Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitaly.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKirill <g4s8.public@gmail.com>2021-05-21 11:31:59 +0300
committerKirill <g4s8.public@gmail.com>2021-05-21 11:31:59 +0300
commit5e0b9d5b157c8f203472a817929c261d5c526472 (patch)
tree939a2ad5cd2e817d77de49c3f62a74f394b12bcc
parent544d2a5b3abf788bf0c8169d852bc5882b64230b (diff)
parentc202c0760cecf7b29ddb9ec46a794e208f4da82b (diff)
Merge branch 'master' into 3078-license-grpczj-mirror-license-grpc
-rw-r--r--.gitignore12
-rw-r--r--.gitlab-ci.yml24
-rw-r--r--.gitlab/changelog_config.yml38
-rw-r--r--.golangci.yml40
-rw-r--r--.tool-versions (renamed from .tool-version)2
-rw-r--r--CHANGELOG.md20
-rw-r--r--Dangerfile2
-rw-r--r--Makefile69
-rw-r--r--NOTICE3326
-rw-r--r--README.md4
-rw-r--r--VERSION2
-rw-r--r--_support/gitlab-test.git-packed-refs4
-rw-r--r--_support/noticegen/noticegen.go2
-rwxr-xr-x_support/test-boot7
-rw-r--r--changelogs/unreleased/gitaly-backup-restore.yml5
-rw-r--r--changelogs/unreleased/pks-git-fetch-fsck-objects.yml5
-rw-r--r--changelogs/unreleased/pks-gitlab-internal-api-latency.yml5
-rw-r--r--changelogs/unreleased/pks-lfs-pointers-latency.yml5
-rw-r--r--changelogs/unreleased/pks-makefile-git-profile.yml5
-rw-r--r--changelogs/unreleased/pks-remote-find-root-ref-inmemory-remote.yml5
-rw-r--r--changelogs/unreleased/pks-remotes-voting.yml5
-rw-r--r--changelogs/unreleased/pks-resolve-conflicts-drop-ruby-implementation.yml5
-rw-r--r--changelogs/unreleased/pks-ssh-receive-pack-voting.yml5
-rw-r--r--changelogs/unreleased/remove_ff_gitaly_go_user_revert.yml5
-rw-r--r--changelogs/unreleased/tc-default-enable-go-rebase.yml5
-rw-r--r--changelogs/unreleased/user_revert_default.yml5
-rw-r--r--changelogs/unreleased/wc-no-housekeeping-cleanup.yml5
-rw-r--r--changelogs/unreleased/zj-remove-wiki-find-file.yml5
-rw-r--r--cmd/gitaly-backup/create.go36
-rw-r--r--cmd/gitaly-backup/create_test.go4
-rw-r--r--cmd/gitaly-backup/main.go3
-rw-r--r--cmd/gitaly-backup/restore.go62
-rw-r--r--cmd/gitaly-backup/restore_test.go77
-rw-r--r--cmd/gitaly-git2go/main.go1
-rw-r--r--cmd/gitaly-git2go/merge_test.go2
-rw-r--r--cmd/gitaly-git2go/rebase.go142
-rw-r--r--cmd/gitaly-git2go/rebase_test.go203
-rw-r--r--cmd/gitaly-git2go/submodule_test.go32
-rw-r--r--cmd/gitaly-hooks/hooks.go44
-rw-r--r--cmd/gitaly-hooks/hooks_test.go65
-rw-r--r--cmd/gitaly-lfs-smudge/lfs_smudge.go5
-rw-r--r--cmd/gitaly-lfs-smudge/lfs_smudge_test.go10
-rw-r--r--cmd/gitaly-ssh/auth_test.go7
-rw-r--r--cmd/gitaly-ssh/upload_pack_test.go9
-rw-r--r--cmd/gitaly/main.go45
-rw-r--r--cmd/praefect/main.go19
-rw-r--r--cmd/praefect/main_test.go60
-rw-r--r--cmd/praefect/subcmd_reconcile.go2
-rw-r--r--config.toml.example2
-rw-r--r--danger/assignees/Dangerfile2
-rw-r--r--danger/changelog/Dangerfile58
-rw-r--r--doc/DESIGN.md43
-rw-r--r--doc/virtual_storage.md110
-rw-r--r--internal/backup/backup.go167
-rw-r--r--internal/backup/backup_test.go113
-rw-r--r--internal/backup/pipeline.go167
-rw-r--r--internal/backup/pipeline_test.go109
-rw-r--r--internal/bootstrap/starter/starter.go19
-rw-r--r--internal/cache/cache.go360
-rw-r--r--internal/cache/cache_test.go (renamed from internal/cache/cachedb_test.go)61
-rw-r--r--internal/cache/cachedb.go157
-rw-r--r--internal/cache/export_test.go46
-rw-r--r--internal/cache/keyer.go92
-rw-r--r--internal/cache/prometheus.go99
-rw-r--r--internal/cache/testhelper_test.go19
-rw-r--r--internal/cache/walker.go77
-rw-r--r--internal/cache/walker_internal_test.go64
-rw-r--r--internal/cache/walker_test.go96
-rw-r--r--internal/cgroups/v1_linux_test.go7
-rw-r--r--internal/command/command.go159
-rw-r--r--internal/command/command_test.go283
-rw-r--r--internal/command/stderrbuffer.go101
-rw-r--r--internal/command/stderrbuffer_test.go126
-rwxr-xr-xinternal/command/testdata/stderr_binary_null.sh4
-rwxr-xr-xinternal/command/testdata/stderr_many_lines.sh9
-rwxr-xr-xinternal/command/testdata/stderr_max_bytes_edge_case.sh14
-rwxr-xr-xinternal/command/testdata/stderr_repeat_a.sh2
-rwxr-xr-xinternal/command/testdata/stderr_script.sh4
-rw-r--r--internal/git/catfile/batch.go118
-rw-r--r--internal/git/catfile/batch_cache.go198
-rw-r--r--internal/git/catfile/batch_cache_test.go50
-rw-r--r--internal/git/catfile/batch_check_process.go17
-rw-r--r--internal/git/catfile/batch_process.go13
-rw-r--r--internal/git/catfile/batch_test.go87
-rw-r--r--internal/git/catfile/commit.go (renamed from internal/git/log/commit.go)21
-rw-r--r--internal/git/catfile/commit_test.go (renamed from internal/git/log/commit_test.go)39
-rw-r--r--internal/git/catfile/tag.go (renamed from internal/git/log/tag.go)23
-rw-r--r--internal/git/catfile/tag_test.go (renamed from internal/git/log/tag_test.go)7
-rw-r--r--internal/git/command_description.go50
-rw-r--r--internal/git/command_factory.go25
-rw-r--r--internal/git/command_factory_test.go6
-rw-r--r--internal/git/command_options_test.go26
-rw-r--r--internal/git/gittest/command.go4
-rw-r--r--internal/git/gittest/commit.go197
-rw-r--r--internal/git/gittest/commit_test.go172
-rw-r--r--internal/git/gittest/delta_islands.go47
-rw-r--r--internal/git/gittest/objects.go10
-rw-r--r--internal/git/gittest/remote.go6
-rw-r--r--internal/git/gittest/repo.go57
-rw-r--r--internal/git/gittest/repository_suite.go16
-rw-r--r--internal/git/gittest/tag.go (renamed from internal/testhelper/tag.go)9
-rw-r--r--internal/git/gittest/testhelper_test.go57
-rw-r--r--internal/git/gittest/tree.go75
-rw-r--r--internal/git/gittest/tree_test.go154
-rw-r--r--internal/git/gittest/user.go20
-rw-r--r--internal/git/hooks_options.go4
-rw-r--r--internal/git/hooks_payload.go12
-rw-r--r--internal/git/hooks_payload_test.go8
-rw-r--r--internal/git/housekeeping/housekeeping_test.go14
-rw-r--r--internal/git/lfs.go5
-rw-r--r--internal/git/localrepo/config_test.go52
-rw-r--r--internal/git/localrepo/objects.go9
-rw-r--r--internal/git/localrepo/objects_test.go10
-rw-r--r--internal/git/localrepo/refs_test.go25
-rw-r--r--internal/git/localrepo/remote_test.go104
-rw-r--r--internal/git/localrepo/repo.go13
-rw-r--r--internal/git/localrepo/repo_test.go13
-rw-r--r--internal/git/log/last_commit.go2
-rw-r--r--internal/git/log/parser.go (renamed from internal/git/log/log.go)9
-rw-r--r--internal/git/objectpool/clone_test.go12
-rw-r--r--internal/git/objectpool/fetch.go8
-rw-r--r--internal/git/objectpool/fetch_test.go77
-rw-r--r--internal/git/objectpool/link_test.go36
-rw-r--r--internal/git/objectpool/pool.go22
-rw-r--r--internal/git/objectpool/pool_test.go21
-rw-r--r--internal/git/objectpool/proto.go11
-rw-r--r--internal/git/packfile/packfile_test.go16
-rw-r--r--internal/git/reference.go9
-rw-r--r--internal/git/remoterepo/repository_test.go17
-rw-r--r--internal/git/repository.go11
-rw-r--r--internal/git/ssh_test.go7
-rw-r--r--internal/git/stats/git_test.go6
-rw-r--r--internal/git/stats/profile_test.go19
-rw-r--r--internal/git/updateref/updateref.go7
-rw-r--r--internal/git/updateref/updateref_test.go8
-rw-r--r--internal/git2go/apply_test.go9
-rw-r--r--internal/git2go/commit_test.go6
-rw-r--r--internal/git2go/rebase.go25
-rw-r--r--internal/gitaly/config/config.go28
-rw-r--r--internal/gitaly/config/config_test.go16
-rw-r--r--internal/gitaly/hook/check.go51
-rw-r--r--internal/gitaly/hook/manager.go19
-rw-r--r--internal/gitaly/hook/postreceive.go7
-rw-r--r--internal/gitaly/hook/postreceive_test.go39
-rw-r--r--internal/gitaly/hook/prereceive.go7
-rw-r--r--internal/gitaly/hook/prereceive_test.go35
-rw-r--r--internal/gitaly/hook/referencetransaction.go11
-rw-r--r--internal/gitaly/hook/transactions.go15
-rw-r--r--internal/gitaly/hook/transactions_test.go20
-rw-r--r--internal/gitaly/hook/update_test.go13
-rw-r--r--internal/gitaly/maintenance/optimize_test.go9
-rw-r--r--internal/gitaly/rubyserver/proxy.go10
-rw-r--r--internal/gitaly/rubyserver/rubyserver_test.go2
-rw-r--r--internal/gitaly/server/auth_test.go20
-rw-r--r--internal/gitaly/server/server.go15
-rw-r--r--internal/gitaly/server/server_factory.go94
-rw-r--r--internal/gitaly/server/server_factory_test.go180
-rw-r--r--internal/gitaly/service/blob/blob_filter.go65
-rw-r--r--internal/gitaly/service/blob/blob_filter_test.go (renamed from internal/gitaly/service/blob/lfs_pointer_filter_test.go)27
-rw-r--r--internal/gitaly/service/blob/get_blob.go4
-rw-r--r--internal/gitaly/service/blob/get_blobs.go4
-rw-r--r--internal/gitaly/service/blob/get_blobs_test.go5
-rw-r--r--internal/gitaly/service/blob/lfs_pointer_filter.go60
-rw-r--r--internal/gitaly/service/blob/lfs_pointers.go268
-rw-r--r--internal/gitaly/service/blob/lfs_pointers_test.go485
-rw-r--r--internal/gitaly/service/blob/server.go11
-rw-r--r--internal/gitaly/service/blob/testhelper_test.go9
-rw-r--r--internal/gitaly/service/cleanup/apply_bfg_object_map_stream.go6
-rw-r--r--internal/gitaly/service/cleanup/apply_bfg_object_map_stream_test.go7
-rw-r--r--internal/gitaly/service/cleanup/internalrefs/cleaner.go21
-rw-r--r--internal/gitaly/service/cleanup/notifier/notifier.go4
-rw-r--r--internal/gitaly/service/cleanup/server.go16
-rw-r--r--internal/gitaly/service/cleanup/testhelper_test.go6
-rw-r--r--internal/gitaly/service/commit/between.go6
-rw-r--r--internal/gitaly/service/commit/commit_messages.go7
-rw-r--r--internal/gitaly/service/commit/commit_messages_test.go15
-rw-r--r--internal/gitaly/service/commit/commit_signatures.go3
-rw-r--r--internal/gitaly/service/commit/commit_signatures_test.go5
-rw-r--r--internal/gitaly/service/commit/commits_by_message.go5
-rw-r--r--internal/gitaly/service/commit/commits_helper.go14
-rw-r--r--internal/gitaly/service/commit/count_commits_test.go8
-rw-r--r--internal/gitaly/service/commit/count_diverging_commits_test.go14
-rw-r--r--internal/gitaly/service/commit/filter_shas_with_signatures.go7
-rw-r--r--internal/gitaly/service/commit/find_all_commits.go10
-rw-r--r--internal/gitaly/service/commit/find_all_commits_test.go2
-rw-r--r--internal/gitaly/service/commit/find_commit.go2
-rw-r--r--internal/gitaly/service/commit/find_commit_test.go23
-rw-r--r--internal/gitaly/service/commit/find_commits.go13
-rw-r--r--internal/gitaly/service/commit/find_commits_test.go8
-rw-r--r--internal/gitaly/service/commit/isancestor_test.go2
-rw-r--r--internal/gitaly/service/commit/languages.go18
-rw-r--r--internal/gitaly/service/commit/last_commit_for_path.go5
-rw-r--r--internal/gitaly/service/commit/last_commit_for_path_test.go14
-rw-r--r--internal/gitaly/service/commit/list_commits_by_oid.go6
-rw-r--r--internal/gitaly/service/commit/list_commits_by_ref_name.go6
-rw-r--r--internal/gitaly/service/commit/list_files.go13
-rw-r--r--internal/gitaly/service/commit/list_files_test.go4
-rw-r--r--internal/gitaly/service/commit/list_last_commits_for_tree.go5
-rw-r--r--internal/gitaly/service/commit/list_last_commits_for_tree_test.go23
-rw-r--r--internal/gitaly/service/commit/server.go24
-rw-r--r--internal/gitaly/service/commit/stats.go3
-rw-r--r--internal/gitaly/service/commit/testhelper_test.go34
-rw-r--r--internal/gitaly/service/commit/tree_entries.go4
-rw-r--r--internal/gitaly/service/commit/tree_entries_test.go17
-rw-r--r--internal/gitaly/service/commit/tree_entry.go5
-rw-r--r--internal/gitaly/service/conflicts/list_conflict_files.go3
-rw-r--r--internal/gitaly/service/conflicts/list_conflict_files_test.go9
-rw-r--r--internal/gitaly/service/conflicts/resolve_conflicts.go49
-rw-r--r--internal/gitaly/service/conflicts/resolve_conflicts_test.go222
-rw-r--r--internal/gitaly/service/conflicts/server.go14
-rw-r--r--internal/gitaly/service/conflicts/testhelper_test.go52
-rw-r--r--internal/gitaly/service/dependencies.go23
-rw-r--r--internal/gitaly/service/diff/commit_test.go11
-rw-r--r--internal/gitaly/service/diff/find_changed_paths.go3
-rw-r--r--internal/gitaly/service/diff/find_changed_paths_test.go3
-rw-r--r--internal/gitaly/service/diff/raw_test.go28
-rw-r--r--internal/gitaly/service/diff/server.go11
-rw-r--r--internal/gitaly/service/diff/testhelper_test.go49
-rw-r--r--internal/gitaly/service/hook/pack_objects_test.go16
-rw-r--r--internal/gitaly/service/hook/post_receive_test.go17
-rw-r--r--internal/gitaly/service/hook/pre_receive_test.go29
-rw-r--r--internal/gitaly/service/hook/reference_transaction_test.go285
-rw-r--r--internal/gitaly/service/hook/server_test.go5
-rw-r--r--internal/gitaly/service/hook/testhelper_test.go2
-rw-r--r--internal/gitaly/service/internalgitaly/testhelper_test.go40
-rw-r--r--internal/gitaly/service/internalgitaly/walkrepos_test.go20
-rw-r--r--internal/gitaly/service/namespace/namespace_test.go5
-rw-r--r--internal/gitaly/service/namespace/testhelper_test.go50
-rw-r--r--internal/gitaly/service/objectpool/alternates_test.go22
-rw-r--r--internal/gitaly/service/objectpool/create.go2
-rw-r--r--internal/gitaly/service/objectpool/create_test.go15
-rw-r--r--internal/gitaly/service/objectpool/fetch_into_object_pool.go2
-rw-r--r--internal/gitaly/service/objectpool/fetch_into_object_pool_test.go35
-rw-r--r--internal/gitaly/service/objectpool/get.go2
-rw-r--r--internal/gitaly/service/objectpool/get_test.go2
-rw-r--r--internal/gitaly/service/objectpool/link_test.go42
-rw-r--r--internal/gitaly/service/objectpool/reduplicate_test.go6
-rw-r--r--internal/gitaly/service/objectpool/server.go16
-rw-r--r--internal/gitaly/service/objectpool/testhelper_test.go71
-rw-r--r--internal/gitaly/service/operations/apply_patch_test.go43
-rw-r--r--internal/gitaly/service/operations/branches.go7
-rw-r--r--internal/gitaly/service/operations/branches_test.go361
-rw-r--r--internal/gitaly/service/operations/cherry_pick.go23
-rw-r--r--internal/gitaly/service/operations/cherry_pick_test.go171
-rw-r--r--internal/gitaly/service/operations/commit_files.go2
-rw-r--r--internal/gitaly/service/operations/commit_files_test.go90
-rw-r--r--internal/gitaly/service/operations/merge.go7
-rw-r--r--internal/gitaly/service/operations/merge_test.go158
-rw-r--r--internal/gitaly/service/operations/rebase.go119
-rw-r--r--internal/gitaly/service/operations/rebase_test.go295
-rw-r--r--internal/gitaly/service/operations/revert.go24
-rw-r--r--internal/gitaly/service/operations/revert_test.go169
-rw-r--r--internal/gitaly/service/operations/server.go19
-rw-r--r--internal/gitaly/service/operations/squash.go4
-rw-r--r--internal/gitaly/service/operations/squash_test.go119
-rw-r--r--internal/gitaly/service/operations/submodules.go21
-rw-r--r--internal/gitaly/service/operations/submodules_test.go142
-rw-r--r--internal/gitaly/service/operations/tags.go13
-rw-r--r--internal/gitaly/service/operations/tags_test.go435
-rw-r--r--internal/gitaly/service/operations/testhelper_test.go70
-rw-r--r--internal/gitaly/service/operations/update_branches_test.go46
-rw-r--r--internal/gitaly/service/operations/update_with_hooks.go17
-rw-r--r--internal/gitaly/service/operations/update_with_hooks_test.go23
-rw-r--r--internal/gitaly/service/ref/branches.go3
-rw-r--r--internal/gitaly/service/ref/branches_test.go2
-rw-r--r--internal/gitaly/service/ref/delete_refs.go13
-rw-r--r--internal/gitaly/service/ref/delete_refs_test.go32
-rw-r--r--internal/gitaly/service/ref/list_new_blobs.go8
-rw-r--r--internal/gitaly/service/ref/list_new_blobs_test.go5
-rw-r--r--internal/gitaly/service/ref/list_new_commits.go9
-rw-r--r--internal/gitaly/service/ref/list_new_commits_test.go5
-rw-r--r--internal/gitaly/service/ref/pack_refs_test.go5
-rw-r--r--internal/gitaly/service/ref/refs.go67
-rw-r--r--internal/gitaly/service/ref/refs_test.go159
-rw-r--r--internal/gitaly/service/ref/remote_branches.go7
-rw-r--r--internal/gitaly/service/ref/remote_branches_test.go2
-rw-r--r--internal/gitaly/service/ref/server.go17
-rw-r--r--internal/gitaly/service/ref/tag_messages.go6
-rw-r--r--internal/gitaly/service/ref/tag_messages_test.go7
-rw-r--r--internal/gitaly/service/ref/testhelper_test.go10
-rw-r--r--internal/gitaly/service/ref/util.go7
-rw-r--r--internal/gitaly/service/remote/fetch_internal_remote.go10
-rw-r--r--internal/gitaly/service/remote/fetch_internal_remote_test.go32
-rw-r--r--internal/gitaly/service/remote/find_remote_root_ref.go55
-rw-r--r--internal/gitaly/service/remote/find_remote_root_ref_test.go160
-rw-r--r--internal/gitaly/service/remote/remotes.go66
-rw-r--r--internal/gitaly/service/remote/remotes_test.go100
-rw-r--r--internal/gitaly/service/remote/server.go21
-rw-r--r--internal/gitaly/service/remote/testhelper_test.go20
-rw-r--r--internal/gitaly/service/remote/update_remote_mirror.go9
-rw-r--r--internal/gitaly/service/remote/update_remote_mirror_test.go118
-rw-r--r--internal/gitaly/service/repository/apply_gitattributes.go16
-rw-r--r--internal/gitaly/service/repository/apply_gitattributes_test.go236
-rw-r--r--internal/gitaly/service/repository/archive.go20
-rw-r--r--internal/gitaly/service/repository/archive_test.go24
-rw-r--r--internal/gitaly/service/repository/calculate_checksum_test.go4
-rw-r--r--internal/gitaly/service/repository/cleanup.go7
-rw-r--r--internal/gitaly/service/repository/cleanup_test.go197
-rw-r--r--internal/gitaly/service/repository/clone_from_pool.go4
-rw-r--r--internal/gitaly/service/repository/clone_from_pool_internal.go4
-rw-r--r--internal/gitaly/service/repository/clone_from_pool_internal_test.go25
-rw-r--r--internal/gitaly/service/repository/clone_from_pool_test.go9
-rw-r--r--internal/gitaly/service/repository/commit_graph.go45
-rw-r--r--internal/gitaly/service/repository/commit_graph_test.go151
-rw-r--r--internal/gitaly/service/repository/config.go116
-rw-r--r--internal/gitaly/service/repository/config_test.go161
-rw-r--r--internal/gitaly/service/repository/create.go40
-rw-r--r--internal/gitaly/service/repository/create_bundle_test.go14
-rw-r--r--internal/gitaly/service/repository/create_from_bundle_test.go14
-rw-r--r--internal/gitaly/service/repository/create_from_snapshot_test.go12
-rw-r--r--internal/gitaly/service/repository/create_from_url_test.go4
-rw-r--r--internal/gitaly/service/repository/create_test.go95
-rw-r--r--internal/gitaly/service/repository/fetch.go3
-rw-r--r--internal/gitaly/service/repository/fetch_remote.go9
-rw-r--r--internal/gitaly/service/repository/fetch_remote_test.go120
-rw-r--r--internal/gitaly/service/repository/fetch_test.go20
-rw-r--r--internal/gitaly/service/repository/fork_test.go29
-rw-r--r--internal/gitaly/service/repository/gc.go44
-rw-r--r--internal/gitaly/service/repository/gc_test.go206
-rw-r--r--internal/gitaly/service/repository/license.go2
-rw-r--r--internal/gitaly/service/repository/midx_test.go7
-rw-r--r--internal/gitaly/service/repository/optimize.go3
-rw-r--r--internal/gitaly/service/repository/optimize_test.go56
-rw-r--r--internal/gitaly/service/repository/raw_changes.go8
-rw-r--r--internal/gitaly/service/repository/raw_changes_test.go26
-rw-r--r--internal/gitaly/service/repository/rebase_in_progress_test.go8
-rw-r--r--internal/gitaly/service/repository/remove_test.go2
-rw-r--r--internal/gitaly/service/repository/rename_test.go6
-rw-r--r--internal/gitaly/service/repository/repack.go4
-rw-r--r--internal/gitaly/service/repository/repack_test.go29
-rw-r--r--internal/gitaly/service/repository/replicate.go67
-rw-r--r--internal/gitaly/service/repository/replicate_test.go70
-rw-r--r--internal/gitaly/service/repository/repository.go3
-rw-r--r--internal/gitaly/service/repository/repository_test.go4
-rw-r--r--internal/gitaly/service/repository/search_files_test.go30
-rw-r--r--internal/gitaly/service/repository/server.go16
-rw-r--r--internal/gitaly/service/repository/snapshot_test.go30
-rw-r--r--internal/gitaly/service/repository/squash_in_progress_test.go4
-rw-r--r--internal/gitaly/service/repository/testhelper_test.go55
-rw-r--r--internal/gitaly/service/repository/write_ref.go20
-rw-r--r--internal/gitaly/service/repository/write_ref_test.go5
-rw-r--r--internal/gitaly/service/server/disk_stats_test.go11
-rw-r--r--internal/gitaly/service/server/info_test.go61
-rw-r--r--internal/gitaly/service/server/storage_status_unix.go4
-rw-r--r--internal/gitaly/service/setup/register.go80
-rw-r--r--internal/gitaly/service/smarthttp/inforefs_test.go35
-rw-r--r--internal/gitaly/service/smarthttp/receive_pack_test.go182
-rw-r--r--internal/gitaly/service/smarthttp/server.go5
-rw-r--r--internal/gitaly/service/smarthttp/testhelper_test.go8
-rw-r--r--internal/gitaly/service/smarthttp/upload_pack.go58
-rw-r--r--internal/gitaly/service/smarthttp/upload_pack_test.go46
-rw-r--r--internal/gitaly/service/ssh/receive_pack.go13
-rw-r--r--internal/gitaly/service/ssh/receive_pack_test.go262
-rw-r--r--internal/gitaly/service/ssh/server.go11
-rw-r--r--internal/gitaly/service/ssh/testhelper_test.go7
-rw-r--r--internal/gitaly/service/ssh/upload_pack_test.go16
-rw-r--r--internal/gitaly/service/wiki/delete_page_test.go5
-rw-r--r--internal/gitaly/service/wiki/find_file.go46
-rw-r--r--internal/gitaly/service/wiki/find_file_test.go232
-rw-r--r--internal/gitaly/service/wiki/find_page_test.go6
-rw-r--r--internal/gitaly/service/wiki/get_page_versions.go41
-rw-r--r--internal/gitaly/service/wiki/get_page_versions_test.go185
-rw-r--r--internal/gitaly/service/wiki/testhelper_test.go41
-rw-r--r--internal/gitaly/service/wiki/update_page_test.go7
-rw-r--r--internal/gitaly/service/wiki/write_page_test.go7
-rw-r--r--internal/gitaly/transaction/manager.go37
-rw-r--r--internal/gitaly/transaction/manager_test.go271
-rw-r--r--internal/gitaly/transaction/mock.go11
-rw-r--r--internal/gitalyssh/gitalyssh_test.go7
-rw-r--r--internal/gitlab/client.go55
-rw-r--r--internal/gitlab/http_client.go (renamed from internal/gitaly/hook/access.go)253
-rw-r--r--internal/gitlab/http_client_test.go (renamed from internal/gitaly/hook/access_test.go)101
-rw-r--r--internal/gitlab/mock_client.go38
-rw-r--r--internal/gitlab/testdata/certs/server.crt (renamed from internal/gitaly/hook/testdata/certs/server.crt)0
-rw-r--r--internal/gitlab/testdata/certs/server.key (renamed from internal/gitaly/hook/testdata/certs/server.key)0
-rw-r--r--internal/gitlab/testhelper_test.go15
-rw-r--r--internal/helper/suppressed_context.go18
-rw-r--r--internal/helper/suppressed_context_test.go58
-rw-r--r--internal/metadata/featureflag/feature_flags.go28
-rw-r--r--internal/middleware/commandstatshandler/commandstatshandler_test.go16
-rw-r--r--internal/praefect/coordinator.go81
-rw-r--r--internal/praefect/coordinator_pg_test.go142
-rw-r--r--internal/praefect/coordinator_test.go332
-rw-r--r--internal/praefect/datastore/repository_store.go33
-rw-r--r--internal/praefect/datastore/repository_store_mock.go6
-rw-r--r--internal/praefect/datastore/repository_store_test.go58
-rw-r--r--internal/praefect/grpc-proxy/proxy/handler.go17
-rw-r--r--internal/praefect/grpc-proxy/proxy/handler_ext_test.go523
-rw-r--r--internal/praefect/grpc-proxy/proxy/handler_test.go532
-rw-r--r--internal/praefect/info_service_test.go11
-rw-r--r--internal/praefect/middleware/errorhandler_test.go13
-rw-r--r--internal/praefect/nodes/local_elector_test.go12
-rw-r--r--internal/praefect/nodes/manager_test.go27
-rw-r--r--internal/praefect/nodes/per_repository_test.go56
-rw-r--r--internal/praefect/nodes/sql_elector_test.go13
-rw-r--r--internal/praefect/protoregistry/protoregistry_test.go24
-rw-r--r--internal/praefect/replicator_test.go56
-rw-r--r--internal/praefect/repository_exists_test.go2
-rw-r--r--internal/praefect/router_node_manager.go27
-rw-r--r--internal/praefect/router_per_repository.go47
-rw-r--r--internal/praefect/router_per_repository_test.go208
-rw-r--r--internal/praefect/server_factory_test.go14
-rw-r--r--internal/praefect/server_test.go74
-rw-r--r--internal/praefect/service/info/consistencycheck_test.go10
-rw-r--r--internal/praefect/service/transaction/server.go7
-rw-r--r--internal/praefect/transactions/manager.go74
-rw-r--r--internal/praefect/transactions/subtransaction.go87
-rw-r--r--internal/praefect/transactions/subtransaction_test.go172
-rw-r--r--internal/praefect/transactions/transaction.go32
-rw-r--r--internal/praefect/transactions/transaction_test.go6
-rw-r--r--internal/prometheus/metrics/metrics.go3
-rw-r--r--internal/safe/file_writer_test.go5
-rw-r--r--internal/tempdir/tempdir_test.go2
-rw-r--r--internal/testhelper/configure.go50
-rw-r--r--internal/testhelper/grpc.go10
-rw-r--r--internal/testhelper/promtest/histogram.go23
-rw-r--r--internal/testhelper/testcfg/gitaly_builder.go2
-rw-r--r--internal/testhelper/testhelper.go90
-rw-r--r--internal/testhelper/testserver.go335
-rw-r--r--internal/testhelper/testserver/gitaly.go46
-rw-r--r--internal/transaction/txinfo/server.go (renamed from internal/praefect/metadata/server.go)3
-rw-r--r--internal/transaction/txinfo/server_test.go (renamed from internal/praefect/metadata/server_test.go)2
-rw-r--r--internal/transaction/txinfo/transaction.go (renamed from internal/praefect/metadata/transaction.go)28
-rw-r--r--internal/transaction/voting/testhelper_test.go21
-rw-r--r--internal/transaction/voting/vote.go (renamed from internal/gitaly/transaction/vote.go)8
-rw-r--r--internal/transaction/voting/vote_test.go (renamed from internal/gitaly/transaction/vote_test.go)2
-rw-r--r--proto/blob.proto68
-rw-r--r--proto/go/gitalypb/blob.pb.go472
-rw-r--r--proto/go/gitalypb/remote.pb.go154
-rw-r--r--proto/go/gitalypb/repository-service.pb.go692
-rw-r--r--proto/go/gitalypb/wiki.pb.go519
-rw-r--r--proto/remote.proto24
-rw-r--r--proto/repository-service.proto34
-rw-r--r--proto/wiki.proto37
-rw-r--r--ruby/lib/gitaly_server/operations_service.rb16
-rw-r--r--ruby/lib/gitaly_server/wiki_service.rb59
-rw-r--r--ruby/lib/gitlab/git/hooks_service.rb3
-rw-r--r--ruby/lib/gitlab/git/repository.rb10
-rw-r--r--ruby/lib/gitlab/git/wiki.rb12
-rw-r--r--ruby/lib/gitlab/git/wiki_file.rb20
-rw-r--r--ruby/proto/gitaly/blob_pb.rb20
-rw-r--r--ruby/proto/gitaly/blob_services_pb.rb19
-rw-r--r--ruby/proto/gitaly/remote_pb.rb2
-rw-r--r--ruby/proto/gitaly/remote_services_pb.rb5
-rw-r--r--ruby/proto/gitaly/repository-service_pb.rb13
-rw-r--r--ruby/proto/gitaly/repository-service_services_pb.rb3
-rw-r--r--ruby/proto/gitaly/version.rb2
-rw-r--r--ruby/proto/gitaly/wiki_pb.rb24
-rw-r--r--ruby/proto/gitaly/wiki_services_pb.rb2
-rw-r--r--ruby/spec/lib/gitlab/git/repository_spec.rb47
450 files changed, 13792 insertions, 11691 deletions
diff --git a/.gitignore b/.gitignore
index 1d71a918c..8e0817d8f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,15 +1,3 @@
-# Top-level executables
-/gitaly
-/gitaly-backup
-/gitaly-blackbox
-/gitaly-debug
-/gitaly-git2go
-/gitaly-hooks
-/gitaly-lfs-smudge
-/gitaly-ssh
-/gitaly-wrapper
-/praefect
-
# Generic artifacts
/_build/
/*.deb
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 187a2158f..e54f793a4 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -2,6 +2,7 @@ stages:
- build
- test
- publish
+ - qa
default:
image: registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.7-golang-1.15-git-2.31
@@ -129,11 +130,12 @@ binaries:
script:
# Just in case we start running CI builds on other architectures in future
- go version
- - make binaries
+ - make build
+ - cd _build && sha256sum bin/* | tee checksums.sha256.txt
artifacts:
paths:
- - _build/assembly/checksums.sha256.txt
- - _build/assembly/bin/
+ - _build/checksums.sha256.txt
+ - _build/bin/
name: "${CI_JOB_NAME}:go-${GO_VERSION}-git-${GIT_VERSION}"
expire_in: 6 months
parallel:
@@ -244,8 +246,8 @@ praefect_sql_connect:
# Sanity check: direct ping with psql
- PGPASSWORD=$POSTGRES_PASSWORD psql -h postgres -U $POSTGRES_USER -d $POSTGRES_DB -c 'select now()'
- ruby -rerb -e 'ERB.new(ARGF.read).run' _support/config.praefect.toml.ci-sql-test.erb > config.praefect.toml
- - ./praefect -config config.praefect.toml sql-ping
- - ./praefect -config config.praefect.toml sql-migrate
+ - ./_build/bin/praefect -config config.praefect.toml sql-ping
+ - ./_build/bin/praefect -config config.praefect.toml sql-migrate
praefect_sql_test:
<<: *test_definition
@@ -295,8 +297,18 @@ objectinfo_fuzz_test:
code_navigation:
allow_failure: true
script:
- - go get github.com/sourcegraph/lsif-go/cmd/lsif-go
+ - go get github.com/sourcegraph/lsif-go/cmd/lsif-go@v1.3.1
- ~/go/bin/lsif-go
artifacts:
reports:
lsif: dump.lsif
+
+trigger-qa:
+ stage: qa
+ when: manual
+ trigger:
+ project: gitlab-org/build/omnibus-gitlab-mirror
+ variables:
+ ALTERNATIVE_SOURCES: "true"
+ GITALY_SERVER_VERSION: $CI_COMMIT_SHA
+ EE: "true"
diff --git a/.gitlab/changelog_config.yml b/.gitlab/changelog_config.yml
new file mode 100644
index 000000000..7aa18cc8f
--- /dev/null
+++ b/.gitlab/changelog_config.yml
@@ -0,0 +1,38 @@
+---
+# Settings for generating changelogs using the GitLab API. See
+# https://docs.gitlab.com/ee/api/repositories.html#generate-changelog-data for
+# more information.
+categories:
+ added: Added
+ fixed: Fixed
+ changed: Changed
+ deprecated: Deprecated
+ removed: Removed
+ security: Security
+ performance: Performance
+ other: Other
+template: |
+ {% if categories %}
+ {% each categories %}
+ ### {{ title }} ({% if single_change %}1 change{% else %}{{ count }} changes{% end %})
+
+ {% each entries %}
+ - [{{ title }}]({{ commit.reference }})\
+ {% if author.contributor %} by {{ author.reference }}{% end %}\
+ {% if commit.trailers.MR %}\
+ ([merge request]({{ commit.trailers.MR }}))\
+ {% else %}\
+ {% if merge_request %}\
+ ([merge request]({{ merge_request.reference }}))\
+ {% end %}\
+ {% end %}\
+ {% if commit.trailers.EE %}\
+ **GitLab Enterprise Edition**\
+ {% end %}
+
+ {% end %}
+
+ {% end %}
+ {% else %}
+ No changes.
+ {% end %}
diff --git a/.golangci.yml b/.golangci.yml
index ef59d1c85..299976cbf 100644
--- a/.golangci.yml
+++ b/.golangci.yml
@@ -529,10 +529,6 @@ issues:
text: "exported method `StaticHealthChecker.HealthyNodes` should have comment or be unexported"
- linters:
- golint
- path: "internal/praefect/metadata/server.go"
- text: "exported method `PraefectServer.Address` should have comment or be unexported"
- - linters:
- - golint
path: "internal/praefect/metrics/prometheus.go"
text: "exported var `MethodTypeCounter` should have comment or be unexported"
- linters:
@@ -657,10 +653,6 @@ issues:
text: "exported var `ErrNotFound` should have comment or be unexported"
- linters:
- golint
- path: "internal/prometheus/metrics/metrics.go"
- text: "exported type `HistogramVec` should have comment or be unexported"
- - linters:
- - golint
path: "internal/storage/locator.go"
text: "exported var `ErrRelativePathEscapesRoot` should have comment or be unexported"
- linters:
@@ -679,38 +671,6 @@ issues:
- golint
path: "internal/testhelper/promtest/counter.go"
text: "exported type `MockCounter` should have comment or be unexported"
- - linters:
- - golint
- path: "internal/testhelper/promtest/histogram.go"
- text: "exported function `NewMockHistogramVec` should have comment or be unexported"
- - linters:
- - golint
- path: "internal/testhelper/promtest/histogram.go"
- text: "exported type `MockHistogramVec` should have comment or be unexported"
- - linters:
- - golint
- path: "internal/testhelper/promtest/histogram.go"
- text: "exported method `MockHistogramVec.LabelsCalled` should have comment or be unexported"
- - linters:
- - golint
- path: "internal/testhelper/promtest/histogram.go"
- text: "exported method `MockHistogramVec.Observer` should have comment or be unexported"
- - linters:
- - golint
- path: "internal/testhelper/promtest/histogram.go"
- text: "exported method `MockHistogramVec.WithLabelValues` should have comment or be unexported"
- - linters:
- - golint
- path: "internal/testhelper/promtest/histogram.go"
- text: "exported type `MockObserver` should have comment or be unexported"
- - linters:
- - golint
- path: "internal/testhelper/promtest/histogram.go"
- text: "exported method `MockObserver.Observe` should have comment or be unexported"
- - linters:
- - golint
- path: "internal/testhelper/promtest/histogram.go"
- text: "exported method `MockObserver.Observed` should have comment or be unexported"
## END golint exclusions
##
## BEGIN errcheck exclusions
diff --git a/.tool-version b/.tool-versions
index 56d98ca03..0861384d5 100644
--- a/.tool-version
+++ b/.tool-versions
@@ -1,3 +1,3 @@
# Versions of Gitaly dependencies managed by asdf.
-golang 1.15.11 1.16.3
+golang 1.15.11 1.16.4
ruby 2.7.2
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a58fa2fad..aa980e61b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,17 @@
# Gitaly changelog
+## 13.11.4 (2021-05-14)
+
+- No changes.
+
+## 13.11.3 (2021-04-30)
+
+- No changes.
+
+## 13.11.2 (2021-04-27)
+
+- No changes.
+
## 13.11.1 (2021-04-22)
- No changes.
@@ -62,6 +74,10 @@
- Update gitlab-gollum-rugged_adapter to 0.4.4.4.gitlab.1. !3357 (Takuya Noguchi)
+## 13.10.4 (2021-04-27)
+
+- No changes.
+
## 13.10.3 (2021-04-13)
- No changes.
@@ -138,6 +154,10 @@
- Remove unused Ruby code. !3203
+## 13.9.7 (2021-04-27)
+
+- No changes.
+
## 13.9.6 (2021-04-13)
- No changes.
diff --git a/Dangerfile b/Dangerfile
index 0e7836403..8289c7713 100644
--- a/Dangerfile
+++ b/Dangerfile
@@ -9,8 +9,6 @@ GITALY_TEAM = %w[
zj-gitlab
]
-REVIEWERS = GITALY_TEAM - %w[zj-gitlab]
-
danger.import_dangerfile(path: 'danger/assignees')
danger.import_dangerfile(path: 'danger/changelog')
danger.import_dangerfile(path: 'danger/labels')
diff --git a/Makefile b/Makefile
index 1e0d86b57..cfac5b032 100644
--- a/Makefile
+++ b/Makefile
@@ -31,7 +31,6 @@ prefix ?= ${PREFIX}
exec_prefix ?= ${prefix}
bindir ?= ${exec_prefix}/bin
INSTALL_DEST_DIR := ${DESTDIR}${bindir}
-ASSEMBLY_ROOT ?= ${BUILD_DIR}/assembly
GIT_PREFIX ?= ${GIT_INSTALL_DIR}
# Tools
@@ -81,8 +80,6 @@ endif
# Git target
GIT_REPO_URL ?= https://gitlab.com/gitlab-org/gitlab-git.git
-GIT_BINARIES_URL ?= https://gitlab.com/gitlab-org/gitlab-git/-/jobs/artifacts/${GIT_VERSION}/raw/git_full_bins.tgz?job=build
-GIT_BINARIES_HASH ?= 9d8a5f0177eb723c10a63423280d47ab1e67de7a6f2608ec420ead009f6b7394
GIT_INSTALL_DIR := ${DEPENDENCY_DIR}/git/install
GIT_SOURCE_DIR := ${DEPENDENCY_DIR}/git/source
GIT_QUIET :=
@@ -136,7 +133,7 @@ ifndef LIBGIT2_BUILD_OPTIONS
endif
# These variables control test options and artifacts
-TEST_PACKAGES ?= $(call find_go_packages)
+TEST_PACKAGES ?= ${SOURCE_DIR}/...
TEST_OPTIONS ?= -v -count=1
TEST_REPORT_DIR ?= ${BUILD_DIR}/reports
TEST_OUTPUT_NAME ?= go-${GO_VERSION}-git-${GIT_VERSION}
@@ -148,21 +145,12 @@ TEST_REPO := ${TEST_REPO_DIR}/gitlab-test.git
TEST_REPO_GIT := ${TEST_REPO_DIR}/gitlab-git-test.git
BENCHMARK_REPO := ${TEST_REPO_DIR}/benchmark.git
-# uniq is a helper function to filter out any duplicate values in the single
-# parameter it accepts.
-#
-# Credits go to https://stackoverflow.com/questions/16144115/makefile-remove-duplicate-words-without-sorting
-uniq = $(if $(1),$(firstword $(1)) $(call uniq,$(filter-out $(firstword $(1)),$(1))))
-
# Find all commands.
find_commands = $(notdir $(shell find ${SOURCE_DIR}/cmd -mindepth 1 -maxdepth 1 -type d -print))
# Find all command binaries.
find_command_binaries = $(addprefix ${BUILD_DIR}/bin/, $(call find_commands))
-
# Find all Go source files.
-find_go_sources = $(shell find ${SOURCE_DIR} -type d \( -name ruby -o -name vendor -o -name testdata -o -name '_*' -o -path '*/proto/go' \) -prune -o -type f -name '*.go' -not -name '*.pb.go' -print | sort -u)
-# Find all Go packages.
-find_go_packages = $(call uniq,$(dir $(call find_go_sources)))
+find_go_sources = $(shell find ${SOURCE_DIR} -type d \( -name ruby -o -name vendor -o -name testdata -o -name '_*' -o -path '*/proto/go' \) -prune -o -type f -name '*.go' -not -name '*.pb.go' -print | sort -u)
# run_go_tests will execute Go tests with all required parameters. Its
# behaviour can be modified via the following variables:
@@ -193,8 +181,7 @@ export CGO_LDFLAGS_ALLOW = -D_THREAD_SAFE
.SECONDARY:
.PHONY: all
-all: INSTALL_DEST_DIR = ${SOURCE_DIR}
-all: install
+all: build
.PHONY: build
build: ${SOURCE_DIR}/.ruby-bundle libgit2
@@ -205,38 +192,6 @@ install: build
${Q}mkdir -p ${INSTALL_DEST_DIR}
install $(call find_command_binaries) ${INSTALL_DEST_DIR}
-.PHONY: force-ruby-bundle
-force-ruby-bundle:
- ${Q}rm -f ${SOURCE_DIR}/.ruby-bundle
-
-# Assembles all runtime components into a directory
-# Used by the GDK: run 'make assemble ASSEMBLY_ROOT=.../gitaly'
-.PHONY: assemble
-assemble: force-ruby-bundle assemble-internal
-
-# assemble-internal does not force 'bundle install' to run again
-.PHONY: assemble-internal
-assemble-internal: assemble-ruby assemble-go
-
-.PHONY: assemble-go
-assemble-go: build
- ${Q}rm -rf ${ASSEMBLY_ROOT}/bin
- ${Q}mkdir -p ${ASSEMBLY_ROOT}/bin
- install $(call find_command_binaries) ${ASSEMBLY_ROOT}/bin
-
-.PHONY: assemble-ruby
-assemble-ruby:
- ${Q}mkdir -p ${ASSEMBLY_ROOT}
- ${Q}rm -rf ${GITALY_RUBY_DIR}/tmp
- ${Q}mkdir -p ${ASSEMBLY_ROOT}/ruby/
- rsync -a --delete ${GITALY_RUBY_DIR}/ ${ASSEMBLY_ROOT}/ruby/
- ${Q}rm -rf ${ASSEMBLY_ROOT}/ruby/spec
-
-.PHONY: binaries
-binaries: assemble
- ${Q}if [ ${ARCH} != 'x86_64' ]; then echo Incorrect architecture for build: ${ARCH}; exit 1; fi
- ${Q}cd ${ASSEMBLY_ROOT} && sha256sum bin/* | tee checksums.sha256.txt
-
.PHONY: prepare-tests
prepare-tests: git libgit2 prepare-test-repos ${SOURCE_DIR}/.ruby-bundle
@@ -279,7 +234,7 @@ race-go: TEST_OPTIONS := ${TEST_OPTIONS} -race
race-go: test-go
.PHONY: rspec
-rspec: assemble-go prepare-tests
+rspec: build prepare-tests
${Q}cd ${GITALY_RUBY_DIR} && PATH='${SOURCE_DIR}/internal/testhelper/testdata/home/bin:${PATH}' bundle exec rspec
.PHONY: verify
@@ -319,7 +274,7 @@ notice: ${SOURCE_DIR}/NOTICE
.PHONY: clean
clean:
- rm -rf ${BUILD_DIR} ${SOURCE_DIR}/internal/testhelper/testdata/data/ ${SOURCE_DIR}/ruby/.bundle/ ${SOURCE_DIR}/ruby/vendor/bundle/ $(addprefix ${SOURCE_DIR}/, $(notdir $(call find_commands)))
+ rm -rf ${BUILD_DIR} ${SOURCE_DIR}/internal/testhelper/testdata/data/ ${SOURCE_DIR}/ruby/.bundle/ ${SOURCE_DIR}/ruby/vendor/bundle/
.PHONY: clean-ruby-vendor-go
clean-ruby-vendor-go:
@@ -440,7 +395,6 @@ ${LIBGIT2_INSTALL_DIR}/lib/libgit2.a: ${DEPENDENCY_DIR}/libgit2.version
${Q}CMAKE_BUILD_PARALLEL_LEVEL=$(shell nproc) cmake --build ${LIBGIT2_BUILD_DIR} --target install
go install -a github.com/libgit2/git2go/${GIT2GO_VERSION}
-ifndef GIT_USE_PREBUILT_BINARIES
${GIT_INSTALL_DIR}/bin/git: ${DEPENDENCY_DIR}/git.version
${Q}${GIT} -c init.defaultBranch=master init ${GIT_QUIET} ${GIT_SOURCE_DIR}
${Q}${GIT} -C "${GIT_SOURCE_DIR}" config remote.origin.url ${GIT_REPO_URL}
@@ -453,18 +407,7 @@ ifneq (${GIT_PATCHES},)
endif
${Q}rm -rf ${GIT_INSTALL_DIR}
${Q}mkdir -p ${GIT_INSTALL_DIR}
- env -u MAKEFLAGS -u GIT_VERSION ${MAKE} -C ${GIT_SOURCE_DIR} -j$(shell nproc) prefix=${GIT_PREFIX} ${GIT_BUILD_OPTIONS} install
-else
-${DEPENDENCY_DIR}/git_full_bins.tgz: ${DEPENDENCY_DIR}/git.version
- curl -o $@.tmp --silent --show-error -L ${GIT_BINARIES_URL}
- ${Q}printf '${GIT_BINARIES_HASH} $@.tmp' | sha256sum -c -
- ${Q}mv $@.tmp $@
-
-${GIT_INSTALL_DIR}/bin/git: ${DEPENDENCY_DIR}/git_full_bins.tgz
- ${Q}rm -rf ${GIT_INSTALL_DIR}
- ${Q}mkdir -p ${GIT_INSTALL_DIR}
- tar -C ${GIT_INSTALL_DIR} -xvzf ${DEPENDENCY_DIR}/git_full_bins.tgz
-endif
+ env -u PROFILE -u MAKEFLAGS -u GIT_VERSION ${MAKE} -C ${GIT_SOURCE_DIR} -j$(shell nproc) prefix=${GIT_PREFIX} ${GIT_BUILD_OPTIONS} install
${TOOLS_DIR}/protoc.zip: TOOL_VERSION = ${PROTOC_VERSION}
${TOOLS_DIR}/protoc.zip: ${TOOLS_DIR}/protoc.version
diff --git a/NOTICE b/NOTICE
index 7d91f0614..9d3f4de91 100644
--- a/NOTICE
+++ b/NOTICE
@@ -65,38 +65,38 @@ LICENSE - cloud.google.com/go
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -104,21 +104,21 @@ LICENSE - cloud.google.com/go
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -162,7 +162,7 @@ LICENSE - cloud.google.com/go
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -201,7 +201,7 @@ LICENSE - cloud.google.com/go
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -237,24 +237,24 @@ LICENSE - cloud.google.com/go
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -265,7 +265,7 @@ Copyright (C) 2013 Blake Mizerany
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
-&#34;Software&#34;), to deal in the Software without restriction, including
+"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
@@ -274,7 +274,7 @@ the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND,
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
@@ -289,7 +289,7 @@ The MIT License (MIT)
Copyright (c) 2015 Nick Galbreath
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -298,7 +298,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -319,7 +319,7 @@ Redistribution and use in source and binary forms, with or without modification,
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS &#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
LICENSE - github.com/codahale/hdrhistogram
@@ -328,7 +328,7 @@ The MIT License (MIT)
Copyright (c) 2014 Coda Hale
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -337,7 +337,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -355,38 +355,38 @@ LICENSE - github.com/containerd/cgroups
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -394,21 +394,21 @@ LICENSE - github.com/containerd/cgroups
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -452,7 +452,7 @@ LICENSE - github.com/containerd/cgroups
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -491,7 +491,7 @@ LICENSE - github.com/containerd/cgroups
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -527,24 +527,24 @@ LICENSE - github.com/containerd/cgroups
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -559,55 +559,55 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
-&#34;License&#34; shall mean the terms and conditions for use, reproduction, and
+"License" shall mean the terms and conditions for use, reproduction, and
distribution as defined by Sections 1 through 9 of this document.
-&#34;Licensor&#34; shall mean the copyright owner or entity authorized by the copyright
+"Licensor" shall mean the copyright owner or entity authorized by the copyright
owner that is granting the License.
-&#34;Legal Entity&#34; shall mean the union of the acting entity and all other entities
+"Legal Entity" shall mean the union of the acting entity and all other entities
that control, are controlled by, or are under common control with that entity.
-For the purposes of this definition, &#34;control&#34; means (i) the power, direct or
+For the purposes of this definition, "control" means (i) the power, direct or
indirect, to cause the direction or management of such entity, whether by
contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
-&#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity exercising
+"You" (or "Your") shall mean an individual or Legal Entity exercising
permissions granted by this License.
-&#34;Source&#34; form shall mean the preferred form for making modifications, including
+"Source" form shall mean the preferred form for making modifications, including
but not limited to software source code, documentation source, and configuration
files.
-&#34;Object&#34; form shall mean any form resulting from mechanical transformation or
+"Object" form shall mean any form resulting from mechanical transformation or
translation of a Source form, including but not limited to compiled object code,
generated documentation, and conversions to other media types.
-&#34;Work&#34; shall mean the work of authorship, whether in Source or Object form, made
+"Work" shall mean the work of authorship, whether in Source or Object form, made
available under the License, as indicated by a copyright notice that is included
in or attached to the work (an example is provided in the Appendix below).
-&#34;Derivative Works&#34; shall mean any work, whether in Source or Object form, that
+"Derivative Works" shall mean any work, whether in Source or Object form, that
is based on (or derived from) the Work and for which the editorial revisions,
annotations, elaborations, or other modifications represent, as a whole, an
original work of authorship. For the purposes of this License, Derivative Works
shall not include works that remain separable from, or merely link (or bind by
name) to the interfaces of, the Work and Derivative Works thereof.
-&#34;Contribution&#34; shall mean any work of authorship, including the original version
+"Contribution" shall mean any work of authorship, including the original version
of the Work and any modifications or additions to that Work or Derivative Works
thereof, that is intentionally submitted to Licensor for inclusion in the Work
by the copyright owner or by an individual or Legal Entity authorized to submit
on behalf of the copyright owner. For the purposes of this definition,
-&#34;submitted&#34; means any form of electronic, verbal, or written communication sent
+"submitted" means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems, and
issue tracking systems that are managed by, or on behalf of, the Licensor for
the purpose of discussing and improving the Work, but excluding communication
that is conspicuously marked or otherwise designated in writing by the copyright
-owner as &#34;Not a Contribution.&#34;
+owner as "Not a Contribution."
-&#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity on behalf
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
of whom a Contribution has been received by Licensor and subsequently
incorporated within the Work.
@@ -648,7 +648,7 @@ You must retain, in the Source form of any Derivative Works that You distribute,
all copyright, patent, trademark, and attribution notices from the Source form
of the Work, excluding those notices that do not pertain to any part of the
Derivative Works; and
-If the Work includes a &#34;NOTICE&#34; text file as part of its distribution, then any
+If the Work includes a "NOTICE" text file as part of its distribution, then any
Derivative Works that You distribute must include a readable copy of the
attribution notices contained within such NOTICE file, excluding those notices
that do not pertain to any part of the Derivative Works, in at least one of the
@@ -686,7 +686,7 @@ reproducing the content of the NOTICE file.
7. Disclaimer of Warranty.
Unless required by applicable law or agreed to in writing, Licensor provides the
-Work (and each Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
including, without limitation, any warranties or conditions of TITLE,
NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
@@ -722,23 +722,23 @@ END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work
To apply the Apache License to your work, attach the following boilerplate
-notice, with the fields enclosed by brackets &#34;[]&#34; replaced with your own
-identifying information. (Don&#39;t include the brackets!) The text should be
+notice, with the fields enclosed by brackets "[]" replaced with your own
+identifying information. (Don't include the brackets!) The text should be
enclosed in the appropriate comment syntax for the file format. We also
recommend that a file or class name and description of purpose be included on
-the same &#34;printed page&#34; as the copyright notice for easier identification within
+the same "printed page" as the copyright notice for easier identification within
third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -755,13 +755,13 @@ This product includes software developed at CoreOS, Inc.
LICENSE - github.com/davecgh/go-spew/spew
ISC License
-Copyright (c) 2012-2016 Dave Collins &lt;dave@davec.name&gt;
+Copyright (c) 2012-2016 Dave Collins <dave@davec.name>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34; AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
@@ -776,7 +776,7 @@ The MIT License (MIT)
Copyright (c) 2016 Damian Gryski damian@gryski.com
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -785,7 +785,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -804,38 +804,38 @@ LICENSE - github.com/docker/go-units
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -843,21 +843,21 @@ LICENSE - github.com/docker/go-units
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -901,7 +901,7 @@ LICENSE - github.com/docker/go-units
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -940,7 +940,7 @@ LICENSE - github.com/docker/go-units
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -975,14 +975,14 @@ LICENSE - github.com/docker/go-units
Copyright 2015 Docker, Inc.
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -994,7 +994,7 @@ MIT License
Copyright (c) 2018 Daniel Potapov
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -1003,7 +1003,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -1017,7 +1017,7 @@ MIT License
Copyright (c) 2017 Eric Zhu
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -1026,7 +1026,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -1049,7 +1049,7 @@ modification, are permitted provided that the following conditions are met:
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS &#34;AS IS&#34;
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
@@ -1064,13 +1064,13 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
AVL Tree:
-Copyright (c) 2017 Benjamin Scher Purcell &lt;benjapurcell@gmail.com&gt;
+Copyright (c) 2017 Benjamin Scher Purcell <benjapurcell@gmail.com>
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34; AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
@@ -1088,7 +1088,7 @@ Redistribution and use in source and binary forms, with or without modification,
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS &#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
LICENSE.md - github.com/git-lfs/git-lfs
@@ -1097,7 +1097,7 @@ MIT License
Copyright (c) 2014-2020 GitHub, Inc. and Git LFS contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -1106,7 +1106,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -1134,7 +1134,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -1155,7 +1155,7 @@ MIT License
Copyright (c) 2017- GitHub, Inc. and Git LFS contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -1164,7 +1164,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -1174,11 +1174,11 @@ SOFTWARE.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
LICENSE - github.com/git-lfs/go-netrc/netrc
-Original version Copyright © 2010 Fazlul Shahriar &lt;fshahriar@gmail.com&gt;. Newer
-portions Copyright © 2014 Blake Gentry &lt;blakesgentry@gmail.com&gt;.
+Original version Copyright © 2010 Fazlul Shahriar <fshahriar@gmail.com>. Newer
+portions Copyright © 2014 Blake Gentry <blakesgentry@gmail.com>.
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -1187,7 +1187,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -1202,7 +1202,7 @@ MIT License
Copyright (c) 2018- GitHub, Inc. and Git LFS contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -1211,7 +1211,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -1225,7 +1225,7 @@ Apache License
==============
_Version 2.0, January 2004_
-_&amp;lt;&lt;http://www.apache.org/licenses/&gt;&amp;gt;_
+_&lt;<http://www.apache.org/licenses/>&gt;_
### Terms and Conditions for use, reproduction, and distribution
@@ -1396,7 +1396,7 @@ _END OF TERMS AND CONDITIONS_
To apply the Apache License to your work, attach the following boilerplate
notice, with the fields enclosed by brackets `[]` replaced with your own
-identifying information. (Don&#39;t include the brackets!) The text should be
+identifying information. (Don't include the brackets!) The text should be
enclosed in the appropriate comment syntax for the file format. We also
recommend that a file or class name and description of purpose be included on
the same “printed page” as the copyright notice for easier identification within
@@ -1404,14 +1404,14 @@ third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -1436,7 +1436,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -1457,38 +1457,38 @@ LICENSE - github.com/go-git/go-billy/v5
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -1496,21 +1496,21 @@ LICENSE - github.com/go-git/go-billy/v5
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -1554,7 +1554,7 @@ LICENSE - github.com/go-git/go-billy/v5
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -1593,7 +1593,7 @@ LICENSE - github.com/go-git/go-billy/v5
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -1629,24 +1629,24 @@ LICENSE - github.com/go-git/go-billy/v5
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;{}&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2017 Sourced Technologies S.L.
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -1661,38 +1661,38 @@ LICENSE - github.com/go-git/go-git/v5
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -1700,21 +1700,21 @@ LICENSE - github.com/go-git/go-git/v5
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -1758,7 +1758,7 @@ LICENSE - github.com/go-git/go-git/v5
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -1797,7 +1797,7 @@ LICENSE - github.com/go-git/go-git/v5
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -1833,31 +1833,31 @@ LICENSE - github.com/go-git/go-git/v5
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;{}&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2018 Sourced Technologies, S.L.
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
LICENSE - github.com/godbus/dbus/v5
-Copyright (c) 2013, Georg Reinke (&lt;guelfey at gmail dot com&gt;), Google
+Copyright (c) 2013, Georg Reinke (<guelfey at gmail dot com>), Google
All rights reserved.
Redistribution and use in source and binary forms, with or without
@@ -1872,7 +1872,7 @@ notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -1889,7 +1889,7 @@ Copyright (c) 2013, The GoGo Authors. All rights reserved.
Protocol Buffers for Go with Gadgets
-Go support for Protocol Buffers - Google&#39;s data interchange format
+Go support for Protocol Buffers - Google's data interchange format
Copyright 2010 The Go Authors. All rights reserved.
https://github.com/golang/protobuf
@@ -1909,7 +1909,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -1931,55 +1931,55 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
-&#34;License&#34; shall mean the terms and conditions for use, reproduction, and
+"License" shall mean the terms and conditions for use, reproduction, and
distribution as defined by Sections 1 through 9 of this document.
-&#34;Licensor&#34; shall mean the copyright owner or entity authorized by the copyright
+"Licensor" shall mean the copyright owner or entity authorized by the copyright
owner that is granting the License.
-&#34;Legal Entity&#34; shall mean the union of the acting entity and all other entities
+"Legal Entity" shall mean the union of the acting entity and all other entities
that control, are controlled by, or are under common control with that entity.
-For the purposes of this definition, &#34;control&#34; means (i) the power, direct or
+For the purposes of this definition, "control" means (i) the power, direct or
indirect, to cause the direction or management of such entity, whether by
contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
-&#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity exercising
+"You" (or "Your") shall mean an individual or Legal Entity exercising
permissions granted by this License.
-&#34;Source&#34; form shall mean the preferred form for making modifications, including
+"Source" form shall mean the preferred form for making modifications, including
but not limited to software source code, documentation source, and configuration
files.
-&#34;Object&#34; form shall mean any form resulting from mechanical transformation or
+"Object" form shall mean any form resulting from mechanical transformation or
translation of a Source form, including but not limited to compiled object code,
generated documentation, and conversions to other media types.
-&#34;Work&#34; shall mean the work of authorship, whether in Source or Object form, made
+"Work" shall mean the work of authorship, whether in Source or Object form, made
available under the License, as indicated by a copyright notice that is included
in or attached to the work (an example is provided in the Appendix below).
-&#34;Derivative Works&#34; shall mean any work, whether in Source or Object form, that
+"Derivative Works" shall mean any work, whether in Source or Object form, that
is based on (or derived from) the Work and for which the editorial revisions,
annotations, elaborations, or other modifications represent, as a whole, an
original work of authorship. For the purposes of this License, Derivative Works
shall not include works that remain separable from, or merely link (or bind by
name) to the interfaces of, the Work and Derivative Works thereof.
-&#34;Contribution&#34; shall mean any work of authorship, including the original version
+"Contribution" shall mean any work of authorship, including the original version
of the Work and any modifications or additions to that Work or Derivative Works
thereof, that is intentionally submitted to Licensor for inclusion in the Work
by the copyright owner or by an individual or Legal Entity authorized to submit
on behalf of the copyright owner. For the purposes of this definition,
-&#34;submitted&#34; means any form of electronic, verbal, or written communication sent
+"submitted" means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems, and
issue tracking systems that are managed by, or on behalf of, the Licensor for
the purpose of discussing and improving the Work, but excluding communication
that is conspicuously marked or otherwise designated in writing by the copyright
-owner as &#34;Not a Contribution.&#34;
+owner as "Not a Contribution."
-&#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity on behalf
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
of whom a Contribution has been received by Licensor and subsequently
incorporated within the Work.
@@ -2020,7 +2020,7 @@ You must retain, in the Source form of any Derivative Works that You distribute,
all copyright, patent, trademark, and attribution notices from the Source form
of the Work, excluding those notices that do not pertain to any part of the
Derivative Works; and
-If the Work includes a &#34;NOTICE&#34; text file as part of its distribution, then any
+If the Work includes a "NOTICE" text file as part of its distribution, then any
Derivative Works that You distribute must include a readable copy of the
attribution notices contained within such NOTICE file, excluding those notices
that do not pertain to any part of the Derivative Works, in at least one of the
@@ -2058,7 +2058,7 @@ reproducing the content of the NOTICE file.
7. Disclaimer of Warranty.
Unless required by applicable law or agreed to in writing, Licensor provides the
-Work (and each Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
including, without limitation, any warranties or conditions of TITLE,
NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
@@ -2094,23 +2094,23 @@ END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work
To apply the Apache License to your work, attach the following boilerplate
-notice, with the fields enclosed by brackets &#34;[]&#34; replaced with your own
-identifying information. (Don&#39;t include the brackets!) The text should be
+notice, with the fields enclosed by brackets "[]" replaced with your own
+identifying information. (Don't include the brackets!) The text should be
enclosed in the appropriate comment syntax for the file format. We also
recommend that a file or class name and description of purpose be included on
-the same &#34;printed page&#34; as the copyright notice for easier identification within
+the same "printed page" as the copyright notice for easier identification within
third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -2134,7 +2134,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -2165,7 +2165,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -2187,38 +2187,38 @@ LICENSE - github.com/google/pprof/profile
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -2226,21 +2226,21 @@ LICENSE - github.com/google/pprof/profile
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -2284,7 +2284,7 @@ LICENSE - github.com/google/pprof/profile
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -2323,7 +2323,7 @@ LICENSE - github.com/google/pprof/profile
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -2359,24 +2359,24 @@ LICENSE - github.com/google/pprof/profile
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -2400,7 +2400,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -2430,7 +2430,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -2451,38 +2451,38 @@ LICENSE - github.com/grpc-ecosystem/go-grpc-middleware
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -2490,21 +2490,21 @@ LICENSE - github.com/grpc-ecosystem/go-grpc-middleware
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -2548,7 +2548,7 @@ LICENSE - github.com/grpc-ecosystem/go-grpc-middleware
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -2587,7 +2587,7 @@ LICENSE - github.com/grpc-ecosystem/go-grpc-middleware
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -2623,24 +2623,24 @@ LICENSE - github.com/grpc-ecosystem/go-grpc-middleware
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -2654,38 +2654,38 @@ LICENSE - github.com/grpc-ecosystem/go-grpc-prometheus
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -2693,21 +2693,21 @@ LICENSE - github.com/grpc-ecosystem/go-grpc-prometheus
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -2751,7 +2751,7 @@ LICENSE - github.com/grpc-ecosystem/go-grpc-prometheus
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -2790,7 +2790,7 @@ LICENSE - github.com/grpc-ecosystem/go-grpc-prometheus
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -2826,24 +2826,24 @@ LICENSE - github.com/grpc-ecosystem/go-grpc-prometheus
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -2868,28 +2868,28 @@ Mozilla Public License, version 2.0
1. Definitions
-1.1. &#34;Contributor&#34;
+1.1. "Contributor"
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
-1.2. &#34;Contributor Version&#34;
+1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor&#39;s Contribution.
+ Contributor and that particular Contributor's Contribution.
-1.3. &#34;Contribution&#34;
+1.3. "Contribution"
means Covered Software of a particular Contributor.
-1.4. &#34;Covered Software&#34;
+1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
-1.5. &#34;Incompatible With Secondary Licenses&#34;
+1.5. "Incompatible With Secondary Licenses"
means
a. that the initial Contributor has attached the notice described in
@@ -2899,26 +2899,26 @@ Mozilla Public License, version 2.0
version 1.1 or earlier of the License, but not also under the terms of
a Secondary License.
-1.6. &#34;Executable Form&#34;
+1.6. "Executable Form"
means any form of the work other than Source Code Form.
-1.7. &#34;Larger Work&#34;
+1.7. "Larger Work"
means a work that combines Covered Software with other material, in a
separate file or files, that is not Covered Software.
-1.8. &#34;License&#34;
+1.8. "License"
means this document.
-1.9. &#34;Licensable&#34;
+1.9. "Licensable"
means having the right to grant, to the maximum extent possible, whether
at the time of the initial grant or subsequently, any and all of the
rights conveyed by this License.
-1.10. &#34;Modifications&#34;
+1.10. "Modifications"
means any of the following:
@@ -2927,7 +2927,7 @@ Mozilla Public License, version 2.0
b. any new file in Source Code Form that contains any Covered Software.
-1.11. &#34;Patent Claims&#34; of a Contributor
+1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
@@ -2935,22 +2935,22 @@ Mozilla Public License, version 2.0
by the making, using, selling, offering for sale, having made, import,
or transfer of either its Contributions or its Contributor Version.
-1.12. &#34;Secondary License&#34;
+1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
-1.13. &#34;Source Code Form&#34;
+1.13. "Source Code Form"
means the form of the work preferred for making modifications.
-1.14. &#34;You&#34; (or &#34;Your&#34;)
+1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
- License. For legal entities, &#34;You&#34; includes any entity that controls, is
+ License. For legal entities, "You" includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
- definition, &#34;control&#34; means (a) the power, direct or indirect, to cause
+ definition, "control" means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
@@ -2989,7 +2989,7 @@ Mozilla Public License, version 2.0
a. for any code that a Contributor has removed from Covered Software; or
- b. for infringements caused by: (i) Your and any other third party&#39;s
+ b. for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
@@ -3035,7 +3035,7 @@ Mozilla Public License, version 2.0
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
- attempt to alter or restrict the recipients&#39; rights in the Source Code
+ attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
@@ -3051,7 +3051,7 @@ Mozilla Public License, version 2.0
b. You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter the
- recipients&#39; rights in the Source Code Form under this License.
+ recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
@@ -3129,7 +3129,7 @@ Mozilla Public License, version 2.0
6. Disclaimer of Warranty
- Covered Software is provided under this License on an &#34;as is&#34; basis,
+ Covered Software is provided under this License on an "as is" basis,
without warranty of any kind, either expressed, implied, or statutory,
including, without limitation, warranties that the Covered Software is free
of defects, merchantable, fit for a particular purpose or non-infringing.
@@ -3151,7 +3151,7 @@ Mozilla Public License, version 2.0
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from
- such party&#39;s negligence to the extent applicable law prohibits such
+ such party's negligence to the extent applicable law prohibits such
limitation. Some jurisdictions do not allow the exclusion or limitation of
incidental or consequential damages, so this exclusion and limitation may
not apply to You.
@@ -3162,7 +3162,7 @@ Mozilla Public License, version 2.0
of a jurisdiction where the defendant maintains its principal place of
business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions. Nothing
- in this Section shall prevent a party&#39;s ability to bring cross-claims or
+ in this Section shall prevent a party's ability to bring cross-claims or
counter-claims.
9. Miscellaneous
@@ -3221,10 +3221,10 @@ notice.
You may add additional accurate notices of copyright ownership.
-Exhibit B - &#34;Incompatible With Secondary Licenses&#34; Notice
+Exhibit B - "Incompatible With Secondary Licenses" Notice
- This Source Code Form is &#34;Incompatible
- With Secondary Licenses&#34;, as defined by
+ This Source Code Form is "Incompatible
+ With Secondary Licenses", as defined by
the Mozilla Public License, v. 2.0.
@@ -3248,10 +3248,10 @@ uuid.go - github.com/hashicorp/go-uuid
package uuid
import (
- &#34;crypto/rand&#34;
- &#34;encoding/hex&#34;
- &#34;fmt&#34;
- &#34;io&#34;
+ "crypto/rand"
+ "encoding/hex"
+ "fmt"
+ "io"
)
// GenerateRandomBytes is used to generate random bytes of given size.
@@ -3262,11 +3262,11 @@ func GenerateRandomBytes(size int) ([]byte, error) {
// GenerateRandomBytesWithReader is used to generate random bytes of given size read from a given reader.
func GenerateRandomBytesWithReader(size int, reader io.Reader) ([]byte, error) {
if reader == nil {
- return nil, fmt.Errorf(&#34;provided reader is nil&#34;)
+ return nil, fmt.Errorf("provided reader is nil")
}
buf := make([]byte, size)
if _, err := io.ReadFull(reader, buf); err != nil {
- return nil, fmt.Errorf(&#34;failed to read random bytes: %v&#34;, err)
+ return nil, fmt.Errorf("failed to read random bytes: %v", err)
}
return buf, nil
}
@@ -3282,21 +3282,21 @@ func GenerateUUID() (string, error) {
// GenerateUUIDWithReader is used to generate a random UUID with a given Reader
func GenerateUUIDWithReader(reader io.Reader) (string, error) {
if reader == nil {
- return &#34;&#34;, fmt.Errorf(&#34;provided reader is nil&#34;)
+ return "", fmt.Errorf("provided reader is nil")
}
buf, err := GenerateRandomBytesWithReader(uuidLen, reader)
if err != nil {
- return &#34;&#34;, err
+ return "", err
}
return FormatUUID(buf)
}
func FormatUUID(buf []byte) (string, error) {
if buflen := len(buf); buflen != uuidLen {
- return &#34;&#34;, fmt.Errorf(&#34;wrong length byte slice (%d)&#34;, buflen)
+ return "", fmt.Errorf("wrong length byte slice (%d)", buflen)
}
- return fmt.Sprintf(&#34;%x-%x-%x-%x-%x&#34;,
+ return fmt.Sprintf("%x-%x-%x-%x-%x",
buf[0:4],
buf[4:6],
buf[6:8],
@@ -3305,25 +3305,25 @@ func FormatUUID(buf []byte) (string, error) {
}
func ParseUUID(uuid string) ([]byte, error) {
- if len(uuid) != 2 * uuidLen &#43; 4 {
- return nil, fmt.Errorf(&#34;uuid string is wrong length&#34;)
+ if len(uuid) != 2 * uuidLen + 4 {
+ return nil, fmt.Errorf("uuid string is wrong length")
}
- if uuid[8] != &#39;-&#39; ||
- uuid[13] != &#39;-&#39; ||
- uuid[18] != &#39;-&#39; ||
- uuid[23] != &#39;-&#39; {
- return nil, fmt.Errorf(&#34;uuid is improperly formatted&#34;)
+ if uuid[8] != '-' ||
+ uuid[13] != '-' ||
+ uuid[18] != '-' ||
+ uuid[23] != '-' {
+ return nil, fmt.Errorf("uuid is improperly formatted")
}
- hexStr := uuid[0:8] &#43; uuid[9:13] &#43; uuid[14:18] &#43; uuid[19:23] &#43; uuid[24:36]
+ hexStr := uuid[0:8] + uuid[9:13] + uuid[14:18] + uuid[19:23] + uuid[24:36]
ret, err := hex.DecodeString(hexStr)
if err != nil {
return nil, err
}
if len(ret) != uuidLen {
- return nil, fmt.Errorf(&#34;decoded hex is the wrong length&#34;)
+ return nil, fmt.Errorf("decoded hex is the wrong length")
}
return ret, nil
@@ -3334,11 +3334,11 @@ uuid_test.go - github.com/hashicorp/go-uuid
package uuid
import (
- &#34;crypto/rand&#34;
- &#34;io&#34;
- &#34;reflect&#34;
- &#34;regexp&#34;
- &#34;testing&#34;
+ "crypto/rand"
+ "io"
+ "reflect"
+ "regexp"
+ "testing"
)
func TestGenerateUUID(t *testing.T) {
@@ -3346,19 +3346,19 @@ func TestGenerateUUID(t *testing.T) {
if err != nil {
t.Fatal(err)
}
- for i := 0; i &lt; 100; i&#43;&#43; {
+ for i := 0; i < 100; i++ {
id, err := GenerateUUID()
if err != nil {
t.Fatal(err)
}
if prev == id {
- t.Fatalf(&#34;Should get a new ID!&#34;)
+ t.Fatalf("Should get a new ID!")
}
matched, err := regexp.MatchString(
- &#34;[\\da-f]{8}-[\\da-f]{4}-[\\da-f]{4}-[\\da-f]{4}-[\\da-f]{12}&#34;, id)
+ "[\\da-f]{8}-[\\da-f]{4}-[\\da-f]{4}-[\\da-f]{4}-[\\da-f]{12}", id)
if !matched || err != nil {
- t.Fatalf(&#34;expected match %s %v %s&#34;, id, matched, err)
+ t.Fatalf("expected match %s %v %s", id, matched, err)
}
}
}
@@ -3367,10 +3367,10 @@ func TestGenerateUUIDWithReader(t *testing.T) {
var nilReader io.Reader
str, err := GenerateUUIDWithReader(nilReader)
if err == nil {
- t.Fatalf(&#34;should get an error with a nilReader&#34;)
+ t.Fatalf("should get an error with a nilReader")
}
- if str != &#34;&#34; {
- t.Fatalf(&#34;should get an empty string&#34;)
+ if str != "" {
+ t.Fatalf("should get an empty string")
}
prev, err := GenerateUUIDWithReader(rand.Reader)
@@ -3383,20 +3383,20 @@ func TestGenerateUUIDWithReader(t *testing.T) {
t.Fatal(err)
}
if prev == id {
- t.Fatalf(&#34;Should get a new ID!&#34;)
+ t.Fatalf("Should get a new ID!")
}
matched, err := regexp.MatchString(
- &#34;[\\da-f]{8}-[\\da-f]{4}-[\\da-f]{4}-[\\da-f]{4}-[\\da-f]{12}&#34;, id)
+ "[\\da-f]{8}-[\\da-f]{4}-[\\da-f]{4}-[\\da-f]{4}-[\\da-f]{12}", id)
if !matched || err != nil {
- t.Fatalf(&#34;expected match %s %v %s&#34;, id, matched, err)
+ t.Fatalf("expected match %s %v %s", id, matched, err)
}
}
func TestParseUUID(t *testing.T) {
buf := make([]byte, 16)
if _, err := rand.Read(buf); err != nil {
- t.Fatalf(&#34;failed to read random bytes: %v&#34;, err)
+ t.Fatalf("failed to read random bytes: %v", err)
}
uuidStr, err := FormatUUID(buf)
@@ -3410,18 +3410,18 @@ func TestParseUUID(t *testing.T) {
}
if !reflect.DeepEqual(parsedStr, buf) {
- t.Fatalf(&#34;mismatched buffers&#34;)
+ t.Fatalf("mismatched buffers")
}
}
func BenchmarkGenerateUUID(b *testing.B) {
- for n := 0; n &lt; b.N; n&#43;&#43; {
+ for n := 0; n < b.N; n++ {
_, _ = GenerateUUID()
}
}
func BenchmarkGenerateUUIDWithReader(b *testing.B) {
- for n := 0; n &lt; b.N; n&#43;&#43; {
+ for n := 0; n < b.N; n++ {
_, _ = GenerateUUIDWithReader(rand.Reader)
}
}
@@ -3457,10 +3457,10 @@ _testmain.go
package lru
import (
- &#34;fmt&#34;
- &#34;sync&#34;
+ "fmt"
+ "sync"
- &#34;github.com/hashicorp/golang-lru/simplelru&#34;
+ "github.com/hashicorp/golang-lru/simplelru"
)
const (
@@ -3501,14 +3501,14 @@ func New2Q(size int) (*TwoQueueCache, error) {
// New2QParams creates a new TwoQueueCache using the provided
// parameter values.
func New2QParams(size int, recentRatio float64, ghostRatio float64) (*TwoQueueCache, error) {
- if size &lt;= 0 {
- return nil, fmt.Errorf(&#34;invalid size&#34;)
+ if size <= 0 {
+ return nil, fmt.Errorf("invalid size")
}
- if recentRatio &lt; 0.0 || recentRatio &gt; 1.0 {
- return nil, fmt.Errorf(&#34;invalid recent ratio&#34;)
+ if recentRatio < 0.0 || recentRatio > 1.0 {
+ return nil, fmt.Errorf("invalid recent ratio")
}
- if ghostRatio &lt; 0.0 || ghostRatio &gt; 1.0 {
- return nil, fmt.Errorf(&#34;invalid ghost ratio&#34;)
+ if ghostRatio < 0.0 || ghostRatio > 1.0 {
+ return nil, fmt.Errorf("invalid ghost ratio")
}
// Determine the sub-sizes
@@ -3530,7 +3530,7 @@ func New2QParams(size int, recentRatio float64, ghostRatio float64) (*TwoQueueCa
}
// Initialize the cache
- c := &amp;TwoQueueCache{
+ c := &TwoQueueCache{
size: size,
recentSize: recentSize,
recent: recent,
@@ -3540,7 +3540,7 @@ func New2QParams(size int, recentRatio float64, ghostRatio float64) (*TwoQueueCa
return c, nil
}
-// Get looks up a key&#39;s value from the cache.
+// Get looks up a key's value from the cache.
func (c *TwoQueueCache) Get(key interface{}) (value interface{}, ok bool) {
c.lock.Lock()
defer c.lock.Unlock()
@@ -3602,13 +3602,13 @@ func (c *TwoQueueCache) ensureSpace(recentEvict bool) {
// If we have space, nothing to do
recentLen := c.recent.Len()
freqLen := c.frequent.Len()
- if recentLen&#43;freqLen &lt; c.size {
+ if recentLen+freqLen < c.size {
return
}
// If the recent buffer is larger than
// the target, evict from there
- if recentLen &gt; 0 &amp;&amp; (recentLen &gt; c.recentSize || (recentLen == c.recentSize &amp;&amp; !recentEvict)) {
+ if recentLen > 0 && (recentLen > c.recentSize || (recentLen == c.recentSize && !recentEvict)) {
k, _, _ := c.recent.RemoveOldest()
c.recentEvict.Add(k, nil)
return
@@ -3622,7 +3622,7 @@ func (c *TwoQueueCache) ensureSpace(recentEvict bool) {
func (c *TwoQueueCache) Len() int {
c.lock.RLock()
defer c.lock.RUnlock()
- return c.recent.Len() &#43; c.frequent.Len()
+ return c.recent.Len() + c.frequent.Len()
}
// Keys returns a slice of the keys in the cache.
@@ -3683,47 +3683,47 @@ func (c *TwoQueueCache) Peek(key interface{}) (value interface{}, ok bool) {
package lru
import (
- &#34;math/rand&#34;
- &#34;testing&#34;
+ "math/rand"
+ "testing"
)
func Benchmark2Q_Rand(b *testing.B) {
l, err := New2Q(8192)
if err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
trace := make([]int64, b.N*2)
- for i := 0; i &lt; b.N*2; i&#43;&#43; {
+ for i := 0; i < b.N*2; i++ {
trace[i] = rand.Int63() % 32768
}
b.ResetTimer()
var hit, miss int
- for i := 0; i &lt; 2*b.N; i&#43;&#43; {
+ for i := 0; i < 2*b.N; i++ {
if i%2 == 0 {
l.Add(trace[i], trace[i])
} else {
_, ok := l.Get(trace[i])
if ok {
- hit&#43;&#43;
+ hit++
} else {
- miss&#43;&#43;
+ miss++
}
}
}
- b.Logf(&#34;hit: %d miss: %d ratio: %f&#34;, hit, miss, float64(hit)/float64(miss))
+ b.Logf("hit: %d miss: %d ratio: %f", hit, miss, float64(hit)/float64(miss))
}
func Benchmark2Q_Freq(b *testing.B) {
l, err := New2Q(8192)
if err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
trace := make([]int64, b.N*2)
- for i := 0; i &lt; b.N*2; i&#43;&#43; {
+ for i := 0; i < b.N*2; i++ {
if i%2 == 0 {
trace[i] = rand.Int63() % 16384
} else {
@@ -3733,30 +3733,30 @@ func Benchmark2Q_Freq(b *testing.B) {
b.ResetTimer()
- for i := 0; i &lt; b.N; i&#43;&#43; {
+ for i := 0; i < b.N; i++ {
l.Add(trace[i], trace[i])
}
var hit, miss int
- for i := 0; i &lt; b.N; i&#43;&#43; {
+ for i := 0; i < b.N; i++ {
_, ok := l.Get(trace[i])
if ok {
- hit&#43;&#43;
+ hit++
} else {
- miss&#43;&#43;
+ miss++
}
}
- b.Logf(&#34;hit: %d miss: %d ratio: %f&#34;, hit, miss, float64(hit)/float64(miss))
+ b.Logf("hit: %d miss: %d ratio: %f", hit, miss, float64(hit)/float64(miss))
}
func Test2Q_RandomOps(t *testing.T) {
size := 128
l, err := New2Q(128)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
n := 200000
- for i := 0; i &lt; n; i&#43;&#43; {
+ for i := 0; i < n; i++ {
key := rand.Int63() % 512
r := rand.Int63()
switch r % 3 {
@@ -3768,8 +3768,8 @@ func Test2Q_RandomOps(t *testing.T) {
l.Remove(key)
}
- if l.recent.Len()&#43;l.frequent.Len() &gt; size {
- t.Fatalf(&#34;bad: recent: %d freq: %d&#34;,
+ if l.recent.Len()+l.frequent.Len() > size {
+ t.Fatalf("bad: recent: %d freq: %d",
l.recent.Len(), l.frequent.Len())
}
}
@@ -3778,212 +3778,212 @@ func Test2Q_RandomOps(t *testing.T) {
func Test2Q_Get_RecentToFrequent(t *testing.T) {
l, err := New2Q(128)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
// Touch all the entries, should be in t1
- for i := 0; i &lt; 128; i&#43;&#43; {
+ for i := 0; i < 128; i++ {
l.Add(i, i)
}
if n := l.recent.Len(); n != 128 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.frequent.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Get should upgrade to t2
- for i := 0; i &lt; 128; i&#43;&#43; {
+ for i := 0; i < 128; i++ {
_, ok := l.Get(i)
if !ok {
- t.Fatalf(&#34;missing: %d&#34;, i)
+ t.Fatalf("missing: %d", i)
}
}
if n := l.recent.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.frequent.Len(); n != 128 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Get be from t2
- for i := 0; i &lt; 128; i&#43;&#43; {
+ for i := 0; i < 128; i++ {
_, ok := l.Get(i)
if !ok {
- t.Fatalf(&#34;missing: %d&#34;, i)
+ t.Fatalf("missing: %d", i)
}
}
if n := l.recent.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.frequent.Len(); n != 128 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
}
func Test2Q_Add_RecentToFrequent(t *testing.T) {
l, err := New2Q(128)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
// Add initially to recent
l.Add(1, 1)
if n := l.recent.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.frequent.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Add should upgrade to frequent
l.Add(1, 1)
if n := l.recent.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.frequent.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Add should remain in frequent
l.Add(1, 1)
if n := l.recent.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.frequent.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
}
func Test2Q_Add_RecentEvict(t *testing.T) {
l, err := New2Q(4)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
- // Add 1,2,3,4,5 -&gt; Evict 1
+ // Add 1,2,3,4,5 -> Evict 1
l.Add(1, 1)
l.Add(2, 2)
l.Add(3, 3)
l.Add(4, 4)
l.Add(5, 5)
if n := l.recent.Len(); n != 4 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.recentEvict.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.frequent.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Pull in the recently evicted
l.Add(1, 1)
if n := l.recent.Len(); n != 3 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.recentEvict.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.frequent.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Add 6, should cause another recent evict
l.Add(6, 6)
if n := l.recent.Len(); n != 3 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.recentEvict.Len(); n != 2 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.frequent.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
}
func Test2Q(t *testing.T) {
l, err := New2Q(128)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
- for i := 0; i &lt; 256; i&#43;&#43; {
+ for i := 0; i < 256; i++ {
l.Add(i, i)
}
if l.Len() != 128 {
- t.Fatalf(&#34;bad len: %v&#34;, l.Len())
+ t.Fatalf("bad len: %v", l.Len())
}
for i, k := range l.Keys() {
- if v, ok := l.Get(k); !ok || v != k || v != i&#43;128 {
- t.Fatalf(&#34;bad key: %v&#34;, k)
+ if v, ok := l.Get(k); !ok || v != k || v != i+128 {
+ t.Fatalf("bad key: %v", k)
}
}
- for i := 0; i &lt; 128; i&#43;&#43; {
+ for i := 0; i < 128; i++ {
_, ok := l.Get(i)
if ok {
- t.Fatalf(&#34;should be evicted&#34;)
+ t.Fatalf("should be evicted")
}
}
- for i := 128; i &lt; 256; i&#43;&#43; {
+ for i := 128; i < 256; i++ {
_, ok := l.Get(i)
if !ok {
- t.Fatalf(&#34;should not be evicted&#34;)
+ t.Fatalf("should not be evicted")
}
}
- for i := 128; i &lt; 192; i&#43;&#43; {
+ for i := 128; i < 192; i++ {
l.Remove(i)
_, ok := l.Get(i)
if ok {
- t.Fatalf(&#34;should be deleted&#34;)
+ t.Fatalf("should be deleted")
}
}
l.Purge()
if l.Len() != 0 {
- t.Fatalf(&#34;bad len: %v&#34;, l.Len())
+ t.Fatalf("bad len: %v", l.Len())
}
if _, ok := l.Get(200); ok {
- t.Fatalf(&#34;should contain nothing&#34;)
+ t.Fatalf("should contain nothing")
}
}
-// Test that Contains doesn&#39;t update recent-ness
+// Test that Contains doesn't update recent-ness
func Test2Q_Contains(t *testing.T) {
l, err := New2Q(2)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
l.Add(1, 1)
l.Add(2, 2)
if !l.Contains(1) {
- t.Errorf(&#34;1 should be contained&#34;)
+ t.Errorf("1 should be contained")
}
l.Add(3, 3)
if l.Contains(1) {
- t.Errorf(&#34;Contains should not have updated recent-ness of 1&#34;)
+ t.Errorf("Contains should not have updated recent-ness of 1")
}
}
-// Test that Peek doesn&#39;t update recent-ness
+// Test that Peek doesn't update recent-ness
func Test2Q_Peek(t *testing.T) {
l, err := New2Q(2)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
l.Add(1, 1)
l.Add(2, 2)
if v, ok := l.Peek(1); !ok || v != 1 {
- t.Errorf(&#34;1 should be set to 1: %v, %v&#34;, v, ok)
+ t.Errorf("1 should be set to 1: %v, %v", v, ok)
}
l.Add(3, 3)
if l.Contains(1) {
- t.Errorf(&#34;should not have updated recent-ness of 1&#34;)
+ t.Errorf("should not have updated recent-ness of 1")
}
}
@@ -3993,28 +3993,28 @@ Mozilla Public License, version 2.0
1. Definitions
-1.1. &#34;Contributor&#34;
+1.1. "Contributor"
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
-1.2. &#34;Contributor Version&#34;
+1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor&#39;s Contribution.
+ Contributor and that particular Contributor's Contribution.
-1.3. &#34;Contribution&#34;
+1.3. "Contribution"
means Covered Software of a particular Contributor.
-1.4. &#34;Covered Software&#34;
+1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
-1.5. &#34;Incompatible With Secondary Licenses&#34;
+1.5. "Incompatible With Secondary Licenses"
means
a. that the initial Contributor has attached the notice described in
@@ -4024,26 +4024,26 @@ Mozilla Public License, version 2.0
version 1.1 or earlier of the License, but not also under the terms of
a Secondary License.
-1.6. &#34;Executable Form&#34;
+1.6. "Executable Form"
means any form of the work other than Source Code Form.
-1.7. &#34;Larger Work&#34;
+1.7. "Larger Work"
means a work that combines Covered Software with other material, in a
separate file or files, that is not Covered Software.
-1.8. &#34;License&#34;
+1.8. "License"
means this document.
-1.9. &#34;Licensable&#34;
+1.9. "Licensable"
means having the right to grant, to the maximum extent possible, whether
at the time of the initial grant or subsequently, any and all of the
rights conveyed by this License.
-1.10. &#34;Modifications&#34;
+1.10. "Modifications"
means any of the following:
@@ -4052,7 +4052,7 @@ Mozilla Public License, version 2.0
b. any new file in Source Code Form that contains any Covered Software.
-1.11. &#34;Patent Claims&#34; of a Contributor
+1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
@@ -4060,22 +4060,22 @@ Mozilla Public License, version 2.0
by the making, using, selling, offering for sale, having made, import,
or transfer of either its Contributions or its Contributor Version.
-1.12. &#34;Secondary License&#34;
+1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
-1.13. &#34;Source Code Form&#34;
+1.13. "Source Code Form"
means the form of the work preferred for making modifications.
-1.14. &#34;You&#34; (or &#34;Your&#34;)
+1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
- License. For legal entities, &#34;You&#34; includes any entity that controls, is
+ License. For legal entities, "You" includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
- definition, &#34;control&#34; means (a) the power, direct or indirect, to cause
+ definition, "control" means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
@@ -4114,7 +4114,7 @@ Mozilla Public License, version 2.0
a. for any code that a Contributor has removed from Covered Software; or
- b. for infringements caused by: (i) Your and any other third party&#39;s
+ b. for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
@@ -4160,7 +4160,7 @@ Mozilla Public License, version 2.0
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
- attempt to alter or restrict the recipients&#39; rights in the Source Code
+ attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
@@ -4176,7 +4176,7 @@ Mozilla Public License, version 2.0
b. You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter the
- recipients&#39; rights in the Source Code Form under this License.
+ recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
@@ -4254,7 +4254,7 @@ Mozilla Public License, version 2.0
6. Disclaimer of Warranty
- Covered Software is provided under this License on an &#34;as is&#34; basis,
+ Covered Software is provided under this License on an "as is" basis,
without warranty of any kind, either expressed, implied, or statutory,
including, without limitation, warranties that the Covered Software is free
of defects, merchantable, fit for a particular purpose or non-infringing.
@@ -4276,7 +4276,7 @@ Mozilla Public License, version 2.0
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from
- such party&#39;s negligence to the extent applicable law prohibits such
+ such party's negligence to the extent applicable law prohibits such
limitation. Some jurisdictions do not allow the exclusion or limitation of
incidental or consequential damages, so this exclusion and limitation may
not apply to You.
@@ -4287,7 +4287,7 @@ Mozilla Public License, version 2.0
of a jurisdiction where the defendant maintains its principal place of
business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions. Nothing
- in this Section shall prevent a party&#39;s ability to bring cross-claims or
+ in this Section shall prevent a party's ability to bring cross-claims or
counter-claims.
9. Miscellaneous
@@ -4346,10 +4346,10 @@ notice.
You may add additional accurate notices of copyright ownership.
-Exhibit B - &#34;Incompatible With Secondary Licenses&#34; Notice
+Exhibit B - "Incompatible With Secondary Licenses" Notice
- This Source Code Form is &#34;Incompatible
- With Secondary Licenses&#34;, as defined by
+ This Source Code Form is "Incompatible
+ With Secondary Licenses", as defined by
the Mozilla Public License, v. 2.0.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -4372,11 +4372,11 @@ Using the LRU is very simple:
```go
l, _ := New(128)
-for i := 0; i &lt; 256; i&#43;&#43; {
+for i := 0; i < 256; i++ {
l.Add(i, nil)
}
if l.Len() != 128 {
- panic(fmt.Sprintf(&#34;bad len: %v&#34;, l.Len()))
+ panic(fmt.Sprintf("bad len: %v", l.Len()))
}
```
@@ -4385,9 +4385,9 @@ arc.go - github.com/hashicorp/golang-lru
package lru
import (
- &#34;sync&#34;
+ "sync"
- &#34;github.com/hashicorp/golang-lru/simplelru&#34;
+ "github.com/hashicorp/golang-lru/simplelru"
)
// ARCCache is a thread-safe fixed size Adaptive Replacement Cache (ARC).
@@ -4432,7 +4432,7 @@ func NewARC(size int) (*ARCCache, error) {
}
// Initialize the ARC
- c := &amp;ARCCache{
+ c := &ARCCache{
size: size,
p: 0,
t1: t1,
@@ -4443,7 +4443,7 @@ func NewARC(size int) (*ARCCache, error) {
return c, nil
}
-// Get looks up a key&#39;s value from the cache.
+// Get looks up a key's value from the cache.
func (c *ARCCache) Get(key interface{}) (value interface{}, ok bool) {
c.lock.Lock()
defer c.lock.Unlock()
@@ -4491,17 +4491,17 @@ func (c *ARCCache) Add(key, value interface{}) {
delta := 1
b1Len := c.b1.Len()
b2Len := c.b2.Len()
- if b2Len &gt; b1Len {
+ if b2Len > b1Len {
delta = b2Len / b1Len
}
- if c.p&#43;delta &gt;= c.size {
+ if c.p+delta >= c.size {
c.p = c.size
} else {
- c.p &#43;= delta
+ c.p += delta
}
// Potentially need to make room in the cache
- if c.t1.Len()&#43;c.t2.Len() &gt;= c.size {
+ if c.t1.Len()+c.t2.Len() >= c.size {
c.replace(false)
}
@@ -4520,17 +4520,17 @@ func (c *ARCCache) Add(key, value interface{}) {
delta := 1
b1Len := c.b1.Len()
b2Len := c.b2.Len()
- if b1Len &gt; b2Len {
+ if b1Len > b2Len {
delta = b1Len / b2Len
}
- if delta &gt;= c.p {
+ if delta >= c.p {
c.p = 0
} else {
c.p -= delta
}
// Potentially need to make room in the cache
- if c.t1.Len()&#43;c.t2.Len() &gt;= c.size {
+ if c.t1.Len()+c.t2.Len() >= c.size {
c.replace(true)
}
@@ -4543,15 +4543,15 @@ func (c *ARCCache) Add(key, value interface{}) {
}
// Potentially need to make room in the cache
- if c.t1.Len()&#43;c.t2.Len() &gt;= c.size {
+ if c.t1.Len()+c.t2.Len() >= c.size {
c.replace(false)
}
// Keep the size of the ghost buffers trim
- if c.b1.Len() &gt; c.size-c.p {
+ if c.b1.Len() > c.size-c.p {
c.b1.RemoveOldest()
}
- if c.b2.Len() &gt; c.p {
+ if c.b2.Len() > c.p {
c.b2.RemoveOldest()
}
@@ -4564,7 +4564,7 @@ func (c *ARCCache) Add(key, value interface{}) {
// based on the current learned value of P
func (c *ARCCache) replace(b2ContainsKey bool) {
t1Len := c.t1.Len()
- if t1Len &gt; 0 &amp;&amp; (t1Len &gt; c.p || (t1Len == c.p &amp;&amp; b2ContainsKey)) {
+ if t1Len > 0 && (t1Len > c.p || (t1Len == c.p && b2ContainsKey)) {
k, _, ok := c.t1.RemoveOldest()
if ok {
c.b1.Add(k, nil)
@@ -4581,7 +4581,7 @@ func (c *ARCCache) replace(b2ContainsKey bool) {
func (c *ARCCache) Len() int {
c.lock.RLock()
defer c.lock.RUnlock()
- return c.t1.Len() &#43; c.t2.Len()
+ return c.t1.Len() + c.t2.Len()
}
// Keys returns all the cached keys
@@ -4645,9 +4645,9 @@ arc_test.go - github.com/hashicorp/golang-lru
package lru
import (
- &#34;math/rand&#34;
- &#34;testing&#34;
- &#34;time&#34;
+ "math/rand"
+ "testing"
+ "time"
)
func init() {
@@ -4657,40 +4657,40 @@ func init() {
func BenchmarkARC_Rand(b *testing.B) {
l, err := NewARC(8192)
if err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
trace := make([]int64, b.N*2)
- for i := 0; i &lt; b.N*2; i&#43;&#43; {
+ for i := 0; i < b.N*2; i++ {
trace[i] = rand.Int63() % 32768
}
b.ResetTimer()
var hit, miss int
- for i := 0; i &lt; 2*b.N; i&#43;&#43; {
+ for i := 0; i < 2*b.N; i++ {
if i%2 == 0 {
l.Add(trace[i], trace[i])
} else {
_, ok := l.Get(trace[i])
if ok {
- hit&#43;&#43;
+ hit++
} else {
- miss&#43;&#43;
+ miss++
}
}
}
- b.Logf(&#34;hit: %d miss: %d ratio: %f&#34;, hit, miss, float64(hit)/float64(miss))
+ b.Logf("hit: %d miss: %d ratio: %f", hit, miss, float64(hit)/float64(miss))
}
func BenchmarkARC_Freq(b *testing.B) {
l, err := NewARC(8192)
if err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
trace := make([]int64, b.N*2)
- for i := 0; i &lt; b.N*2; i&#43;&#43; {
+ for i := 0; i < b.N*2; i++ {
if i%2 == 0 {
trace[i] = rand.Int63() % 16384
} else {
@@ -4700,30 +4700,30 @@ func BenchmarkARC_Freq(b *testing.B) {
b.ResetTimer()
- for i := 0; i &lt; b.N; i&#43;&#43; {
+ for i := 0; i < b.N; i++ {
l.Add(trace[i], trace[i])
}
var hit, miss int
- for i := 0; i &lt; b.N; i&#43;&#43; {
+ for i := 0; i < b.N; i++ {
_, ok := l.Get(trace[i])
if ok {
- hit&#43;&#43;
+ hit++
} else {
- miss&#43;&#43;
+ miss++
}
}
- b.Logf(&#34;hit: %d miss: %d ratio: %f&#34;, hit, miss, float64(hit)/float64(miss))
+ b.Logf("hit: %d miss: %d ratio: %f", hit, miss, float64(hit)/float64(miss))
}
func TestARC_RandomOps(t *testing.T) {
size := 128
l, err := NewARC(128)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
n := 200000
- for i := 0; i &lt; n; i&#43;&#43; {
+ for i := 0; i < n; i++ {
key := rand.Int63() % 512
r := rand.Int63()
switch r % 3 {
@@ -4735,12 +4735,12 @@ func TestARC_RandomOps(t *testing.T) {
l.Remove(key)
}
- if l.t1.Len()&#43;l.t2.Len() &gt; size {
- t.Fatalf(&#34;bad: t1: %d t2: %d b1: %d b2: %d p: %d&#34;,
+ if l.t1.Len()+l.t2.Len() > size {
+ t.Fatalf("bad: t1: %d t2: %d b1: %d b2: %d p: %d",
l.t1.Len(), l.t2.Len(), l.b1.Len(), l.b2.Len(), l.p)
}
- if l.b1.Len()&#43;l.b2.Len() &gt; size {
- t.Fatalf(&#34;bad: t1: %d t2: %d b1: %d b2: %d p: %d&#34;,
+ if l.b1.Len()+l.b2.Len() > size {
+ t.Fatalf("bad: t1: %d t2: %d b1: %d b2: %d p: %d",
l.t1.Len(), l.t2.Len(), l.b1.Len(), l.b2.Len(), l.p)
}
}
@@ -4749,108 +4749,108 @@ func TestARC_RandomOps(t *testing.T) {
func TestARC_Get_RecentToFrequent(t *testing.T) {
l, err := NewARC(128)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
// Touch all the entries, should be in t1
- for i := 0; i &lt; 128; i&#43;&#43; {
+ for i := 0; i < 128; i++ {
l.Add(i, i)
}
if n := l.t1.Len(); n != 128 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.t2.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Get should upgrade to t2
- for i := 0; i &lt; 128; i&#43;&#43; {
+ for i := 0; i < 128; i++ {
_, ok := l.Get(i)
if !ok {
- t.Fatalf(&#34;missing: %d&#34;, i)
+ t.Fatalf("missing: %d", i)
}
}
if n := l.t1.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.t2.Len(); n != 128 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Get be from t2
- for i := 0; i &lt; 128; i&#43;&#43; {
+ for i := 0; i < 128; i++ {
_, ok := l.Get(i)
if !ok {
- t.Fatalf(&#34;missing: %d&#34;, i)
+ t.Fatalf("missing: %d", i)
}
}
if n := l.t1.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.t2.Len(); n != 128 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
}
func TestARC_Add_RecentToFrequent(t *testing.T) {
l, err := NewARC(128)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
// Add initially to t1
l.Add(1, 1)
if n := l.t1.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.t2.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Add should upgrade to t2
l.Add(1, 1)
if n := l.t1.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.t2.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Add should remain in t2
l.Add(1, 1)
if n := l.t1.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.t2.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
}
func TestARC_Adaptive(t *testing.T) {
l, err := NewARC(4)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
// Fill t1
- for i := 0; i &lt; 4; i&#43;&#43; {
+ for i := 0; i < 4; i++ {
l.Add(i, i)
}
if n := l.t1.Len(); n != 4 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Move to t2
l.Get(0)
l.Get(1)
if n := l.t2.Len(); n != 2 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Evict from t1
l.Add(4, 4)
if n := l.b1.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Current state
@@ -4862,13 +4862,13 @@ func TestARC_Adaptive(t *testing.T) {
// Add 2, should cause hit on b1
l.Add(2, 2)
if n := l.b1.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if l.p != 1 {
- t.Fatalf(&#34;bad: %d&#34;, l.p)
+ t.Fatalf("bad: %d", l.p)
}
if n := l.t2.Len(); n != 3 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Current state
@@ -4880,10 +4880,10 @@ func TestARC_Adaptive(t *testing.T) {
// Add 4, should migrate to t2
l.Add(4, 4)
if n := l.t1.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.t2.Len(); n != 4 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Current state
@@ -4895,13 +4895,13 @@ func TestARC_Adaptive(t *testing.T) {
// Add 4, should evict to b2
l.Add(5, 5)
if n := l.t1.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.t2.Len(); n != 3 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.b2.Len(); n != 1 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
// Current state
@@ -4913,19 +4913,19 @@ func TestARC_Adaptive(t *testing.T) {
// Add 0, should decrease p
l.Add(0, 0)
if n := l.t1.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.t2.Len(); n != 4 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.b1.Len(); n != 2 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if n := l.b2.Len(); n != 0 {
- t.Fatalf(&#34;bad: %d&#34;, n)
+ t.Fatalf("bad: %d", n)
}
if l.p != 0 {
- t.Fatalf(&#34;bad: %d&#34;, l.p)
+ t.Fatalf("bad: %d", l.p)
}
// Current state
@@ -4938,85 +4938,85 @@ func TestARC_Adaptive(t *testing.T) {
func TestARC(t *testing.T) {
l, err := NewARC(128)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
- for i := 0; i &lt; 256; i&#43;&#43; {
+ for i := 0; i < 256; i++ {
l.Add(i, i)
}
if l.Len() != 128 {
- t.Fatalf(&#34;bad len: %v&#34;, l.Len())
+ t.Fatalf("bad len: %v", l.Len())
}
for i, k := range l.Keys() {
- if v, ok := l.Get(k); !ok || v != k || v != i&#43;128 {
- t.Fatalf(&#34;bad key: %v&#34;, k)
+ if v, ok := l.Get(k); !ok || v != k || v != i+128 {
+ t.Fatalf("bad key: %v", k)
}
}
- for i := 0; i &lt; 128; i&#43;&#43; {
+ for i := 0; i < 128; i++ {
_, ok := l.Get(i)
if ok {
- t.Fatalf(&#34;should be evicted&#34;)
+ t.Fatalf("should be evicted")
}
}
- for i := 128; i &lt; 256; i&#43;&#43; {
+ for i := 128; i < 256; i++ {
_, ok := l.Get(i)
if !ok {
- t.Fatalf(&#34;should not be evicted&#34;)
+ t.Fatalf("should not be evicted")
}
}
- for i := 128; i &lt; 192; i&#43;&#43; {
+ for i := 128; i < 192; i++ {
l.Remove(i)
_, ok := l.Get(i)
if ok {
- t.Fatalf(&#34;should be deleted&#34;)
+ t.Fatalf("should be deleted")
}
}
l.Purge()
if l.Len() != 0 {
- t.Fatalf(&#34;bad len: %v&#34;, l.Len())
+ t.Fatalf("bad len: %v", l.Len())
}
if _, ok := l.Get(200); ok {
- t.Fatalf(&#34;should contain nothing&#34;)
+ t.Fatalf("should contain nothing")
}
}
-// Test that Contains doesn&#39;t update recent-ness
+// Test that Contains doesn't update recent-ness
func TestARC_Contains(t *testing.T) {
l, err := NewARC(2)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
l.Add(1, 1)
l.Add(2, 2)
if !l.Contains(1) {
- t.Errorf(&#34;1 should be contained&#34;)
+ t.Errorf("1 should be contained")
}
l.Add(3, 3)
if l.Contains(1) {
- t.Errorf(&#34;Contains should not have updated recent-ness of 1&#34;)
+ t.Errorf("Contains should not have updated recent-ness of 1")
}
}
-// Test that Peek doesn&#39;t update recent-ness
+// Test that Peek doesn't update recent-ness
func TestARC_Peek(t *testing.T) {
l, err := NewARC(2)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
l.Add(1, 1)
l.Add(2, 2)
if v, ok := l.Peek(1); !ok || v != 1 {
- t.Errorf(&#34;1 should be set to 1: %v, %v&#34;, v, ok)
+ t.Errorf("1 should be set to 1: %v, %v", v, ok)
}
l.Add(3, 3)
if l.Contains(1) {
- t.Errorf(&#34;should not have updated recent-ness of 1&#34;)
+ t.Errorf("should not have updated recent-ness of 1")
}
}
@@ -5055,9 +5055,9 @@ lru.go - github.com/hashicorp/golang-lru
package lru
import (
- &#34;sync&#34;
+ "sync"
- &#34;github.com/hashicorp/golang-lru/simplelru&#34;
+ "github.com/hashicorp/golang-lru/simplelru"
)
// Cache is a thread-safe fixed size LRU cache.
@@ -5078,7 +5078,7 @@ func NewWithEvict(size int, onEvicted func(key interface{}, value interface{}))
if err != nil {
return nil, err
}
- c := &amp;Cache{
+ c := &Cache{
lru: lru,
}
return c, nil
@@ -5099,7 +5099,7 @@ func (c *Cache) Add(key, value interface{}) (evicted bool) {
return evicted
}
-// Get looks up a key&#39;s value from the cache.
+// Get looks up a key's value from the cache.
func (c *Cache) Get(key interface{}) (value interface{}, ok bool) {
c.lock.Lock()
value, ok = c.lru.Get(key)
@@ -5117,7 +5117,7 @@ func (c *Cache) Contains(key interface{}) bool {
}
// Peek returns the key value (or undefined if not found) without updating
-// the &#34;recently used&#34;-ness of the key.
+// the "recently used"-ness of the key.
func (c *Cache) Peek(key interface{}) (value interface{}, ok bool) {
c.lock.RLock()
value, ok = c.lru.Peek(key)
@@ -5208,47 +5208,47 @@ lru_test.go - github.com/hashicorp/golang-lru
package lru
import (
- &#34;math/rand&#34;
- &#34;testing&#34;
+ "math/rand"
+ "testing"
)
func BenchmarkLRU_Rand(b *testing.B) {
l, err := New(8192)
if err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
trace := make([]int64, b.N*2)
- for i := 0; i &lt; b.N*2; i&#43;&#43; {
+ for i := 0; i < b.N*2; i++ {
trace[i] = rand.Int63() % 32768
}
b.ResetTimer()
var hit, miss int
- for i := 0; i &lt; 2*b.N; i&#43;&#43; {
+ for i := 0; i < 2*b.N; i++ {
if i%2 == 0 {
l.Add(trace[i], trace[i])
} else {
_, ok := l.Get(trace[i])
if ok {
- hit&#43;&#43;
+ hit++
} else {
- miss&#43;&#43;
+ miss++
}
}
}
- b.Logf(&#34;hit: %d miss: %d ratio: %f&#34;, hit, miss, float64(hit)/float64(miss))
+ b.Logf("hit: %d miss: %d ratio: %f", hit, miss, float64(hit)/float64(miss))
}
func BenchmarkLRU_Freq(b *testing.B) {
l, err := New(8192)
if err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
trace := make([]int64, b.N*2)
- for i := 0; i &lt; b.N*2; i&#43;&#43; {
+ for i := 0; i < b.N*2; i++ {
if i%2 == 0 {
trace[i] = rand.Int63() % 16384
} else {
@@ -5258,84 +5258,84 @@ func BenchmarkLRU_Freq(b *testing.B) {
b.ResetTimer()
- for i := 0; i &lt; b.N; i&#43;&#43; {
+ for i := 0; i < b.N; i++ {
l.Add(trace[i], trace[i])
}
var hit, miss int
- for i := 0; i &lt; b.N; i&#43;&#43; {
+ for i := 0; i < b.N; i++ {
_, ok := l.Get(trace[i])
if ok {
- hit&#43;&#43;
+ hit++
} else {
- miss&#43;&#43;
+ miss++
}
}
- b.Logf(&#34;hit: %d miss: %d ratio: %f&#34;, hit, miss, float64(hit)/float64(miss))
+ b.Logf("hit: %d miss: %d ratio: %f", hit, miss, float64(hit)/float64(miss))
}
func TestLRU(t *testing.T) {
evictCounter := 0
onEvicted := func(k interface{}, v interface{}) {
if k != v {
- t.Fatalf(&#34;Evict values not equal (%v!=%v)&#34;, k, v)
+ t.Fatalf("Evict values not equal (%v!=%v)", k, v)
}
- evictCounter&#43;&#43;
+ evictCounter++
}
l, err := NewWithEvict(128, onEvicted)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
- for i := 0; i &lt; 256; i&#43;&#43; {
+ for i := 0; i < 256; i++ {
l.Add(i, i)
}
if l.Len() != 128 {
- t.Fatalf(&#34;bad len: %v&#34;, l.Len())
+ t.Fatalf("bad len: %v", l.Len())
}
if evictCounter != 128 {
- t.Fatalf(&#34;bad evict count: %v&#34;, evictCounter)
+ t.Fatalf("bad evict count: %v", evictCounter)
}
for i, k := range l.Keys() {
- if v, ok := l.Get(k); !ok || v != k || v != i&#43;128 {
- t.Fatalf(&#34;bad key: %v&#34;, k)
+ if v, ok := l.Get(k); !ok || v != k || v != i+128 {
+ t.Fatalf("bad key: %v", k)
}
}
- for i := 0; i &lt; 128; i&#43;&#43; {
+ for i := 0; i < 128; i++ {
_, ok := l.Get(i)
if ok {
- t.Fatalf(&#34;should be evicted&#34;)
+ t.Fatalf("should be evicted")
}
}
- for i := 128; i &lt; 256; i&#43;&#43; {
+ for i := 128; i < 256; i++ {
_, ok := l.Get(i)
if !ok {
- t.Fatalf(&#34;should not be evicted&#34;)
+ t.Fatalf("should not be evicted")
}
}
- for i := 128; i &lt; 192; i&#43;&#43; {
+ for i := 128; i < 192; i++ {
l.Remove(i)
_, ok := l.Get(i)
if ok {
- t.Fatalf(&#34;should be deleted&#34;)
+ t.Fatalf("should be deleted")
}
}
l.Get(192) // expect 192 to be last key in l.Keys()
for i, k := range l.Keys() {
- if (i &lt; 63 &amp;&amp; k != i&#43;193) || (i == 63 &amp;&amp; k != 192) {
- t.Fatalf(&#34;out of order key: %v&#34;, k)
+ if (i < 63 && k != i+193) || (i == 63 && k != 192) {
+ t.Fatalf("out of order key: %v", k)
}
}
l.Purge()
if l.Len() != 0 {
- t.Fatalf(&#34;bad len: %v&#34;, l.Len())
+ t.Fatalf("bad len: %v", l.Len())
}
if _, ok := l.Get(200); ok {
- t.Fatalf(&#34;should contain nothing&#34;)
+ t.Fatalf("should contain nothing")
}
}
@@ -5343,120 +5343,120 @@ func TestLRU(t *testing.T) {
func TestLRUAdd(t *testing.T) {
evictCounter := 0
onEvicted := func(k interface{}, v interface{}) {
- evictCounter&#43;&#43;
+ evictCounter++
}
l, err := NewWithEvict(1, onEvicted)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if l.Add(1, 1) == true || evictCounter != 0 {
- t.Errorf(&#34;should not have an eviction&#34;)
+ t.Errorf("should not have an eviction")
}
if l.Add(2, 2) == false || evictCounter != 1 {
- t.Errorf(&#34;should have an eviction&#34;)
+ t.Errorf("should have an eviction")
}
}
-// test that Contains doesn&#39;t update recent-ness
+// test that Contains doesn't update recent-ness
func TestLRUContains(t *testing.T) {
l, err := New(2)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
l.Add(1, 1)
l.Add(2, 2)
if !l.Contains(1) {
- t.Errorf(&#34;1 should be contained&#34;)
+ t.Errorf("1 should be contained")
}
l.Add(3, 3)
if l.Contains(1) {
- t.Errorf(&#34;Contains should not have updated recent-ness of 1&#34;)
+ t.Errorf("Contains should not have updated recent-ness of 1")
}
}
-// test that ContainsOrAdd doesn&#39;t update recent-ness
+// test that ContainsOrAdd doesn't update recent-ness
func TestLRUContainsOrAdd(t *testing.T) {
l, err := New(2)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
l.Add(1, 1)
l.Add(2, 2)
contains, evict := l.ContainsOrAdd(1, 1)
if !contains {
- t.Errorf(&#34;1 should be contained&#34;)
+ t.Errorf("1 should be contained")
}
if evict {
- t.Errorf(&#34;nothing should be evicted here&#34;)
+ t.Errorf("nothing should be evicted here")
}
l.Add(3, 3)
contains, evict = l.ContainsOrAdd(1, 1)
if contains {
- t.Errorf(&#34;1 should not have been contained&#34;)
+ t.Errorf("1 should not have been contained")
}
if !evict {
- t.Errorf(&#34;an eviction should have occurred&#34;)
+ t.Errorf("an eviction should have occurred")
}
if !l.Contains(1) {
- t.Errorf(&#34;now 1 should be contained&#34;)
+ t.Errorf("now 1 should be contained")
}
}
-// test that PeekOrAdd doesn&#39;t update recent-ness
+// test that PeekOrAdd doesn't update recent-ness
func TestLRUPeekOrAdd(t *testing.T) {
l, err := New(2)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
l.Add(1, 1)
l.Add(2, 2)
previous, contains, evict := l.PeekOrAdd(1, 1)
if !contains {
- t.Errorf(&#34;1 should be contained&#34;)
+ t.Errorf("1 should be contained")
}
if evict {
- t.Errorf(&#34;nothing should be evicted here&#34;)
+ t.Errorf("nothing should be evicted here")
}
if previous != 1 {
- t.Errorf(&#34;previous is not equal to 1&#34;)
+ t.Errorf("previous is not equal to 1")
}
l.Add(3, 3)
contains, evict = l.ContainsOrAdd(1, 1)
if contains {
- t.Errorf(&#34;1 should not have been contained&#34;)
+ t.Errorf("1 should not have been contained")
}
if !evict {
- t.Errorf(&#34;an eviction should have occurred&#34;)
+ t.Errorf("an eviction should have occurred")
}
if !l.Contains(1) {
- t.Errorf(&#34;now 1 should be contained&#34;)
+ t.Errorf("now 1 should be contained")
}
}
-// test that Peek doesn&#39;t update recent-ness
+// test that Peek doesn't update recent-ness
func TestLRUPeek(t *testing.T) {
l, err := New(2)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
l.Add(1, 1)
l.Add(2, 2)
if v, ok := l.Peek(1); !ok || v != 1 {
- t.Errorf(&#34;1 should be set to 1: %v, %v&#34;, v, ok)
+ t.Errorf("1 should be set to 1: %v, %v", v, ok)
}
l.Add(3, 3)
if l.Contains(1) {
- t.Errorf(&#34;should not have updated recent-ness of 1&#34;)
+ t.Errorf("should not have updated recent-ness of 1")
}
}
@@ -5464,11 +5464,11 @@ func TestLRUPeek(t *testing.T) {
func TestLRUResize(t *testing.T) {
onEvictCounter := 0
onEvicted := func(k interface{}, v interface{}) {
- onEvictCounter&#43;&#43;
+ onEvictCounter++
}
l, err := NewWithEvict(2, onEvicted)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
// Downsize
@@ -5476,26 +5476,26 @@ func TestLRUResize(t *testing.T) {
l.Add(2, 2)
evicted := l.Resize(1);
if evicted != 1 {
- t.Errorf(&#34;1 element should have been evicted: %v&#34;, evicted)
+ t.Errorf("1 element should have been evicted: %v", evicted)
}
if onEvictCounter != 1 {
- t.Errorf(&#34;onEvicted should have been called 1 time: %v&#34;, onEvictCounter)
+ t.Errorf("onEvicted should have been called 1 time: %v", onEvictCounter)
}
l.Add(3, 3)
if l.Contains(1) {
- t.Errorf(&#34;Element 1 should have been evicted&#34;)
+ t.Errorf("Element 1 should have been evicted")
}
// Upsize
evicted = l.Resize(2);
if evicted != 0 {
- t.Errorf(&#34;0 elements should have been evicted: %v&#34;, evicted)
+ t.Errorf("0 elements should have been evicted: %v", evicted)
}
l.Add(4, 4)
if !l.Contains(3) || !l.Contains(4) {
- t.Errorf(&#34;Cache should have contained 2 elements&#34;)
+ t.Errorf("Cache should have contained 2 elements")
}
}
@@ -5504,8 +5504,8 @@ lru.go - github.com/hashicorp/golang-lru/simplelru
package simplelru
import (
- &#34;container/list&#34;
- &#34;errors&#34;
+ "container/list"
+ "errors"
)
// EvictCallback is used to get a callback when a cache entry is evicted
@@ -5527,10 +5527,10 @@ type entry struct {
// NewLRU constructs an LRU of the given size
func NewLRU(size int, onEvict EvictCallback) (*LRU, error) {
- if size &lt;= 0 {
- return nil, errors.New(&#34;Must provide a positive size&#34;)
+ if size <= 0 {
+ return nil, errors.New("Must provide a positive size")
}
- c := &amp;LRU{
+ c := &LRU{
size: size,
evictList: list.New(),
items: make(map[interface{}]*list.Element),
@@ -5560,11 +5560,11 @@ func (c *LRU) Add(key, value interface{}) (evicted bool) {
}
// Add new item
- ent := &amp;entry{key, value}
+ ent := &entry{key, value}
entry := c.evictList.PushFront(ent)
c.items[key] = entry
- evict := c.evictList.Len() &gt; c.size
+ evict := c.evictList.Len() > c.size
// Verify size not exceeded
if evict {
c.removeOldest()
@@ -5572,7 +5572,7 @@ func (c *LRU) Add(key, value interface{}) (evicted bool) {
return evict
}
-// Get looks up a key&#39;s value from the cache.
+// Get looks up a key's value from the cache.
func (c *LRU) Get(key interface{}) (value interface{}, ok bool) {
if ent, ok := c.items[key]; ok {
c.evictList.MoveToFront(ent)
@@ -5592,7 +5592,7 @@ func (c *LRU) Contains(key interface{}) (ok bool) {
}
// Peek returns the key value (or undefined if not found) without updating
-// the &#34;recently used&#34;-ness of the key.
+// the "recently used"-ness of the key.
func (c *LRU) Peek(key interface{}) (value interface{}, ok bool) {
var ent *list.Element
if ent, ok = c.items[key]; ok {
@@ -5638,7 +5638,7 @@ func (c *LRU) Keys() []interface{} {
i := 0
for ent := c.evictList.Back(); ent != nil; ent = ent.Prev() {
keys[i] = ent.Value.(*entry).key
- i&#43;&#43;
+ i++
}
return keys
}
@@ -5651,10 +5651,10 @@ func (c *LRU) Len() int {
// Resize changes the cache size.
func (c *LRU) Resize(size int) (evicted int) {
diff := c.Len() - size
- if diff &lt; 0 {
+ if diff < 0 {
diff = 0
}
- for i := 0; i &lt; diff; i&#43;&#43; {
+ for i := 0; i < diff; i++ {
c.removeOldest()
}
c.size = size
@@ -5686,17 +5686,17 @@ package simplelru
// LRUCache is the interface for simple LRU cache.
type LRUCache interface {
// Adds a value to the cache, returns true if an eviction occurred and
- // updates the &#34;recently used&#34;-ness of the key.
+ // updates the "recently used"-ness of the key.
Add(key, value interface{}) bool
- // Returns key&#39;s value from the cache and
- // updates the &#34;recently used&#34;-ness of the key. #value, isFound
+ // Returns key's value from the cache and
+ // updates the "recently used"-ness of the key. #value, isFound
Get(key interface{}) (value interface{}, ok bool)
// Checks if a key exists in cache without updating the recent-ness.
Contains(key interface{}) (ok bool)
- // Returns key&#39;s value without updating the &#34;recently used&#34;-ness of the key.
+ // Returns key's value without updating the "recently used"-ness of the key.
Peek(key interface{}) (value interface{}, ok bool)
// Removes a key from the cache.
@@ -5725,111 +5725,111 @@ type LRUCache interface {
lru_test.go - github.com/hashicorp/golang-lru/simplelru
package simplelru
-import &#34;testing&#34;
+import "testing"
func TestLRU(t *testing.T) {
evictCounter := 0
onEvicted := func(k interface{}, v interface{}) {
if k != v {
- t.Fatalf(&#34;Evict values not equal (%v!=%v)&#34;, k, v)
+ t.Fatalf("Evict values not equal (%v!=%v)", k, v)
}
- evictCounter&#43;&#43;
+ evictCounter++
}
l, err := NewLRU(128, onEvicted)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
- for i := 0; i &lt; 256; i&#43;&#43; {
+ for i := 0; i < 256; i++ {
l.Add(i, i)
}
if l.Len() != 128 {
- t.Fatalf(&#34;bad len: %v&#34;, l.Len())
+ t.Fatalf("bad len: %v", l.Len())
}
if evictCounter != 128 {
- t.Fatalf(&#34;bad evict count: %v&#34;, evictCounter)
+ t.Fatalf("bad evict count: %v", evictCounter)
}
for i, k := range l.Keys() {
- if v, ok := l.Get(k); !ok || v != k || v != i&#43;128 {
- t.Fatalf(&#34;bad key: %v&#34;, k)
+ if v, ok := l.Get(k); !ok || v != k || v != i+128 {
+ t.Fatalf("bad key: %v", k)
}
}
- for i := 0; i &lt; 128; i&#43;&#43; {
+ for i := 0; i < 128; i++ {
_, ok := l.Get(i)
if ok {
- t.Fatalf(&#34;should be evicted&#34;)
+ t.Fatalf("should be evicted")
}
}
- for i := 128; i &lt; 256; i&#43;&#43; {
+ for i := 128; i < 256; i++ {
_, ok := l.Get(i)
if !ok {
- t.Fatalf(&#34;should not be evicted&#34;)
+ t.Fatalf("should not be evicted")
}
}
- for i := 128; i &lt; 192; i&#43;&#43; {
+ for i := 128; i < 192; i++ {
ok := l.Remove(i)
if !ok {
- t.Fatalf(&#34;should be contained&#34;)
+ t.Fatalf("should be contained")
}
ok = l.Remove(i)
if ok {
- t.Fatalf(&#34;should not be contained&#34;)
+ t.Fatalf("should not be contained")
}
_, ok = l.Get(i)
if ok {
- t.Fatalf(&#34;should be deleted&#34;)
+ t.Fatalf("should be deleted")
}
}
l.Get(192) // expect 192 to be last key in l.Keys()
for i, k := range l.Keys() {
- if (i &lt; 63 &amp;&amp; k != i&#43;193) || (i == 63 &amp;&amp; k != 192) {
- t.Fatalf(&#34;out of order key: %v&#34;, k)
+ if (i < 63 && k != i+193) || (i == 63 && k != 192) {
+ t.Fatalf("out of order key: %v", k)
}
}
l.Purge()
if l.Len() != 0 {
- t.Fatalf(&#34;bad len: %v&#34;, l.Len())
+ t.Fatalf("bad len: %v", l.Len())
}
if _, ok := l.Get(200); ok {
- t.Fatalf(&#34;should contain nothing&#34;)
+ t.Fatalf("should contain nothing")
}
}
func TestLRU_GetOldest_RemoveOldest(t *testing.T) {
l, err := NewLRU(128, nil)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
- for i := 0; i &lt; 256; i&#43;&#43; {
+ for i := 0; i < 256; i++ {
l.Add(i, i)
}
k, _, ok := l.GetOldest()
if !ok {
- t.Fatalf(&#34;missing&#34;)
+ t.Fatalf("missing")
}
if k.(int) != 128 {
- t.Fatalf(&#34;bad: %v&#34;, k)
+ t.Fatalf("bad: %v", k)
}
k, _, ok = l.RemoveOldest()
if !ok {
- t.Fatalf(&#34;missing&#34;)
+ t.Fatalf("missing")
}
if k.(int) != 128 {
- t.Fatalf(&#34;bad: %v&#34;, k)
+ t.Fatalf("bad: %v", k)
}
k, _, ok = l.RemoveOldest()
if !ok {
- t.Fatalf(&#34;missing&#34;)
+ t.Fatalf("missing")
}
if k.(int) != 129 {
- t.Fatalf(&#34;bad: %v&#34;, k)
+ t.Fatalf("bad: %v", k)
}
}
@@ -5837,57 +5837,57 @@ func TestLRU_GetOldest_RemoveOldest(t *testing.T) {
func TestLRU_Add(t *testing.T) {
evictCounter := 0
onEvicted := func(k interface{}, v interface{}) {
- evictCounter&#43;&#43;
+ evictCounter++
}
l, err := NewLRU(1, onEvicted)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if l.Add(1, 1) == true || evictCounter != 0 {
- t.Errorf(&#34;should not have an eviction&#34;)
+ t.Errorf("should not have an eviction")
}
if l.Add(2, 2) == false || evictCounter != 1 {
- t.Errorf(&#34;should have an eviction&#34;)
+ t.Errorf("should have an eviction")
}
}
-// Test that Contains doesn&#39;t update recent-ness
+// Test that Contains doesn't update recent-ness
func TestLRU_Contains(t *testing.T) {
l, err := NewLRU(2, nil)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
l.Add(1, 1)
l.Add(2, 2)
if !l.Contains(1) {
- t.Errorf(&#34;1 should be contained&#34;)
+ t.Errorf("1 should be contained")
}
l.Add(3, 3)
if l.Contains(1) {
- t.Errorf(&#34;Contains should not have updated recent-ness of 1&#34;)
+ t.Errorf("Contains should not have updated recent-ness of 1")
}
}
-// Test that Peek doesn&#39;t update recent-ness
+// Test that Peek doesn't update recent-ness
func TestLRU_Peek(t *testing.T) {
l, err := NewLRU(2, nil)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
l.Add(1, 1)
l.Add(2, 2)
if v, ok := l.Peek(1); !ok || v != 1 {
- t.Errorf(&#34;1 should be set to 1: %v, %v&#34;, v, ok)
+ t.Errorf("1 should be set to 1: %v, %v", v, ok)
}
l.Add(3, 3)
if l.Contains(1) {
- t.Errorf(&#34;should not have updated recent-ness of 1&#34;)
+ t.Errorf("should not have updated recent-ness of 1")
}
}
@@ -5895,11 +5895,11 @@ func TestLRU_Peek(t *testing.T) {
func TestLRU_Resize(t *testing.T) {
onEvictCounter := 0
onEvicted := func(k interface{}, v interface{}) {
- onEvictCounter&#43;&#43;
+ onEvictCounter++
}
l, err := NewLRU(2, onEvicted)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
// Downsize
@@ -5907,26 +5907,26 @@ func TestLRU_Resize(t *testing.T) {
l.Add(2, 2)
evicted := l.Resize(1);
if evicted != 1 {
- t.Errorf(&#34;1 element should have been evicted: %v&#34;, evicted)
+ t.Errorf("1 element should have been evicted: %v", evicted)
}
if onEvictCounter != 1 {
- t.Errorf(&#34;onEvicted should have been called 1 time: %v&#34;, onEvictCounter)
+ t.Errorf("onEvicted should have been called 1 time: %v", onEvictCounter)
}
l.Add(3, 3)
if l.Contains(1) {
- t.Errorf(&#34;Element 1 should have been evicted&#34;)
+ t.Errorf("Element 1 should have been evicted")
}
// Upsize
evicted = l.Resize(2);
if evicted != 0 {
- t.Errorf(&#34;0 elements should have been evicted: %v&#34;, evicted)
+ t.Errorf("0 elements should have been evicted: %v", evicted)
}
l.Add(4, 4)
if !l.Contains(3) || !l.Contains(4) {
- t.Errorf(&#34;Cache should have contained 2 elements&#34;)
+ t.Errorf("Cache should have contained 2 elements")
}
}
@@ -5962,28 +5962,28 @@ Mozilla Public License, version 2.0
1. Definitions
-1.1. &#34;Contributor&#34;
+1.1. "Contributor"
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
-1.2. &#34;Contributor Version&#34;
+1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor&#39;s Contribution.
+ Contributor and that particular Contributor's Contribution.
-1.3. &#34;Contribution&#34;
+1.3. "Contribution"
means Covered Software of a particular Contributor.
-1.4. &#34;Covered Software&#34;
+1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
-1.5. &#34;Incompatible With Secondary Licenses&#34;
+1.5. "Incompatible With Secondary Licenses"
means
a. that the initial Contributor has attached the notice described in
@@ -5993,26 +5993,26 @@ Mozilla Public License, version 2.0
version 1.1 or earlier of the License, but not also under the terms of
a Secondary License.
-1.6. &#34;Executable Form&#34;
+1.6. "Executable Form"
means any form of the work other than Source Code Form.
-1.7. &#34;Larger Work&#34;
+1.7. "Larger Work"
means a work that combines Covered Software with other material, in a
separate file or files, that is not Covered Software.
-1.8. &#34;License&#34;
+1.8. "License"
means this document.
-1.9. &#34;Licensable&#34;
+1.9. "Licensable"
means having the right to grant, to the maximum extent possible, whether
at the time of the initial grant or subsequently, any and all of the
rights conveyed by this License.
-1.10. &#34;Modifications&#34;
+1.10. "Modifications"
means any of the following:
@@ -6021,7 +6021,7 @@ Mozilla Public License, version 2.0
b. any new file in Source Code Form that contains any Covered Software.
-1.11. &#34;Patent Claims&#34; of a Contributor
+1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
@@ -6029,22 +6029,22 @@ Mozilla Public License, version 2.0
by the making, using, selling, offering for sale, having made, import,
or transfer of either its Contributions or its Contributor Version.
-1.12. &#34;Secondary License&#34;
+1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
-1.13. &#34;Source Code Form&#34;
+1.13. "Source Code Form"
means the form of the work preferred for making modifications.
-1.14. &#34;You&#34; (or &#34;Your&#34;)
+1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
- License. For legal entities, &#34;You&#34; includes any entity that controls, is
+ License. For legal entities, "You" includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
- definition, &#34;control&#34; means (a) the power, direct or indirect, to cause
+ definition, "control" means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
@@ -6083,7 +6083,7 @@ Mozilla Public License, version 2.0
a. for any code that a Contributor has removed from Covered Software; or
- b. for infringements caused by: (i) Your and any other third party&#39;s
+ b. for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
@@ -6129,7 +6129,7 @@ Mozilla Public License, version 2.0
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
- attempt to alter or restrict the recipients&#39; rights in the Source Code
+ attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
@@ -6145,7 +6145,7 @@ Mozilla Public License, version 2.0
b. You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter the
- recipients&#39; rights in the Source Code Form under this License.
+ recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
@@ -6223,7 +6223,7 @@ Mozilla Public License, version 2.0
6. Disclaimer of Warranty
- Covered Software is provided under this License on an &#34;as is&#34; basis,
+ Covered Software is provided under this License on an "as is" basis,
without warranty of any kind, either expressed, implied, or statutory,
including, without limitation, warranties that the Covered Software is free
of defects, merchantable, fit for a particular purpose or non-infringing.
@@ -6245,7 +6245,7 @@ Mozilla Public License, version 2.0
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from
- such party&#39;s negligence to the extent applicable law prohibits such
+ such party's negligence to the extent applicable law prohibits such
limitation. Some jurisdictions do not allow the exclusion or limitation of
incidental or consequential damages, so this exclusion and limitation may
not apply to You.
@@ -6256,7 +6256,7 @@ Mozilla Public License, version 2.0
of a jurisdiction where the defendant maintains its principal place of
business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions. Nothing
- in this Section shall prevent a party&#39;s ability to bring cross-claims or
+ in this Section shall prevent a party's ability to bring cross-claims or
counter-claims.
9. Miscellaneous
@@ -6315,10 +6315,10 @@ notice.
You may add additional accurate notices of copyright ownership.
-Exhibit B - &#34;Incompatible With Secondary Licenses&#34; Notice
+Exhibit B - "Incompatible With Secondary Licenses" Notice
- This Source Code Form is &#34;Incompatible
- With Secondary Licenses&#34;, as defined by
+ This Source Code Form is "Incompatible
+ With Secondary Licenses", as defined by
the Mozilla Public License, v. 2.0.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
README.md - github.com/hashicorp/yamux
@@ -6379,7 +6379,7 @@ func client() {
}
// Stream implements net.Conn
- stream.Write([]byte(&#34;ping&#34;))
+ stream.Write([]byte("ping"))
}
func server() {
@@ -6414,8 +6414,8 @@ addr.go - github.com/hashicorp/yamux
package yamux
import (
- &#34;fmt&#34;
- &#34;net&#34;
+ "fmt"
+ "net"
)
// hasAddr is used to get the address from the underlying connection
@@ -6430,11 +6430,11 @@ type yamuxAddr struct {
}
func (*yamuxAddr) Network() string {
- return &#34;yamux&#34;
+ return "yamux"
}
func (y *yamuxAddr) String() string {
- return fmt.Sprintf(&#34;yamux:%s&#34;, y.Addr)
+ return fmt.Sprintf("yamux:%s", y.Addr)
}
// Addr is used to get the address of the listener.
@@ -6447,7 +6447,7 @@ func (s *Session) Addr() net.Addr {
func (s *Session) LocalAddr() net.Addr {
addr, ok := s.conn.(hasAddr)
if !ok {
- return &amp;yamuxAddr{&#34;local&#34;}
+ return &yamuxAddr{"local"}
}
return addr.LocalAddr()
}
@@ -6457,7 +6457,7 @@ func (s *Session) LocalAddr() net.Addr {
func (s *Session) RemoteAddr() net.Addr {
addr, ok := s.conn.(hasAddr)
if !ok {
- return &amp;yamuxAddr{&#34;remote&#34;}
+ return &yamuxAddr{"remote"}
}
return addr.RemoteAddr()
}
@@ -6477,9 +6477,9 @@ bench_test.go - github.com/hashicorp/yamux
package yamux
import (
- &#34;io&#34;
- &#34;io/ioutil&#34;
- &#34;testing&#34;
+ "io"
+ "io/ioutil"
+ "testing"
)
func BenchmarkPing(b *testing.B) {
@@ -6492,13 +6492,13 @@ func BenchmarkPing(b *testing.B) {
b.ReportAllocs()
b.ResetTimer()
- for i := 0; i &lt; b.N; i&#43;&#43; {
+ for i := 0; i < b.N; i++ {
rtt, err := client.Ping()
if err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
if rtt == 0 {
- b.Fatalf(&#34;bad: %v&#34;, rtt)
+ b.Fatalf("bad: %v", rtt)
}
}
}
@@ -6517,7 +6517,7 @@ func BenchmarkAccept(b *testing.B) {
go func() {
defer close(doneCh)
- for i := 0; i &lt; b.N; i&#43;&#43; {
+ for i := 0; i < b.N; i++ {
stream, err := server.AcceptStream()
if err != nil {
return
@@ -6526,14 +6526,14 @@ func BenchmarkAccept(b *testing.B) {
}
}()
- for i := 0; i &lt; b.N; i&#43;&#43; {
+ for i := 0; i < b.N; i++ {
stream, err := client.Open()
if err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
stream.Close()
}
- &lt;-doneCh
+ <-doneCh
}
func BenchmarkSendRecv32(b *testing.B) {
@@ -6608,21 +6608,21 @@ func benchmarkSendRecv(b *testing.B, sendSize, recvSize int) {
switch {
case sendSize == recvSize:
- for i := 0; i &lt; b.N; i&#43;&#43; {
+ for i := 0; i < b.N; i++ {
if _, err := stream.Read(recvBuf); err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
}
- case recvSize &gt; sendSize:
- b.Fatalf(&#34;bad test case; recvSize was: %d and sendSize was: %d, but recvSize must be &lt;= sendSize!&#34;, recvSize, sendSize)
+ case recvSize > sendSize:
+ b.Fatalf("bad test case; recvSize was: %d and sendSize was: %d, but recvSize must be <= sendSize!", recvSize, sendSize)
default:
chunks := sendSize / recvSize
- for i := 0; i &lt; b.N; i&#43;&#43; {
- for j := 0; j &lt; chunks; j&#43;&#43; {
+ for i := 0; i < b.N; i++ {
+ for j := 0; j < chunks; j++ {
if _, err := stream.Read(recvBuf); err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
}
}
@@ -6631,16 +6631,16 @@ func benchmarkSendRecv(b *testing.B, sendSize, recvSize int) {
stream, err := client.Open()
if err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
defer stream.Close()
- for i := 0; i &lt; b.N; i&#43;&#43; {
+ for i := 0; i < b.N; i++ {
if _, err := stream.Write(sendBuf); err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
}
- &lt;-doneCh
+ <-doneCh
}
func BenchmarkSendRecvParallel32(b *testing.B) {
@@ -6709,23 +6709,23 @@ func benchmarkSendRecvParallel(b *testing.B, sendSize int) {
defer stream.Close()
if _, err := discarder.ReadFrom(stream); err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
}()
stream, err := client.Open()
if err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
for pb.Next() {
if _, err := stream.Write(sendBuf); err != nil {
- b.Fatalf(&#34;err: %v&#34;, err)
+ b.Fatalf("err: %v", err)
}
}
stream.Close()
- &lt;-doneCh
+ <-doneCh
})
}
@@ -6734,56 +6734,56 @@ const.go - github.com/hashicorp/yamux
package yamux
import (
- &#34;encoding/binary&#34;
- &#34;fmt&#34;
+ "encoding/binary"
+ "fmt"
)
var (
// ErrInvalidVersion means we received a frame with an
// invalid version
- ErrInvalidVersion = fmt.Errorf(&#34;invalid protocol version&#34;)
+ ErrInvalidVersion = fmt.Errorf("invalid protocol version")
// ErrInvalidMsgType means we received a frame with an
// invalid message type
- ErrInvalidMsgType = fmt.Errorf(&#34;invalid msg type&#34;)
+ ErrInvalidMsgType = fmt.Errorf("invalid msg type")
// ErrSessionShutdown is used if there is a shutdown during
// an operation
- ErrSessionShutdown = fmt.Errorf(&#34;session shutdown&#34;)
+ ErrSessionShutdown = fmt.Errorf("session shutdown")
// ErrStreamsExhausted is returned if we have no more
// stream ids to issue
- ErrStreamsExhausted = fmt.Errorf(&#34;streams exhausted&#34;)
+ ErrStreamsExhausted = fmt.Errorf("streams exhausted")
// ErrDuplicateStream is used if a duplicate stream is
// opened inbound
- ErrDuplicateStream = fmt.Errorf(&#34;duplicate stream initiated&#34;)
+ ErrDuplicateStream = fmt.Errorf("duplicate stream initiated")
// ErrReceiveWindowExceeded indicates the window was exceeded
- ErrRecvWindowExceeded = fmt.Errorf(&#34;recv window exceeded&#34;)
+ ErrRecvWindowExceeded = fmt.Errorf("recv window exceeded")
// ErrTimeout is used when we reach an IO deadline
- ErrTimeout = fmt.Errorf(&#34;i/o deadline reached&#34;)
+ ErrTimeout = fmt.Errorf("i/o deadline reached")
// ErrStreamClosed is returned when using a closed stream
- ErrStreamClosed = fmt.Errorf(&#34;stream closed&#34;)
+ ErrStreamClosed = fmt.Errorf("stream closed")
// ErrUnexpectedFlag is set when we get an unexpected flag
- ErrUnexpectedFlag = fmt.Errorf(&#34;unexpected flag&#34;)
+ ErrUnexpectedFlag = fmt.Errorf("unexpected flag")
// ErrRemoteGoAway is used when we get a go away from the other side
- ErrRemoteGoAway = fmt.Errorf(&#34;remote end is not accepting connections&#34;)
+ ErrRemoteGoAway = fmt.Errorf("remote end is not accepting connections")
// ErrConnectionReset is sent if a stream is reset. This can happen
// if the backlog is exceeded, or if there was a remote GoAway.
- ErrConnectionReset = fmt.Errorf(&#34;connection reset&#34;)
+ ErrConnectionReset = fmt.Errorf("connection reset")
- // ErrConnectionWriteTimeout indicates that we hit the &#34;safety valve&#34;
+ // ErrConnectionWriteTimeout indicates that we hit the "safety valve"
// timeout writing to the underlying stream connection.
- ErrConnectionWriteTimeout = fmt.Errorf(&#34;connection write timeout&#34;)
+ ErrConnectionWriteTimeout = fmt.Errorf("connection write timeout")
// ErrKeepAliveTimeout is sent if a missed keepalive caused the stream close
- ErrKeepAliveTimeout = fmt.Errorf(&#34;keepalive timeout&#34;)
+ ErrKeepAliveTimeout = fmt.Errorf("keepalive timeout")
)
const (
@@ -6814,7 +6814,7 @@ const (
const (
// SYN is sent to signal a new stream. May
// be sent with a data payload
- flagSYN uint16 = 1 &lt;&lt; iota
+ flagSYN uint16 = 1 << iota
// ACK is sent to acknowledge a new stream. May
// be sent with a data payload
@@ -6850,8 +6850,8 @@ const (
sizeOfFlags = 2
sizeOfStreamID = 4
sizeOfLength = 4
- headerSize = sizeOfVersion &#43; sizeOfType &#43; sizeOfFlags &#43;
- sizeOfStreamID &#43; sizeOfLength
+ headerSize = sizeOfVersion + sizeOfType + sizeOfFlags +
+ sizeOfStreamID + sizeOfLength
)
type header []byte
@@ -6877,7 +6877,7 @@ func (h header) Length() uint32 {
}
func (h header) String() string {
- return fmt.Sprintf(&#34;Vsn:%d Type:%d Flags:%d StreamID:%d Length:%d&#34;,
+ return fmt.Sprintf("Vsn:%d Type:%d Flags:%d StreamID:%d Length:%d",
h.Version(), h.MsgType(), h.Flags(), h.StreamID(), h.Length())
}
@@ -6894,52 +6894,52 @@ const_test.go - github.com/hashicorp/yamux
package yamux
import (
- &#34;testing&#34;
+ "testing"
)
func TestConst(t *testing.T) {
if protoVersion != 0 {
- t.Fatalf(&#34;bad: %v&#34;, protoVersion)
+ t.Fatalf("bad: %v", protoVersion)
}
if typeData != 0 {
- t.Fatalf(&#34;bad: %v&#34;, typeData)
+ t.Fatalf("bad: %v", typeData)
}
if typeWindowUpdate != 1 {
- t.Fatalf(&#34;bad: %v&#34;, typeWindowUpdate)
+ t.Fatalf("bad: %v", typeWindowUpdate)
}
if typePing != 2 {
- t.Fatalf(&#34;bad: %v&#34;, typePing)
+ t.Fatalf("bad: %v", typePing)
}
if typeGoAway != 3 {
- t.Fatalf(&#34;bad: %v&#34;, typeGoAway)
+ t.Fatalf("bad: %v", typeGoAway)
}
if flagSYN != 1 {
- t.Fatalf(&#34;bad: %v&#34;, flagSYN)
+ t.Fatalf("bad: %v", flagSYN)
}
if flagACK != 2 {
- t.Fatalf(&#34;bad: %v&#34;, flagACK)
+ t.Fatalf("bad: %v", flagACK)
}
if flagFIN != 4 {
- t.Fatalf(&#34;bad: %v&#34;, flagFIN)
+ t.Fatalf("bad: %v", flagFIN)
}
if flagRST != 8 {
- t.Fatalf(&#34;bad: %v&#34;, flagRST)
+ t.Fatalf("bad: %v", flagRST)
}
if goAwayNormal != 0 {
- t.Fatalf(&#34;bad: %v&#34;, goAwayNormal)
+ t.Fatalf("bad: %v", goAwayNormal)
}
if goAwayProtoErr != 1 {
- t.Fatalf(&#34;bad: %v&#34;, goAwayProtoErr)
+ t.Fatalf("bad: %v", goAwayProtoErr)
}
if goAwayInternalErr != 2 {
- t.Fatalf(&#34;bad: %v&#34;, goAwayInternalErr)
+ t.Fatalf("bad: %v", goAwayInternalErr)
}
if headerSize != 12 {
- t.Fatalf(&#34;bad header size&#34;)
+ t.Fatalf("bad header size")
}
}
@@ -6948,19 +6948,19 @@ func TestEncodeDecode(t *testing.T) {
hdr.encode(typeWindowUpdate, flagACK|flagRST, 1234, 4321)
if hdr.Version() != protoVersion {
- t.Fatalf(&#34;bad: %v&#34;, hdr)
+ t.Fatalf("bad: %v", hdr)
}
if hdr.MsgType() != typeWindowUpdate {
- t.Fatalf(&#34;bad: %v&#34;, hdr)
+ t.Fatalf("bad: %v", hdr)
}
if hdr.Flags() != flagACK|flagRST {
- t.Fatalf(&#34;bad: %v&#34;, hdr)
+ t.Fatalf("bad: %v", hdr)
}
if hdr.StreamID() != 1234 {
- t.Fatalf(&#34;bad: %v&#34;, hdr)
+ t.Fatalf("bad: %v", hdr)
}
if hdr.Length() != 4321 {
- t.Fatalf(&#34;bad: %v&#34;, hdr)
+ t.Fatalf("bad: %v", hdr)
}
}
@@ -6975,11 +6975,11 @@ mux.go - github.com/hashicorp/yamux
package yamux
import (
- &#34;fmt&#34;
- &#34;io&#34;
- &#34;log&#34;
- &#34;os&#34;
- &#34;time&#34;
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "time"
)
// Config is used to tune the Yamux session
@@ -6995,9 +6995,9 @@ type Config struct {
// KeepAliveInterval is how often to perform the keep alive
KeepAliveInterval time.Duration
- // ConnectionWriteTimeout is meant to be a &#34;safety valve&#34; timeout after
+ // ConnectionWriteTimeout is meant to be a "safety valve" timeout after
// we which will suspect a problem with the underlying connection and
- // close it. This is only applied to writes, where&#39;s there&#39;s generally
+ // close it. This is only applied to writes, where's there's generally
// an expectation that things will move along quickly.
ConnectionWriteTimeout time.Duration
@@ -7023,7 +7023,7 @@ type Config struct {
// DefaultConfig is used to return a default configuration
func DefaultConfig() *Config {
- return &amp;Config{
+ return &Config{
AcceptBacklog: 256,
EnableKeepAlive: true,
KeepAliveInterval: 30 * time.Second,
@@ -7036,19 +7036,19 @@ func DefaultConfig() *Config {
// VerifyConfig is used to verify the sanity of configuration
func VerifyConfig(config *Config) error {
- if config.AcceptBacklog &lt;= 0 {
- return fmt.Errorf(&#34;backlog must be positive&#34;)
+ if config.AcceptBacklog <= 0 {
+ return fmt.Errorf("backlog must be positive")
}
if config.KeepAliveInterval == 0 {
- return fmt.Errorf(&#34;keep-alive interval must be positive&#34;)
+ return fmt.Errorf("keep-alive interval must be positive")
}
- if config.MaxStreamWindowSize &lt; initialStreamWindow {
- return fmt.Errorf(&#34;MaxStreamWindowSize must be larger than %d&#34;, initialStreamWindow)
+ if config.MaxStreamWindowSize < initialStreamWindow {
+ return fmt.Errorf("MaxStreamWindowSize must be larger than %d", initialStreamWindow)
}
- if config.LogOutput != nil &amp;&amp; config.Logger != nil {
- return fmt.Errorf(&#34;both Logger and LogOutput may not be set, select one&#34;)
- } else if config.LogOutput == nil &amp;&amp; config.Logger == nil {
- return fmt.Errorf(&#34;one of Logger or LogOutput must be set, select one&#34;)
+ if config.LogOutput != nil && config.Logger != nil {
+ return fmt.Errorf("both Logger and LogOutput may not be set, select one")
+ } else if config.LogOutput == nil && config.Logger == nil {
+ return fmt.Errorf("one of Logger or LogOutput must be set, select one")
}
return nil
}
@@ -7084,17 +7084,17 @@ session.go - github.com/hashicorp/yamux
package yamux
import (
- &#34;bufio&#34;
- &#34;fmt&#34;
- &#34;io&#34;
- &#34;io/ioutil&#34;
- &#34;log&#34;
- &#34;math&#34;
- &#34;net&#34;
- &#34;strings&#34;
- &#34;sync&#34;
- &#34;sync/atomic&#34;
- &#34;time&#34;
+ "bufio"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "log"
+ "math"
+ "net"
+ "strings"
+ "sync"
+ "sync/atomic"
+ "time"
)
// Session is used to wrap a reliable ordered connection and to
@@ -7171,10 +7171,10 @@ type sendReady struct {
func newSession(config *Config, conn io.ReadWriteCloser, client bool) *Session {
logger := config.Logger
if logger == nil {
- logger = log.New(config.LogOutput, &#34;&#34;, log.LstdFlags)
+ logger = log.New(config.LogOutput, "", log.LstdFlags)
}
- s := &amp;Session{
+ s := &Session{
config: config,
logger: logger,
conn: conn,
@@ -7204,7 +7204,7 @@ func newSession(config *Config, conn io.ReadWriteCloser, client bool) *Session {
// IsClosed does a safe check to see if we have shutdown
func (s *Session) IsClosed() bool {
select {
- case &lt;-s.shutdownCh:
+ case <-s.shutdownCh:
return true
default:
return false
@@ -7213,7 +7213,7 @@ func (s *Session) IsClosed() bool {
// CloseChan returns a read-only channel which is closed as
// soon as the session is closed.
-func (s *Session) CloseChan() &lt;-chan struct{} {
+func (s *Session) CloseChan() <-chan struct{} {
return s.shutdownCh
}
@@ -7239,24 +7239,24 @@ func (s *Session) OpenStream() (*Stream, error) {
if s.IsClosed() {
return nil, ErrSessionShutdown
}
- if atomic.LoadInt32(&amp;s.remoteGoAway) == 1 {
+ if atomic.LoadInt32(&s.remoteGoAway) == 1 {
return nil, ErrRemoteGoAway
}
// Block if we have too many inflight SYNs
select {
- case s.synCh &lt;- struct{}{}:
- case &lt;-s.shutdownCh:
+ case s.synCh <- struct{}{}:
+ case <-s.shutdownCh:
return nil, ErrSessionShutdown
}
GET_ID:
// Get an ID, and check for stream exhaustion
- id := atomic.LoadUint32(&amp;s.nextStreamID)
- if id &gt;= math.MaxUint32-1 {
+ id := atomic.LoadUint32(&s.nextStreamID)
+ if id >= math.MaxUint32-1 {
return nil, ErrStreamsExhausted
}
- if !atomic.CompareAndSwapUint32(&amp;s.nextStreamID, id, id&#43;2) {
+ if !atomic.CompareAndSwapUint32(&s.nextStreamID, id, id+2) {
goto GET_ID
}
@@ -7270,9 +7270,9 @@ GET_ID:
// Send the window update to create
if err := stream.sendWindowUpdate(); err != nil {
select {
- case &lt;-s.synCh:
+ case <-s.synCh:
default:
- s.logger.Printf(&#34;[ERR] yamux: aborted stream open without inflight syn semaphore&#34;)
+ s.logger.Printf("[ERR] yamux: aborted stream open without inflight syn semaphore")
}
return nil, err
}
@@ -7293,12 +7293,12 @@ func (s *Session) Accept() (net.Conn, error) {
// is ready to be accepted.
func (s *Session) AcceptStream() (*Stream, error) {
select {
- case stream := &lt;-s.acceptCh:
+ case stream := <-s.acceptCh:
if err := stream.sendWindowUpdate(); err != nil {
return nil, err
}
return stream, nil
- case &lt;-s.shutdownCh:
+ case <-s.shutdownCh:
return nil, s.shutdownErr
}
}
@@ -7318,7 +7318,7 @@ func (s *Session) Close() error {
}
close(s.shutdownCh)
s.conn.Close()
- &lt;-s.recvDoneCh
+ <-s.recvDoneCh
s.streamLock.Lock()
defer s.streamLock.Unlock()
@@ -7347,7 +7347,7 @@ func (s *Session) GoAway() error {
// goAway is used to send a goAway message
func (s *Session) goAway(reason uint32) header {
- atomic.SwapInt32(&amp;s.localGoAway, 1)
+ atomic.SwapInt32(&s.localGoAway, 1)
hdr := header(make([]byte, headerSize))
hdr.encode(typeGoAway, 0, 0, reason)
return hdr
@@ -7361,7 +7361,7 @@ func (s *Session) Ping() (time.Duration, error) {
// Get a new ping id, mark as pending
s.pingLock.Lock()
id := s.pingID
- s.pingID&#43;&#43;
+ s.pingID++
s.pings[id] = ch
s.pingLock.Unlock()
@@ -7375,13 +7375,13 @@ func (s *Session) Ping() (time.Duration, error) {
// Wait for a response
start := time.Now()
select {
- case &lt;-ch:
- case &lt;-time.After(s.config.ConnectionWriteTimeout):
+ case <-ch:
+ case <-time.After(s.config.ConnectionWriteTimeout):
s.pingLock.Lock()
delete(s.pings, id) // Ignore it if a response comes later.
s.pingLock.Unlock()
return 0, ErrTimeout
- case &lt;-s.shutdownCh:
+ case <-s.shutdownCh:
return 0, ErrSessionShutdown
}
@@ -7394,16 +7394,16 @@ func (s *Session) Ping() (time.Duration, error) {
func (s *Session) keepalive() {
for {
select {
- case &lt;-time.After(s.config.KeepAliveInterval):
+ case <-time.After(s.config.KeepAliveInterval):
_, err := s.Ping()
if err != nil {
if err != ErrSessionShutdown {
- s.logger.Printf(&#34;[ERR] yamux: keepalive failed: %v&#34;, err)
+ s.logger.Printf("[ERR] yamux: keepalive failed: %v", err)
s.exitErr(ErrKeepAliveTimeout)
}
return
}
- case &lt;-s.shutdownCh:
+ case <-s.shutdownCh:
return
}
}
@@ -7416,7 +7416,7 @@ func (s *Session) waitForSend(hdr header, body io.Reader) error {
}
// waitForSendErr waits to send a header with optional data, checking for a
-// potential shutdown. Since there&#39;s the expectation that sends can happen
+// potential shutdown. Since there's the expectation that sends can happen
// in a timely manner, we enforce the connection write timeout here.
func (s *Session) waitForSendErr(hdr header, body io.Reader, errCh chan error) error {
t := timerPool.Get()
@@ -7425,7 +7425,7 @@ func (s *Session) waitForSendErr(hdr header, body io.Reader, errCh chan error) e
defer func() {
timer.Stop()
select {
- case &lt;-timer.C:
+ case <-timer.C:
default:
}
timerPool.Put(t)
@@ -7433,26 +7433,26 @@ func (s *Session) waitForSendErr(hdr header, body io.Reader, errCh chan error) e
ready := sendReady{Hdr: hdr, Body: body, Err: errCh}
select {
- case s.sendCh &lt;- ready:
- case &lt;-s.shutdownCh:
+ case s.sendCh <- ready:
+ case <-s.shutdownCh:
return ErrSessionShutdown
- case &lt;-timer.C:
+ case <-timer.C:
return ErrConnectionWriteTimeout
}
select {
- case err := &lt;-errCh:
+ case err := <-errCh:
return err
- case &lt;-s.shutdownCh:
+ case <-s.shutdownCh:
return ErrSessionShutdown
- case &lt;-timer.C:
+ case <-timer.C:
return ErrConnectionWriteTimeout
}
}
-// sendNoWait does a send without waiting. Since there&#39;s the expectation that
+// sendNoWait does a send without waiting. Since there's the expectation that
// the send happens right here, we enforce the connection write timeout if we
-// can&#39;t queue the header to be sent.
+// can't queue the header to be sent.
func (s *Session) sendNoWait(hdr header) error {
t := timerPool.Get()
timer := t.(*time.Timer)
@@ -7460,18 +7460,18 @@ func (s *Session) sendNoWait(hdr header) error {
defer func() {
timer.Stop()
select {
- case &lt;-timer.C:
+ case <-timer.C:
default:
}
timerPool.Put(t)
}()
select {
- case s.sendCh &lt;- sendReady{Hdr: hdr}:
+ case s.sendCh <- sendReady{Hdr: hdr}:
return nil
- case &lt;-s.shutdownCh:
+ case <-s.shutdownCh:
return ErrSessionShutdown
- case &lt;-timer.C:
+ case <-timer.C:
return ErrConnectionWriteTimeout
}
}
@@ -7480,19 +7480,19 @@ func (s *Session) sendNoWait(hdr header) error {
func (s *Session) send() {
for {
select {
- case ready := &lt;-s.sendCh:
+ case ready := <-s.sendCh:
// Send a header if ready
if ready.Hdr != nil {
sent := 0
- for sent &lt; len(ready.Hdr) {
+ for sent < len(ready.Hdr) {
n, err := s.conn.Write(ready.Hdr[sent:])
if err != nil {
- s.logger.Printf(&#34;[ERR] yamux: Failed to write header: %v&#34;, err)
+ s.logger.Printf("[ERR] yamux: Failed to write header: %v", err)
asyncSendErr(ready.Err, err)
s.exitErr(err)
return
}
- sent &#43;= n
+ sent += n
}
}
@@ -7500,7 +7500,7 @@ func (s *Session) send() {
if ready.Body != nil {
_, err := io.Copy(s.conn, ready.Body)
if err != nil {
- s.logger.Printf(&#34;[ERR] yamux: Failed to write body: %v&#34;, err)
+ s.logger.Printf("[ERR] yamux: Failed to write body: %v", err)
asyncSendErr(ready.Err, err)
s.exitErr(err)
return
@@ -7509,7 +7509,7 @@ func (s *Session) send() {
// No error, successful send
asyncSendErr(ready.Err, nil)
- case &lt;-s.shutdownCh:
+ case <-s.shutdownCh:
return
}
}
@@ -7539,20 +7539,20 @@ func (s *Session) recvLoop() error {
for {
// Read the header
if _, err := io.ReadFull(s.bufRead, hdr); err != nil {
- if err != io.EOF &amp;&amp; !strings.Contains(err.Error(), &#34;closed&#34;) &amp;&amp; !strings.Contains(err.Error(), &#34;reset by peer&#34;) {
- s.logger.Printf(&#34;[ERR] yamux: Failed to read header: %v&#34;, err)
+ if err != io.EOF && !strings.Contains(err.Error(), "closed") && !strings.Contains(err.Error(), "reset by peer") {
+ s.logger.Printf("[ERR] yamux: Failed to read header: %v", err)
}
return err
}
// Verify the version
if hdr.Version() != protoVersion {
- s.logger.Printf(&#34;[ERR] yamux: Invalid protocol version: %d&#34;, hdr.Version())
+ s.logger.Printf("[ERR] yamux: Invalid protocol version: %d", hdr.Version())
return ErrInvalidVersion
}
mt := hdr.MsgType()
- if mt &lt; typeData || mt &gt; typeGoAway {
+ if mt < typeData || mt > typeGoAway {
return ErrInvalidMsgType
}
@@ -7567,7 +7567,7 @@ func (s *Session) handleStreamMessage(hdr header) error {
// Check for a new stream creation
id := hdr.StreamID()
flags := hdr.Flags()
- if flags&amp;flagSYN == flagSYN {
+ if flags&flagSYN == flagSYN {
if err := s.incomingStream(id); err != nil {
return err
}
@@ -7581,14 +7581,14 @@ func (s *Session) handleStreamMessage(hdr header) error {
// If we do not have a stream, likely we sent a RST
if stream == nil {
// Drain any data on the wire
- if hdr.MsgType() == typeData &amp;&amp; hdr.Length() &gt; 0 {
- s.logger.Printf(&#34;[WARN] yamux: Discarding data for stream: %d&#34;, id)
+ if hdr.MsgType() == typeData && hdr.Length() > 0 {
+ s.logger.Printf("[WARN] yamux: Discarding data for stream: %d", id)
if _, err := io.CopyN(ioutil.Discard, s.bufRead, int64(hdr.Length())); err != nil {
- s.logger.Printf(&#34;[ERR] yamux: Failed to discard data: %v&#34;, err)
+ s.logger.Printf("[ERR] yamux: Failed to discard data: %v", err)
return nil
}
} else {
- s.logger.Printf(&#34;[WARN] yamux: frame for missing stream: %v&#34;, hdr)
+ s.logger.Printf("[WARN] yamux: frame for missing stream: %v", hdr)
}
return nil
}
@@ -7597,7 +7597,7 @@ func (s *Session) handleStreamMessage(hdr header) error {
if hdr.MsgType() == typeWindowUpdate {
if err := stream.incrSendWindow(hdr, flags); err != nil {
if sendErr := s.sendNoWait(s.goAway(goAwayProtoErr)); sendErr != nil {
- s.logger.Printf(&#34;[WARN] yamux: failed to send go away: %v&#34;, sendErr)
+ s.logger.Printf("[WARN] yamux: failed to send go away: %v", sendErr)
}
return err
}
@@ -7607,7 +7607,7 @@ func (s *Session) handleStreamMessage(hdr header) error {
// Read the new data
if err := stream.readData(hdr, flags, s.bufRead); err != nil {
if sendErr := s.sendNoWait(s.goAway(goAwayProtoErr)); sendErr != nil {
- s.logger.Printf(&#34;[WARN] yamux: failed to send go away: %v&#34;, sendErr)
+ s.logger.Printf("[WARN] yamux: failed to send go away: %v", sendErr)
}
return err
}
@@ -7620,13 +7620,13 @@ func (s *Session) handlePing(hdr header) error {
pingID := hdr.Length()
// Check if this is a query, respond back in a separate context so we
- // don&#39;t interfere with the receiving thread blocking for the write.
- if flags&amp;flagSYN == flagSYN {
+ // don't interfere with the receiving thread blocking for the write.
+ if flags&flagSYN == flagSYN {
go func() {
hdr := header(make([]byte, headerSize))
hdr.encode(typePing, flagACK, 0, pingID)
if err := s.sendNoWait(hdr); err != nil {
- s.logger.Printf(&#34;[WARN] yamux: failed to send ping reply: %v&#34;, err)
+ s.logger.Printf("[WARN] yamux: failed to send ping reply: %v", err)
}
}()
return nil
@@ -7648,16 +7648,16 @@ func (s *Session) handleGoAway(hdr header) error {
code := hdr.Length()
switch code {
case goAwayNormal:
- atomic.SwapInt32(&amp;s.remoteGoAway, 1)
+ atomic.SwapInt32(&s.remoteGoAway, 1)
case goAwayProtoErr:
- s.logger.Printf(&#34;[ERR] yamux: received protocol error go away&#34;)
- return fmt.Errorf(&#34;yamux protocol error&#34;)
+ s.logger.Printf("[ERR] yamux: received protocol error go away")
+ return fmt.Errorf("yamux protocol error")
case goAwayInternalErr:
- s.logger.Printf(&#34;[ERR] yamux: received internal error go away&#34;)
- return fmt.Errorf(&#34;remote yamux internal error&#34;)
+ s.logger.Printf("[ERR] yamux: received internal error go away")
+ return fmt.Errorf("remote yamux internal error")
default:
- s.logger.Printf(&#34;[ERR] yamux: received unexpected go away&#34;)
- return fmt.Errorf(&#34;unexpected go away received&#34;)
+ s.logger.Printf("[ERR] yamux: received unexpected go away")
+ return fmt.Errorf("unexpected go away received")
}
return nil
}
@@ -7665,7 +7665,7 @@ func (s *Session) handleGoAway(hdr header) error {
// incomingStream is used to create a new incoming stream
func (s *Session) incomingStream(id uint32) error {
// Reject immediately if we are doing a go away
- if atomic.LoadInt32(&amp;s.localGoAway) == 1 {
+ if atomic.LoadInt32(&s.localGoAway) == 1 {
hdr := header(make([]byte, headerSize))
hdr.encode(typeWindowUpdate, flagRST, id, 0)
return s.sendNoWait(hdr)
@@ -7679,9 +7679,9 @@ func (s *Session) incomingStream(id uint32) error {
// Check if stream already exists
if _, ok := s.streams[id]; ok {
- s.logger.Printf(&#34;[ERR] yamux: duplicate stream declared&#34;)
+ s.logger.Printf("[ERR] yamux: duplicate stream declared")
if sendErr := s.sendNoWait(s.goAway(goAwayProtoErr)); sendErr != nil {
- s.logger.Printf(&#34;[WARN] yamux: failed to send go away: %v&#34;, sendErr)
+ s.logger.Printf("[WARN] yamux: failed to send go away: %v", sendErr)
}
return ErrDuplicateStream
}
@@ -7689,13 +7689,13 @@ func (s *Session) incomingStream(id uint32) error {
// Register the stream
s.streams[id] = stream
- // Check if we&#39;ve exceeded the backlog
+ // Check if we've exceeded the backlog
select {
- case s.acceptCh &lt;- stream:
+ case s.acceptCh <- stream:
return nil
default:
// Backlog exceeded! RST the stream
- s.logger.Printf(&#34;[WARN] yamux: backlog exceeded, forcing connection reset&#34;)
+ s.logger.Printf("[WARN] yamux: backlog exceeded, forcing connection reset")
delete(s.streams, id)
stream.sendHdr.encode(typeWindowUpdate, flagRST, id, 0)
return s.sendNoWait(stream.sendHdr)
@@ -7709,9 +7709,9 @@ func (s *Session) closeStream(id uint32) {
s.streamLock.Lock()
if _, ok := s.inflight[id]; ok {
select {
- case &lt;-s.synCh:
+ case <-s.synCh:
default:
- s.logger.Printf(&#34;[ERR] yamux: SYN tracking out of sync&#34;)
+ s.logger.Printf("[ERR] yamux: SYN tracking out of sync")
}
}
delete(s.streams, id)
@@ -7725,12 +7725,12 @@ func (s *Session) establishStream(id uint32) {
if _, ok := s.inflight[id]; ok {
delete(s.inflight, id)
} else {
- s.logger.Printf(&#34;[ERR] yamux: established stream without inflight SYN (no tracking entry)&#34;)
+ s.logger.Printf("[ERR] yamux: established stream without inflight SYN (no tracking entry)")
}
select {
- case &lt;-s.synCh:
+ case <-s.synCh:
default:
- s.logger.Printf(&#34;[ERR] yamux: established stream without inflight SYN (didn&#39;t have semaphore)&#34;)
+ s.logger.Printf("[ERR] yamux: established stream without inflight SYN (didn't have semaphore)")
}
s.streamLock.Unlock()
}
@@ -7740,23 +7740,23 @@ session_test.go - github.com/hashicorp/yamux
package yamux
import (
- &#34;bytes&#34;
- &#34;fmt&#34;
- &#34;io&#34;
- &#34;io/ioutil&#34;
- &#34;log&#34;
- &#34;reflect&#34;
- &#34;runtime&#34;
- &#34;strings&#34;
- &#34;sync&#34;
- &#34;testing&#34;
- &#34;time&#34;
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "log"
+ "reflect"
+ "runtime"
+ "strings"
+ "sync"
+ "testing"
+ "time"
)
type logCapture struct{ bytes.Buffer }
func (l *logCapture) logs() []string {
- return strings.Split(strings.TrimSpace(l.String()), &#34;\n&#34;)
+ return strings.Split(strings.TrimSpace(l.String()), "\n")
}
func (l *logCapture) match(expect []string) bool {
@@ -7765,7 +7765,7 @@ func (l *logCapture) match(expect []string) bool {
func captureLogs(s *Session) *logCapture {
buf := new(logCapture)
- s.logger = log.New(buf, &#34;&#34;, 0)
+ s.logger = log.New(buf, "", 0)
return buf
}
@@ -7793,8 +7793,8 @@ func (p *pipeConn) Close() error {
func testConn() (io.ReadWriteCloser, io.ReadWriteCloser) {
read1, write1 := io.Pipe()
read2, write2 := io.Pipe()
- conn1 := &amp;pipeConn{reader: read1, writer: write2}
- conn2 := &amp;pipeConn{reader: read2, writer: write1}
+ conn1 := &pipeConn{reader: read1, writer: write2}
+ conn2 := &pipeConn{reader: read2, writer: write1}
return conn1, conn2
}
@@ -7830,18 +7830,18 @@ func TestPing(t *testing.T) {
rtt, err := client.Ping()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if rtt == 0 {
- t.Fatalf(&#34;bad: %v&#34;, rtt)
+ t.Fatalf("bad: %v", rtt)
}
rtt, err = server.Ping()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if rtt == 0 {
- t.Fatalf(&#34;bad: %v&#34;, rtt)
+ t.Fatalf("bad: %v", rtt)
}
}
@@ -7857,16 +7857,16 @@ func TestPing_Timeout(t *testing.T) {
errCh := make(chan error, 1)
go func() {
_, err := server.Ping() // Ping via the server session
- errCh &lt;- err
+ errCh <- err
}()
select {
- case err := &lt;-errCh:
+ case err := <-errCh:
if err != ErrTimeout {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
- case &lt;-time.After(client.config.ConnectionWriteTimeout * 2):
- t.Fatalf(&#34;failed to timeout within expected %v&#34;, client.config.ConnectionWriteTimeout)
+ case <-time.After(client.config.ConnectionWriteTimeout * 2):
+ t.Fatalf("failed to timeout within expected %v", client.config.ConnectionWriteTimeout)
}
// Verify that we recover, even if we gave up
@@ -7874,16 +7874,16 @@ func TestPing_Timeout(t *testing.T) {
go func() {
_, err := server.Ping() // Ping via the server session
- errCh &lt;- err
+ errCh <- err
}()
select {
- case err := &lt;-errCh:
+ case err := <-errCh:
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
- case &lt;-time.After(client.config.ConnectionWriteTimeout):
- t.Fatalf(&#34;timeout&#34;)
+ case <-time.After(client.config.ConnectionWriteTimeout):
+ t.Fatalf("timeout")
}
}
@@ -7895,7 +7895,7 @@ func TestCloseBeforeAck(t *testing.T) {
defer client.Close()
defer server.Close()
- for i := 0; i &lt; 8; i&#43;&#43; {
+ for i := 0; i < 8; i++ {
s, err := client.OpenStream()
if err != nil {
t.Fatal(err)
@@ -7903,7 +7903,7 @@ func TestCloseBeforeAck(t *testing.T) {
s.Close()
}
- for i := 0; i &lt; 8; i&#43;&#43; {
+ for i := 0; i < 8; i++ {
s, err := server.AcceptStream()
if err != nil {
t.Fatal(err)
@@ -7922,9 +7922,9 @@ func TestCloseBeforeAck(t *testing.T) {
}()
select {
- case &lt;-done:
- case &lt;-time.After(time.Second * 5):
- t.Fatal(&#34;timed out trying to open stream&#34;)
+ case <-done:
+ case <-time.After(time.Second * 5):
+ t.Fatal("timed out trying to open stream")
}
}
@@ -7934,26 +7934,26 @@ func TestAccept(t *testing.T) {
defer server.Close()
if client.NumStreams() != 0 {
- t.Fatalf(&#34;bad&#34;)
+ t.Fatalf("bad")
}
if server.NumStreams() != 0 {
- t.Fatalf(&#34;bad&#34;)
+ t.Fatalf("bad")
}
- wg := &amp;sync.WaitGroup{}
+ wg := &sync.WaitGroup{}
wg.Add(4)
go func() {
defer wg.Done()
stream, err := server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if id := stream.StreamID(); id != 1 {
- t.Fatalf(&#34;bad: %v&#34;, id)
+ t.Fatalf("bad: %v", id)
}
if err := stream.Close(); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}()
@@ -7961,13 +7961,13 @@ func TestAccept(t *testing.T) {
defer wg.Done()
stream, err := client.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if id := stream.StreamID(); id != 2 {
- t.Fatalf(&#34;bad: %v&#34;, id)
+ t.Fatalf("bad: %v", id)
}
if err := stream.Close(); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}()
@@ -7975,13 +7975,13 @@ func TestAccept(t *testing.T) {
defer wg.Done()
stream, err := server.OpenStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if id := stream.StreamID(); id != 2 {
- t.Fatalf(&#34;bad: %v&#34;, id)
+ t.Fatalf("bad: %v", id)
}
if err := stream.Close(); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}()
@@ -7989,13 +7989,13 @@ func TestAccept(t *testing.T) {
defer wg.Done()
stream, err := client.OpenStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if id := stream.StreamID(); id != 1 {
- t.Fatalf(&#34;bad: %v&#34;, id)
+ t.Fatalf("bad: %v", id)
}
if err := stream.Close(); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}()
@@ -8006,9 +8006,9 @@ func TestAccept(t *testing.T) {
}()
select {
- case &lt;-doneCh:
- case &lt;-time.After(time.Second):
- panic(&#34;timeout&#34;)
+ case <-doneCh:
+ case <-time.After(time.Second):
+ panic("timeout")
}
}
@@ -8020,13 +8020,13 @@ func TestClose_closeTimeout(t *testing.T) {
defer server.Close()
if client.NumStreams() != 0 {
- t.Fatalf(&#34;bad&#34;)
+ t.Fatalf("bad")
}
if server.NumStreams() != 0 {
- t.Fatalf(&#34;bad&#34;)
+ t.Fatalf("bad")
}
- wg := &amp;sync.WaitGroup{}
+ wg := &sync.WaitGroup{}
wg.Add(2)
// Open a stream on the client but only close it on the server.
@@ -8038,7 +8038,7 @@ func TestClose_closeTimeout(t *testing.T) {
var err error
clientStream, err = client.OpenStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}()
@@ -8046,10 +8046,10 @@ func TestClose_closeTimeout(t *testing.T) {
defer wg.Done()
stream, err := server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if err := stream.Close(); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}()
@@ -8060,25 +8060,25 @@ func TestClose_closeTimeout(t *testing.T) {
}()
select {
- case &lt;-doneCh:
- case &lt;-time.After(time.Second):
- panic(&#34;timeout&#34;)
+ case <-doneCh:
+ case <-time.After(time.Second):
+ panic("timeout")
}
// We should have zero streams after our timeout period
time.Sleep(100 * time.Millisecond)
- if v := server.NumStreams(); v &gt; 0 {
- t.Fatalf(&#34;should have zero streams: %d&#34;, v)
+ if v := server.NumStreams(); v > 0 {
+ t.Fatalf("should have zero streams: %d", v)
}
- if v := client.NumStreams(); v &gt; 0 {
- t.Fatalf(&#34;should have zero streams: %d&#34;, v)
+ if v := client.NumStreams(); v > 0 {
+ t.Fatalf("should have zero streams: %d", v)
}
- if _, err := clientStream.Write([]byte(&#34;hello&#34;)); err == nil {
- t.Fatal(&#34;should error on write&#34;)
- } else if err.Error() != &#34;connection reset&#34; {
- t.Fatalf(&#34;expected connection reset, got %q&#34;, err)
+ if _, err := clientStream.Write([]byte("hello")); err == nil {
+ t.Fatal("should error on write")
+ } else if err.Error() != "connection reset" {
+ t.Fatalf("expected connection reset, got %q", err)
}
}
@@ -8087,13 +8087,13 @@ func TestNonNilInterface(t *testing.T) {
server.Close()
conn, err := server.Accept()
- if err != nil &amp;&amp; conn != nil {
- t.Error(&#34;bad: accept should return a connection of nil value&#34;)
+ if err != nil && conn != nil {
+ t.Error("bad: accept should return a connection of nil value")
}
conn, err = server.Open()
- if err != nil &amp;&amp; conn != nil {
- t.Error(&#34;bad: open should return a connection of nil value&#34;)
+ if err != nil && conn != nil {
+ t.Error("bad: open should return a connection of nil value")
}
}
@@ -8102,36 +8102,36 @@ func TestSendData_Small(t *testing.T) {
defer client.Close()
defer server.Close()
- wg := &amp;sync.WaitGroup{}
+ wg := &sync.WaitGroup{}
wg.Add(2)
go func() {
defer wg.Done()
stream, err := server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if server.NumStreams() != 1 {
- t.Fatalf(&#34;bad&#34;)
+ t.Fatalf("bad")
}
buf := make([]byte, 4)
- for i := 0; i &lt; 1000; i&#43;&#43; {
+ for i := 0; i < 1000; i++ {
n, err := stream.Read(buf)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != 4 {
- t.Fatalf(&#34;short read: %d&#34;, n)
+ t.Fatalf("short read: %d", n)
}
- if string(buf) != &#34;test&#34; {
- t.Fatalf(&#34;bad: %s&#34;, buf)
+ if string(buf) != "test" {
+ t.Fatalf("bad: %s", buf)
}
}
if err := stream.Close(); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}()
@@ -8139,25 +8139,25 @@ func TestSendData_Small(t *testing.T) {
defer wg.Done()
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if client.NumStreams() != 1 {
- t.Fatalf(&#34;bad&#34;)
+ t.Fatalf("bad")
}
- for i := 0; i &lt; 1000; i&#43;&#43; {
- n, err := stream.Write([]byte(&#34;test&#34;))
+ for i := 0; i < 1000; i++ {
+ n, err := stream.Write([]byte("test"))
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != 4 {
- t.Fatalf(&#34;short write %d&#34;, n)
+ t.Fatalf("short write %d", n)
}
}
if err := stream.Close(); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}()
@@ -8167,16 +8167,16 @@ func TestSendData_Small(t *testing.T) {
close(doneCh)
}()
select {
- case &lt;-doneCh:
- case &lt;-time.After(time.Second):
- panic(&#34;timeout&#34;)
+ case <-doneCh:
+ case <-time.After(time.Second):
+ panic("timeout")
}
if client.NumStreams() != 0 {
- t.Fatalf(&#34;bad&#34;)
+ t.Fatalf("bad")
}
if server.NumStreams() != 0 {
- t.Fatalf(&#34;bad&#34;)
+ t.Fatalf("bad")
}
}
@@ -8195,57 +8195,57 @@ func TestSendData_Large(t *testing.T) {
data[idx] = byte(idx % 256)
}
- wg := &amp;sync.WaitGroup{}
+ wg := &sync.WaitGroup{}
wg.Add(2)
go func() {
defer wg.Done()
stream, err := server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
var sz int
buf := make([]byte, recvSize)
- for i := 0; i &lt; sendSize/recvSize; i&#43;&#43; {
+ for i := 0; i < sendSize/recvSize; i++ {
n, err := stream.Read(buf)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != recvSize {
- t.Fatalf(&#34;short read: %d&#34;, n)
+ t.Fatalf("short read: %d", n)
}
- sz &#43;= n
+ sz += n
for idx := range buf {
if buf[idx] != byte(idx%256) {
- t.Fatalf(&#34;bad: %v %v %v&#34;, i, idx, buf[idx])
+ t.Fatalf("bad: %v %v %v", i, idx, buf[idx])
}
}
}
if err := stream.Close(); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
- t.Logf(&#34;cap=%d, n=%d\n&#34;, stream.recvBuf.Cap(), sz)
+ t.Logf("cap=%d, n=%d\n", stream.recvBuf.Cap(), sz)
}()
go func() {
defer wg.Done()
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
n, err := stream.Write(data)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != len(data) {
- t.Fatalf(&#34;short write %d&#34;, n)
+ t.Fatalf("short write %d", n)
}
if err := stream.Close(); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}()
@@ -8255,9 +8255,9 @@ func TestSendData_Large(t *testing.T) {
close(doneCh)
}()
select {
- case &lt;-doneCh:
- case &lt;-time.After(5 * time.Second):
- panic(&#34;timeout&#34;)
+ case <-doneCh:
+ case <-time.After(5 * time.Second):
+ panic("timeout")
}
}
@@ -8267,12 +8267,12 @@ func TestGoAway(t *testing.T) {
defer server.Close()
if err := server.GoAway(); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
_, err := client.Open()
if err != ErrRemoteGoAway {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}
@@ -8281,13 +8281,13 @@ func TestManyStreams(t *testing.T) {
defer client.Close()
defer server.Close()
- wg := &amp;sync.WaitGroup{}
+ wg := &sync.WaitGroup{}
acceptor := func(i int) {
defer wg.Done()
stream, err := server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
@@ -8298,10 +8298,10 @@ func TestManyStreams(t *testing.T) {
return
}
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n == 0 {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}
}
@@ -8309,23 +8309,23 @@ func TestManyStreams(t *testing.T) {
defer wg.Done()
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
- msg := fmt.Sprintf(&#34;%08d&#34;, i)
- for i := 0; i &lt; 1000; i&#43;&#43; {
+ msg := fmt.Sprintf("%08d", i)
+ for i := 0; i < 1000; i++ {
n, err := stream.Write([]byte(msg))
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != len(msg) {
- t.Fatalf(&#34;short write %d&#34;, n)
+ t.Fatalf("short write %d", n)
}
}
}
- for i := 0; i &lt; 50; i&#43;&#43; {
+ for i := 0; i < 50; i++ {
wg.Add(2)
go acceptor(i)
go sender(i)
@@ -8339,46 +8339,46 @@ func TestManyStreams_PingPong(t *testing.T) {
defer client.Close()
defer server.Close()
- wg := &amp;sync.WaitGroup{}
+ wg := &sync.WaitGroup{}
- ping := []byte(&#34;ping&#34;)
- pong := []byte(&#34;pong&#34;)
+ ping := []byte("ping")
+ pong := []byte("pong")
acceptor := func(i int) {
defer wg.Done()
stream, err := server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
buf := make([]byte, 4)
for {
- // Read the &#39;ping&#39;
+ // Read the 'ping'
n, err := stream.Read(buf)
if err == io.EOF {
return
}
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != 4 {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if !bytes.Equal(buf, ping) {
- t.Fatalf(&#34;bad: %s&#34;, buf)
+ t.Fatalf("bad: %s", buf)
}
// Shrink the internal buffer!
stream.Shrink()
- // Write out the &#39;pong&#39;
+ // Write out the 'pong'
n, err = stream.Write(pong)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != 4 {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}
}
@@ -8386,31 +8386,31 @@ func TestManyStreams_PingPong(t *testing.T) {
defer wg.Done()
stream, err := client.OpenStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
buf := make([]byte, 4)
- for i := 0; i &lt; 1000; i&#43;&#43; {
- // Send the &#39;ping&#39;
+ for i := 0; i < 1000; i++ {
+ // Send the 'ping'
n, err := stream.Write(ping)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != 4 {
- t.Fatalf(&#34;short write %d&#34;, n)
+ t.Fatalf("short write %d", n)
}
- // Read the &#39;pong&#39;
+ // Read the 'pong'
n, err = stream.Read(buf)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != 4 {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if !bytes.Equal(buf, pong) {
- t.Fatalf(&#34;bad: %s&#34;, buf)
+ t.Fatalf("bad: %s", buf)
}
// Shrink the buffer
@@ -8418,7 +8418,7 @@ func TestManyStreams_PingPong(t *testing.T) {
}
}
- for i := 0; i &lt; 50; i&#43;&#43; {
+ for i := 0; i < 50; i++ {
wg.Add(2)
go acceptor(i)
go sender(i)
@@ -8434,49 +8434,49 @@ func TestHalfClose(t *testing.T) {
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
- if _, err = stream.Write([]byte(&#34;a&#34;)); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ if _, err = stream.Write([]byte("a")); err != nil {
+ t.Fatalf("err: %v", err)
}
stream2, err := server.Accept()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
stream2.Close() // Half close
buf := make([]byte, 4)
n, err := stream2.Read(buf)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != 1 {
- t.Fatalf(&#34;bad: %v&#34;, n)
+ t.Fatalf("bad: %v", n)
}
// Send more
- if _, err = stream.Write([]byte(&#34;bcd&#34;)); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ if _, err = stream.Write([]byte("bcd")); err != nil {
+ t.Fatalf("err: %v", err)
}
stream.Close()
// Read after close
n, err = stream2.Read(buf)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != 3 {
- t.Fatalf(&#34;bad: %v&#34;, n)
+ t.Fatalf("bad: %v", n)
}
// EOF after close
n, err = stream2.Read(buf)
if err != io.EOF {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != 0 {
- t.Fatalf(&#34;bad: %v&#34;, n)
+ t.Fatalf("bad: %v", n)
}
}
@@ -8487,23 +8487,23 @@ func TestReadDeadline(t *testing.T) {
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
stream2, err := server.Accept()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream2.Close()
if err := stream.SetReadDeadline(time.Now().Add(5 * time.Millisecond)); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
buf := make([]byte, 4)
if _, err := stream.Read(buf); err != ErrTimeout {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}
@@ -8514,13 +8514,13 @@ func TestReadDeadline_BlockedRead(t *testing.T) {
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
stream2, err := server.Accept()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream2.Close()
@@ -8529,7 +8529,7 @@ func TestReadDeadline_BlockedRead(t *testing.T) {
go func() {
buf := make([]byte, 4)
_, err := stream.Read(buf)
- errCh &lt;- err
+ errCh <- err
close(errCh)
}()
@@ -8538,15 +8538,15 @@ func TestReadDeadline_BlockedRead(t *testing.T) {
// Update the read deadline
if err := stream.SetReadDeadline(time.Now().Add(5 * time.Millisecond)); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
select {
- case &lt;-time.After(100 * time.Millisecond):
- t.Fatal(&#34;expected read timeout&#34;)
- case err := &lt;-errCh:
+ case <-time.After(100 * time.Millisecond):
+ t.Fatal("expected read timeout")
+ case err := <-errCh:
if err != ErrTimeout {
- t.Fatalf(&#34;expected ErrTimeout; got %v&#34;, err)
+ t.Fatalf("expected ErrTimeout; got %v", err)
}
}
}
@@ -8558,30 +8558,30 @@ func TestWriteDeadline(t *testing.T) {
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
stream2, err := server.Accept()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream2.Close()
if err := stream.SetWriteDeadline(time.Now().Add(50 * time.Millisecond)); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
buf := make([]byte, 512)
- for i := 0; i &lt; int(initialStreamWindow); i&#43;&#43; {
+ for i := 0; i < int(initialStreamWindow); i++ {
_, err := stream.Write(buf)
- if err != nil &amp;&amp; err == ErrTimeout {
+ if err != nil && err == ErrTimeout {
return
} else if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}
- t.Fatalf(&#34;Expected timeout&#34;)
+ t.Fatalf("Expected timeout")
}
func TestWriteDeadline_BlockedWrite(t *testing.T) {
@@ -8591,13 +8591,13 @@ func TestWriteDeadline_BlockedWrite(t *testing.T) {
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
stream2, err := server.Accept()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream2.Close()
@@ -8605,13 +8605,13 @@ func TestWriteDeadline_BlockedWrite(t *testing.T) {
errCh := make(chan error, 1)
go func() {
buf := make([]byte, 512)
- for i := 0; i &lt; int(initialStreamWindow); i&#43;&#43; {
+ for i := 0; i < int(initialStreamWindow); i++ {
_, err := stream.Write(buf)
if err == nil {
continue
}
- errCh &lt;- err
+ errCh <- err
close(errCh)
return
}
@@ -8624,15 +8624,15 @@ func TestWriteDeadline_BlockedWrite(t *testing.T) {
// Update the write deadline
if err := stream.SetWriteDeadline(time.Now().Add(5 * time.Millisecond)); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
select {
- case &lt;-time.After(1 * time.Second):
- t.Fatal(&#34;expected write timeout&#34;)
- case err := &lt;-errCh:
+ case <-time.After(1 * time.Second):
+ t.Fatal("expected write timeout")
+ case err := <-errCh:
if err != ErrTimeout {
- t.Fatalf(&#34;expected ErrTimeout; got %v&#34;, err)
+ t.Fatalf("expected ErrTimeout; got %v", err)
}
}
}
@@ -8644,15 +8644,15 @@ func TestBacklogExceeded(t *testing.T) {
// Fill the backlog
max := client.config.AcceptBacklog
- for i := 0; i &lt; max; i&#43;&#43; {
+ for i := 0; i < max; i++ {
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
- if _, err := stream.Write([]byte(&#34;foo&#34;)); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ if _, err := stream.Write([]byte("foo")); err != nil {
+ t.Fatalf("err: %v", err)
}
}
@@ -8660,7 +8660,7 @@ func TestBacklogExceeded(t *testing.T) {
errCh := make(chan error, 1)
go func() {
_, err := client.Open()
- errCh &lt;- err
+ errCh <- err
}()
// Shutdown the server
@@ -8670,12 +8670,12 @@ func TestBacklogExceeded(t *testing.T) {
}()
select {
- case err := &lt;-errCh:
+ case err := <-errCh:
if err == nil {
- t.Fatalf(&#34;open should fail&#34;)
+ t.Fatalf("open should fail")
}
- case &lt;-time.After(time.Second):
- t.Fatalf(&#34;timeout&#34;)
+ case <-time.After(time.Second):
+ t.Fatalf("timeout")
}
}
@@ -8690,13 +8690,13 @@ func TestKeepAlive(t *testing.T) {
client.pingLock.Lock()
defer client.pingLock.Unlock()
if client.pingID == 0 {
- t.Fatalf(&#34;should ping&#34;)
+ t.Fatalf("should ping")
}
server.pingLock.Lock()
defer server.pingLock.Unlock()
if server.pingID == 0 {
- t.Fatalf(&#34;should ping&#34;)
+ t.Fatalf("should ping")
}
}
@@ -8704,21 +8704,21 @@ func TestKeepAlive_Timeout(t *testing.T) {
conn1, conn2 := testConn()
clientConf := testConf()
- clientConf.ConnectionWriteTimeout = time.Hour // We&#39;re testing keep alives, not connection writes
- clientConf.EnableKeepAlive = false // Just test one direction, so it&#39;s deterministic who hangs up on whom
+ clientConf.ConnectionWriteTimeout = time.Hour // We're testing keep alives, not connection writes
+ clientConf.EnableKeepAlive = false // Just test one direction, so it's deterministic who hangs up on whom
client, _ := Client(conn1, clientConf)
defer client.Close()
server, _ := Server(conn2, testConf())
defer server.Close()
- _ = captureLogs(client) // Client logs aren&#39;t part of the test
+ _ = captureLogs(client) // Client logs aren't part of the test
serverLogs := captureLogs(server)
errCh := make(chan error, 1)
go func() {
_, err := server.Accept() // Wait until server closes
- errCh &lt;- err
+ errCh <- err
}()
// Prevent the client from responding
@@ -8726,20 +8726,20 @@ func TestKeepAlive_Timeout(t *testing.T) {
clientConn.writeBlocker.Lock()
select {
- case err := &lt;-errCh:
+ case err := <-errCh:
if err != ErrKeepAliveTimeout {
- t.Fatalf(&#34;unexpected error: %v&#34;, err)
+ t.Fatalf("unexpected error: %v", err)
}
- case &lt;-time.After(1 * time.Second):
- t.Fatalf(&#34;timeout waiting for timeout&#34;)
+ case <-time.After(1 * time.Second):
+ t.Fatalf("timeout waiting for timeout")
}
if !server.IsClosed() {
- t.Fatalf(&#34;server should have closed&#34;)
+ t.Fatalf("server should have closed")
}
- if !serverLogs.match([]string{&#34;[ERR] yamux: keepalive failed: i/o deadline reached&#34;}) {
- t.Fatalf(&#34;server log incorect: %v&#34;, serverLogs.logs())
+ if !serverLogs.match([]string{"[ERR] yamux: keepalive failed: i/o deadline reached"}) {
+ t.Fatalf("server log incorect: %v", serverLogs.logs())
}
}
@@ -8753,13 +8753,13 @@ func TestLargeWindow(t *testing.T) {
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
stream2, err := server.Accept()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream2.Close()
@@ -8767,10 +8767,10 @@ func TestLargeWindow(t *testing.T) {
buf := make([]byte, conf.MaxStreamWindowSize)
n, err := stream.Write(buf)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != len(buf) {
- t.Fatalf(&#34;short write: %d&#34;, n)
+ t.Fatalf("short write: %d", n)
}
}
@@ -8789,57 +8789,57 @@ func TestSendData_VeryLarge(t *testing.T) {
var n int64 = 1 * 1024 * 1024 * 1024
var workers int = 16
- wg := &amp;sync.WaitGroup{}
+ wg := &sync.WaitGroup{}
wg.Add(workers * 2)
- for i := 0; i &lt; workers; i&#43;&#43; {
+ for i := 0; i < workers; i++ {
go func() {
defer wg.Done()
stream, err := server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
buf := make([]byte, 4)
_, err = stream.Read(buf)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if !bytes.Equal(buf, []byte{0, 1, 2, 3}) {
- t.Fatalf(&#34;bad header&#34;)
+ t.Fatalf("bad header")
}
recv, err := io.Copy(ioutil.Discard, stream)
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if recv != n {
- t.Fatalf(&#34;bad: %v&#34;, recv)
+ t.Fatalf("bad: %v", recv)
}
}()
}
- for i := 0; i &lt; workers; i&#43;&#43; {
+ for i := 0; i < workers; i++ {
go func() {
defer wg.Done()
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
_, err = stream.Write([]byte{0, 1, 2, 3})
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
- unlimited := &amp;UnlimitedReader{}
+ unlimited := &UnlimitedReader{}
sent, err := io.Copy(stream, io.LimitReader(unlimited, n))
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if sent != n {
- t.Fatalf(&#34;bad: %v&#34;, sent)
+ t.Fatalf("bad: %v", sent)
}
}()
}
@@ -8850,9 +8850,9 @@ func TestSendData_VeryLarge(t *testing.T) {
close(doneCh)
}()
select {
- case &lt;-doneCh:
- case &lt;-time.After(20 * time.Second):
- panic(&#34;timeout&#34;)
+ case <-doneCh:
+ case <-time.After(20 * time.Second):
+ panic("timeout")
}
}
@@ -8863,25 +8863,25 @@ func TestBacklogExceeded_Accept(t *testing.T) {
max := 5 * client.config.AcceptBacklog
go func() {
- for i := 0; i &lt; max; i&#43;&#43; {
+ for i := 0; i < max; i++ {
stream, err := server.Accept()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
}
}()
// Fill the backlog
- for i := 0; i &lt; max; i&#43;&#43; {
+ for i := 0; i < max; i++ {
stream, err := client.Open()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
- if _, err := stream.Write([]byte(&#34;foo&#34;)); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ if _, err := stream.Write([]byte("foo")); err != nil {
+ t.Fatalf("err: %v", err)
}
}
}
@@ -8903,28 +8903,28 @@ func TestSession_WindowUpdateWriteDuringRead(t *testing.T) {
stream, err := server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
n, err := stream.Write(make([]byte, flood))
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if int64(n) != flood {
- t.Fatalf(&#34;short write: %d&#34;, n)
+ t.Fatalf("short write: %d", n)
}
}()
// The client will open a stream, block outbound writes, and then
// listen to the flood from the server, which should time out since
- // it won&#39;t be able to send the window update.
+ // it won't be able to send the window update.
go func() {
defer wg.Done()
stream, err := client.OpenStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
@@ -8933,7 +8933,7 @@ func TestSession_WindowUpdateWriteDuringRead(t *testing.T) {
_, err = stream.Read(make([]byte, flood))
if err != ErrConnectionWriteTimeout {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}()
@@ -8959,38 +8959,38 @@ func TestSession_PartialReadWindowUpdate(t *testing.T) {
var err error
wr, err = server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer wr.Close()
if wr.sendWindow != client.config.MaxStreamWindowSize {
- t.Fatalf(&#34;sendWindow: exp=%d, got=%d&#34;, client.config.MaxStreamWindowSize, wr.sendWindow)
+ t.Fatalf("sendWindow: exp=%d, got=%d", client.config.MaxStreamWindowSize, wr.sendWindow)
}
n, err := wr.Write(make([]byte, flood))
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if int64(n) != flood {
- t.Fatalf(&#34;short write: %d&#34;, n)
+ t.Fatalf("short write: %d", n)
}
if wr.sendWindow != 0 {
- t.Fatalf(&#34;sendWindow: exp=%d, got=%d&#34;, 0, wr.sendWindow)
+ t.Fatalf("sendWindow: exp=%d, got=%d", 0, wr.sendWindow)
}
}()
stream, err := client.OpenStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
wg.Wait()
- _, err = stream.Read(make([]byte, flood/2&#43;1))
+ _, err = stream.Read(make([]byte, flood/2+1))
- if exp := uint32(flood/2 &#43; 1); wr.sendWindow != exp {
- t.Errorf(&#34;sendWindow: exp=%d, got=%d&#34;, exp, wr.sendWindow)
+ if exp := uint32(flood/2 + 1); wr.sendWindow != exp {
+ t.Errorf("sendWindow: exp=%d, got=%d", exp, wr.sendWindow)
}
}
@@ -9007,19 +9007,19 @@ func TestSession_sendNoWait_Timeout(t *testing.T) {
stream, err := server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
}()
- // The client will open the stream and then block outbound writes, we&#39;ll
+ // The client will open the stream and then block outbound writes, we'll
// probe sendNoWait once it gets into that state.
go func() {
defer wg.Done()
stream, err := client.OpenStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
@@ -9035,7 +9035,7 @@ func TestSession_sendNoWait_Timeout(t *testing.T) {
} else if err == ErrConnectionWriteTimeout {
break
} else {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}
}()
@@ -9064,7 +9064,7 @@ func TestSession_PingOfDeath(t *testing.T) {
stream, err := server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
@@ -9078,7 +9078,7 @@ func TestSession_PingOfDeath(t *testing.T) {
} else if err == ErrConnectionWriteTimeout {
break
} else {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}
@@ -9086,14 +9086,14 @@ func TestSession_PingOfDeath(t *testing.T) {
}()
// The client will open a stream and then send the server a ping once it
- // can no longer write. This makes sure the server doesn&#39;t deadlock reads
+ // can no longer write. This makes sure the server doesn't deadlock reads
// while trying to reply to the ping with no ability to write.
go func() {
defer wg.Done()
stream, err := client.OpenStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
@@ -9107,7 +9107,7 @@ func TestSession_PingOfDeath(t *testing.T) {
time.Sleep(2 * server.config.ConnectionWriteTimeout)
conn.writeBlocker.Unlock()
if _, err = client.Ping(); err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
}()
@@ -9127,19 +9127,19 @@ func TestSession_ConnectionWriteTimeout(t *testing.T) {
stream, err := server.AcceptStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
}()
- // The client will open the stream and then block outbound writes, we&#39;ll
+ // The client will open the stream and then block outbound writes, we'll
// tee up a write and make sure it eventually times out.
go func() {
defer wg.Done()
stream, err := client.OpenStream()
if err != nil {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
defer stream.Close()
@@ -9147,14 +9147,14 @@ func TestSession_ConnectionWriteTimeout(t *testing.T) {
conn.writeBlocker.Lock()
// Since the write goroutine is blocked then this will return a
- // timeout since it can&#39;t get feedback about whether the write
+ // timeout since it can't get feedback about whether the write
// worked.
- n, err := stream.Write([]byte(&#34;hello&#34;))
+ n, err := stream.Write([]byte("hello"))
if err != ErrConnectionWriteTimeout {
- t.Fatalf(&#34;err: %v&#34;, err)
+ t.Fatalf("err: %v", err)
}
if n != 0 {
- t.Fatalf(&#34;lied about writes: %d&#34;, n)
+ t.Fatalf("lied about writes: %d", n)
}
}()
@@ -9227,7 +9227,7 @@ to the message type. The following flags are supported:
## StreamID Field
The StreamID field is used to identify the logical stream the frame
-is addressing. The client side should use odd ID&#39;s, and the server even.
+is addressing. The client side should use odd ID's, and the server even.
This prevents any collisions. Additionally, the 0 ID is reserved to represent
the session.
@@ -9309,11 +9309,11 @@ stream.go - github.com/hashicorp/yamux
package yamux
import (
- &#34;bytes&#34;
- &#34;io&#34;
- &#34;sync&#34;
- &#34;sync/atomic&#34;
- &#34;time&#34;
+ "bytes"
+ "io"
+ "sync"
+ "sync/atomic"
+ "time"
)
type streamState int
@@ -9366,7 +9366,7 @@ type Stream struct {
// newStream is used to construct a new stream within
// a given session for an ID
func newStream(session *Session, id uint32, state streamState) *Stream {
- s := &amp;Stream{
+ s := &Stream{
id: id,
session: session,
state: state,
@@ -9434,7 +9434,7 @@ START:
return n, err
WAIT:
- var timeout &lt;-chan time.Time
+ var timeout <-chan time.Time
var timer *time.Timer
readDeadline := s.readDeadline.Load().(time.Time)
if !readDeadline.IsZero() {
@@ -9443,12 +9443,12 @@ WAIT:
timeout = timer.C
}
select {
- case &lt;-s.recvNotifyCh:
+ case <-s.recvNotifyCh:
if timer != nil {
timer.Stop()
}
goto START
- case &lt;-timeout:
+ case <-timeout:
return 0, ErrTimeout
}
}
@@ -9458,9 +9458,9 @@ func (s *Stream) Write(b []byte) (n int, err error) {
s.sendLock.Lock()
defer s.sendLock.Unlock()
total := 0
- for total &lt; len(b) {
+ for total < len(b) {
n, err := s.write(b[total:])
- total &#43;= n
+ total += n
if err != nil {
return total, err
}
@@ -9489,7 +9489,7 @@ START:
s.stateLock.Unlock()
// If there is no data available, block
- window := atomic.LoadUint32(&amp;s.sendWindow)
+ window := atomic.LoadUint32(&s.sendWindow)
if window == 0 {
goto WAIT
}
@@ -9508,22 +9508,22 @@ START:
}
// Reduce our send window
- atomic.AddUint32(&amp;s.sendWindow, ^uint32(max-1))
+ atomic.AddUint32(&s.sendWindow, ^uint32(max-1))
// Unlock
return int(max), err
WAIT:
- var timeout &lt;-chan time.Time
+ var timeout <-chan time.Time
writeDeadline := s.writeDeadline.Load().(time.Time)
if !writeDeadline.IsZero() {
delay := writeDeadline.Sub(time.Now())
timeout = time.After(delay)
}
select {
- case &lt;-s.sendNotifyCh:
+ case <-s.sendNotifyCh:
goto START
- case &lt;-timeout:
+ case <-timeout:
return 0, ErrTimeout
}
return 0, nil
@@ -9565,13 +9565,13 @@ func (s *Stream) sendWindowUpdate() error {
flags := s.sendFlags()
// Check if we can omit the update
- if delta &lt; (max/2) &amp;&amp; flags == 0 {
+ if delta < (max/2) && flags == 0 {
s.recvLock.Unlock()
return nil
}
// Update our window
- s.recvWindow &#43;= delta
+ s.recvWindow += delta
s.recvLock.Unlock()
// Send the header
@@ -9619,12 +9619,12 @@ func (s *Stream) Close() error {
case streamClosed:
case streamReset:
default:
- panic(&#34;unhandled state&#34;)
+ panic("unhandled state")
}
s.stateLock.Unlock()
return nil
SEND_CLOSE:
- // This shouldn&#39;t happen (the more realistic scenario to cancel the
+ // This shouldn't happen (the more realistic scenario to cancel the
// timer is via processFlags) but just in case this ever happens, we
// cancel the timer to prevent dangling timers.
if s.closeTimer != nil {
@@ -9633,14 +9633,14 @@ SEND_CLOSE:
}
// If we have a StreamCloseTimeout set we start the timeout timer.
- // We do this only if we&#39;re not already closing the stream since that
+ // We do this only if we're not already closing the stream since that
// means this was a graceful close.
//
// This prevents memory leaks if one side (this side) closes and the
// remote side poorly behaves and never responds with a FIN to complete
// the close. After the specified timeout, we clean our resources up no
// matter what.
- if !closeStream &amp;&amp; s.session.config.StreamCloseTimeout &gt; 0 {
+ if !closeStream && s.session.config.StreamCloseTimeout > 0 {
s.closeTimer = time.AfterFunc(
s.session.config.StreamCloseTimeout, s.closeTimeout)
}
@@ -9697,13 +9697,13 @@ func (s *Stream) processFlags(flags uint16) error {
}
}()
- if flags&amp;flagACK == flagACK {
+ if flags&flagACK == flagACK {
if s.state == streamSYNSent {
s.state = streamEstablished
}
s.session.establishStream(s.id)
}
- if flags&amp;flagFIN == flagFIN {
+ if flags&flagFIN == flagFIN {
switch s.state {
case streamSYNSent:
fallthrough
@@ -9717,11 +9717,11 @@ func (s *Stream) processFlags(flags uint16) error {
closeStream = true
s.notifyWaiting()
default:
- s.session.logger.Printf(&#34;[ERR] yamux: unexpected FIN flag in state %d&#34;, s.state)
+ s.session.logger.Printf("[ERR] yamux: unexpected FIN flag in state %d", s.state)
return ErrUnexpectedFlag
}
}
- if flags&amp;flagRST == flagRST {
+ if flags&flagRST == flagRST {
s.state = streamReset
closeStream = true
s.notifyWaiting()
@@ -9742,7 +9742,7 @@ func (s *Stream) incrSendWindow(hdr header, flags uint16) error {
}
// Increase window, unblock a sender
- atomic.AddUint32(&amp;s.sendWindow, hdr.Length())
+ atomic.AddUint32(&s.sendWindow, hdr.Length())
asyncNotify(s.sendNotifyCh)
return nil
}
@@ -9760,13 +9760,13 @@ func (s *Stream) readData(hdr header, flags uint16, conn io.Reader) error {
}
// Wrap in a limited reader
- conn = &amp;io.LimitedReader{R: conn, N: int64(length)}
+ conn = &io.LimitedReader{R: conn, N: int64(length)}
// Copy into buffer
s.recvLock.Lock()
- if length &gt; s.recvWindow {
- s.session.logger.Printf(&#34;[ERR] yamux: receive window exceeded (stream: %d, remain: %d, recv: %d)&#34;, s.id, s.recvWindow, length)
+ if length > s.recvWindow {
+ s.session.logger.Printf("[ERR] yamux: receive window exceeded (stream: %d, remain: %d, recv: %d)", s.id, s.recvWindow, length)
return ErrRecvWindowExceeded
}
@@ -9776,7 +9776,7 @@ func (s *Stream) readData(hdr header, flags uint16, conn io.Reader) error {
s.recvBuf = bytes.NewBuffer(make([]byte, 0, length))
}
if _, err := io.Copy(s.recvBuf, conn); err != nil {
- s.session.logger.Printf(&#34;[ERR] yamux: Failed to read stream data: %v&#34;, err)
+ s.session.logger.Printf("[ERR] yamux: Failed to read stream data: %v", err)
s.recvLock.Unlock()
return err
}
@@ -9820,7 +9820,7 @@ func (s *Stream) SetWriteDeadline(t time.Time) error {
// the idle memory utilization.
func (s *Stream) Shrink() {
s.recvLock.Lock()
- if s.recvBuf != nil &amp;&amp; s.recvBuf.Len() == 0 {
+ if s.recvBuf != nil && s.recvBuf.Len() == 0 {
s.recvBuf = nil
}
s.recvLock.Unlock()
@@ -9831,12 +9831,12 @@ util.go - github.com/hashicorp/yamux
package yamux
import (
- &#34;sync&#34;
- &#34;time&#34;
+ "sync"
+ "time"
)
var (
- timerPool = &amp;sync.Pool{
+ timerPool = &sync.Pool{
New: func() interface{} {
timer := time.NewTimer(time.Hour * 1e6)
timer.Stop()
@@ -9851,7 +9851,7 @@ func asyncSendErr(ch chan error, err error) {
return
}
select {
- case ch &lt;- err:
+ case ch <- err:
default:
}
}
@@ -9859,14 +9859,14 @@ func asyncSendErr(ch chan error, err error) {
// asyncNotify is used to signal a waiting goroutine
func asyncNotify(ch chan struct{}) {
select {
- case ch &lt;- struct{}{}:
+ case ch <- struct{}{}:
default:
}
}
// min computes the minimum of two values
func min(a, b uint32) uint32 {
- if a &lt; b {
+ if a < b {
return a
}
return b
@@ -9877,24 +9877,24 @@ util_test.go - github.com/hashicorp/yamux
package yamux
import (
- &#34;testing&#34;
+ "testing"
)
func TestAsyncSendErr(t *testing.T) {
ch := make(chan error)
asyncSendErr(ch, ErrTimeout)
select {
- case &lt;-ch:
- t.Fatalf(&#34;should not get&#34;)
+ case <-ch:
+ t.Fatalf("should not get")
default:
}
ch = make(chan error, 1)
asyncSendErr(ch, ErrTimeout)
select {
- case &lt;-ch:
+ case <-ch:
default:
- t.Fatalf(&#34;should get&#34;)
+ t.Fatalf("should get")
}
}
@@ -9902,26 +9902,26 @@ func TestAsyncNotify(t *testing.T) {
ch := make(chan struct{})
asyncNotify(ch)
select {
- case &lt;-ch:
- t.Fatalf(&#34;should not get&#34;)
+ case <-ch:
+ t.Fatalf("should not get")
default:
}
ch = make(chan struct{}, 1)
asyncNotify(ch)
select {
- case &lt;-ch:
+ case <-ch:
default:
- t.Fatalf(&#34;should get&#34;)
+ t.Fatalf("should get")
}
}
func TestMin(t *testing.T) {
if min(1, 2) != 1 {
- t.Fatalf(&#34;bad&#34;)
+ t.Fatalf("bad")
}
if min(2, 1) != 1 {
- t.Fatalf(&#34;bad&#34;)
+ t.Fatalf("bad")
}
}
@@ -9938,7 +9938,7 @@ https://github.com/knieriem/markdown
Copyright (c) 2010 Michael Teichgräber
-Michael&#39;s markdown module is a translation of peg-markdown, written
+Michael's markdown module is a translation of peg-markdown, written
by John MacFarlane, into Go:
Copyright (c) 2008 John MacFarlane
@@ -9965,7 +9965,7 @@ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
The MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -9974,7 +9974,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -10003,7 +10003,7 @@ are permitted provided that the following conditions are met:
* Neither the name of the Go Authors nor the names of its contributors may be used to
endorse or promote products derived from this software without specific prior written permission.
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS &#34;AS IS&#34; AND ANY
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -10033,7 +10033,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -10051,7 +10051,7 @@ The MIT License (MIT)
Copyright (c) 2014 Juan Batiz-Benet
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -10060,7 +10060,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -10087,7 +10087,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -10105,7 +10105,7 @@ MIT License
Copyright (c) 2017 Joseph Kato
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -10114,7 +10114,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -10127,7 +10127,7 @@ LICENSE - github.com/kelseyhightower/envconfig
Copyright (c) 2013 Kelsey Hightower
Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the &#34;Software&#34;), to deal in
+this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
@@ -10136,7 +10136,7 @@ so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -10150,7 +10150,7 @@ Copyright (c) 2017 Kevin Burke.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
-files (the &#34;Software&#34;), to deal in the Software without
+files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
@@ -10160,7 +10160,7 @@ conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND,
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
@@ -10179,7 +10179,7 @@ The MIT License (MIT)
Copyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -10188,7 +10188,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -10198,14 +10198,14 @@ SOFTWARE.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
LICENSE.md - github.com/lib/pq
-Copyright (c) 2011-2013, &#39;pq&#39; Contributors
+Copyright (c) 2011-2013, 'pq' Contributors
Portions Copyright (C) 2011 Blake Mizerany
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the &#34;Software&#34;), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
LICENSE - github.com/libgit2/git2go/v31
@@ -10214,7 +10214,7 @@ The MIT License
Copyright (c) 2013 The git2go contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -10223,7 +10223,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -10233,15 +10233,15 @@ THE SOFTWARE.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
LICENSE - github.com/mattn/go-isatty
-Copyright (c) Yasuhiro MATSUMOTO &lt;mattn.jp@gmail.com&gt;
+Copyright (c) Yasuhiro MATSUMOTO <mattn.jp@gmail.com>
MIT License (Expat)
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the &#34;Software&#34;), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
LICENSE - github.com/mattn/go-runewidth
@@ -10250,7 +10250,7 @@ The MIT License (MIT)
Copyright (c) 2016 Yasuhiro Matsumoto
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -10259,7 +10259,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -10277,38 +10277,38 @@ LICENSE - github.com/matttproud/golang_protobuf_extensions/pbutil
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -10316,21 +10316,21 @@ LICENSE - github.com/matttproud/golang_protobuf_extensions/pbutil
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -10374,7 +10374,7 @@ LICENSE - github.com/matttproud/golang_protobuf_extensions/pbutil
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -10413,7 +10413,7 @@ LICENSE - github.com/matttproud/golang_protobuf_extensions/pbutil
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -10449,24 +10449,24 @@ LICENSE - github.com/matttproud/golang_protobuf_extensions/pbutil
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;{}&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -10482,7 +10482,7 @@ The MIT License (MIT)
Copyright (c) 2013 Mitchell Hashimoto
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -10491,7 +10491,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -10506,7 +10506,7 @@ The MIT License (MIT)
Copyright (c) 2014-2015 Montana Flynn (https://anonfunction.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -10515,7 +10515,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -10533,38 +10533,38 @@ LICENSE - github.com/oklog/ulid/v2
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -10572,21 +10572,21 @@ LICENSE - github.com/oklog/ulid/v2
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -10630,7 +10630,7 @@ LICENSE - github.com/oklog/ulid/v2
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -10669,7 +10669,7 @@ LICENSE - github.com/oklog/ulid/v2
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -10705,24 +10705,24 @@ LICENSE - github.com/oklog/ulid/v2
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -10732,7 +10732,7 @@ LICENSE.md - github.com/olekukonko/tablewriter
Copyright (C) 2014 by Oleku Konko
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -10741,7 +10741,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -10754,7 +10754,7 @@ LICENCE - github.com/olekukonko/ts
Copyright (C) 2014 by Oleku Konko
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -10763,7 +10763,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -10781,38 +10781,38 @@ LICENSE - github.com/opencontainers/runtime-spec/specs-go
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -10820,21 +10820,21 @@ LICENSE - github.com/opencontainers/runtime-spec/specs-go
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -10878,7 +10878,7 @@ LICENSE - github.com/opencontainers/runtime-spec/specs-go
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -10917,7 +10917,7 @@ LICENSE - github.com/opencontainers/runtime-spec/specs-go
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -10952,14 +10952,14 @@ LICENSE - github.com/opencontainers/runtime-spec/specs-go
Copyright 2015 The Linux Foundation.
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -10974,38 +10974,38 @@ LICENSE - github.com/opentracing/opentracing-go
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -11013,21 +11013,21 @@ LICENSE - github.com/opentracing/opentracing-go
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -11071,7 +11071,7 @@ LICENSE - github.com/opentracing/opentracing-go
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -11110,7 +11110,7 @@ LICENSE - github.com/opentracing/opentracing-go
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -11146,24 +11146,24 @@ LICENSE - github.com/opentracing/opentracing-go
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;{}&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2016 The OpenTracing Authors
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -11175,7 +11175,7 @@ The MIT License (MIT)
Copyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -11184,7 +11184,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -11194,7 +11194,7 @@ SOFTWARE.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
LICENSE - github.com/pkg/errors
-Copyright (c) 2015, Dave Cheney &lt;dave@cheney.net&gt;
+Copyright (c) 2015, Dave Cheney <dave@cheney.net>
All rights reserved.
Redistribution and use in source and binary forms, with or without
@@ -11207,7 +11207,7 @@ modification, are permitted provided that the following conditions are met:
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS &#34;AS IS&#34;
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
@@ -11236,8 +11236,8 @@ documentation and/or other materials provided with the distribution.
products derived from this software without specific prior written
permission.
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS &#34;AS
-IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -11258,38 +11258,38 @@ LICENSE - github.com/prometheus/client_golang/prometheus
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -11297,21 +11297,21 @@ LICENSE - github.com/prometheus/client_golang/prometheus
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -11355,7 +11355,7 @@ LICENSE - github.com/prometheus/client_golang/prometheus
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -11394,7 +11394,7 @@ LICENSE - github.com/prometheus/client_golang/prometheus
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -11430,24 +11430,24 @@ LICENSE - github.com/prometheus/client_golang/prometheus
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -11468,7 +11468,7 @@ https://github.com/beorn7/perks
Copyright 2013-2015 Blake Mizerany, Björn Rabenstein
See https://github.com/beorn7/perks/blob/master/README.md for license details.
-Go support for Protocol Buffers - Google&#39;s data interchange format
+Go support for Protocol Buffers - Google's data interchange format
http://github.com/golang/protobuf/
Copyright 2010 The Go Authors
See source code for license details.
@@ -11488,38 +11488,38 @@ LICENSE - github.com/prometheus/client_model/go
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -11527,21 +11527,21 @@ LICENSE - github.com/prometheus/client_model/go
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -11585,7 +11585,7 @@ LICENSE - github.com/prometheus/client_model/go
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -11624,7 +11624,7 @@ LICENSE - github.com/prometheus/client_model/go
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -11660,24 +11660,24 @@ LICENSE - github.com/prometheus/client_model/go
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -11700,38 +11700,38 @@ LICENSE - github.com/prometheus/common
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -11739,21 +11739,21 @@ LICENSE - github.com/prometheus/common
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -11797,7 +11797,7 @@ LICENSE - github.com/prometheus/common
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -11836,7 +11836,7 @@ LICENSE - github.com/prometheus/common
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -11872,24 +11872,24 @@ LICENSE - github.com/prometheus/common
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -11907,7 +11907,7 @@ README.txt - github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg
PACKAGE
package goautoneg
-import &#34;bitbucket.org/ww/goautoneg&#34;
+import "bitbucket.org/ww/goautoneg"
HTTP Content-Type Autonegotiation.
@@ -11935,7 +11935,7 @@ met:
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -11982,38 +11982,38 @@ LICENSE - github.com/prometheus/procfs
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -12021,21 +12021,21 @@ LICENSE - github.com/prometheus/procfs
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -12079,7 +12079,7 @@ LICENSE - github.com/prometheus/procfs
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -12118,7 +12118,7 @@ LICENSE - github.com/prometheus/procfs
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -12154,24 +12154,24 @@ LICENSE - github.com/prometheus/procfs
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -12190,10 +12190,10 @@ SoundCloud Ltd. (http://soundcloud.com/).
LICENSE - github.com/rubenv/sql-migrate
MIT License
-Copyright (C) 2014-2019 by Ruben Vermeersch &lt;ruben@rocketeer.be&gt;
+Copyright (C) 2014-2019 by Ruben Vermeersch <ruben@rocketeer.be>
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -12202,7 +12202,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -12214,11 +12214,11 @@ SOFTWARE.
LICENSE - github.com/rubenv/sql-migrate/sqlparse
MIT License
-Copyright (C) 2014-2017 by Ruben Vermeersch &lt;ruben@rocketeer.be&gt;
+Copyright (C) 2014-2017 by Ruben Vermeersch <ruben@rocketeer.be>
Copyright (C) 2012-2014 by Liam Staskawicz
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -12227,7 +12227,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -12243,7 +12243,7 @@ Copyright (c) 2014 Scott Barron
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
-&#39;Software&#39;), to deal in the Software without restriction, including
+'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
@@ -12252,7 +12252,7 @@ the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#39;AS IS&#39;, WITHOUT WARRANTY OF ANY KIND,
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
@@ -12264,33 +12264,33 @@ SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
LICENSE.txt - github.com/russross/blackfriday/v2
Blackfriday is distributed under the Simplified BSD License:
-&gt; Copyright © 2011 Russ Ross
-&gt; All rights reserved.
-&gt;
-&gt; Redistribution and use in source and binary forms, with or without
-&gt; modification, are permitted provided that the following conditions
-&gt; are met:
-&gt;
-&gt; 1. Redistributions of source code must retain the above copyright
-&gt; notice, this list of conditions and the following disclaimer.
-&gt;
-&gt; 2. Redistributions in binary form must reproduce the above
-&gt; copyright notice, this list of conditions and the following
-&gt; disclaimer in the documentation and/or other materials provided with
-&gt; the distribution.
-&gt;
-&gt; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&gt; &#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-&gt; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-&gt; FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-&gt; COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-&gt; INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-&gt; BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-&gt; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-&gt; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-&gt; LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-&gt; ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-&gt; POSSIBILITY OF SUCH DAMAGE.
+> Copyright © 2011 Russ Ross
+> All rights reserved.
+>
+> Redistribution and use in source and binary forms, with or without
+> modification, are permitted provided that the following conditions
+> are met:
+>
+> 1. Redistributions of source code must retain the above copyright
+> notice, this list of conditions and the following disclaimer.
+>
+> 2. Redistributions in binary form must reproduce the above
+> copyright notice, this list of conditions and the following
+> disclaimer in the documentation and/or other materials provided with
+> the distribution.
+>
+> THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+> "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+> LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+> FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+> COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+> INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+> BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+> LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+> CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+> LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+> ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+> POSSIBILITY OF SUCH DAMAGE.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
LICENSE - github.com/sebest/xff
@@ -12298,7 +12298,7 @@ Copyright (c) 2015 Sebastien Estienne (sebastien.estienne@gmail.com)
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
-&#34;Software&#34;), to deal in the Software without restriction, including
+"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
@@ -12307,7 +12307,7 @@ the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND,
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
@@ -12320,7 +12320,7 @@ LICENSE - github.com/sergi/go-diff/diffmatchpatch
Copyright (c) 2012-2016 The go-diff Authors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the &#34;Software&#34;),
+copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
@@ -12329,7 +12329,7 @@ Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -12345,7 +12345,7 @@ The MIT License (MIT)
Copyright (c) 2017 Ichinose Shogo
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -12354,7 +12354,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -12369,7 +12369,7 @@ MIT License
Copyright (c) 2015 Dmitri Shuralyov
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -12378,7 +12378,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -12393,7 +12393,7 @@ The MIT License (MIT)
Copyright (c) 2014 Simon Eskildsen
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -12402,7 +12402,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -12417,7 +12417,7 @@ MIT License
Copyright (c) 2019 Stephen Gelman
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -12426,7 +12426,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -12441,7 +12441,7 @@ MIT License
Copyright (c) 2012-2020 Mat Ryer, Tyler Bunnell and contributors.
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -12450,7 +12450,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -12468,38 +12468,38 @@ LICENSE - github.com/uber/jaeger-client-go
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -12507,21 +12507,21 @@ LICENSE - github.com/uber/jaeger-client-go
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -12565,7 +12565,7 @@ LICENSE - github.com/uber/jaeger-client-go
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -12604,7 +12604,7 @@ LICENSE - github.com/uber/jaeger-client-go
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -12640,24 +12640,24 @@ LICENSE - github.com/uber/jaeger-client-go
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -12672,38 +12672,38 @@ LICENSE - github.com/uber/jaeger-lib/metrics
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -12711,21 +12711,21 @@ LICENSE - github.com/uber/jaeger-lib/metrics
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -12769,7 +12769,7 @@ LICENSE - github.com/uber/jaeger-lib/metrics
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -12808,7 +12808,7 @@ LICENSE - github.com/uber/jaeger-lib/metrics
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -12844,24 +12844,24 @@ LICENSE - github.com/uber/jaeger-lib/metrics
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -12876,38 +12876,38 @@ LICENSE - github.com/xanzy/ssh-agent
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -12915,21 +12915,21 @@ LICENSE - github.com/xanzy/ssh-agent
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -12973,7 +12973,7 @@ LICENSE - github.com/xanzy/ssh-agent
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -13012,7 +13012,7 @@ LICENSE - github.com/xanzy/ssh-agent
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -13048,24 +13048,24 @@ LICENSE - github.com/xanzy/ssh-agent
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;{}&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -13079,7 +13079,7 @@ MIT License
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
-&#34;Software&#34;), to deal in the Software without restriction, including
+"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
@@ -13088,7 +13088,7 @@ the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND,
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
@@ -13106,38 +13106,38 @@ LICENSE.txt - gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/proxy
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -13145,21 +13145,21 @@ LICENSE.txt - gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/proxy
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -13203,7 +13203,7 @@ LICENSE.txt - gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/proxy
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -13242,7 +13242,7 @@ LICENSE.txt - gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/proxy
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -13279,7 +13279,7 @@ Copyright (c) 2011-2018 GitLab B.V.
With regard to the GitLab Software:
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -13288,7 +13288,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -13307,7 +13307,7 @@ The MIT License (MIT)
Copyright (c) 2016-2017 GitLab B.V.
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -13316,7 +13316,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -13335,38 +13335,38 @@ LICENSE - go.opencensus.io
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -13374,21 +13374,21 @@ LICENSE - go.opencensus.io
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -13432,7 +13432,7 @@ LICENSE - go.opencensus.io
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -13471,7 +13471,7 @@ LICENSE - go.opencensus.io
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -13507,28 +13507,52 @@ LICENSE - go.opencensus.io
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+LICENSE - go.uber.org/goleak
+The MIT License (MIT)
+
+Copyright (c) 2018 Uber Technologies, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
LICENSE - golang.org/x/crypto
Copyright (c) 2009 The Go Authors. All rights reserved.
@@ -13547,7 +13571,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -13577,7 +13601,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -13607,7 +13631,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -13637,7 +13661,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -13667,7 +13691,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -13697,7 +13721,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -13727,7 +13751,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -13753,7 +13777,7 @@ modification, are permitted provided that the following conditions are met:
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS &#34;AS IS&#34; AND
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
@@ -13782,7 +13806,7 @@ contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -13804,38 +13828,38 @@ LICENSE - google.golang.org/genproto
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -13843,21 +13867,21 @@ LICENSE - google.golang.org/genproto
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -13901,7 +13925,7 @@ LICENSE - google.golang.org/genproto
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -13940,7 +13964,7 @@ LICENSE - google.golang.org/genproto
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -13976,24 +14000,24 @@ LICENSE - google.golang.org/genproto
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -14009,38 +14033,38 @@ LICENSE - google.golang.org/grpc
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -14048,21 +14072,21 @@ LICENSE - google.golang.org/grpc
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -14106,7 +14130,7 @@ LICENSE - google.golang.org/grpc
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -14145,7 +14169,7 @@ LICENSE - google.golang.org/grpc
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -14181,24 +14205,24 @@ LICENSE - google.golang.org/grpc
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -14207,11 +14231,11 @@ LICENSE - google.golang.org/grpc
LICENSE - gopkg.in/gorp.v1
(The MIT License)
-Copyright (c) 2012 James Cooper &lt;james@bitmechanic.com&gt;
+Copyright (c) 2012 James Cooper <james@bitmechanic.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
-&#39;Software&#39;), to deal in the Software without restriction, including
+'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
@@ -14220,7 +14244,7 @@ the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#39;AS IS&#39;, WITHOUT WARRANTY OF ANY KIND,
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
@@ -14238,38 +14262,38 @@ LICENSE - gopkg.in/jcmturner/aescts.v1
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -14277,21 +14301,21 @@ LICENSE - gopkg.in/jcmturner/aescts.v1
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -14335,7 +14359,7 @@ LICENSE - gopkg.in/jcmturner/aescts.v1
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -14374,7 +14398,7 @@ LICENSE - gopkg.in/jcmturner/aescts.v1
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -14410,24 +14434,24 @@ LICENSE - gopkg.in/jcmturner/aescts.v1
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;{}&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -14442,38 +14466,38 @@ LICENSE - gopkg.in/jcmturner/dnsutils.v1
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -14481,21 +14505,21 @@ LICENSE - gopkg.in/jcmturner/dnsutils.v1
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -14539,7 +14563,7 @@ LICENSE - gopkg.in/jcmturner/dnsutils.v1
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -14578,7 +14602,7 @@ LICENSE - gopkg.in/jcmturner/dnsutils.v1
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -14614,24 +14638,24 @@ LICENSE - gopkg.in/jcmturner/dnsutils.v1
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -14646,38 +14670,38 @@ LICENSE - gopkg.in/jcmturner/gokrb5.v5
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -14685,21 +14709,21 @@ LICENSE - gopkg.in/jcmturner/gokrb5.v5
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -14743,7 +14767,7 @@ LICENSE - gopkg.in/jcmturner/gokrb5.v5
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -14782,7 +14806,7 @@ LICENSE - gopkg.in/jcmturner/gokrb5.v5
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -14818,24 +14842,24 @@ LICENSE - gopkg.in/jcmturner/gokrb5.v5
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;{}&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -14850,38 +14874,38 @@ LICENSE - gopkg.in/jcmturner/rpc.v0/ndr
1. Definitions.
- &#34;License&#34; shall mean the terms and conditions for use, reproduction,
+ "License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
- &#34;Licensor&#34; shall mean the copyright owner or entity authorized by
+ "Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
- &#34;Legal Entity&#34; shall mean the union of the acting entity and all
+ "Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
- &#34;control&#34; means (i) the power, direct or indirect, to cause the
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
- &#34;You&#34; (or &#34;Your&#34;) shall mean an individual or Legal Entity
+ "You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
- &#34;Source&#34; form shall mean the preferred form for making modifications,
+ "Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
- &#34;Object&#34; form shall mean any form resulting from mechanical
+ "Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
- &#34;Work&#34; shall mean the work of authorship, whether in Source or
+ "Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
- &#34;Derivative Works&#34; shall mean any work, whether in Source or Object
+ "Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
@@ -14889,21 +14913,21 @@ LICENSE - gopkg.in/jcmturner/rpc.v0/ndr
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
- &#34;Contribution&#34; shall mean any work of authorship, including
+ "Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, &#34;submitted&#34;
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as &#34;Not a Contribution.&#34;
+ designated in writing by the copyright owner as "Not a Contribution."
- &#34;Contributor&#34; shall mean Licensor and any individual or Legal Entity
+ "Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
@@ -14947,7 +14971,7 @@ LICENSE - gopkg.in/jcmturner/rpc.v0/ndr
excluding those notices that do not pertain to any part of
the Derivative Works; and
- (d) If the Work includes a &#34;NOTICE&#34; text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -14986,7 +15010,7 @@ LICENSE - gopkg.in/jcmturner/rpc.v0/ndr
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an &#34;AS IS&#34; BASIS,
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
@@ -15022,24 +15046,24 @@ LICENSE - gopkg.in/jcmturner/rpc.v0/ndr
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets &#34;[]&#34;
- replaced with your own identifying information. (Don&#39;t include
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
- same &#34;printed page&#34; as the copyright notice for easier
+ same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
- Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+ Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+ distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -15052,7 +15076,7 @@ The MIT License (MIT)
Copyright (c) 2015 Eric Bower
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the &#34;Software&#34;), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@@ -15061,7 +15085,7 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -15085,7 +15109,7 @@ in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-&#34;AS IS&#34; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
@@ -15114,7 +15138,7 @@ Copyright (c) 2006-2010 Kirill Simonov
Copyright (c) 2006-2011 Kirill Simonov
Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the &#34;Software&#34;), to deal in
+this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
@@ -15123,7 +15147,7 @@ so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED &#34;AS IS&#34;, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
@@ -15137,14 +15161,14 @@ All the remaining project files are covered by the Apache license:
Copyright (c) 2011-2019 Canonical Ltd
-Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@@ -15153,14 +15177,14 @@ limitations under the License.
NOTICE - gopkg.in/yaml.v3
Copyright 2011-2016 Canonical Ltd.
-Licensed under the Apache License, Version 2.0 (the &#34;License&#34;);
+Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an &#34;AS IS&#34; BASIS,
+distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
diff --git a/README.md b/README.md
index 0903d1afd..218140c30 100644
--- a/README.md
+++ b/README.md
@@ -43,11 +43,11 @@ GitLab.com, read about our [observability story](doc/observability.md)!
##### Overall
-[![image](https://gitlab.com/gitlab-org/gitaly/uploads/ca7dddd2e23b7f1fb8c0f842c93059ce/gitaly-overview_s.png)](https://dashboards.gitlab.com/d/000000176/gitaly)
+[![image](https://gitlab.com/gitlab-org/gitaly/uploads/c3aa987884d5e78c3567a3a7469ea6c2/overview.png)](https://dashboards.gitlab.com/d/gitaly-main/gitaly-overview)
##### By Feature
-[![image](https://gitlab.com/gitlab-org/gitaly/uploads/048a1facaaf18b4799569150ca7c3cd6/gitaly-features_s.png)](https://dashboards.gitlab.com/d/000000198/gitaly-features-overview)
+[![image](https://gitlab.com/gitlab-org/gitaly/uploads/3e8a5616863fa17c5bf08cb67c1bb385/feature.png)](https://dashboards.gitlab.com/d/000000198/gitaly-features-overview)
## Installation
diff --git a/VERSION b/VERSION
index dcd2e8f8a..16a60ba12 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-13.11.0-rc1 \ No newline at end of file
+13.12.0-rc1 \ No newline at end of file
diff --git a/_support/gitlab-test.git-packed-refs b/_support/gitlab-test.git-packed-refs
index 940754fe4..e8093ee87 100644
--- a/_support/gitlab-test.git-packed-refs
+++ b/_support/gitlab-test.git-packed-refs
@@ -1,4 +1,4 @@
-# pack-refs with: peeled fully-peeled sorted
+# pack-refs with: peeled fully-peeled sorted
e56497bb5f03a90a51293fc6d516788730953899 refs/heads/'test'
1b12f15a11fc6e62177bef08f47bc7b5ce50b141 refs/heads/100%branch
1942eed5cc108b19c7405106e81fa96125d0be22 refs/heads/1942eed5cc108b19c7405106e81fa96125d0be22
@@ -44,6 +44,7 @@ bb5206fee213d983da88c47f9cf4cc6caf9c66dc refs/heads/feature_conflict
ce369011c189f62c815f5971d096b26759bab0d1 refs/heads/flat-path
d25b6d94034242f3930dfcfeb6d8d9aac3583992 refs/heads/flat-path-2
e56497bb5f03a90a51293fc6d516788730953899 refs/heads/flatten-dirs
+9a944d90955aaf45f6d0c88f30e27f8d2c41cec0 refs/heads/gitaly-branches-test
ab2c9622c02288a2bbaaf35d96088cfdff31d9d9 refs/heads/gitaly-diff-stuff
0999bb770f8dc92ab5581cc0b474b3e31a96bf5c refs/heads/gitaly-non-utf8-commit
94bb47ca1297b7b3731ff2a36923640991e9236f refs/heads/gitaly-rename-test
@@ -68,6 +69,7 @@ b83d6e391c22777fca1ed3012fce84f633d7fed0 refs/heads/not-mergéd-branch
fe42f41cdc7ca97ae200fb50a268431add95901a refs/heads/png-lfs
c84ff944ff4529a70788a5e9003c2b7feae29047 refs/heads/rd-add-file-larger-than-1-mb
ca47bfd5e930148c42ed74c3b561a8783e381f7f refs/heads/rebase-encoding-failure-trigger
+842616594688d2351480dfebd67b3d8d15571e6d refs/heads/sha-starting-with-large-number
6101e87e575de14b38b4e1ce180519a813671e10 refs/heads/signed-commits
ed775cc81e5477df30c2abba7b6fdbb5d0baadae refs/heads/smime-signed-commits
ba3343bc4fa403a8dfbfcab7fc1a8c29ee34bd69 refs/heads/spooky-stuff
diff --git a/_support/noticegen/noticegen.go b/_support/noticegen/noticegen.go
index c54d1df7d..57467cbea 100644
--- a/_support/noticegen/noticegen.go
+++ b/_support/noticegen/noticegen.go
@@ -2,11 +2,11 @@ package main
import (
"flag"
- "html/template"
"io/ioutil"
"log"
"os"
"path/filepath"
+ "text/template"
)
var (
diff --git a/_support/test-boot b/_support/test-boot
index ca19b118f..3c8638637 100755
--- a/_support/test-boot
+++ b/_support/test-boot
@@ -7,8 +7,9 @@ ADDR = 'socket'.freeze
def main(gitaly_dir)
gitaly_dir = File.realpath(gitaly_dir)
+ bin_dir = File.join(gitaly_dir, '_build', 'bin')
- version = IO.popen("#{File.join(gitaly_dir, 'gitaly')} -version").read.delete_prefix('Gitaly, version ').strip
+ version = IO.popen("#{File.join(bin_dir, 'gitaly')} -version").read.delete_prefix('Gitaly, version ').strip
version_from_file = IO.read(File.join(gitaly_dir, 'VERSION')).strip
# Use start_with? instead of == because the version output could use git describe, if it is a source install
@@ -22,7 +23,7 @@ def main(gitaly_dir)
File.write('config.toml', <<~CONFIG
socket_path = "#{ADDR}"
- bin_dir = "#{gitaly_dir}"
+ bin_dir = "#{bin_dir}"
[[storage]]
name = "default"
@@ -44,7 +45,7 @@ def main(gitaly_dir)
begin
start = Time.now
- pid = spawn(File.join(gitaly_dir, 'gitaly'), 'config.toml')
+ pid = spawn(File.join(bin_dir, 'gitaly'), 'config.toml')
wait_connect
puts
puts "\n\nconnection established after #{Time.now - start} seconds\n\n"
diff --git a/changelogs/unreleased/gitaly-backup-restore.yml b/changelogs/unreleased/gitaly-backup-restore.yml
new file mode 100644
index 000000000..ff2fcfb67
--- /dev/null
+++ b/changelogs/unreleased/gitaly-backup-restore.yml
@@ -0,0 +1,5 @@
+---
+title: 'gitlab-backup: Restore repositories as per backup.rake'
+merge_request: 3383
+author:
+type: added
diff --git a/changelogs/unreleased/pks-git-fetch-fsck-objects.yml b/changelogs/unreleased/pks-git-fetch-fsck-objects.yml
new file mode 100644
index 000000000..463024a92
--- /dev/null
+++ b/changelogs/unreleased/pks-git-fetch-fsck-objects.yml
@@ -0,0 +1,5 @@
+---
+title: 'git: Always check fetched objects for consistency'
+merge_request: 3458
+author:
+type: security
diff --git a/changelogs/unreleased/pks-gitlab-internal-api-latency.yml b/changelogs/unreleased/pks-gitlab-internal-api-latency.yml
new file mode 100644
index 000000000..fc57d7799
--- /dev/null
+++ b/changelogs/unreleased/pks-gitlab-internal-api-latency.yml
@@ -0,0 +1,5 @@
+---
+title: 'gitlab: Implement metric to measure latency of API calls'
+merge_request: 3409
+author:
+type: added
diff --git a/changelogs/unreleased/pks-lfs-pointers-latency.yml b/changelogs/unreleased/pks-lfs-pointers-latency.yml
new file mode 100644
index 000000000..f56bebcb6
--- /dev/null
+++ b/changelogs/unreleased/pks-lfs-pointers-latency.yml
@@ -0,0 +1,5 @@
+---
+title: 'blob: Improve latency and memory consumption for LFS pointers'
+merge_request: 3483
+author:
+type: performance
diff --git a/changelogs/unreleased/pks-makefile-git-profile.yml b/changelogs/unreleased/pks-makefile-git-profile.yml
new file mode 100644
index 000000000..a113117bc
--- /dev/null
+++ b/changelogs/unreleased/pks-makefile-git-profile.yml
@@ -0,0 +1,5 @@
+---
+title: 'Makefile: Unset PROFILE envvar before building git'
+merge_request: 3414
+author:
+type: fixed
diff --git a/changelogs/unreleased/pks-remote-find-root-ref-inmemory-remote.yml b/changelogs/unreleased/pks-remote-find-root-ref-inmemory-remote.yml
new file mode 100644
index 000000000..3fe24b4d0
--- /dev/null
+++ b/changelogs/unreleased/pks-remote-find-root-ref-inmemory-remote.yml
@@ -0,0 +1,5 @@
+---
+title: 'remote: Add RemoteUrl parameter to FindRemoteRootRef'
+merge_request: 3412
+author:
+type: added
diff --git a/changelogs/unreleased/pks-remotes-voting.yml b/changelogs/unreleased/pks-remotes-voting.yml
new file mode 100644
index 000000000..fffe31aed
--- /dev/null
+++ b/changelogs/unreleased/pks-remotes-voting.yml
@@ -0,0 +1,5 @@
+---
+title: 'remote: Vote when adding and removing remotes'
+merge_request: 3508
+author:
+type: added
diff --git a/changelogs/unreleased/pks-resolve-conflicts-drop-ruby-implementation.yml b/changelogs/unreleased/pks-resolve-conflicts-drop-ruby-implementation.yml
new file mode 100644
index 000000000..ea400bfc9
--- /dev/null
+++ b/changelogs/unreleased/pks-resolve-conflicts-drop-ruby-implementation.yml
@@ -0,0 +1,5 @@
+---
+title: 'conflicts: Drop ResolveConflicts feature flag'
+merge_request: 3410
+author:
+type: performance
diff --git a/changelogs/unreleased/pks-ssh-receive-pack-voting.yml b/changelogs/unreleased/pks-ssh-receive-pack-voting.yml
new file mode 100644
index 000000000..12ad4186f
--- /dev/null
+++ b/changelogs/unreleased/pks-ssh-receive-pack-voting.yml
@@ -0,0 +1,5 @@
+---
+title: 'ssh: Fix secondaries being out-of-date if all refs are rejected'
+merge_request: 3455
+author:
+type: fixed
diff --git a/changelogs/unreleased/remove_ff_gitaly_go_user_revert.yml b/changelogs/unreleased/remove_ff_gitaly_go_user_revert.yml
new file mode 100644
index 000000000..4c8633b75
--- /dev/null
+++ b/changelogs/unreleased/remove_ff_gitaly_go_user_revert.yml
@@ -0,0 +1,5 @@
+---
+title: Remove gitaly feature flag gitaly_go_user_revert
+merge_request: 3516
+author:
+type: changed
diff --git a/changelogs/unreleased/tc-default-enable-go-rebase.yml b/changelogs/unreleased/tc-default-enable-go-rebase.yml
new file mode 100644
index 000000000..77c782fb1
--- /dev/null
+++ b/changelogs/unreleased/tc-default-enable-go-rebase.yml
@@ -0,0 +1,5 @@
+---
+title: 'featureflag: Activate Rebase implementation in Go'
+merge_request: 3484
+author:
+type: performance
diff --git a/changelogs/unreleased/user_revert_default.yml b/changelogs/unreleased/user_revert_default.yml
new file mode 100644
index 000000000..c62e4e35f
--- /dev/null
+++ b/changelogs/unreleased/user_revert_default.yml
@@ -0,0 +1,5 @@
+---
+title: Use the go implementation of UserRevert by default
+merge_request: 3438
+author:
+type: changed
diff --git a/changelogs/unreleased/wc-no-housekeeping-cleanup.yml b/changelogs/unreleased/wc-no-housekeeping-cleanup.yml
new file mode 100644
index 000000000..e2da5d9b8
--- /dev/null
+++ b/changelogs/unreleased/wc-no-housekeeping-cleanup.yml
@@ -0,0 +1,5 @@
+---
+title: Don't run housekeeping in Cleanup RPC
+merge_request: 3502
+author:
+type: fixed
diff --git a/changelogs/unreleased/zj-remove-wiki-find-file.yml b/changelogs/unreleased/zj-remove-wiki-find-file.yml
new file mode 100644
index 000000000..3465c2f1c
--- /dev/null
+++ b/changelogs/unreleased/zj-remove-wiki-find-file.yml
@@ -0,0 +1,5 @@
+---
+title: 'wiki: Remove FindFile RPC'
+merge_request: 3454
+author:
+type: removed
diff --git a/cmd/gitaly-backup/create.go b/cmd/gitaly-backup/create.go
index 49aac2be8..ea80eb32a 100644
--- a/cmd/gitaly-backup/create.go
+++ b/cmd/gitaly-backup/create.go
@@ -3,10 +3,10 @@ package main
import (
"context"
"encoding/json"
- "errors"
"flag"
"fmt"
"io"
+ "runtime"
log "github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitaly/internal/backup"
@@ -23,16 +23,23 @@ type serverRepository struct {
type createSubcommand struct {
backupPath string
+ parallel int
}
func (cmd *createSubcommand) Flags(fs *flag.FlagSet) {
fs.StringVar(&cmd.backupPath, "path", "", "repository backup path")
+ fs.IntVar(&cmd.parallel, "parallel", runtime.NumCPU(), "maximum number of parallel backups")
}
func (cmd *createSubcommand) Run(ctx context.Context, stdin io.Reader, stdout io.Writer) error {
fsBackup := backup.NewFilesystem(cmd.backupPath)
- var failed int
+ var pipeline backup.CreatePipeline
+ pipeline = backup.NewPipeline(log.StandardLogger(), fsBackup)
+ if cmd.parallel > 0 {
+ pipeline = backup.NewParallelCreatePipeline(pipeline, cmd.parallel)
+ }
+
decoder := json.NewDecoder(stdin)
for {
var sr serverRepository
@@ -41,32 +48,19 @@ func (cmd *createSubcommand) Run(ctx context.Context, stdin io.Reader, stdout io
} else if err != nil {
return fmt.Errorf("create: %w", err)
}
- repoLog := log.WithFields(log.Fields{
- "storage_name": sr.StorageName,
- "relative_path": sr.RelativePath,
- "gl_project_path": sr.GlProjectPath,
- })
repo := gitalypb.Repository{
StorageName: sr.StorageName,
RelativePath: sr.RelativePath,
GlProjectPath: sr.GlProjectPath,
}
- repoLog.Info("started backup")
- if err := fsBackup.BackupRepository(ctx, sr.ServerInfo, &repo); err != nil {
- if errors.Is(err, backup.ErrSkipped) {
- repoLog.Warn("skipped backup")
- } else {
- repoLog.WithError(err).Error("backup failed")
- failed++
- }
- continue
- }
-
- repoLog.Info("completed backup")
+ pipeline.Create(ctx, &backup.CreateRequest{
+ Server: sr.ServerInfo,
+ Repository: &repo,
+ })
}
- if failed > 0 {
- return fmt.Errorf("create: %d failures encountered", failed)
+ if err := pipeline.Done(); err != nil {
+ return fmt.Errorf("create: %w", err)
}
return nil
}
diff --git a/cmd/gitaly-backup/create_test.go b/cmd/gitaly-backup/create_test.go
index e14944dda..d26d37b11 100644
--- a/cmd/gitaly-backup/create_test.go
+++ b/cmd/gitaly-backup/create_test.go
@@ -28,7 +28,7 @@ func TestCreateSubcommand(t *testing.T) {
var repos []*gitalypb.Repository
for i := 0; i < 5; i++ {
- repo, _, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], fmt.Sprintf("repo-%d", i))
+ repo, _, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], fmt.Sprintf("repo-%d", i))
repos = append(repos, repo)
}
@@ -58,7 +58,7 @@ func TestCreateSubcommand(t *testing.T) {
require.NoError(t, fs.Parse([]string{"-path", path}))
require.EqualError(t,
cmd.Run(context.Background(), &stdin, ioutil.Discard),
- "create: 1 failures encountered")
+ "create: pipeline: 1 failures encountered")
for _, repo := range repos {
bundlePath := filepath.Join(path, repo.RelativePath+".bundle")
diff --git a/cmd/gitaly-backup/main.go b/cmd/gitaly-backup/main.go
index 12cf21396..57173ca31 100644
--- a/cmd/gitaly-backup/main.go
+++ b/cmd/gitaly-backup/main.go
@@ -16,7 +16,8 @@ type subcmd interface {
}
var subcommands = map[string]subcmd{
- "create": &createSubcommand{},
+ "create": &createSubcommand{},
+ "restore": &restoreSubcommand{},
}
func main() {
diff --git a/cmd/gitaly-backup/restore.go b/cmd/gitaly-backup/restore.go
new file mode 100644
index 000000000..a7b8a3e10
--- /dev/null
+++ b/cmd/gitaly-backup/restore.go
@@ -0,0 +1,62 @@
+package main
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "flag"
+ "fmt"
+ "io"
+
+ log "github.com/sirupsen/logrus"
+ "gitlab.com/gitlab-org/gitaly/internal/backup"
+ "gitlab.com/gitlab-org/gitaly/internal/storage"
+ "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+)
+
+type restoreRequest struct {
+ storage.ServerInfo
+ StorageName string `json:"storage_name"`
+ RelativePath string `json:"relative_path"`
+ GlProjectPath string `json:"gl_project_path"`
+ AlwaysCreate bool `json:"always_create"`
+}
+
+type restoreSubcommand struct {
+ backupPath string
+}
+
+func (cmd *restoreSubcommand) Flags(fs *flag.FlagSet) {
+ fs.StringVar(&cmd.backupPath, "path", "", "repository backup path")
+}
+
+func (cmd *restoreSubcommand) Run(ctx context.Context, stdin io.Reader, stdout io.Writer) error {
+ fsBackup := backup.NewFilesystem(cmd.backupPath)
+ pipeline := backup.NewPipeline(log.StandardLogger(), fsBackup)
+
+ decoder := json.NewDecoder(stdin)
+ for {
+ var req restoreRequest
+ if err := decoder.Decode(&req); errors.Is(err, io.EOF) {
+ break
+ } else if err != nil {
+ return fmt.Errorf("restore: %w", err)
+ }
+
+ repo := gitalypb.Repository{
+ StorageName: req.StorageName,
+ RelativePath: req.RelativePath,
+ GlProjectPath: req.GlProjectPath,
+ }
+ pipeline.Restore(ctx, &backup.RestoreRequest{
+ Server: req.ServerInfo,
+ Repository: &repo,
+ AlwaysCreate: req.AlwaysCreate,
+ })
+ }
+
+ if err := pipeline.Done(); err != nil {
+ return fmt.Errorf("restore: %w", err)
+ }
+ return nil
+}
diff --git a/cmd/gitaly-backup/restore_test.go b/cmd/gitaly-backup/restore_test.go
new file mode 100644
index 000000000..015cce792
--- /dev/null
+++ b/cmd/gitaly-backup/restore_test.go
@@ -0,0 +1,77 @@
+package main
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "flag"
+ "fmt"
+ "io/ioutil"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service/setup"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+)
+
+func TestRestoreSubcommand(t *testing.T) {
+ cfg := testcfg.Build(t)
+ testhelper.ConfigureGitalyHooksBin(t, cfg)
+
+ gitalyAddr := testserver.RunGitalyServer(t, cfg, nil, setup.RegisterAll)
+
+ path := testhelper.TempDir(t)
+
+ existingRepo, existRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "existing_repo")
+ existingRepoBundlePath := filepath.Join(path, existingRepo.RelativePath+".bundle")
+ gittest.Exec(t, cfg, "-C", existRepoPath, "bundle", "create", existingRepoBundlePath, "--all")
+
+ repos := []*gitalypb.Repository{existingRepo}
+ for i := 0; i < 2; i++ {
+ repo := gittest.InitRepoDir(t, cfg.Storages[0].Path, fmt.Sprintf("repo-%d", i))
+ repoBundlePath := filepath.Join(path, repo.RelativePath+".bundle")
+ testhelper.CopyFile(t, existingRepoBundlePath, repoBundlePath)
+ repos = append(repos, repo)
+ }
+
+ var stdin bytes.Buffer
+
+ encoder := json.NewEncoder(&stdin)
+ for _, repo := range repos {
+ require.NoError(t, encoder.Encode(map[string]string{
+ "address": gitalyAddr,
+ "token": cfg.Auth.Token,
+ "storage_name": repo.StorageName,
+ "relative_path": repo.RelativePath,
+ "gl_project_path": repo.GlProjectPath,
+ }))
+ }
+
+ require.NoError(t, encoder.Encode(map[string]string{
+ "address": "invalid",
+ "token": "invalid",
+ }))
+
+ cmd := restoreSubcommand{}
+
+ fs := flag.NewFlagSet("restore", flag.ContinueOnError)
+ cmd.Flags(fs)
+
+ require.NoError(t, fs.Parse([]string{"-path", path}))
+ require.EqualError(t,
+ cmd.Run(context.Background(), &stdin, ioutil.Discard),
+ "restore: pipeline: 1 failures encountered")
+
+ for _, repo := range repos {
+ repoPath := filepath.Join(cfg.Storages[0].Path, repo.RelativePath)
+ bundlePath := filepath.Join(path, repo.RelativePath+".bundle")
+
+ output := gittest.Exec(t, cfg, "-C", repoPath, "bundle", "verify", bundlePath)
+ require.Contains(t, string(output), "The bundle records a complete history")
+ }
+}
diff --git a/cmd/gitaly-git2go/main.go b/cmd/gitaly-git2go/main.go
index ab7e1dfe2..5256b5390 100644
--- a/cmd/gitaly-git2go/main.go
+++ b/cmd/gitaly-git2go/main.go
@@ -23,6 +23,7 @@ var subcommands = map[string]subcmd{
"commit": commitSubcommand{},
"conflicts": &conflicts.Subcommand{},
"merge": &mergeSubcommand{},
+ "rebase": &rebaseSubcommand{},
"revert": &revertSubcommand{},
"resolve": &resolveSubcommand{},
"submodule": &submoduleSubcommand{},
diff --git a/cmd/gitaly-git2go/merge_test.go b/cmd/gitaly-git2go/merge_test.go
index efa767498..7ef15366f 100644
--- a/cmd/gitaly-git2go/merge_test.go
+++ b/cmd/gitaly-git2go/merge_test.go
@@ -237,7 +237,7 @@ func TestMerge_recursive(t *testing.T) {
cfg := testcfg.Build(t)
testhelper.ConfigureGitalyGit2GoBin(t, cfg)
- _, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ _, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
defer cleanup()
base := cmdtesthelper.BuildCommit(t, repoPath, nil, map[string]string{"base": "base\n"})
diff --git a/cmd/gitaly-git2go/rebase.go b/cmd/gitaly-git2go/rebase.go
new file mode 100644
index 000000000..9bb13f2a6
--- /dev/null
+++ b/cmd/gitaly-git2go/rebase.go
@@ -0,0 +1,142 @@
+// +build static,system_libgit2
+
+package main
+
+import (
+ "context"
+ "encoding/gob"
+ "errors"
+ "flag"
+ "fmt"
+ "io"
+
+ git "github.com/libgit2/git2go/v31"
+ "gitlab.com/gitlab-org/gitaly/internal/git2go"
+)
+
+type rebaseSubcommand struct{}
+
+func (cmd *rebaseSubcommand) Flags() *flag.FlagSet {
+ return flag.NewFlagSet("rebase", flag.ExitOnError)
+}
+
+func (cmd *rebaseSubcommand) Run(ctx context.Context, r io.Reader, w io.Writer) error {
+ var request git2go.RebaseCommand
+ if err := gob.NewDecoder(r).Decode(&request); err != nil {
+ return err
+ }
+
+ commitID, err := cmd.rebase(ctx, &request)
+ return gob.NewEncoder(w).Encode(git2go.Result{
+ CommitID: commitID,
+ Error: git2go.SerializableError(err),
+ })
+}
+
+func (cmd *rebaseSubcommand) verify(ctx context.Context, r *git2go.RebaseCommand) error {
+ if r.Repository == "" {
+ return errors.New("missing repository")
+ }
+ if r.Committer.Name == "" {
+ return errors.New("missing committer name")
+ }
+ if r.Committer.Email == "" {
+ return errors.New("missing committer email")
+ }
+ if r.BranchName == "" {
+ return errors.New("missing branch name")
+ }
+ if r.UpstreamRevision == "" {
+ return errors.New("missing upstream revision")
+ }
+ return nil
+}
+
+func (cmd *rebaseSubcommand) rebase(ctx context.Context, request *git2go.RebaseCommand) (string, error) {
+ if err := cmd.verify(ctx, request); err != nil {
+ return "", err
+ }
+
+ repo, err := git.OpenRepository(request.Repository)
+ if err != nil {
+ return "", fmt.Errorf("open repository: %w", err)
+ }
+
+ opts, err := git.DefaultRebaseOptions()
+ if err != nil {
+ return "", fmt.Errorf("get rebase options: %w", err)
+ }
+ opts.InMemory = 1
+
+ branch, err := repo.AnnotatedCommitFromRevspec(fmt.Sprintf("refs/heads/%s", request.BranchName))
+ if err != nil {
+ return "", fmt.Errorf("look up branch %q: %w", request.BranchName, err)
+ }
+
+ ontoOid, err := git.NewOid(request.UpstreamRevision)
+ if err != nil {
+ return "", fmt.Errorf("parse upstream revision %q: %w", request.UpstreamRevision, err)
+ }
+
+ onto, err := repo.LookupAnnotatedCommit(ontoOid)
+ if err != nil {
+ return "", fmt.Errorf("look up upstream revision %q: %w", request.UpstreamRevision, err)
+ }
+
+ mergeBase, err := repo.MergeBase(onto.Id(), branch.Id())
+ if err != nil {
+ return "", fmt.Errorf("find merge base: %w", err)
+ }
+
+ if mergeBase.Equal(onto.Id()) {
+ // Branch is zero commits behind, so do not rebase
+ return branch.Id().String(), nil
+ }
+
+ if mergeBase.Equal(branch.Id()) {
+ // Branch is merged, so fast-forward to upstream
+ return onto.Id().String(), nil
+ }
+
+ mergeCommit, err := repo.LookupAnnotatedCommit(mergeBase)
+ if err != nil {
+ return "", fmt.Errorf("look up merge base: %w", err)
+ }
+
+ rebase, err := repo.InitRebase(branch, mergeCommit, onto, &opts)
+ if err != nil {
+ return "", fmt.Errorf("initiate rebase: %w", err)
+ }
+
+ committer := git.Signature(request.Committer)
+ var oid *git.Oid
+ for {
+ op, err := rebase.Next()
+ if git.IsErrorCode(err, git.ErrIterOver) {
+ break
+ } else if err != nil {
+ return "", fmt.Errorf("rebase iterate: %w", err)
+ }
+
+ commit, err := repo.LookupCommit(op.Id)
+ if err != nil {
+ return "", fmt.Errorf("lookup commit: %w", err)
+ }
+
+ oid = op.Id.Copy()
+ err = rebase.Commit(oid, nil, &committer, commit.Message())
+ if err != nil {
+ return "", fmt.Errorf("commit %q: %w", op.Id.String(), err)
+ }
+ }
+
+ if oid == nil {
+ return branch.Id().String(), nil
+ }
+
+ if err = rebase.Finish(); err != nil {
+ return "", fmt.Errorf("finish rebase: %w", err)
+ }
+
+ return oid.String(), nil
+}
diff --git a/cmd/gitaly-git2go/rebase_test.go b/cmd/gitaly-git2go/rebase_test.go
new file mode 100644
index 000000000..a169b4a1f
--- /dev/null
+++ b/cmd/gitaly-git2go/rebase_test.go
@@ -0,0 +1,203 @@
+// +build static,system_libgit2
+
+package main
+
+import (
+ "testing"
+ "time"
+
+ git "github.com/libgit2/git2go/v31"
+ "github.com/stretchr/testify/require"
+ cmdtesthelper "gitlab.com/gitlab-org/gitaly/cmd/gitaly-git2go/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
+ "gitlab.com/gitlab-org/gitaly/internal/git2go"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+)
+
+var (
+ masterRevision = "1e292f8fedd741b75372e19097c76d327140c312"
+)
+
+func TestRebase_validation(t *testing.T) {
+ cfg, _, repoPath := testcfg.BuildWithRepo(t)
+ testhelper.ConfigureGitalyGit2GoBin(t, cfg)
+ committer := git2go.NewSignature("Foo", "foo@example.com", time.Now())
+
+ testcases := []struct {
+ desc string
+ request git2go.RebaseCommand
+ expectedErr string
+ }{
+ {
+ desc: "no arguments",
+ expectedErr: "rebase: missing repository",
+ },
+ {
+ desc: "missing repository",
+ request: git2go.RebaseCommand{Committer: committer, BranchName: "feature", UpstreamRevision: masterRevision},
+ expectedErr: "rebase: missing repository",
+ },
+ {
+ desc: "missing committer name",
+ request: git2go.RebaseCommand{Repository: repoPath, Committer: git2go.Signature{Email: "foo@example.com"}, BranchName: "feature", UpstreamRevision: masterRevision},
+ expectedErr: "rebase: missing committer name",
+ },
+ {
+ desc: "missing committer email",
+ request: git2go.RebaseCommand{Repository: repoPath, Committer: git2go.Signature{Name: "Foo"}, BranchName: "feature", UpstreamRevision: masterRevision},
+ expectedErr: "rebase: missing committer email",
+ },
+ {
+ desc: "missing branch name",
+ request: git2go.RebaseCommand{Repository: repoPath, Committer: committer, UpstreamRevision: masterRevision},
+ expectedErr: "rebase: missing branch name",
+ },
+ {
+ desc: "missing upstream branch",
+ request: git2go.RebaseCommand{Repository: repoPath, Committer: committer, BranchName: "feature"},
+ expectedErr: "rebase: missing upstream revision",
+ },
+ }
+ for _, tc := range testcases {
+ t.Run(tc.desc, func(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ _, err := tc.request.Run(ctx, cfg)
+ require.EqualError(t, err, tc.expectedErr)
+ })
+ }
+}
+
+func TestRebase_rebase(t *testing.T) {
+ testcases := []struct {
+ desc string
+ branch string
+ commitsAhead int
+ setupRepo func(testing.TB, *git.Repository)
+ expected string
+ expectedErr string
+ }{
+ {
+ desc: "Single commit rebase",
+ branch: "gitaly-rename-test",
+ commitsAhead: 1,
+ expected: "a08ed4bc45f9e686db93c5d0519f63d7b537270c",
+ },
+ {
+ desc: "Multiple commits",
+ branch: "csv",
+ commitsAhead: 5,
+ expected: "2f8365edc69d3683e22c4209ae9641642d84dd4a",
+ },
+ {
+ desc: "Branch zero commits behind",
+ branch: "sha-starting-with-large-number",
+ commitsAhead: 1,
+ expected: "842616594688d2351480dfebd67b3d8d15571e6d",
+ },
+ {
+ desc: "Merged branch",
+ branch: "branch-merged",
+ expected: masterRevision,
+ },
+ {
+ desc: "Partially merged branch",
+ branch: "branch-merged-plus-one",
+ setupRepo: func(t testing.TB, repo *git.Repository) {
+ head, err := lookupCommit(repo, "branch-merged")
+ require.NoError(t, err)
+
+ other, err := lookupCommit(repo, "gitaly-rename-test")
+ require.NoError(t, err)
+ tree, err := other.Tree()
+ require.NoError(t, err)
+ newOid, err := repo.CreateCommitFromIds("refs/heads/branch-merged-plus-one", &cmdtesthelper.DefaultAuthor, &cmdtesthelper.DefaultAuthor, "Message", tree.Object.Id(), head.Object.Id())
+ require.NoError(t, err)
+ require.Equal(t, "5da601ef10e314884bbade9d5b063be37579ccf9", newOid.String())
+ },
+ commitsAhead: 1,
+ expected: "591b29084164bcc58fa4fb851a3c409290b17bfe",
+ },
+ {
+ desc: "With upstream merged into",
+ branch: "csv-plus-merge",
+ setupRepo: func(t testing.TB, repo *git.Repository) {
+ ours, err := lookupCommit(repo, "csv")
+ require.NoError(t, err)
+ theirs, err := lookupCommit(repo, "b83d6e391c22777fca1ed3012fce84f633d7fed0")
+ require.NoError(t, err)
+
+ index, err := repo.MergeCommits(ours, theirs, nil)
+ require.NoError(t, err)
+ tree, err := index.WriteTreeTo(repo)
+ require.NoError(t, err)
+
+ newOid, err := repo.CreateCommitFromIds("refs/heads/csv-plus-merge", &cmdtesthelper.DefaultAuthor, &cmdtesthelper.DefaultAuthor, "Message", tree, ours.Object.Id(), theirs.Object.Id())
+ require.NoError(t, err)
+ require.Equal(t, "5cfe4a597b54c8f2b7ae85212f67599a1492009c", newOid.String())
+ },
+ commitsAhead: 5, // Same as "Multiple commits"
+ expected: "2f8365edc69d3683e22c4209ae9641642d84dd4a",
+ },
+ {
+ desc: "Rebase with conflict",
+ branch: "rebase-encoding-failure-trigger",
+ expectedErr: "rebase: commit \"eb8f5fb9523b868cef583e09d4bf70b99d2dd404\": conflicts have not been resolved",
+ },
+ {
+ desc: "Orphaned branch",
+ branch: "orphaned-branch",
+ expectedErr: "rebase: find merge base: no merge base found",
+ },
+ }
+
+ for _, tc := range testcases {
+ t.Run(tc.desc, func(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ committer := git2go.NewSignature(string(gittest.TestUser.Name),
+ string(gittest.TestUser.Email),
+ time.Date(2021, 3, 1, 13, 45, 50, 0, time.FixedZone("", +2*60*60)))
+
+ cfg, _, repoPath := testcfg.BuildWithRepo(t)
+ testhelper.ConfigureGitalyGit2GoBin(t, cfg)
+
+ repo, err := git.OpenRepository(repoPath)
+ require.NoError(t, err)
+
+ if tc.setupRepo != nil {
+ tc.setupRepo(t, repo)
+ }
+
+ request := git2go.RebaseCommand{
+ Repository: repoPath,
+ Committer: committer,
+ BranchName: tc.branch,
+ UpstreamRevision: masterRevision,
+ }
+
+ response, err := request.Run(ctx, cfg)
+ if tc.expectedErr != "" {
+ require.EqualError(t, err, tc.expectedErr)
+ } else {
+ require.NoError(t, err)
+
+ result := response.String()
+ require.Equal(t, tc.expected, result)
+
+ commit, err := lookupCommit(repo, result)
+ require.NoError(t, err)
+
+ for i := tc.commitsAhead; i > 0; i-- {
+ commit = commit.Parent(0)
+ }
+ masterCommit, err := lookupCommit(repo, masterRevision)
+ require.NoError(t, err)
+ require.Equal(t, masterCommit, commit)
+ }
+ })
+ }
+}
diff --git a/cmd/gitaly-git2go/submodule_test.go b/cmd/gitaly-git2go/submodule_test.go
index 48a8bf8e2..005b6780d 100644
--- a/cmd/gitaly-git2go/submodule_test.go
+++ b/cmd/gitaly-git2go/submodule_test.go
@@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git/lstree"
"gitlab.com/gitlab-org/gitaly/internal/git2go"
@@ -27,8 +28,8 @@ func TestSubmodule(t *testing.T) {
{
desc: "Update submodule",
command: git2go.SubmoduleCommand{
- AuthorName: string(testhelper.TestUser.Name),
- AuthorMail: string(testhelper.TestUser.Email),
+ AuthorName: string(gittest.TestUser.Name),
+ AuthorMail: string(gittest.TestUser.Email),
Message: string(commitMessage),
CommitSHA: "41fa1bc9e0f0630ced6a8a211d60c2af425ecc2d",
Submodule: "gitlab-grack",
@@ -38,8 +39,8 @@ func TestSubmodule(t *testing.T) {
{
desc: "Update submodule inside folder",
command: git2go.SubmoduleCommand{
- AuthorName: string(testhelper.TestUser.Name),
- AuthorMail: string(testhelper.TestUser.Email),
+ AuthorName: string(gittest.TestUser.Name),
+ AuthorMail: string(gittest.TestUser.Email),
Message: string(commitMessage),
CommitSHA: "e25eda1fece24ac7a03624ed1320f82396f35bd8",
Submodule: "test_inside_folder/another_folder/six",
@@ -49,8 +50,8 @@ func TestSubmodule(t *testing.T) {
{
desc: "Invalid branch",
command: git2go.SubmoduleCommand{
- AuthorName: string(testhelper.TestUser.Name),
- AuthorMail: string(testhelper.TestUser.Email),
+ AuthorName: string(gittest.TestUser.Name),
+ AuthorMail: string(gittest.TestUser.Email),
Message: string(commitMessage),
CommitSHA: "e25eda1fece24ac7a03624ed1320f82396f35bd8",
Submodule: "test_inside_folder/another_folder/six",
@@ -61,8 +62,8 @@ func TestSubmodule(t *testing.T) {
{
desc: "Invalid submodule",
command: git2go.SubmoduleCommand{
- AuthorName: string(testhelper.TestUser.Name),
- AuthorMail: string(testhelper.TestUser.Email),
+ AuthorName: string(gittest.TestUser.Name),
+ AuthorMail: string(gittest.TestUser.Email),
Message: string(commitMessage),
CommitSHA: "e25eda1fece24ac7a03624ed1320f82396f35bd8",
Submodule: "non-existent-submodule",
@@ -73,8 +74,8 @@ func TestSubmodule(t *testing.T) {
{
desc: "Duplicate reference",
command: git2go.SubmoduleCommand{
- AuthorName: string(testhelper.TestUser.Name),
- AuthorMail: string(testhelper.TestUser.Email),
+ AuthorName: string(gittest.TestUser.Name),
+ AuthorMail: string(gittest.TestUser.Email),
Message: string(commitMessage),
CommitSHA: "409f37c4f05865e4fb208c771485f211a22c4c2d",
Submodule: "six",
@@ -88,7 +89,7 @@ func TestSubmodule(t *testing.T) {
t.Run(tc.desc, func(t *testing.T) {
cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
testhelper.ConfigureGitalyGit2GoBin(t, cfg)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
tc.command.Repository = repoPath
@@ -105,14 +106,13 @@ func TestSubmodule(t *testing.T) {
commit, err := repo.ReadCommit(ctx, git.Revision(response.CommitID))
require.NoError(t, err)
- require.Equal(t, commit.Author.Email, testhelper.TestUser.Email)
- require.Equal(t, commit.Committer.Email, testhelper.TestUser.Email)
+ require.Equal(t, commit.Author.Email, gittest.TestUser.Email)
+ require.Equal(t, commit.Committer.Email, gittest.TestUser.Email)
require.Equal(t, commit.Subject, commitMessage)
- entry := testhelper.MustRunCommand(
+ entry := gittest.Exec(
t,
- nil,
- "git",
+ cfg,
"-C",
repoPath,
"ls-tree",
diff --git a/cmd/gitaly-hooks/hooks.go b/cmd/gitaly-hooks/hooks.go
index ffd61ead9..b83c995eb 100644
--- a/cmd/gitaly-hooks/hooks.go
+++ b/cmd/gitaly-hooks/hooks.go
@@ -15,7 +15,9 @@ import (
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/prometheus"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
gitalylog "gitlab.com/gitlab-org/gitaly/internal/log"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/internal/stream"
@@ -60,19 +62,28 @@ var (
func main() {
logger = gitalylog.NewHookLogger()
- if len(os.Args) < 2 {
- logger.Fatalf("requires hook name. args: %v", os.Args)
+ returnCode, err := run(os.Args)
+ if err != nil {
+ logger.Fatalf("%s", err)
}
- subCmd := os.Args[1]
+ os.Exit(returnCode)
+}
+
+func run(args []string) (int, error) {
+ if len(args) < 2 {
+ return 0, fmt.Errorf("requires hook name. args: %v", args)
+ }
+
+ subCmd := args[1]
if subCmd == "check" {
logrus.SetLevel(logrus.ErrorLevel)
- if len(os.Args) != 3 {
+ if len(args) != 3 {
log.Fatal(errors.New("no configuration file path provided invoke with: gitaly-hooks check <config_path>"))
}
- configPath := os.Args[2]
+ configPath := args[2]
fmt.Print("Checking GitLab API access: ")
info, err := check(configPath)
@@ -87,7 +98,8 @@ func main() {
fmt.Printf("GitLab Api version: %s\n", info.APIVersion)
fmt.Printf("Redis reachable for GitLab: %t\n", info.RedisReachable)
fmt.Println("OK")
- os.Exit(0)
+
+ return 0, nil
}
ctx, cancel := context.WithCancel(context.Background())
@@ -101,33 +113,35 @@ func main() {
payload, err := git.HooksPayloadFromEnv(os.Environ())
if err != nil {
- logger.Fatalf("error when getting hooks payload: %v", err)
+ return 0, fmt.Errorf("error when getting hooks payload: %v", err)
}
hookCommand, ok := hooksBySubcommand[subCmd]
if !ok {
- logger.Fatalf("subcommand name invalid: %q", subCmd)
+ return 0, fmt.Errorf("subcommand name invalid: %q", subCmd)
}
// If the hook wasn't requested, then we simply skip executing any
// logic.
if !payload.IsHookRequested(hookCommand.hookType) {
- os.Exit(0)
+ return 0, nil
}
conn, err := dialGitaly(payload)
if err != nil {
- logger.Fatalf("error when connecting to gitaly: %v", err)
+ return 0, fmt.Errorf("error when connecting to gitaly: %v", err)
}
+ defer conn.Close()
+
hookClient := gitalypb.NewHookServiceClient(conn)
ctx = featureflag.OutgoingWithRaw(ctx, payload.FeatureFlags)
- returnCode, err := hookCommand.exec(ctx, payload, hookClient, os.Args)
+ returnCode, err := hookCommand.exec(ctx, payload, hookClient, args)
if err != nil {
- logger.Fatal(err)
+ return 0, err
}
- os.Exit(returnCode)
+ return returnCode, nil
}
func noopSender(c chan error) {}
@@ -169,7 +183,7 @@ func sendFunc(reqWriter io.Writer, stream grpc.ClientStream, stdin io.Reader) fu
}
}
-func check(configPath string) (*hook.CheckInfo, error) {
+func check(configPath string) (*gitlab.CheckInfo, error) {
cfgFile, err := os.Open(configPath)
if err != nil {
return nil, fmt.Errorf("failed to open config file: %w", err)
@@ -181,7 +195,7 @@ func check(configPath string) (*hook.CheckInfo, error) {
return nil, err
}
- gitlabAPI, err := hook.NewGitlabAPI(cfg.Gitlab, cfg.TLS)
+ gitlabAPI, err := gitlab.NewHTTPClient(cfg.Gitlab, cfg.TLS, prometheus.Config{})
if err != nil {
return nil, err
}
diff --git a/cmd/gitaly-hooks/hooks_test.go b/cmd/gitaly-hooks/hooks_test.go
index a79baf910..b41fe48ee 100644
--- a/cmd/gitaly-hooks/hooks_test.go
+++ b/cmd/gitaly-hooks/hooks_test.go
@@ -5,7 +5,6 @@ import (
"context"
"encoding/json"
"fmt"
- "io/ioutil"
"os"
"os/exec"
"path"
@@ -16,23 +15,24 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config/auth"
internallog "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/log"
- gitalyhook "gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/prometheus"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/hook"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
gitalylog "gitlab.com/gitlab-org/gitaly/internal/log"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
- "google.golang.org/grpc/reflection"
+ "google.golang.org/grpc"
)
type glHookValues struct {
@@ -171,11 +171,10 @@ func testHooksPrePostReceive(t *testing.T, cfg config.Cfg, repo *gitalypb.Reposi
t.Run(fmt.Sprintf("hookName: %s", hookName), func(t *testing.T) {
customHookOutputPath := gittest.WriteEnvToCustomHook(t, repoPath, hookName)
- gitlabAPI, err := gitalyhook.NewGitlabAPI(cfg.Gitlab, cfg.TLS)
+ gitlabClient, err := gitlab.NewHTTPClient(cfg.Gitlab, cfg.TLS, prometheus.Config{})
require.NoError(t, err)
- stop := runHookServiceServerWithAPI(t, cfg, gitlabAPI)
- defer stop()
+ runHookServiceWithGitlabClient(t, cfg, gitlabClient)
var stderr, stdout bytes.Buffer
stdin := bytes.NewBuffer([]byte(changes))
@@ -260,8 +259,7 @@ func TestHooksUpdate(t *testing.T) {
cfg.Gitlab.SecretFile = testhelper.WriteShellSecretFile(t, cfg.GitlabShell.Dir, "the wrong token")
- stop := runHookServiceServer(t, cfg)
- defer stop()
+ runHookServiceServer(t, cfg)
testHooksUpdate(t, cfg, glHookValues{
GLID: glID,
@@ -271,7 +269,7 @@ func TestHooksUpdate(t *testing.T) {
}
func testHooksUpdate(t *testing.T, cfg config.Cfg, glValues glHookValues) {
- repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanup)
refval, oldval, newval := "refval", strings.Repeat("a", 40), strings.Repeat("b", 40)
@@ -308,8 +306,7 @@ open('%s', 'w') { |f| f.puts(JSON.dump(ARGV)) }
var inputs []string
- b, err := ioutil.ReadFile(customHookArgsPath)
- require.NoError(t, err)
+ b := testhelper.MustReadFile(t, customHookArgsPath)
require.NoError(t, json.Unmarshal(b, &inputs))
require.Equal(t, []string{refval, oldval, newval}, inputs)
@@ -350,7 +347,7 @@ func TestHooksPostReceiveFailed(t *testing.T) {
cfg.Gitlab.URL = serverURL
cfg.Gitlab.SecretFile = testhelper.WriteShellSecretFile(t, cfg.GitlabShell.Dir, secretToken)
- gitlabAPI, err := gitalyhook.NewGitlabAPI(cfg.Gitlab, cfg.TLS)
+ gitlabClient, err := gitlab.NewHTTPClient(cfg.Gitlab, cfg.TLS, prometheus.Config{})
require.NoError(t, err)
customHookOutputPath := gittest.WriteEnvToCustomHook(t, repoPath, "post-receive")
@@ -395,18 +392,17 @@ func TestHooksPostReceiveFailed(t *testing.T) {
for _, tc := range testcases {
t.Run(tc.desc, func(t *testing.T) {
- stop := runHookServiceServerWithAPI(t, cfg, gitlabAPI)
- defer stop()
+ runHookServiceWithGitlabClient(t, cfg, gitlabClient)
hooksPayload, err := git.NewHooksPayload(
cfg,
repo,
- &metadata.Transaction{
+ &txinfo.Transaction{
ID: 1,
Node: "node",
Primary: tc.primary,
},
- &metadata.PraefectServer{
+ &txinfo.PraefectServer{
SocketPath: "/path/to/socket",
Token: "secret",
},
@@ -464,11 +460,10 @@ func TestHooksNotAllowed(t *testing.T) {
customHookOutputPath := gittest.WriteEnvToCustomHook(t, repoPath, "post-receive")
- gitlabAPI, err := gitalyhook.NewGitlabAPI(cfg.Gitlab, cfg.TLS)
+ gitlabClient, err := gitlab.NewHTTPClient(cfg.Gitlab, cfg.TLS, prometheus.Config{})
require.NoError(t, err)
- stop := runHookServiceServerWithAPI(t, cfg, gitlabAPI)
- defer stop()
+ runHookServiceWithGitlabClient(t, cfg, gitlabClient)
var stderr, stdout bytes.Buffer
@@ -576,8 +571,8 @@ func TestCheckBadCreds(t *testing.T) {
require.Regexp(t, `Checking GitLab API access: .* level=error msg="Internal API error" .* error="authorization failed" method=GET status=401 url="http://127.0.0.1:[0-9]+/api/v4/internal/check"\nFAIL`, stdout.String())
}
-func runHookServiceServer(t *testing.T, cfg config.Cfg) func() {
- return runHookServiceServerWithAPI(t, cfg, gitalyhook.GitlabAPIStub)
+func runHookServiceServer(t *testing.T, cfg config.Cfg) {
+ runHookServiceWithGitlabClient(t, cfg, gitlab.NewMockClient())
}
type featureFlagAsserter struct {
@@ -615,20 +610,12 @@ func (svc featureFlagAsserter) PackObjectsHook(stream gitalypb.HookService_PackO
return svc.wrapped.PackObjectsHook(stream)
}
-func runHookServiceServerWithAPI(t *testing.T, cfg config.Cfg, gitlabAPI gitalyhook.GitlabAPI) func() {
- registry := backchannel.NewRegistry()
- txManager := transaction.NewManager(cfg, registry)
- hookManager := gitalyhook.NewManager(config.NewLocator(cfg), txManager, gitlabAPI, cfg)
- gitCmdFactory := git.NewExecCommandFactory(cfg)
- server := testhelper.NewServerWithAuth(t, nil, nil, cfg.Auth.Token, registry, testhelper.WithInternalSocket(cfg))
-
- gitalypb.RegisterHookServiceServer(server.GrpcServer(), featureFlagAsserter{
- t: t, wrapped: hook.NewServer(cfg, hookManager, gitCmdFactory),
- })
- reflection.Register(server.GrpcServer())
- server.Start(t)
-
- return server.Stop
+func runHookServiceWithGitlabClient(t *testing.T, cfg config.Cfg, gitlabClient gitlab.Client) {
+ testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterHookServiceServer(srv, featureFlagAsserter{
+ t: t, wrapped: hook.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()),
+ })
+ }, testserver.WithGitLabClient(gitlabClient))
}
func requireContainsOnce(t *testing.T, s string, contains string) {
@@ -690,7 +677,7 @@ func TestGitalyHooksPackObjects(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- defer runHookServiceServer(t, cfg)()
+ runHookServiceServer(t, cfg)
tempDir := testhelper.TempDir(t)
diff --git a/cmd/gitaly-lfs-smudge/lfs_smudge.go b/cmd/gitaly-lfs-smudge/lfs_smudge.go
index 01a16847d..c132f15d9 100644
--- a/cmd/gitaly-lfs-smudge/lfs_smudge.go
+++ b/cmd/gitaly-lfs-smudge/lfs_smudge.go
@@ -11,7 +11,8 @@ import (
"github.com/git-lfs/git-lfs/lfs"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/prometheus"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
gitalylog "gitlab.com/gitlab-org/gitaly/internal/log"
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/labkit/tracing"
@@ -78,7 +79,7 @@ func handleSmudge(to io.Writer, from io.Reader, config configProvider) (io.Reade
WithField("gitaly_tls_config", tlsCfg).
Debug("loaded GitLab API config")
- client, err := hook.NewGitlabNetClient(glCfg, tlsCfg)
+ client, err := gitlab.NewHTTPClient(glCfg, tlsCfg, prometheus.Config{})
if err != nil {
return contents, err
}
diff --git a/cmd/gitaly-lfs-smudge/lfs_smudge_test.go b/cmd/gitaly-lfs-smudge/lfs_smudge_test.go
index d87c2b1fe..5d22576e1 100644
--- a/cmd/gitaly-lfs-smudge/lfs_smudge_test.go
+++ b/cmd/gitaly-lfs-smudge/lfs_smudge_test.go
@@ -3,7 +3,6 @@ package main
import (
"bytes"
"encoding/json"
- "io/ioutil"
"net/http"
"os"
"path/filepath"
@@ -33,8 +32,8 @@ size 177735`
glRepository = "project-1"
secretToken = "topsecret"
testData = "hello world"
- certPath = "../../internal/gitaly/hook/testdata/certs/server.crt"
- keyPath = "../../internal/gitaly/hook/testdata/certs/server.key"
+ certPath = "../../internal/gitlab/testdata/certs/server.crt"
+ keyPath = "../../internal/gitlab/testdata/certs/server.key"
)
var (
@@ -134,7 +133,7 @@ func TestSuccessfulLfsSmudge(t *testing.T) {
logFilename := filepath.Join(tmpDir, "gitaly_lfs_smudge.log")
require.FileExists(t, logFilename)
- data, err := ioutil.ReadFile(logFilename)
+ data := testhelper.MustReadFile(t, logFilename)
require.NoError(t, err)
d := string(data)
@@ -256,8 +255,7 @@ func TestUnsuccessfulLfsSmudge(t *testing.T) {
logFilename := filepath.Join(tmpDir, "gitaly_lfs_smudge.log")
require.FileExists(t, logFilename)
- data, err := ioutil.ReadFile(logFilename)
- require.NoError(t, err)
+ data := testhelper.MustReadFile(t, logFilename)
if tc.expectedLogMessage != "" {
require.Contains(t, string(data), tc.expectedLogMessage)
diff --git a/cmd/gitaly-ssh/auth_test.go b/cmd/gitaly-ssh/auth_test.go
index f1b1d218b..046f7f753 100644
--- a/cmd/gitaly-ssh/auth_test.go
+++ b/cmd/gitaly-ssh/auth_test.go
@@ -14,6 +14,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
+ "gitlab.com/gitlab-org/gitaly/internal/cache"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
@@ -21,6 +22,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/setup"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
@@ -147,9 +149,10 @@ func runServer(t *testing.T, secure bool, cfg config.Cfg, connectionType string,
locator := config.NewLocator(cfg)
registry := backchannel.NewRegistry()
txManager := transaction.NewManager(cfg, registry)
- hookManager := hook.NewManager(locator, txManager, hook.GitlabAPIStub, cfg)
+ hookManager := hook.NewManager(locator, txManager, gitlab.NewMockClient(), cfg)
gitCmdFactory := git.NewExecCommandFactory(cfg)
- srv, err := server.New(secure, cfg, testhelper.DiscardTestEntry(t), registry)
+ diskCache := cache.New(cfg, locator)
+ srv, err := server.New(secure, cfg, testhelper.DiscardTestEntry(t), registry, diskCache)
require.NoError(t, err)
setup.RegisterAll(srv, &service.Dependencies{
Cfg: cfg,
diff --git a/cmd/gitaly-ssh/upload_pack_test.go b/cmd/gitaly-ssh/upload_pack_test.go
index 145586daf..9c86bd6e0 100644
--- a/cmd/gitaly-ssh/upload_pack_test.go
+++ b/cmd/gitaly-ssh/upload_pack_test.go
@@ -10,6 +10,8 @@ import (
"github.com/golang/protobuf/jsonpb"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git/updateref"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
@@ -39,15 +41,16 @@ func TestVisibilityOfHiddenRefs(t *testing.T) {
keepAroundRef := fmt.Sprintf("%s/%s", keepAroundNamespace, existingSha)
gitCmdFactory := git.NewExecCommandFactory(cfg)
- updater, err := updateref.New(ctx, cfg, gitCmdFactory, repo)
+ localRepo := localrepo.NewTestRepo(t, cfg, repo)
+ updater, err := updateref.New(ctx, cfg, localRepo)
require.NoError(t, err)
require.NoError(t, updater.Create(git.ReferenceName(keepAroundRef), existingSha))
require.NoError(t, updater.Wait())
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "transfer.hideRefs", keepAroundNamespace)
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "transfer.hideRefs", keepAroundNamespace)
- output := testhelper.MustRunCommand(t, nil, "git", "ls-remote", repoPath, keepAroundNamespace)
+ output := gittest.Exec(t, cfg, "ls-remote", repoPath, keepAroundNamespace)
require.Empty(t, output, "there should be no keep-around refs in normal ls-remote output")
wd, err := os.Getwd()
diff --git a/cmd/gitaly/main.go b/cmd/gitaly/main.go
index f0d30a347..aa23bbdcd 100644
--- a/cmd/gitaly/main.go
+++ b/cmd/gitaly/main.go
@@ -13,8 +13,10 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/bootstrap"
"gitlab.com/gitlab-org/gitaly/internal/bootstrap/starter"
+ "gitlab.com/gitlab-org/gitaly/internal/cache"
"gitlab.com/gitlab-org/gitaly/internal/cgroups"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config/sentry"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
@@ -24,12 +26,14 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/setup"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
glog "gitlab.com/gitlab-org/gitaly/internal/log"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/internal/tempdir"
"gitlab.com/gitlab-org/gitaly/internal/version"
"gitlab.com/gitlab-org/labkit/monitoring"
"gitlab.com/gitlab-org/labkit/tracing"
+ "google.golang.org/grpc"
)
var (
@@ -147,12 +151,13 @@ func run(cfg config.Cfg) error {
if config.SkipHooks() {
log.Warn("skipping GitLab API client creation since hooks are bypassed via GITALY_TESTING_NO_GIT_HOOKS")
} else {
- gitlabAPI, err := hook.NewGitlabAPI(cfg.Gitlab, cfg.TLS)
+ gitlabClient, err := gitlab.NewHTTPClient(cfg.Gitlab, cfg.TLS, cfg.Prometheus)
if err != nil {
return fmt.Errorf("could not create GitLab API client: %w", err)
}
+ prometheus.MustRegister(gitlabClient)
- hm := hook.NewManager(locator, transactionManager, gitlabAPI, cfg)
+ hm := hook.NewManager(locator, transactionManager, gitlabClient, cfg)
hookManager = hm
}
@@ -166,7 +171,16 @@ func run(cfg config.Cfg) error {
gitCmdFactory := git.NewExecCommandFactory(cfg)
prometheus.MustRegister(gitCmdFactory)
- gitalyServerFactory := server.NewGitalyServerFactory(cfg, registry)
+ catfileCache := catfile.NewCache(cfg)
+ prometheus.MustRegister(catfileCache)
+
+ diskCache := cache.New(cfg, locator)
+ prometheus.MustRegister(diskCache)
+ if err := diskCache.StartWalkers(); err != nil {
+ return fmt.Errorf("disk cache walkers: %w", err)
+ }
+
+ gitalyServerFactory := server.NewGitalyServerFactory(cfg, glog.Default(), registry, diskCache)
defer gitalyServerFactory.Stop()
ling, err := linguist.New(cfg)
@@ -183,19 +197,28 @@ func run(cfg config.Cfg) error {
defer rubySrv.Stop()
for _, c := range []starter.Config{
- {starter.Unix, cfg.SocketPath},
- {starter.Unix, cfg.GitalyInternalSocketPath()},
- {starter.TCP, cfg.ListenAddr},
- {starter.TLS, cfg.TLSListenAddr},
+ {Name: starter.Unix, Addr: cfg.SocketPath, HandoverOnUpgrade: true},
+ {Name: starter.Unix, Addr: cfg.GitalyInternalSocketPath(), HandoverOnUpgrade: false},
+ {Name: starter.TCP, Addr: cfg.ListenAddr, HandoverOnUpgrade: true},
+ {Name: starter.TLS, Addr: cfg.TLSListenAddr, HandoverOnUpgrade: true},
} {
if c.Addr == "" {
continue
}
- srv, err := gitalyServerFactory.Create(c.IsSecure())
- if err != nil {
- return fmt.Errorf("create gRPC server: %w", err)
+ var srv *grpc.Server
+ if c.HandoverOnUpgrade {
+ srv, err = gitalyServerFactory.CreateExternal(c.IsSecure())
+ if err != nil {
+ return fmt.Errorf("create external gRPC server: %w", err)
+ }
+ } else {
+ srv, err = gitalyServerFactory.CreateInternal()
+ if err != nil {
+ return fmt.Errorf("create internal gRPC server: %w", err)
+ }
}
+
setup.RegisterAll(srv, &service.Dependencies{
Cfg: cfg,
RubyServer: rubySrv,
@@ -205,6 +228,8 @@ func run(cfg config.Cfg) error {
ClientPool: conns,
GitCmdFactory: gitCmdFactory,
Linguist: ling,
+ CatfileCache: catfileCache,
+ DiskCache: diskCache,
})
b.RegisterStarter(starter.New(c, srv))
}
diff --git a/cmd/praefect/main.go b/cmd/praefect/main.go
index 74abb96c8..3f8ee04d1 100644
--- a/cmd/praefect/main.go
+++ b/cmd/praefect/main.go
@@ -322,11 +322,14 @@ func run(cfgs []starter.Config, conf config.Config) error {
healthChecker = hm
elector := nodes.NewPerRepositoryElector(logger, db, hm)
- go func() {
- if err := elector.Run(ctx, hm.Updated()); err != nil {
- logger.WithError(err).Error("primary elector exited")
- }
- }()
+
+ if conf.Failover.Enabled {
+ go func() {
+ if err := elector.Run(ctx, hm.Updated()); err != nil {
+ logger.WithError(err).Error("primary elector exited")
+ }
+ }()
+ }
primaryGetter = elector
assignmentStore = datastore.NewAssignmentStore(db, conf.StorageNames())
@@ -341,6 +344,11 @@ func run(cfgs []starter.Config, conf config.Config) error {
conf.DefaultReplicationFactors(),
)
} else {
+ if conf.Failover.Enabled {
+ logger.WithField("election_strategy", conf.Failover.ElectionStrategy).Warn(
+ "Deprecated election stategy in use, migrate to repository specific primary nodes following https://docs.gitlab.com/ee/administration/gitaly/praefect.html#migrate-to-repository-specific-primary-gitaly-nodes. The other election strategies are scheduled for removal in GitLab 14.0.")
+ }
+
nodeMgr, err := nodes.NewManager(logger, conf, db, csg, nodeLatencyHistogram, protoregistry.GitalyProtoPreregistered, errTracker, clientHandshaker)
if err != nil {
return err
@@ -518,6 +526,7 @@ func getStarterConfigs(conf config.Config) ([]starter.Config, error) {
}
addrConf = starter.Config{Name: schema, Addr: addr}
}
+ addrConf.HandoverOnUpgrade = true
if _, found := unique[addrConf.Addr]; found {
return nil, fmt.Errorf("same address can't be used for different schemas %q", addr)
diff --git a/cmd/praefect/main_test.go b/cmd/praefect/main_test.go
index e8b811902..e28bb04b5 100644
--- a/cmd/praefect/main_test.go
+++ b/cmd/praefect/main_test.go
@@ -112,16 +112,19 @@ func TestGetStarterConfigs(t *testing.T) {
},
exp: []starter.Config{
{
- Name: starter.TCP,
- Addr: "127.0.0.1:2306",
+ Name: starter.TCP,
+ Addr: "127.0.0.1:2306",
+ HandoverOnUpgrade: true,
},
{
- Name: starter.TLS,
- Addr: "127.0.0.1:2307",
+ Name: starter.TLS,
+ Addr: "127.0.0.1:2307",
+ HandoverOnUpgrade: true,
},
{
- Name: starter.Unix,
- Addr: "/socket/path",
+ Name: starter.Unix,
+ Addr: "/socket/path",
+ HandoverOnUpgrade: true,
},
},
},
@@ -134,16 +137,19 @@ func TestGetStarterConfigs(t *testing.T) {
},
exp: []starter.Config{
{
- Name: starter.TCP,
- Addr: "127.0.0.1:2306",
+ Name: starter.TCP,
+ Addr: "127.0.0.1:2306",
+ HandoverOnUpgrade: true,
},
{
- Name: starter.TLS,
- Addr: "127.0.0.1:2307",
+ Name: starter.TLS,
+ Addr: "127.0.0.1:2307",
+ HandoverOnUpgrade: true,
},
{
- Name: starter.Unix,
- Addr: "/socket/path",
+ Name: starter.Unix,
+ Addr: "/socket/path",
+ HandoverOnUpgrade: true,
},
},
},
@@ -156,16 +162,19 @@ func TestGetStarterConfigs(t *testing.T) {
},
exp: []starter.Config{
{
- Name: starter.TCP,
- Addr: "127.0.0.1:2306",
+ Name: starter.TCP,
+ Addr: "127.0.0.1:2306",
+ HandoverOnUpgrade: true,
},
{
- Name: starter.TLS,
- Addr: "127.0.0.1:2307",
+ Name: starter.TLS,
+ Addr: "127.0.0.1:2307",
+ HandoverOnUpgrade: true,
},
{
- Name: starter.Unix,
- Addr: "/socket/path",
+ Name: starter.Unix,
+ Addr: "/socket/path",
+ HandoverOnUpgrade: true,
},
},
},
@@ -178,16 +187,19 @@ func TestGetStarterConfigs(t *testing.T) {
},
exp: []starter.Config{
{
- Name: starter.TCP,
- Addr: "127.0.0.1:2306",
+ Name: starter.TCP,
+ Addr: "127.0.0.1:2306",
+ HandoverOnUpgrade: true,
},
{
- Name: starter.TLS,
- Addr: "127.0.0.1:2307",
+ Name: starter.TLS,
+ Addr: "127.0.0.1:2307",
+ HandoverOnUpgrade: true,
},
{
- Name: starter.Unix,
- Addr: "/socket/path",
+ Name: starter.Unix,
+ Addr: "/socket/path",
+ HandoverOnUpgrade: true,
},
},
},
diff --git a/cmd/praefect/subcmd_reconcile.go b/cmd/praefect/subcmd_reconcile.go
index 1f9a62435..20734b48d 100644
--- a/cmd/praefect/subcmd_reconcile.go
+++ b/cmd/praefect/subcmd_reconcile.go
@@ -38,6 +38,8 @@ func (s *reconcileSubcommand) FlagSet() *flag.FlagSet {
}
func (s *reconcileSubcommand) Exec(flags *flag.FlagSet, conf config.Config) error {
+ logger.Warn("The reconcile sub-command has been deprecated in GitLab 13.12 and is scheduled for removal in GitLab 14.0. Use the automatic reconciler instead: https://docs.gitlab.com/ee/administration/gitaly/praefect.html#automatic-reconciliation")
+
nr := nodeReconciler{
conf: conf,
virtualStorage: s.virtual,
diff --git a/config.toml.example b/config.toml.example
index 223e91d4e..eaa23fd8e 100644
--- a/config.toml.example
+++ b/config.toml.example
@@ -5,7 +5,7 @@
socket_path = "/home/git/gitlab/tmp/sockets/private/gitaly.socket"
# The directory where Gitaly's executables are stored
-bin_dir = "/home/git/gitaly"
+bin_dir = "/home/git/gitaly/_build/bin"
# # Optional: listen on a TCP socket. This is insecure (no authentication)
# listen_addr = "localhost:9999"
diff --git a/danger/assignees/Dangerfile b/danger/assignees/Dangerfile
index 611ad76d3..b1534736f 100644
--- a/danger/assignees/Dangerfile
+++ b/danger/assignees/Dangerfile
@@ -11,7 +11,7 @@ if gitlab.mr_json['assignees'].none?
TXT
end
-suggestions = (REVIEWERS - [gitlab.mr_author]).sample(2, random: Random.new(gitlab.mr_json['iid']))
+suggestions = (GITALY_TEAM - [gitlab.mr_author]).sample(2, random: Random.new(gitlab.mr_json['iid']))
case suggestions.size
when 0
diff --git a/danger/changelog/Dangerfile b/danger/changelog/Dangerfile
index d2facce07..1a7029768 100644
--- a/danger/changelog/Dangerfile
+++ b/danger/changelog/Dangerfile
@@ -29,6 +29,28 @@ merge_request: %<mr_iid>s
#{SEE_DOC}
SUGGEST_COMMENT
+CATEGORIES = YAML
+ .load_file(File.expand_path('../../.gitlab/changelog_config.yml', __dir__))
+ .fetch('categories')
+ .keys
+ .freeze
+
+def check_changelog_trailer(commit)
+ trailer = commit.message.match(/^Changelog:\s*(?<category>.+)$/)
+
+ return :missing if trailer.nil? || trailer[:category].nil?
+
+ category = trailer[:category]
+
+ return :valid if CATEGORIES.include?(category)
+
+ self.fail(
+ "Commit #{commit.sha} uses an invalid changelog category: #{category}"
+ )
+
+ :invalid
+end
+
def check_changelog(path)
raw_file = File.read(path)
yaml = YAML.safe_load(raw_file)
@@ -75,4 +97,40 @@ elsif changelog_needed
format(CREATE_CHANGELOG_MESSAGE, mr_iid: gitlab.mr_json["iid"], mr_title: mr_title, labels: presented_no_changelog_labels)
end
+if changelog_needed
+ checked = 0
+
+ git.commits.each do |commit|
+ case check_changelog_trailer(commit)
+ when :valid, :invalid
+ checked += 1
+ end
+ end
+
+ if checked == 0
+ message <<~MSG
+ We are in the process of rolling out a new workflow for adding changelog entries. This new workflow uses Git commit subjects and Git trailers to generate changelogs. This new approach will soon replace the current YAML based approach.
+
+ To ease the transition process, we recommend you start using both the old and new approach in parallel. This is not required at this time, but will make it easier to transition to the new approach in the future. To do so, pick the commit that should go in the changelog and add a `Changelog` trailer to it. For example:
+
+ ```
+ This is my commit's subject line
+
+ This is the optional commit body.
+
+ Changelog: added
+ ```
+
+ The value of the `Changelog` trailer should be one of the following: added, fixed, changed, deprecated, removed, security, performance, other.
+
+ For more information, take a look at the following resources:
+
+ - `https://gitlab.com/gitlab-com/gl-infra/delivery/-/issues/1564`
+ - https://docs.gitlab.com/ee/api/repositories.html#generate-changelog-data
+
+ If you'd like to see the new approach in action, take a look at the commits in [the Omnibus repository](https://gitlab.com/gitlab-org/omnibus-gitlab/-/commits/master).
+ MSG
+ end
+end
+
# vim: ft=ruby
diff --git a/doc/DESIGN.md b/doc/DESIGN.md
index b41786f9d..515e3210a 100644
--- a/doc/DESIGN.md
+++ b/doc/DESIGN.md
@@ -1,5 +1,41 @@
-## Reason
+## Reason
+### Git Characteristics That Make Horizontal Scaling Difficult
+
+Git's fundamental behaviors are similar to relational database engines and are difficult to horizontally scale for the same reasons that serverless database is challenging and why serverless database cannot handle all existing relational database workloads.
+
+Gitaly is a layer that brings horizontal scaling and higher availability to massively scaled Git operations through a variety of optimizations in disk locality, caching results of intensive operations (like git pack-objects), coordinating between multiple nodes, cluster synchronization and sharding.
+
+> **Note:** While Gitaly is designed to help Git scale horizontally, Gitaly internal operations depend on the standard open source release of the git client which it calls during git operations. So some Gitaly limitations still pass through from Git. The same is true of any server system that does not have a layer like Gitaly - but in such cases there is no ability to provide any horizontal scaling support at all.
+#### Git Architectural Characteristics and Assumptions
+
+- **Stateful, Atomic, ACID Transactions** (“database synonymous” workload with regard to memory / CPU / disk IO).
+- **"Process Atomic" Transactions** - requires one commit to be coordinated by one and only one Git process.
+- **Atomic Storage** - assumes that operations of a single git command write to a single storage end-point.
+- **Storage channel speeds** - assumes low latency, high bandwidth storage access (near bus speeds).
+- **ACID Isolation** - by design Git allows concurrent update access to the same repository as much as possible, in the area of updating Git Refs, record locking is necessary and implemented by Git.
+- **Wide ranging burst memory / CPU / disk IO requirements** - assumes significant available memory headroom for operations that intensify depending on the content size.
+
+#### Specific Git Workload Characteristics That Make Remote File Systems and Containerization of Gitaly Challenging
+
+**IMPORTANT:** The above characteristics and assumptions combined with specific Git workloads create challenging compute characteristics - high burst CPU utilization, high burst memory utilization and high burst storage channel utilization. Bursts in these compute needs are based on Git usage patterns - how much content, how dense (e.g. binaries) and how often.
+
+These workload characteristics are not fundamentally predictable across the portfolio of source code that a given GitLab server may need to store. Large monorepos might exist at companies with few employees. Binaries storage - while not considered an ideal file type for Git file systems - is common in some industry segments or project types. This means that architecting a GitLab instance with built-in Git headroom limitations causes unexpected limitations of specific Git usage patterns of the people using the instance.
+
+These are some of the most challenging git workloads for Git:
+- Large scale, busy monorepos (commit volume is high and packs for full clones are very large).
+- High commit volume on a single repository (commit volume is high packs for full clones are very frequent).
+- Binaries stored in the Git object database. (In GitLab Git LFS can be redirected to PaaS storage).
+- Full history cloning - due to packfile creation requirements.
+
+The above workload factors compound together when a given workload has more than one characteristic.
+#### Affects on Horizontal Compute Architecture
+- The memory burstiness profile of Git makes it (and therefore Gitaly) very challenging to reliably containerize because container systems have very strong memory limits. Exceeding these limits causes significant operational instability and/or termination by the container running system.
+- The disk IO burstiness profile of Git makes it (and therefore Gitaly) very challenging to use remote file systems with reliability and integrity (e.g. NFS - including PaaS versions). This was, in fact, the first design reason for Gitaly - to avoid having the Git binary operate on remote storage.
+- The CPU burstiness profile of Git (and therefore Gitaly) also makes it challenging to reliably containerize.
+
+These are the challenges that imply an application layer is needed to help Git scale horizontally in any scaled implementation - not just GitLab. GitLab has built this layer and continues to chip away (iterate) against all of the above challenges in this innovative layer.
+### Evidence To Back Building a New Horizontal Layer to Scale Git
For GitLab.com the [git access is slow](https://gitlab.com/gitlab-com/infrastructure/issues/351).
When looking at `Rugged::Repository.new` performance data we can see that our P99 spikes up to 30 wall seconds, while the CPU time keeps in the realm of the 15 milliseconds. Pointing at filesystem access as the culprit.
@@ -17,9 +53,6 @@ Gitaly will make our situation better in a few steps:
1. Move the git operations from the app to the file/git server with git rpc (routing git access over JSON HTTP calls)
1. Use Git ketch to allow active-active (push to a local server), and distributed read operations (read from a secondary). This is far in the future, we might also use a distributed key value store instead. See the [active-active issue](https://gitlab.com/gitlab-org/gitlab-ee/issues/1381). Until we are active active we can just use persistent storage on the cloud to shard, this eliminates the need for redundancy.
-
-
-
## Scope
To maintain the focus of the project, the following subjects are out-of-scope for the moment:
@@ -74,4 +107,4 @@ All design decisions should be added here.
1. By default all Go packages in the Gitaly repository use the `/internal` directory, unless we explicitly want to export something. The only exception is the `/cmd` directory for executables.
1. GitLab requests should use as few Gitaly gRPC calls as possible. This means it is OK to move GitLab application logic into Gitaly when it saves us gRPC round trips.
1. Defining new gRPC calls is cheap. It is better to define a new 'high level' gRPC call and save gRPC round trips than to chain / combine 'low level' gRPC calls.
-1. Why is Gitaly written in Go? At the time the project started the only practical options were Ruby and Go. We expected to be able to handle more traffic with fewer resources if we used Go. Today (Q3 2019), part of Gitaly is written in Ruby. On the particular Gitaly server that hosts gitlab-org/gitlab-ce, we have a pool of gitaly-ruby processes using a total 20GB of RSS and handling 5 requests per second. The single Gitaly Go process on that machine uses less than 3GB of memory and handles 90 requests per second.
+1. Why is Gitaly written in Go? At the time the project started the only practical options were Ruby and Go. We expected to be able to handle more traffic with fewer resources if we used Go. Today (Q3 2019), part of Gitaly is written in Ruby. On the particular Gitaly server that hosts gitlab-org/gitlab-ce, we have a pool of gitaly-ruby processes using a total 20GB of RSS and handling 5 requests per second. The single Gitaly Go process on that machine uses less than 3GB of memory and handles 90 requests per second. \ No newline at end of file
diff --git a/doc/virtual_storage.md b/doc/virtual_storage.md
index d68db29e4..bbada3c14 100644
--- a/doc/virtual_storage.md
+++ b/doc/virtual_storage.md
@@ -11,10 +11,25 @@ Praefect records the expected state of each repository within a virtual storage
| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | 5 |
The `repositories` table has three columns: [^1]
-1. `virtual_storage` indicates which virtual storage the repository belongs in.
-1. `relative_path` indicates where the repository should be stored on a physical storage.
+1. `virtual_storage` indicates which virtual storage the repository belongs in.
+1. `relative_path` indicates where the repository should be stored on a physical storage.
1. `generation` is monotonically increasing version number that is incremented on each mutator call to the repository.
+`repository_assignments` table records which physical storages are supposed to contain a replica of a repository.
+
+| virtual_storage | relative_path | storage |
+|-----------------|------------------------------------------------------------------------------------|----------|
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-1 |
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-2 |
+
+The number of assigned storages each repository has indicates its desired replication factor. Each record contains:
+
+- The `(virtual_storage, relative_path)` tuple to uniquely identify a repository.
+- The `storage` that is assigned to host a replica.
+
+If there are no assignments for a repository, every physical storage is considered to be assigned.
+previous behavior of replicating a repository on every physical storage.
+
Praefect tracks the current state of a repository on each physical storage in the `storage_repositories` table:
| virtual_storage | relative_path | storage | generation |
@@ -24,8 +39,8 @@ Praefect tracks the current state of a repository on each physical storage in th
The `storage_repositories` table has four columns:
1. `virtual_storage` indicates which virtual storage the repository belongs in.
-1. `relative_path` indicates where the repository should be stored on a physical storage.
-1. `storage` indicates which physical storage this record belongs to.
+1. `relative_path` indicates where the repository should be stored on a physical storage.
+1. `storage` indicates which physical storage this record belongs to.
1. `generation` indicates the minimum generation of the repository on this storage.
While similar to `storage_repositories`, the `repositories` table is needed to infer whether a repository was deleted or is waiting to be replicated to a physical storage. The records in `repositories` table additionally act as repository specific locks which should be acquired on updates to synchronizes access to `storage_repositories`.
@@ -43,19 +58,18 @@ In both cases either all or some secondaries are left outdated. Praefect schedul
## Identifying Inconsistencies
-Praefect identifies inconsistencies in the storage cluster by cross-referencing the expected state in the `repositories` with the actual state of the physical storages in `storage_repositories`.
+Praefect identifies inconsistencies in the storage cluster by cross-referencing the expected state in the `repositories` and `repository_assignments` with the actual state of the physical storages in `storage_repositories`.
-Expected state of physical storages can be attained by cross joining the configured physical storages with the expected repositories of the virtual storage in the `repositories` table. It's important to use configured storages as some physical storages might have been added to or removed from the virtual storage.
+Expected state of physical storages can be attained by cross joining the configured physical storages with the expected repositories of the virtual storage in the `repositories` table. It's important to use configured storages as some physical storages might have been added to or removed from the virtual storage.
-Some possible inconsistencies are listed below. Each of the scenarios assume a virtual storage called `default` with a primary storage `gitaly-1` and a secondary storage `gitaly-2`.
+Possible inconsistencies and their reconciliations are listed below. Each of the scenarios assume a virtual storage called `default` with a primary storage `gitaly-1` and a secondary storage `gitaly-2`.
### Missing Repository
-Praefect expects a repository to be replicated to every physical storage within virtual storage. However, a physical storage might be missing an expected repository. This might be due to the following reasons:
-
-#### New Repository
+Praefect expects an up to date copy of a repository to be present on every assigned physical storage. However, a physical storage might be missing a replica. This can be due to two reasons:
-A repository was just created. The primary `gitaly-1` contains the new repository but it has not yet been replicated to the secondary `gitaly-2`. This might be a temporary situation while the secondary is waiting to replicate the changes.
+- A repository was just created. The primary `gitaly-1` contains the new repository but it has not yet been replicated to the secondary `gitaly-2`. This might be a temporary situation while the secondary is waiting to replicate the changes.
+- A physical storage was assigned as a new host for the repository. The assignment has been recorded for `gitaly-2` but the repository has not yet been replicated to the storage.
`repositories`:
@@ -63,28 +77,20 @@ A repository was just created. The primary `gitaly-1` contains the new repositor
|-----------------|------------------------------------------------------------------------------------|------------|
| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | 0 |
-`storage_repositories`:
-
-| virtual_storage | relative_path | storage | generation |
-|-----------------|------------------------------------------------------------------------------------|----------|------------|
-| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-1 | 0 |
-
+`repository_assignments`:
-#### New Physical Storage
-Assume a new physical storage called `gitaly-3` was added to the virtual storage. Brand new physical storage is empty and would be missing every expected repository.
-
-`repositories`:
-
-| virtual_storage | relative_path | generation |
+| virtual_storage | relative_path | storage |
|-----------------|------------------------------------------------------------------------------------|------------|
-| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | 0 |
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-1 |
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-2 |
`storage_repositories`:
| virtual_storage | relative_path | storage | generation |
|-----------------|------------------------------------------------------------------------------------|----------|------------|
| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-1 | 0 |
-| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-2 | 0 |
+
+To fix the inconsistency, reconciler schedules `update`-type jobs to the storages missing the repository from random healthy storages with up to date replicas.
### Outdated Repository
@@ -101,6 +107,13 @@ In the case below, `gitaly-2` has an outdated version of the repository as its g
|-----------------|------------------------------------------------------------------------------------|------------|
| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | 2 |
+`repository_assignments`:
+
+| virtual_storage | relative_path | storage |
+|-----------------|------------------------------------------------------------------------------------|------------|
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-1 |
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-2 |
+
`storage_repositories`:
| virtual_storage | relative_path | storage | generation |
@@ -108,6 +121,8 @@ In the case below, `gitaly-2` has an outdated version of the repository as its g
| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-1 | 2 |
| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-2 | 0 |
+To fix the inconsistency, reconciler schedules `update`-type jobs to the storages missing the repository from random healthy storages with up to date replicas.
+
### Unexpected Repository
#### Deleted Repository
@@ -121,10 +136,42 @@ A physical storage might contain a repository that is not expected be present on
| virtual_storage | relative_path | generation |
|-----------------|------------------------------------------------------------------------------------|------------|
+`repository_assignments`:
+
+| virtual_storage | relative_path | storage |
+|-----------------|------------------------------------------------------------------------------------|------------|
+
+`storage_repositories`:
+
+| virtual_storage | relative_path | storage | generation |
+|-----------------|------------------------------------------------------------------------------------|----------|------------|
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-2 | 2 |
+
+Praefect's reconciler doesn't fix the inconsistency at this time. A fix is tracked in https://gitlab.com/gitlab-org/gitaly/-/issues/3480.
+
+### Unassigned Replica
+
+A physical storage might contain a replica of a repository even if it is not assigned to host it. This can happen if the storage was previously
+assigned to host the repository but was later unassigned. Praefect doesn't keep unassigned copies up to date via transactions nor replication jobs.
+Below, `gitaly-2` has been unassigned but still contains a replica of the repository.
+
+`repositories`:
+
+| virtual_storage | relative_path | generation |
+|-----------------|------------------------------------------------------------------------------------|------------|
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | 2 |
+
+`repository_assignments`:
+
+| virtual_storage | relative_path | storage |
+|-----------------|------------------------------------------------------------------------------------|------------|
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-1 |
+
`storage_repositories`:
| virtual_storage | relative_path | storage | generation |
|-----------------|------------------------------------------------------------------------------------|----------|------------|
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-1 | 2 |
| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-2 | 2 |
### Removed Physical Storage
@@ -133,12 +180,22 @@ When a physical storage is removed from the virtual storage configuration, it le
In the example below, assume that `gitaly-2` has been removed from the configuration. `gitaly-2` had the most up to date version of a repository in the virtual storage. The expected state of the virtual storage in `repositories` table still records the latest generation. Repositories that were not up to date with the removed physical storage would be considered outdated.
+Assignments of unconfigured storages are ignored as well. This means a repository's replication factor decreases when an assigned storage is removed
+from the virtual storage. Below, the repository's replication factor is `1` as `gitaly-2` has been removed from the configuration.
+
`repositories`:
| virtual_storage | relative_path | generation |
|-----------------|------------------------------------------------------------------------------------|------------|
| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | 3 |
+`repository_assignments`:
+
+| virtual_storage | relative_path | storage |
+|-----------------|------------------------------------------------------------------------------------|------------|
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-1 |
+| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-2 |
+
`storage_repositories`:
| virtual_storage | relative_path | storage | generation |
@@ -146,6 +203,9 @@ In the example below, assume that `gitaly-2` has been removed from the configura
| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-1 | 2 |
| default | @hashed/5f/9c/5f9c4ab08cac7457e9111a30e4664920607ea2c115a1433d7be98e97e64244ca.git | gitaly-2 | 3 |
+The reconciler considers assigned but removed storages as still assigned. This means it won't schedule `delete_replica` jobs to any assigned storage before the assignments
+of the removed storages are manually removed.
+
## Known Problems
1. When a primary is demoted, it might be in process of accepting a write. If there is a concurrent write to the new primary, one of the writes is going to be lost as the primary increments its generation even if it was not on the latest one. This issues and the solution proposed is tracked in [#2969](https://gitlab.com/gitlab-org/gitaly/-/issues/2969).
diff --git a/internal/backup/backup.go b/internal/backup/backup.go
index ad04c8d0a..f08166fe8 100644
--- a/internal/backup/backup.go
+++ b/internal/backup/backup.go
@@ -34,31 +34,73 @@ func NewFilesystem(path string) *Filesystem {
}
}
-// BackupRepository creates a repository backup on a local filesystem
-func (fs *Filesystem) BackupRepository(ctx context.Context, server storage.ServerInfo, repo *gitalypb.Repository) error {
- if isEmpty, err := fs.isEmpty(ctx, server, repo); err != nil {
- return fmt.Errorf("backup: %w", err)
+// CreateRequest is the request to create a backup
+type CreateRequest struct {
+ Server storage.ServerInfo
+ Repository *gitalypb.Repository
+}
+
+// Create creates a repository backup on a local filesystem
+func (fs *Filesystem) Create(ctx context.Context, req *CreateRequest) error {
+ if isEmpty, err := fs.isEmpty(ctx, req.Server, req.Repository); err != nil {
+ return fmt.Errorf("filesystem: %w", err)
} else if isEmpty {
return ErrSkipped
}
- backupPath := strings.TrimSuffix(filepath.Join(fs.path, repo.RelativePath), ".git")
+ backupPath := strings.TrimSuffix(filepath.Join(fs.path, req.Repository.RelativePath), ".git")
bundlePath := backupPath + ".bundle"
customHooksPath := filepath.Join(backupPath, "custom_hooks.tar")
- if err := os.MkdirAll(backupPath, os.ModePerm); err != nil {
- return fmt.Errorf("backup: %w", err)
+ if err := os.MkdirAll(backupPath, 0700); err != nil {
+ return fmt.Errorf("filesystem: %w", err)
}
- if err := fs.writeBundle(ctx, bundlePath, server, repo); err != nil {
- return fmt.Errorf("backup: write bundle: %w", err)
+ if err := fs.writeBundle(ctx, bundlePath, req.Server, req.Repository); err != nil {
+ return fmt.Errorf("filesystem: write bundle: %w", err)
}
- if err := fs.writeCustomHooks(ctx, customHooksPath, server, repo); err != nil {
- return fmt.Errorf("backup: write custom hooks: %w", err)
+ if err := fs.writeCustomHooks(ctx, customHooksPath, req.Server, req.Repository); err != nil {
+ return fmt.Errorf("filesystem: write custom hooks: %w", err)
}
return nil
}
+// RestoreRequest is the request to restore from a backup
+type RestoreRequest struct {
+ Server storage.ServerInfo
+ Repository *gitalypb.Repository
+ AlwaysCreate bool
+}
+
+// Restore restores a repository from a backup on a local filesystem
+func (fs *Filesystem) Restore(ctx context.Context, req *RestoreRequest) error {
+ backupPath := strings.TrimSuffix(filepath.Join(fs.path, req.Repository.RelativePath), ".git")
+ bundlePath := backupPath + ".bundle"
+ customHooksPath := filepath.Join(backupPath, "custom_hooks.tar")
+
+ if err := fs.removeRepository(ctx, req.Server, req.Repository); err != nil {
+ return fmt.Errorf("filesystem: %w", err)
+ }
+ if err := fs.restoreBundle(ctx, bundlePath, req.Server, req.Repository); err != nil {
+ // For compatibility with existing backups we need to always create the
+ // repository even if there's no bundle for project repositories
+ // (not wiki or snippet repositories). Gitaly does not know which
+ // repository is which type so here we accept a parameter to tell us
+ // to employ this behaviour.
+ if req.AlwaysCreate && errors.Is(err, ErrSkipped) {
+ if err := fs.createRepository(ctx, req.Server, req.Repository); err != nil {
+ return fmt.Errorf("filesystem: %w", err)
+ }
+ } else {
+ return fmt.Errorf("filesystem: %w", err)
+ }
+ }
+ if err := fs.restoreCustomHooks(ctx, customHooksPath, req.Server, req.Repository); err != nil {
+ return fmt.Errorf("filesystem: %w", err)
+ }
+ return nil
+}
+
func (fs *Filesystem) isEmpty(ctx context.Context, server storage.ServerInfo, repo *gitalypb.Repository) (bool, error) {
repoClient, err := fs.newRepoClient(ctx, server)
if err != nil {
@@ -74,6 +116,28 @@ func (fs *Filesystem) isEmpty(ctx context.Context, server storage.ServerInfo, re
return !hasLocalBranches.GetValue(), nil
}
+func (fs *Filesystem) removeRepository(ctx context.Context, server storage.ServerInfo, repo *gitalypb.Repository) error {
+ repoClient, err := fs.newRepoClient(ctx, server)
+ if err != nil {
+ return fmt.Errorf("remove repository: %w", err)
+ }
+ if _, err := repoClient.RemoveRepository(ctx, &gitalypb.RemoveRepositoryRequest{Repository: repo}); err != nil {
+ return fmt.Errorf("remove repository: %w", err)
+ }
+ return nil
+}
+
+func (fs *Filesystem) createRepository(ctx context.Context, server storage.ServerInfo, repo *gitalypb.Repository) error {
+ repoClient, err := fs.newRepoClient(ctx, server)
+ if err != nil {
+ return fmt.Errorf("create repository: %w", err)
+ }
+ if _, err := repoClient.CreateRepository(ctx, &gitalypb.CreateRepositoryRequest{Repository: repo}); err != nil {
+ return fmt.Errorf("create repository: %w", err)
+ }
+ return nil
+}
+
func (fs *Filesystem) writeBundle(ctx context.Context, path string, server storage.ServerInfo, repo *gitalypb.Repository) error {
repoClient, err := fs.newRepoClient(ctx, server)
if err != nil {
@@ -90,6 +154,45 @@ func (fs *Filesystem) writeBundle(ctx context.Context, path string, server stora
return writeFile(path, bundle)
}
+func (fs *Filesystem) restoreBundle(ctx context.Context, path string, server storage.ServerInfo, repo *gitalypb.Repository) error {
+ f, err := os.Open(path)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return fmt.Errorf("%w: bundle does not exist: %q", ErrSkipped, path)
+ }
+ return fmt.Errorf("restore bundle: %w", err)
+ }
+ defer f.Close()
+
+ repoClient, err := fs.newRepoClient(ctx, server)
+ if err != nil {
+ return fmt.Errorf("restore bundle: %q: %w", path, err)
+ }
+ stream, err := repoClient.CreateRepositoryFromBundle(ctx)
+ if err != nil {
+ return fmt.Errorf("restore bundle: %q: %w", path, err)
+ }
+ request := &gitalypb.CreateRepositoryFromBundleRequest{Repository: repo}
+ bundle := streamio.NewWriter(func(p []byte) error {
+ request.Data = p
+ if err := stream.Send(request); err != nil {
+ return err
+ }
+
+ // Only set `Repository` on the first `Send` of the stream
+ request = &gitalypb.CreateRepositoryFromBundleRequest{}
+
+ return nil
+ })
+ if _, err := io.Copy(bundle, f); err != nil {
+ return fmt.Errorf("restore bundle: %q: %w", path, err)
+ }
+ if _, err = stream.CloseAndRecv(); err != nil {
+ return fmt.Errorf("restore bundle: %q: %w", path, err)
+ }
+ return nil
+}
+
func (fs *Filesystem) writeCustomHooks(ctx context.Context, path string, server storage.ServerInfo, repo *gitalypb.Repository) error {
repoClient, err := fs.newRepoClient(ctx, server)
if err != nil {
@@ -109,6 +212,46 @@ func (fs *Filesystem) writeCustomHooks(ctx context.Context, path string, server
return nil
}
+func (fs *Filesystem) restoreCustomHooks(ctx context.Context, path string, server storage.ServerInfo, repo *gitalypb.Repository) error {
+ f, err := os.Open(path)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil
+ }
+ return fmt.Errorf("restore custom hooks: %w", err)
+ }
+ defer f.Close()
+
+ repoClient, err := fs.newRepoClient(ctx, server)
+ if err != nil {
+ return fmt.Errorf("restore custom hooks, %q: %w", path, err)
+ }
+ stream, err := repoClient.RestoreCustomHooks(ctx)
+ if err != nil {
+ return fmt.Errorf("restore custom hooks, %q: %w", path, err)
+ }
+
+ request := &gitalypb.RestoreCustomHooksRequest{Repository: repo}
+ bundle := streamio.NewWriter(func(p []byte) error {
+ request.Data = p
+ if err := stream.Send(request); err != nil {
+ return err
+ }
+
+ // Only set `Repository` on the first `Send` of the stream
+ request = &gitalypb.RestoreCustomHooksRequest{}
+
+ return nil
+ })
+ if _, err := io.Copy(bundle, f); err != nil {
+ return fmt.Errorf("restore custom hooks, %q: %w", path, err)
+ }
+ if _, err = stream.CloseAndRecv(); err != nil {
+ return fmt.Errorf("restore custom hooks, %q: %w", path, err)
+ }
+ return nil
+}
+
func (fs *Filesystem) newRepoClient(ctx context.Context, server storage.ServerInfo) (gitalypb.RepositoryServiceClient, error) {
conn, err := fs.conns.Dial(ctx, server.Address, server.Token)
if err != nil {
@@ -119,7 +262,7 @@ func (fs *Filesystem) newRepoClient(ctx context.Context, server storage.ServerIn
}
func writeFile(path string, r io.Reader) (returnErr error) {
- f, err := os.Create(path)
+ f, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
return fmt.Errorf("write file: %w", err)
}
diff --git a/internal/backup/backup_test.go b/internal/backup/backup_test.go
index ce85f235f..26d1a1034 100644
--- a/internal/backup/backup_test.go
+++ b/internal/backup/backup_test.go
@@ -1,6 +1,7 @@
package backup
import (
+ "errors"
"io/ioutil"
"os"
"path/filepath"
@@ -16,19 +17,19 @@ import (
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
-func TestFilesystem_BackupRepository(t *testing.T) {
+func TestFilesystem_Create(t *testing.T) {
cfg := testcfg.Build(t)
gitalyAddr := testserver.RunGitalyServer(t, cfg, nil, setup.RegisterAll)
path := testhelper.TempDir(t)
- hooksRepo, hooksRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "hooks")
+ hooksRepo, hooksRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "hooks")
require.NoError(t, os.Mkdir(filepath.Join(hooksRepoPath, "custom_hooks"), os.ModePerm))
require.NoError(t, ioutil.WriteFile(filepath.Join(hooksRepoPath, "custom_hooks/pre-commit.sample"), []byte("Some hooks"), os.ModePerm))
- noHooksRepo, _, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "no-hooks")
- emptyRepo, _, _ := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ noHooksRepo, _, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "no-hooks")
+ emptyRepo, _, _ := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
nonexistentRepo := *emptyRepo
nonexistentRepo.RelativePath = "nonexistent"
@@ -75,7 +76,10 @@ func TestFilesystem_BackupRepository(t *testing.T) {
defer cancel()
fsBackup := NewFilesystem(path)
- err := fsBackup.BackupRepository(ctx, storage.ServerInfo{Address: gitalyAddr, Token: cfg.Auth.Token}, tc.repo)
+ err := fsBackup.Create(ctx, &CreateRequest{
+ Server: storage.ServerInfo{Address: gitalyAddr, Token: cfg.Auth.Token},
+ Repository: tc.repo,
+ })
if tc.err == nil {
require.NoError(t, err)
} else {
@@ -85,7 +89,15 @@ func TestFilesystem_BackupRepository(t *testing.T) {
if tc.createsBundle {
require.FileExists(t, bundlePath)
- output := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "bundle", "verify", bundlePath)
+ dirInfo, err := os.Stat(filepath.Dir(bundlePath))
+ require.NoError(t, err)
+ require.Equal(t, os.FileMode(0700), dirInfo.Mode().Perm(), "expecting restricted directory permissions")
+
+ bundleInfo, err := os.Stat(bundlePath)
+ require.NoError(t, err)
+ require.Equal(t, os.FileMode(0600), bundleInfo.Mode().Perm(), "expecting restricted file permissions")
+
+ output := gittest.Exec(t, cfg, "-C", repoPath, "bundle", "verify", bundlePath)
require.Contains(t, string(output), "The bundle records a complete history")
} else {
require.NoFileExists(t, bundlePath)
@@ -99,3 +111,92 @@ func TestFilesystem_BackupRepository(t *testing.T) {
})
}
}
+
+func TestFilesystem_Restore(t *testing.T) {
+ cfg := testcfg.Build(t)
+ testhelper.ConfigureGitalyHooksBin(t, cfg)
+
+ gitalyAddr := testserver.RunGitalyServer(t, cfg, nil, setup.RegisterAll)
+
+ path := testhelper.TempDir(t)
+
+ existingRepo, existRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "existing_repo")
+ existingRepoPath := filepath.Join(path, existingRepo.RelativePath)
+ existingRepoBundlePath := existingRepoPath + ".bundle"
+ existingRepoCustomHooksPath := filepath.Join(existingRepoPath, "custom_hooks.tar")
+ require.NoError(t, os.MkdirAll(existingRepoPath, os.ModePerm))
+
+ gittest.Exec(t, cfg, "-C", existRepoPath, "bundle", "create", existingRepoBundlePath, "--all")
+ testhelper.CopyFile(t, "../gitaly/service/repository/testdata/custom_hooks.tar", existingRepoCustomHooksPath)
+
+ newRepo := gittest.InitRepoDir(t, cfg.Storages[0].Path, "new_repo")
+ newRepoBundlePath := filepath.Join(path, newRepo.RelativePath+".bundle")
+ testhelper.CopyFile(t, existingRepoBundlePath, newRepoBundlePath)
+
+ missingBundleRepo := gittest.InitRepoDir(t, cfg.Storages[0].Path, "missing_bundle")
+ missingBundleRepoAlwaysCreate := gittest.InitRepoDir(t, cfg.Storages[0].Path, "missing_bundle_always_create")
+
+ for _, tc := range []struct {
+ desc string
+ repo *gitalypb.Repository
+ alwaysCreate bool
+ expectedPaths []string
+ expectedErrAs error
+ expectVerify bool
+ }{
+ {
+ desc: "new repo, without hooks",
+ repo: newRepo,
+ expectVerify: true,
+ },
+ {
+ desc: "existing repo, with hooks",
+ repo: existingRepo,
+ expectedPaths: []string{
+ "custom_hooks/pre-commit.sample",
+ "custom_hooks/prepare-commit-msg.sample",
+ "custom_hooks/pre-push.sample",
+ },
+ expectVerify: true,
+ },
+ {
+ desc: "missing bundle",
+ repo: missingBundleRepo,
+ expectedErrAs: ErrSkipped,
+ },
+ {
+ desc: "missing bundle, always create",
+ repo: missingBundleRepoAlwaysCreate,
+ alwaysCreate: true,
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ repoPath := filepath.Join(cfg.Storages[0].Path, tc.repo.RelativePath)
+ bundlePath := filepath.Join(path, tc.repo.RelativePath+".bundle")
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ fsBackup := NewFilesystem(path)
+ err := fsBackup.Restore(ctx, &RestoreRequest{
+ Server: storage.ServerInfo{Address: gitalyAddr, Token: cfg.Auth.Token},
+ Repository: tc.repo,
+ AlwaysCreate: tc.alwaysCreate,
+ })
+ if tc.expectedErrAs != nil {
+ require.True(t, errors.Is(err, tc.expectedErrAs), err.Error())
+ } else {
+ require.NoError(t, err)
+ }
+
+ if tc.expectVerify {
+ output := gittest.Exec(t, cfg, "-C", repoPath, "bundle", "verify", bundlePath)
+ require.Contains(t, string(output), "The bundle records a complete history")
+ }
+
+ for _, p := range tc.expectedPaths {
+ require.FileExists(t, filepath.Join(repoPath, p))
+ }
+ })
+ }
+}
diff --git a/internal/backup/pipeline.go b/internal/backup/pipeline.go
new file mode 100644
index 000000000..329faa0b5
--- /dev/null
+++ b/internal/backup/pipeline.go
@@ -0,0 +1,167 @@
+package backup
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "sync"
+ "sync/atomic"
+
+ "github.com/sirupsen/logrus"
+ "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+)
+
+// Strategy used to create/restore backups
+type Strategy interface {
+ Create(context.Context, *CreateRequest) error
+ Restore(context.Context, *RestoreRequest) error
+}
+
+// CreatePipeline is a pipeline that only handles creating backups
+type CreatePipeline interface {
+ Create(context.Context, *CreateRequest)
+ Done() error
+}
+
+// Pipeline handles a series of requests to create/restore backups. Pipeline
+// encapsulates error handling for the caller.
+type Pipeline struct {
+ log logrus.FieldLogger
+ strategy Strategy
+ failed int64
+}
+
+// NewPipeline creates a new pipeline
+func NewPipeline(log logrus.FieldLogger, strategy Strategy) *Pipeline {
+ return &Pipeline{
+ log: log,
+ strategy: strategy,
+ }
+}
+
+// Create requests that a repository backup be created
+func (p *Pipeline) Create(ctx context.Context, req *CreateRequest) {
+ repoLog := p.repoLogger(req.Repository)
+ repoLog.Info("started backup")
+
+ if err := p.strategy.Create(ctx, req); err != nil {
+ if errors.Is(err, ErrSkipped) {
+ repoLog.WithError(err).Warn("skipped backup")
+ } else {
+ repoLog.WithError(err).Error("backup failed")
+ atomic.AddInt64(&p.failed, 1)
+ }
+ return
+ }
+
+ repoLog.Info("completed backup")
+}
+
+// Restore requests that a repository be restored from backup
+func (p *Pipeline) Restore(ctx context.Context, req *RestoreRequest) {
+ repoLog := p.repoLogger(req.Repository)
+ repoLog.Info("started restore")
+
+ if err := p.strategy.Restore(ctx, req); err != nil {
+ if errors.Is(err, ErrSkipped) {
+ repoLog.WithError(err).Warn("skipped restore")
+ } else {
+ repoLog.WithError(err).Error("restore failed")
+ atomic.AddInt64(&p.failed, 1)
+ }
+ return
+ }
+
+ repoLog.Info("completed restore")
+}
+
+// Done indicates that the pipeline is complete and returns any accumulated errors
+func (p *Pipeline) Done() error {
+ if p.failed > 0 {
+ return fmt.Errorf("pipeline: %d failures encountered", p.failed)
+ }
+ return nil
+}
+
+func (p *Pipeline) repoLogger(repo *gitalypb.Repository) logrus.FieldLogger {
+ return p.log.WithFields(logrus.Fields{
+ "storage_name": repo.StorageName,
+ "relative_path": repo.RelativePath,
+ "gl_project_path": repo.GlProjectPath,
+ })
+}
+
+// ParallelCreatePipeline is a pipeline that creates backups in parallel
+type ParallelCreatePipeline struct {
+ next CreatePipeline
+ n int
+
+ workersOnce sync.Once
+ wg sync.WaitGroup
+ done chan struct{}
+ requests chan *CreateRequest
+
+ mu sync.Mutex
+ err error
+}
+
+// NewParallelCreatePipeline creates a new ParallelCreatePipeline where `next`
+// is the pipeline called to create the backups and `n` is the number of
+// parallel backups that will run.
+func NewParallelCreatePipeline(next CreatePipeline, n int) *ParallelCreatePipeline {
+ return &ParallelCreatePipeline{
+ next: next,
+ n: n,
+ done: make(chan struct{}),
+ requests: make(chan *CreateRequest),
+ }
+}
+
+// Create queues a call to `next.Create` which will be run in parallel
+func (p *ParallelCreatePipeline) Create(ctx context.Context, req *CreateRequest) {
+ p.workersOnce.Do(p.startWorkers)
+
+ select {
+ case <-ctx.Done():
+ p.setErr(ctx.Err())
+ case p.requests <- req:
+ }
+}
+
+// Done waits for any in progress calls to `Create` to complete then reports any accumulated errors
+func (p *ParallelCreatePipeline) Done() error {
+ close(p.done)
+ p.wg.Wait()
+ if err := p.next.Done(); err != nil {
+ return err
+ }
+ return p.err
+}
+
+func (p *ParallelCreatePipeline) startWorkers() {
+ for i := 0; i < p.n; i++ {
+ p.wg.Add(1)
+ go p.worker()
+ }
+}
+
+func (p *ParallelCreatePipeline) worker() {
+ defer p.wg.Done()
+ for {
+ select {
+ case <-p.done:
+ return
+ case req := <-p.requests:
+ p.next.Create(context.TODO(), req)
+ }
+ }
+}
+
+func (p *ParallelCreatePipeline) setErr(err error) {
+ p.mu.Lock()
+ defer p.mu.Unlock()
+ if p.err != nil {
+ return
+ }
+ p.err = err
+}
diff --git a/internal/backup/pipeline_test.go b/internal/backup/pipeline_test.go
new file mode 100644
index 000000000..f23a57d53
--- /dev/null
+++ b/internal/backup/pipeline_test.go
@@ -0,0 +1,109 @@
+package backup
+
+import (
+ "context"
+ "testing"
+
+ "github.com/sirupsen/logrus"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+)
+
+func TestPipeline_Create(t *testing.T) {
+ testPipelineCreate(t, func(strategy Strategy) CreatePipeline {
+ return NewPipeline(logrus.StandardLogger(), strategy)
+ })
+}
+
+func TestPipeline_Restore(t *testing.T) {
+ strategy := MockStrategy{
+ RestoreFunc: func(_ context.Context, req *RestoreRequest) error {
+ switch req.Repository.StorageName {
+ case "normal":
+ return nil
+ case "skip":
+ return ErrSkipped
+ case "error":
+ return assert.AnError
+ }
+ require.Failf(t, "unexpected call to Restore", "StorageName = %q", req.Repository.StorageName)
+ return nil
+ },
+ }
+ p := NewPipeline(logrus.StandardLogger(), strategy)
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ requests := []RestoreRequest{
+ {Repository: &gitalypb.Repository{StorageName: "normal"}},
+ {Repository: &gitalypb.Repository{StorageName: "skip"}},
+ {Repository: &gitalypb.Repository{StorageName: "error"}},
+ }
+ for _, req := range requests {
+ p.Restore(ctx, &req)
+ }
+ err := p.Done()
+ require.EqualError(t, err, "pipeline: 1 failures encountered")
+}
+
+func TestParallelCreatePipeline(t *testing.T) {
+ testPipelineCreate(t, func(strategy Strategy) CreatePipeline {
+ return NewParallelCreatePipeline(NewPipeline(logrus.StandardLogger(), strategy), 2)
+ })
+}
+
+type MockStrategy struct {
+ CreateFunc func(context.Context, *CreateRequest) error
+ RestoreFunc func(context.Context, *RestoreRequest) error
+}
+
+func (s MockStrategy) Create(ctx context.Context, req *CreateRequest) error {
+ if s.CreateFunc != nil {
+ return s.CreateFunc(ctx, req)
+ }
+ return nil
+}
+
+func (s MockStrategy) Restore(ctx context.Context, req *RestoreRequest) error {
+ if s.RestoreFunc != nil {
+ return s.RestoreFunc(ctx, req)
+ }
+ return nil
+}
+
+func testPipelineCreate(t *testing.T, init func(Strategy) CreatePipeline) {
+ t.Run("strategy errors", func(t *testing.T) {
+ strategy := MockStrategy{
+ CreateFunc: func(_ context.Context, req *CreateRequest) error {
+ switch req.Repository.StorageName {
+ case "normal":
+ return nil
+ case "skip":
+ return ErrSkipped
+ case "error":
+ return assert.AnError
+ }
+ require.Failf(t, "unexpected call to Create", "StorageName = %q", req.Repository.StorageName)
+ return nil
+ },
+ }
+ p := init(strategy)
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ requests := []CreateRequest{
+ {Repository: &gitalypb.Repository{StorageName: "normal"}},
+ {Repository: &gitalypb.Repository{StorageName: "skip"}},
+ {Repository: &gitalypb.Repository{StorageName: "error"}},
+ }
+ for i := range requests {
+ p.Create(ctx, &requests[i])
+ }
+ err := p.Done()
+ require.EqualError(t, err, "pipeline: 1 failures encountered")
+ })
+}
diff --git a/internal/bootstrap/starter/starter.go b/internal/bootstrap/starter/starter.go
index 7a0e0cd4d..8099121d3 100644
--- a/internal/bootstrap/starter/starter.go
+++ b/internal/bootstrap/starter/starter.go
@@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"net"
+ "os"
"strings"
"github.com/sirupsen/logrus"
@@ -77,6 +78,11 @@ func verifySchema(schema string) error {
// Config represents a network type, and address
type Config struct {
Name, Addr string
+ // HandoverOnUpgrade indicates whether the socket should be handed over to the new
+ // process during an upgrade. If the socket is not handed over, it should be be unique
+ // to avoid colliding with the old process' socket. If the socket is a Unix socket, a
+ // possible existing file at the path is removed.
+ HandoverOnUpgrade bool
}
// Endpoint returns fully qualified address.
@@ -105,7 +111,18 @@ type Server interface {
// New creates a new bootstrap.Starter from a config and a GracefulStoppableServer
func New(cfg Config, server Server) bootstrap.Starter {
- return func(listen bootstrap.ListenFunc, errCh chan<- error) error {
+ return func(listenWithHandover bootstrap.ListenFunc, errCh chan<- error) error {
+ listen := listenWithHandover
+ if !cfg.HandoverOnUpgrade {
+ if cfg.Name == Unix {
+ if err := os.Remove(cfg.Addr); err != nil && !os.IsNotExist(err) {
+ return fmt.Errorf("remove previous socket file: %w", err)
+ }
+ }
+
+ listen = net.Listen
+ }
+
l, err := listen(cfg.family(), cfg.Addr)
if err != nil {
return err
diff --git a/internal/cache/cache.go b/internal/cache/cache.go
new file mode 100644
index 000000000..907a3c457
--- /dev/null
+++ b/internal/cache/cache.go
@@ -0,0 +1,360 @@
+package cache
+
+import (
+ "context"
+ "errors"
+ "io"
+ "os"
+ "path/filepath"
+ "sync"
+
+ "github.com/golang/protobuf/proto"
+ "github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
+ "github.com/prometheus/client_golang/prometheus"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/safe"
+ "gitlab.com/gitlab-org/gitaly/internal/storage"
+ "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+)
+
+// maps a cache path to the number of active writers
+type activeFiles struct {
+ *sync.Mutex
+ m map[string]int
+}
+
+// trackFile returns a function that indicates if the current
+// writing of a file is the last known one, which
+// would indicate the current write is the "winner".
+func (af activeFiles) trackFile(path string) func() bool {
+ af.Lock()
+ defer af.Unlock()
+
+ af.m[path]++
+
+ return func() bool {
+ af.Lock()
+ defer af.Unlock()
+
+ af.m[path]--
+
+ winner := af.m[path] == 0
+ if winner {
+ delete(af.m, path) // reclaim memory
+ }
+
+ return winner
+ }
+}
+
+type cacheConfig struct {
+ disableMoveAndClear bool // only used to disable move and clear in tests
+ disableWalker bool // only used to disable object walker in tests
+}
+
+// Option is an option for the cache.
+type Option func(*cacheConfig)
+
+// withDisabledMoveAndClear disables the initial move and cleanup of preexisting cache directories.
+// This option is only for test purposes.
+func withDisabledMoveAndClear() Option {
+ return func(cfg *cacheConfig) {
+ cfg.disableMoveAndClear = true
+ }
+}
+
+// withDisabledWalker disables the cache walker which cleans up the cache asynchronously. This
+// option is only for test purposes.
+func withDisabledWalker() Option {
+ return func(cfg *cacheConfig) {
+ cfg.disableWalker = true
+ }
+}
+
+// Cache stores and retrieves byte streams for repository related RPCs
+type Cache struct {
+ storages []config.Storage
+ keyer leaseKeyer
+ af activeFiles
+ cacheConfig cacheConfig
+
+ requestTotals prometheus.Counter
+ missTotals prometheus.Counter
+ bytesStoredtotals prometheus.Counter
+ bytesFetchedtotals prometheus.Counter
+ bytesLoserTotals prometheus.Counter
+ errTotal *prometheus.CounterVec
+ walkerCheckTotal prometheus.Counter
+ walkerRemovalTotal prometheus.Counter
+ walkerErrorTotal prometheus.Counter
+ walkerEmptyDirTotal prometheus.Counter
+ walkerEmptyDirRemovalTotal prometheus.Counter
+}
+
+// New will create a new Cache with the given Keyer.
+func New(cfg config.Cfg, locator storage.Locator, opts ...Option) *Cache {
+ var cacheConfig cacheConfig
+ for _, opt := range opts {
+ opt(&cacheConfig)
+ }
+
+ cache := &Cache{
+ storages: cfg.Storages,
+ af: activeFiles{
+ Mutex: &sync.Mutex{},
+ m: map[string]int{},
+ },
+ cacheConfig: cacheConfig,
+
+ requestTotals: prometheus.NewCounter(
+ prometheus.CounterOpts{
+ Name: "gitaly_diskcache_requests_total",
+ Help: "Total number of disk cache requests",
+ },
+ ),
+ missTotals: prometheus.NewCounter(
+ prometheus.CounterOpts{
+ Name: "gitaly_diskcache_miss_total",
+ Help: "Total number of disk cache misses",
+ },
+ ),
+ bytesStoredtotals: prometheus.NewCounter(
+ prometheus.CounterOpts{
+ Name: "gitaly_diskcache_bytes_stored_total",
+ Help: "Total number of disk cache bytes stored",
+ },
+ ),
+ bytesFetchedtotals: prometheus.NewCounter(
+ prometheus.CounterOpts{
+ Name: "gitaly_diskcache_bytes_fetched_total",
+ Help: "Total number of disk cache bytes fetched",
+ },
+ ),
+ bytesLoserTotals: prometheus.NewCounter(
+ prometheus.CounterOpts{
+ Name: "gitaly_diskcache_bytes_loser_total",
+ Help: "Total number of disk cache bytes from losing writes",
+ },
+ ),
+ errTotal: prometheus.NewCounterVec(
+ prometheus.CounterOpts{
+ Name: "gitaly_diskcache_errors_total",
+ Help: "Total number of errors encountered by disk cache",
+ },
+ []string{"error"},
+ ),
+ walkerCheckTotal: prometheus.NewCounter(
+ prometheus.CounterOpts{
+ Name: "gitaly_diskcache_walker_check_total",
+ Help: "Total number of events during diskcache filesystem walks",
+ },
+ ),
+ walkerRemovalTotal: prometheus.NewCounter(
+ prometheus.CounterOpts{
+ Name: "gitaly_diskcache_walker_removal_total",
+ Help: "Total number of events during diskcache filesystem walks",
+ },
+ ),
+ walkerErrorTotal: prometheus.NewCounter(
+ prometheus.CounterOpts{
+ Name: "gitaly_diskcache_walker_error_total",
+ Help: "Total number of errors during diskcache filesystem walks",
+ },
+ ),
+ walkerEmptyDirTotal: prometheus.NewCounter(
+ prometheus.CounterOpts{
+ Name: "gitaly_diskcache_walker_empty_dir_total",
+ Help: "Total number of empty directories encountered",
+ },
+ ),
+ walkerEmptyDirRemovalTotal: prometheus.NewCounter(
+ prometheus.CounterOpts{
+ Name: "gitaly_diskcache_walker_empty_dir_removal_total",
+ Help: "Total number of empty directories removed",
+ },
+ ),
+ }
+ cache.keyer = newLeaseKeyer(locator, cache.countErr)
+
+ return cache
+}
+
+// Describe is used to describe Prometheus metrics.
+func (c *Cache) Describe(descs chan<- *prometheus.Desc) {
+ prometheus.DescribeByCollect(c, descs)
+}
+
+// Collect is used to collect Prometheus metrics.
+func (c *Cache) Collect(metrics chan<- prometheus.Metric) {
+ c.requestTotals.Collect(metrics)
+ c.missTotals.Collect(metrics)
+ c.bytesStoredtotals.Collect(metrics)
+ c.bytesFetchedtotals.Collect(metrics)
+ c.bytesLoserTotals.Collect(metrics)
+ c.errTotal.Collect(metrics)
+ c.walkerRemovalTotal.Collect(metrics)
+ c.walkerErrorTotal.Collect(metrics)
+ c.walkerEmptyDirTotal.Collect(metrics)
+ c.walkerEmptyDirRemovalTotal.Collect(metrics)
+}
+
+func (c *Cache) countErr(err error) error {
+ switch err {
+ case ErrMissingLeaseFile:
+ c.errTotal.WithLabelValues("ErrMissingLeaseFile").Inc()
+ case ErrPendingExists:
+ c.errTotal.WithLabelValues("ErrPendingExists").Inc()
+ }
+ return err
+}
+
+// ErrReqNotFound indicates the request does not exist within the repo digest
+var ErrReqNotFound = errors.New("request digest not found within repo namespace")
+
+// GetStream will fetch the cached stream for a request. It is the
+// responsibility of the caller to close the stream when done.
+func (c *Cache) GetStream(ctx context.Context, repo *gitalypb.Repository, req proto.Message) (_ io.ReadCloser, err error) {
+ defer func() {
+ if err != nil {
+ c.missTotals.Inc()
+ }
+ }()
+
+ c.requestTotals.Inc()
+
+ respPath, err := c.KeyPath(ctx, repo, req)
+ switch {
+ case os.IsNotExist(err):
+ return nil, ErrReqNotFound
+ case err == nil:
+ break
+ default:
+ return nil, err
+ }
+
+ ctxlogrus.Extract(ctx).
+ WithField("stream_path", respPath).
+ Info("getting stream")
+
+ respF, err := os.Open(respPath)
+ switch {
+ case os.IsNotExist(err):
+ return nil, ErrReqNotFound
+ case err == nil:
+ break
+ default:
+ return nil, err
+ }
+
+ return instrumentedReadCloser{
+ ReadCloser: respF,
+ counter: c.bytesFetchedtotals,
+ }, nil
+}
+
+type instrumentedReadCloser struct {
+ io.ReadCloser
+ counter prometheus.Counter
+}
+
+func (irc instrumentedReadCloser) Read(p []byte) (n int, err error) {
+ n, err = irc.ReadCloser.Read(p)
+ irc.counter.Add(float64(n))
+ return
+}
+
+// PutStream will store a stream in a repo-namespace keyed by the digest of the
+// request protobuf message.
+func (c *Cache) PutStream(ctx context.Context, repo *gitalypb.Repository, req proto.Message, src io.Reader) error {
+ reqPath, err := c.KeyPath(ctx, repo, req)
+ if err != nil {
+ return err
+ }
+
+ ctxlogrus.Extract(ctx).
+ WithField("stream_path", reqPath).
+ Info("putting stream")
+
+ var n int64
+ isWinner := c.af.trackFile(reqPath)
+ defer func() {
+ if !isWinner() {
+ c.bytesLoserTotals.Add(float64(n))
+ }
+ }()
+
+ if err := os.MkdirAll(filepath.Dir(reqPath), 0755); err != nil {
+ return err
+ }
+
+ sf, err := safe.CreateFileWriter(reqPath)
+ if err != nil {
+ return err
+ }
+ defer sf.Close()
+
+ n, err = io.Copy(sf, src)
+ if err != nil {
+ return err
+ }
+ c.bytesStoredtotals.Add(float64(n))
+
+ if err := sf.Commit(); err != nil {
+ c.errTotal.WithLabelValues("ErrSafefileCommit").Inc()
+ return err
+ }
+
+ return nil
+}
+
+// KeyPath returns the cache path for the given request.
+func (c *Cache) KeyPath(ctx context.Context, repo *gitalypb.Repository, req proto.Message) (string, error) {
+ return c.keyer.keyPath(ctx, repo, req)
+}
+
+// StartLease will mark the repository as being in an indeterministic state. This is typically used
+// when modifying the repo, since the cache is not stable until after the modification is complete.
+// A lease object will be returned that allows the caller to signal the end of the lease.
+func (c *Cache) StartLease(repo *gitalypb.Repository) (LeaseEnder, error) {
+ pendingPath, err := c.keyer.newPendingLease(repo)
+ if err != nil {
+ return lease{}, err
+ }
+
+ return lease{
+ pendingPath: pendingPath,
+ repo: repo,
+ keyer: c.keyer,
+ countErr: c.countErr,
+ }, nil
+}
+
+// LeaseEnder allows the caller to indicate when a lease is no longer needed
+type LeaseEnder interface {
+ EndLease(context.Context) error
+}
+
+type lease struct {
+ pendingPath string
+ repo *gitalypb.Repository
+ keyer leaseKeyer
+ countErr func(error) error
+}
+
+// EndLease will end the lease by removing the pending lease file and updating
+// the key file with the current lease ID.
+func (l lease) EndLease(ctx context.Context) error {
+ _, err := l.keyer.updateLatest(ctx, l.repo)
+ if err != nil {
+ return err
+ }
+
+ if err := os.Remove(l.pendingPath); err != nil {
+ if os.IsNotExist(err) {
+ return l.countErr(ErrMissingLeaseFile)
+ }
+ return err
+ }
+
+ return nil
+}
diff --git a/internal/cache/cachedb_test.go b/internal/cache/cache_test.go
index 2be2d70f8..fe775b14b 100644
--- a/internal/cache/cachedb_test.go
+++ b/internal/cache/cache_test.go
@@ -1,17 +1,16 @@
-package cache_test
+package cache
import (
"context"
"io"
"io/ioutil"
- "os"
"strings"
"sync"
"testing"
"time"
+ promtest "github.com/prometheus/client_golang/prometheus/testutil"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/cache"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
@@ -20,31 +19,20 @@ import (
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
-func TestMain(m *testing.M) {
- os.Exit(testMain(m))
-}
-
-func testMain(m *testing.M) int {
- defer testhelper.MustHaveNoChildProcess()
- cleanup := testhelper.Configure()
- defer cleanup()
- return m.Run()
-}
-
func TestStreamDBNaiveKeyer(t *testing.T) {
cfg := testcfg.Build(t)
- testRepo1, _, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "repository-1")
- testRepo2, _, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "repository-2")
+ testRepo1, _, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "repository-1")
+ testRepo2, _, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "repository-2")
- keyer := cache.NewLeaseKeyer(config.NewLocator(cfg))
+ locator := config.NewLocator(cfg)
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
ctx = testhelper.SetCtxGrpcMethod(ctx, "InfoRefsUploadPack")
- db := cache.NewStreamDB(cache.NewLeaseKeyer(config.NewLocator(cfg)))
+ cache := New(cfg, locator)
req1 := &gitalypb.InfoRefsRequest{
Repository: testRepo1,
@@ -54,12 +42,12 @@ func TestStreamDBNaiveKeyer(t *testing.T) {
}
expectGetMiss := func(req *gitalypb.InfoRefsRequest) {
- _, err := db.GetStream(ctx, req.Repository, req)
- require.Equal(t, cache.ErrReqNotFound, err)
+ _, err := cache.GetStream(ctx, req.Repository, req)
+ require.Equal(t, ErrReqNotFound, err)
}
expectGetHit := func(expectStr string, req *gitalypb.InfoRefsRequest) {
- actualStream, err := db.GetStream(ctx, req.Repository, req)
+ actualStream, err := cache.GetStream(ctx, req.Repository, req)
require.NoError(t, err)
actualBytes, err := ioutil.ReadAll(actualStream)
require.NoError(t, err)
@@ -67,14 +55,14 @@ func TestStreamDBNaiveKeyer(t *testing.T) {
}
invalidationEvent := func(repo *gitalypb.Repository) {
- lease, err := keyer.StartLease(repo)
+ lease, err := cache.StartLease(repo)
require.NoError(t, err)
// imagine repo being modified here
require.NoError(t, lease.EndLease(ctx))
}
storeAndRetrieve := func(expectStr string, req *gitalypb.InfoRefsRequest) {
- require.NoError(t, db.PutStream(ctx, req.Repository, req, strings.NewReader(expectStr)))
+ require.NoError(t, cache.PutStream(ctx, req.Repository, req, strings.NewReader(expectStr)))
expectGetHit(expectStr, req)
}
@@ -95,7 +83,7 @@ func TestStreamDBNaiveKeyer(t *testing.T) {
// store new value for same cache value but at new generation
expectStream2 := "not what you were looking for"
- require.NoError(t, db.PutStream(ctx, req1.Repository, req1, strings.NewReader(expectStream2)))
+ require.NoError(t, cache.PutStream(ctx, req1.Repository, req1, strings.NewReader(expectStream2)))
expectGetHit(expectStream2, req1)
// enabled feature flags affect caching
@@ -106,21 +94,21 @@ func TestStreamDBNaiveKeyer(t *testing.T) {
expectGetHit(expectStream2, req1)
// start critical section without closing
- repo1Lease, err := keyer.StartLease(req1.Repository)
+ repo1Lease, err := cache.StartLease(req1.Repository)
require.NoError(t, err)
// accessing repo cache with open critical section should fail
- _, err = db.GetStream(ctx, req1.Repository, req1)
- require.Equal(t, err, cache.ErrPendingExists)
- err = db.PutStream(ctx, req1.Repository, req1, strings.NewReader(repo1contents))
- require.Equal(t, err, cache.ErrPendingExists)
+ _, err = cache.GetStream(ctx, req1.Repository, req1)
+ require.Equal(t, err, ErrPendingExists)
+ err = cache.PutStream(ctx, req1.Repository, req1, strings.NewReader(repo1contents))
+ require.Equal(t, err, ErrPendingExists)
expectGetHit(repo2contents, req2) // other repo caches should be unaffected
// opening and closing a new critical zone doesn't resolve the issue
invalidationEvent(req1.Repository)
- _, err = db.GetStream(ctx, req1.Repository, req1)
- require.Equal(t, err, cache.ErrPendingExists)
+ _, err = cache.GetStream(ctx, req1.Repository, req1)
+ require.Equal(t, err, ErrPendingExists)
// only completing/removing the pending generation file will allow access
require.NoError(t, repo1Lease.EndLease(ctx))
@@ -128,7 +116,7 @@ func TestStreamDBNaiveKeyer(t *testing.T) {
// creating a lease on a repo that doesn't exist yet should succeed
req1.Repository.RelativePath += "-does-not-exist"
- _, err = keyer.StartLease(req1.Repository)
+ _, err = cache.StartLease(req1.Repository)
require.NoError(t, err)
}
@@ -138,7 +126,8 @@ func TestLoserCount(t *testing.T) {
cfgBuilder := testcfg.NewGitalyCfgBuilder(testcfg.WithStorages("storage-1", "storage-2"))
cfg := cfgBuilder.Build(t)
- db := cache.NewStreamDB(cache.NewLeaseKeyer(config.NewLocator(cfg)))
+ locator := config.NewLocator(cfg)
+ cache := New(cfg, locator)
req := &gitalypb.InfoRefsRequest{
Repository: &gitalypb.Repository{
@@ -156,20 +145,20 @@ func TestLoserCount(t *testing.T) {
// Run streams concurrently for the same repo and request
for _, l := range leashes {
- go func(l chan struct{}) { errQ <- db.PutStream(ctx, req.Repository, req, leashedReader{l, wg}) }(l)
+ go func(l chan struct{}) { errQ <- cache.PutStream(ctx, req.Repository, req, leashedReader{l, wg}) }(l)
l <- struct{}{}
}
wg.Wait()
- start := cache.ExportMockLoserBytes.Count()
+ start := int(promtest.ToFloat64(cache.bytesLoserTotals))
for _, l := range leashes {
close(l)
require.NoError(t, <-errQ)
}
- require.Equal(t, start+len(leashes)-1, cache.ExportMockLoserBytes.Count())
+ require.Equal(t, start+len(leashes)-1, int(promtest.ToFloat64(cache.bytesLoserTotals)))
}
type leashedReader struct {
diff --git a/internal/cache/cachedb.go b/internal/cache/cachedb.go
deleted file mode 100644
index a70724596..000000000
--- a/internal/cache/cachedb.go
+++ /dev/null
@@ -1,157 +0,0 @@
-package cache
-
-import (
- "context"
- "errors"
- "io"
- "os"
- "path/filepath"
- "sync"
-
- "github.com/golang/protobuf/proto"
- "github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
- "gitlab.com/gitlab-org/gitaly/internal/safe"
- "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
-)
-
-// maps a cache path to the number of active writers
-type activeFiles struct {
- *sync.Mutex
- m map[string]int
-}
-
-// trackFile returns a function that indicates if the current
-// writing of a file is the last known one, which
-// would indicate the current write is the "winner".
-func (af activeFiles) trackFile(path string) func() bool {
- af.Lock()
- defer af.Unlock()
-
- af.m[path]++
-
- return func() bool {
- af.Lock()
- defer af.Unlock()
-
- af.m[path]--
-
- winner := af.m[path] == 0
- if winner {
- delete(af.m, path) // reclaim memory
- }
-
- return winner
- }
-}
-
-// StreamDB stores and retrieves byte streams for repository related RPCs
-type StreamDB struct {
- ck Keyer
- af activeFiles
-}
-
-// NewStreamDB will open the stream database at the specified file path.
-func NewStreamDB(ck Keyer) *StreamDB {
- return &StreamDB{
- ck: ck,
- af: activeFiles{
- Mutex: &sync.Mutex{},
- m: map[string]int{},
- },
- }
-}
-
-// ErrReqNotFound indicates the request does not exist within the repo digest
-var ErrReqNotFound = errors.New("request digest not found within repo namespace")
-
-// GetStream will fetch the cached stream for a request. It is the
-// responsibility of the caller to close the stream when done.
-func (sdb *StreamDB) GetStream(ctx context.Context, repo *gitalypb.Repository, req proto.Message) (_ io.ReadCloser, err error) {
- defer func() {
- if err != nil {
- countMiss()
- }
- }()
-
- countRequest()
-
- respPath, err := sdb.ck.KeyPath(ctx, repo, req)
- switch {
- case os.IsNotExist(err):
- return nil, ErrReqNotFound
- case err == nil:
- break
- default:
- return nil, err
- }
-
- ctxlogrus.Extract(ctx).
- WithField("stream_path", respPath).
- Info("getting stream")
-
- respF, err := os.Open(respPath)
- switch {
- case os.IsNotExist(err):
- return nil, ErrReqNotFound
- case err == nil:
- break
- default:
- return nil, err
- }
-
- return instrumentedReadCloser{respF}, nil
-}
-
-type instrumentedReadCloser struct {
- io.ReadCloser
-}
-
-func (irc instrumentedReadCloser) Read(p []byte) (n int, err error) {
- n, err = irc.ReadCloser.Read(p)
- countReadBytes(float64(n))
- return
-}
-
-// PutStream will store a stream in a repo-namespace keyed by the digest of the
-// request protobuf message.
-func (sdb *StreamDB) PutStream(ctx context.Context, repo *gitalypb.Repository, req proto.Message, src io.Reader) error {
- reqPath, err := sdb.ck.KeyPath(ctx, repo, req)
- if err != nil {
- return err
- }
-
- ctxlogrus.Extract(ctx).
- WithField("stream_path", reqPath).
- Info("putting stream")
-
- var n int64
- isWinner := sdb.af.trackFile(reqPath)
- defer func() {
- if !isWinner() {
- countLoserBytes(float64(n))
- }
- }()
-
- if err := os.MkdirAll(filepath.Dir(reqPath), 0755); err != nil {
- return err
- }
-
- sf, err := safe.CreateFileWriter(reqPath)
- if err != nil {
- return err
- }
- defer sf.Close()
-
- n, err = io.Copy(sf, src)
- if err != nil {
- return err
- }
- countWriteBytes(float64(n))
-
- if err := sf.Commit(); err != nil {
- errTotal.WithLabelValues("ErrSafefileCommit").Inc()
- return err
- }
-
- return nil
-}
diff --git a/internal/cache/export_test.go b/internal/cache/export_test.go
deleted file mode 100644
index 68d39d738..000000000
--- a/internal/cache/export_test.go
+++ /dev/null
@@ -1,46 +0,0 @@
-package cache
-
-import "sync"
-
-var (
- ExportMockRemovalCounter = &MockCounter{}
- ExportMockCheckCounter = &MockCounter{}
- ExportMockLoserBytes = &MockCounter{}
-
- ExportDisableMoveAndClear = &disableMoveAndClear
- ExportDisableWalker = &disableWalker
-)
-
-// MockCounter is a mocked counter used for the testing.
-type MockCounter struct {
- sync.RWMutex
- count int
-}
-
-// Add increments counter on the n.
-func (mc *MockCounter) Add(n int) {
- mc.Lock()
- mc.count += n
- mc.Unlock()
-}
-
-// Count returns total value of the increments.
-func (mc *MockCounter) Count() int {
- mc.RLock()
- defer mc.RUnlock()
- return mc.count
-}
-
-// Reset resets the counter to zero.
-func (mc *MockCounter) Reset() {
- mc.Lock()
- mc.count = 0
- mc.Unlock()
-}
-
-func init() {
- // override counter functions with our mocked version
- countWalkRemoval = func() { ExportMockRemovalCounter.Add(1) }
- countWalkCheck = func() { ExportMockCheckCounter.Add(1) }
- countLoserBytes = func(n float64) { ExportMockLoserBytes.Add(int(n)) }
-}
diff --git a/internal/cache/keyer.go b/internal/cache/keyer.go
index 9aeb2e5e9..78ef2b6ed 100644
--- a/internal/cache/keyer.go
+++ b/internal/cache/keyer.go
@@ -39,56 +39,24 @@ var (
ErrPendingExists = errors.New("one or more cache generations are pending transition for the current repository")
)
-// Keyer abstracts how to obtain a unique file path key for a request at a
-// specific generation of the cache. The key path will magically update as new
-// critical sections are declared. An error will be returned if the repo's cache
-// has any open critical sections.
-type Keyer interface {
- // KeyPath will return a key filepath for the provided request. If an error
- // is returned, the cache should not be used.
- KeyPath(context.Context, *gitalypb.Repository, proto.Message) (string, error)
-}
-
-// LeaseKeyer will try to return a key path for the current generation of
+// leaseKeyer will try to return a key path for the current generation of
// the repo's cache. It uses a strategy that avoids file locks in favor of
-// atomically created/renamed files. Read more about LeaseKeyer's design:
+// atomically created/renamed files. Read more about leaseKeyer's design:
// https://gitlab.com/gitlab-org/gitaly/issues/1745
-type LeaseKeyer struct {
- locator storage.Locator
+type leaseKeyer struct {
+ locator storage.Locator
+ countErr func(error) error
}
-// NewLeaseKeyer initializes a new LeaseKeyer
-func NewLeaseKeyer(locator storage.Locator) LeaseKeyer {
- return LeaseKeyer{
- locator: locator,
+// newLeaseKeyer initializes a new leaseKeyer
+func newLeaseKeyer(locator storage.Locator, countErr func(error) error) leaseKeyer {
+ return leaseKeyer{
+ locator: locator,
+ countErr: countErr,
}
}
-type lease struct {
- pendingPath string
- repo *gitalypb.Repository
- keyer LeaseKeyer
-}
-
-// EndLease will end the lease by removing the pending lease file and updating
-// the key file with the current lease ID.
-func (l lease) EndLease(ctx context.Context) error {
- _, err := l.keyer.updateLatest(ctx, l.repo)
- if err != nil {
- return err
- }
-
- if err := os.Remove(l.pendingPath); err != nil {
- if os.IsNotExist(err) {
- return countErr(ErrMissingLeaseFile)
- }
- return err
- }
-
- return nil
-}
-
-func (keyer LeaseKeyer) updateLatest(ctx context.Context, repo *gitalypb.Repository) (string, error) {
+func (keyer leaseKeyer) updateLatest(ctx context.Context, repo *gitalypb.Repository) (string, error) {
repoStatePath, err := keyer.getRepoStatePath(repo)
if err != nil {
return "", err
@@ -125,35 +93,13 @@ func (keyer LeaseKeyer) updateLatest(ctx context.Context, repo *gitalypb.Reposit
return nextGenID, nil
}
-// LeaseEnder allows the caller to indicate when a lease is no longer needed
-type LeaseEnder interface {
- EndLease(context.Context) error
-}
-
-// StartLease will mark the repository as being in an indeterministic state.
-// This is typically used when modifying the repo, since the cache is not
-// stable until after the modification is complete. A lease object will be
-// returned that allows the caller to signal the end of the lease.
-func (keyer LeaseKeyer) StartLease(repo *gitalypb.Repository) (LeaseEnder, error) {
- pendingPath, err := keyer.newPendingLease(repo)
- if err != nil {
- return lease{}, err
- }
-
- return lease{
- pendingPath: pendingPath,
- repo: repo,
- keyer: keyer,
- }, nil
-}
-
// staleAge is how old we consider a pending file to be stale before removal
const staleAge = time.Hour
-// KeyPath will attempt to return the unique keypath for a request in the
+// keyPath will attempt to return the unique keypath for a request in the
// specified repo for the current generation. The context must contain the gRPC
// method in its values.
-func (keyer LeaseKeyer) KeyPath(ctx context.Context, repo *gitalypb.Repository, req proto.Message) (string, error) {
+func (keyer leaseKeyer) keyPath(ctx context.Context, repo *gitalypb.Repository, req proto.Message) (string, error) {
pending, err := keyer.currentLeases(repo)
if err != nil {
return "", err
@@ -179,7 +125,7 @@ func (keyer LeaseKeyer) KeyPath(ctx context.Context, repo *gitalypb.Repository,
}
if anyValidPending {
- return "", countErr(ErrPendingExists)
+ return "", keyer.countErr(ErrPendingExists)
}
genID, err := keyer.currentGenID(ctx, repo)
@@ -207,7 +153,7 @@ func radixPath(root, key string) (string, error) {
return filepath.Join(root, key[0:2], key[2:]), nil
}
-func (keyer LeaseKeyer) newPendingLease(repo *gitalypb.Repository) (string, error) {
+func (keyer leaseKeyer) newPendingLease(repo *gitalypb.Repository) (string, error) {
repoStatePath, err := keyer.getRepoStatePath(repo)
if err != nil {
return "", err
@@ -237,7 +183,7 @@ func (keyer LeaseKeyer) newPendingLease(repo *gitalypb.Repository) (string, erro
}
// cacheDir is $STORAGE/+gitaly/cache
-func (keyer LeaseKeyer) cacheDir(repo *gitalypb.Repository) (string, error) {
+func (keyer leaseKeyer) cacheDir(repo *gitalypb.Repository) (string, error) {
storagePath, err := keyer.locator.GetStorageByName(repo.StorageName)
if err != nil {
return "", fmt.Errorf("storage not found for %v", repo)
@@ -246,7 +192,7 @@ func (keyer LeaseKeyer) cacheDir(repo *gitalypb.Repository) (string, error) {
return tempdir.AppendCacheDir(storagePath), nil
}
-func (keyer LeaseKeyer) getRepoStatePath(repo *gitalypb.Repository) (string, error) {
+func (keyer leaseKeyer) getRepoStatePath(repo *gitalypb.Repository) (string, error) {
storagePath, err := keyer.locator.GetStorageByName(repo.StorageName)
if err != nil {
return "", fmt.Errorf("getRepoStatePath: storage not found for %v", repo)
@@ -266,7 +212,7 @@ func (keyer LeaseKeyer) getRepoStatePath(repo *gitalypb.Repository) (string, err
return filepath.Join(stateDir, relativePath), nil
}
-func (keyer LeaseKeyer) currentLeases(repo *gitalypb.Repository) ([]os.FileInfo, error) {
+func (keyer leaseKeyer) currentLeases(repo *gitalypb.Repository) ([]os.FileInfo, error) {
repoStatePath, err := keyer.getRepoStatePath(repo)
if err != nil {
return nil, err
@@ -286,7 +232,7 @@ func (keyer LeaseKeyer) currentLeases(repo *gitalypb.Repository) ([]os.FileInfo,
return pendings, nil
}
-func (keyer LeaseKeyer) currentGenID(ctx context.Context, repo *gitalypb.Repository) (string, error) {
+func (keyer leaseKeyer) currentGenID(ctx context.Context, repo *gitalypb.Repository) (string, error) {
repoStatePath, err := keyer.getRepoStatePath(repo)
if err != nil {
return "", err
diff --git a/internal/cache/prometheus.go b/internal/cache/prometheus.go
deleted file mode 100644
index f277b40b2..000000000
--- a/internal/cache/prometheus.go
+++ /dev/null
@@ -1,99 +0,0 @@
-package cache
-
-import (
- "github.com/prometheus/client_golang/prometheus"
- "github.com/prometheus/client_golang/prometheus/promauto"
-)
-
-var (
- requestTotals = promauto.NewCounter(
- prometheus.CounterOpts{
- Name: "gitaly_diskcache_requests_total",
- Help: "Total number of disk cache requests",
- },
- )
- missTotals = promauto.NewCounter(
- prometheus.CounterOpts{
- Name: "gitaly_diskcache_miss_total",
- Help: "Total number of disk cache misses",
- },
- )
- bytesStoredtotals = promauto.NewCounter(
- prometheus.CounterOpts{
- Name: "gitaly_diskcache_bytes_stored_total",
- Help: "Total number of disk cache bytes stored",
- },
- )
- bytesFetchedtotals = promauto.NewCounter(
- prometheus.CounterOpts{
- Name: "gitaly_diskcache_bytes_fetched_total",
- Help: "Total number of disk cache bytes fetched",
- },
- )
- bytesLoserTotals = promauto.NewCounter(
- prometheus.CounterOpts{
- Name: "gitaly_diskcache_bytes_loser_total",
- Help: "Total number of disk cache bytes from losing writes",
- },
- )
- errTotal = promauto.NewCounterVec(
- prometheus.CounterOpts{
- Name: "gitaly_diskcache_errors_total",
- Help: "Total number of errors encountered by disk cache",
- },
- []string{"error"},
- )
- walkerCheckTotal = promauto.NewCounter(
- prometheus.CounterOpts{
- Name: "gitaly_diskcache_walker_check_total",
- Help: "Total number of events during diskcache filesystem walks",
- },
- )
- walkerRemovalTotal = promauto.NewCounter(
- prometheus.CounterOpts{
- Name: "gitaly_diskcache_walker_removal_total",
- Help: "Total number of events during diskcache filesystem walks",
- },
- )
- walkerErrorTotal = promauto.NewCounter(
- prometheus.CounterOpts{
- Name: "gitaly_diskcache_walker_error_total",
- Help: "Total number of errors during diskcache filesystem walks",
- },
- )
- walkerEmptyDirTotal = promauto.NewCounter(
- prometheus.CounterOpts{
- Name: "gitaly_diskcache_walker_empty_dir_total",
- Help: "Total number of empty directories encountered",
- },
- )
- walkerEmptyDirRemovalTotal = promauto.NewCounter(
- prometheus.CounterOpts{
- Name: "gitaly_diskcache_walker_empty_dir_removal_total",
- Help: "Total number of empty directories removed",
- },
- )
-)
-
-func countErr(err error) error {
- switch err {
- case ErrMissingLeaseFile:
- errTotal.WithLabelValues("ErrMissingLeaseFile").Inc()
- case ErrPendingExists:
- errTotal.WithLabelValues("ErrPendingExists").Inc()
- }
- return err
-}
-
-var (
- countRequest = func() { requestTotals.Inc() }
- countMiss = func() { missTotals.Inc() }
- countWriteBytes = func(n float64) { bytesStoredtotals.Add(n) }
- countReadBytes = func(n float64) { bytesFetchedtotals.Add(n) }
- countLoserBytes = func(n float64) { bytesLoserTotals.Add(n) }
- countWalkRemoval = func() { walkerRemovalTotal.Inc() }
- countWalkCheck = func() { walkerCheckTotal.Inc() }
- countWalkError = func() { walkerErrorTotal.Inc() }
- countEmptyDir = func() { walkerEmptyDirTotal.Inc() }
- countEmptyDirRemoval = func() { walkerEmptyDirRemovalTotal.Inc() }
-)
diff --git a/internal/cache/testhelper_test.go b/internal/cache/testhelper_test.go
new file mode 100644
index 000000000..4302cc22a
--- /dev/null
+++ b/internal/cache/testhelper_test.go
@@ -0,0 +1,19 @@
+package cache
+
+import (
+ "os"
+ "testing"
+
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+)
+
+func TestMain(m *testing.M) {
+ os.Exit(testMain(m))
+}
+
+func testMain(m *testing.M) int {
+ defer testhelper.MustHaveNoChildProcess()
+ cleanup := testhelper.Configure()
+ defer cleanup()
+ return m.Run()
+}
diff --git a/internal/cache/walker.go b/internal/cache/walker.go
index 503379192..43c07e9a1 100644
--- a/internal/cache/walker.go
+++ b/internal/cache/walker.go
@@ -13,29 +13,28 @@ import (
"github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitaly/internal/dontpanic"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/log"
"gitlab.com/gitlab-org/gitaly/internal/tempdir"
)
-func logWalkErr(err error, path, msg string) {
- countWalkError()
+func (c *Cache) logWalkErr(err error, path, msg string) {
+ c.walkerErrorTotal.Inc()
log.Default().
WithField("path", path).
WithError(err).
Warn(msg)
}
-func cleanWalk(path string) error {
+func (c *Cache) cleanWalk(path string) error {
defer time.Sleep(100 * time.Microsecond) // relieve pressure
- countWalkCheck()
+ c.walkerCheckTotal.Inc()
entries, err := ioutil.ReadDir(path)
if err != nil {
if os.IsNotExist(err) {
return nil
}
- logWalkErr(err, path, "unable to stat directory")
+ c.logWalkErr(err, path, "unable to stat directory")
return err
}
@@ -43,13 +42,13 @@ func cleanWalk(path string) error {
ePath := filepath.Join(path, e.Name())
if e.IsDir() {
- if err := cleanWalk(ePath); err != nil {
+ if err := c.cleanWalk(ePath); err != nil {
return err
}
continue
}
- countWalkCheck()
+ c.walkerCheckTotal.Inc()
if time.Since(e.ModTime()) < staleAge {
continue // still fresh
}
@@ -59,10 +58,10 @@ func cleanWalk(path string) error {
if os.IsNotExist(err) {
continue
}
- logWalkErr(err, ePath, "unable to remove file")
+ c.logWalkErr(err, ePath, "unable to remove file")
return err
}
- countWalkRemoval()
+ c.walkerRemovalTotal.Inc()
}
files, err := ioutil.ReadDir(path)
@@ -70,21 +69,21 @@ func cleanWalk(path string) error {
if os.IsNotExist(err) {
return nil
}
- logWalkErr(err, path, "unable to stat directory after walk")
+ c.logWalkErr(err, path, "unable to stat directory after walk")
return err
}
if len(files) == 0 {
- countEmptyDir()
+ c.walkerEmptyDirTotal.Inc()
if err := os.Remove(path); err != nil {
if os.IsNotExist(err) {
return nil
}
- logWalkErr(err, path, "unable to remove empty directory")
+ c.logWalkErr(err, path, "unable to remove empty directory")
return err
}
- countEmptyDirRemoval()
- countWalkRemoval()
+ c.walkerEmptyDirRemovalTotal.Inc()
+ c.walkerRemovalTotal.Inc()
}
return nil
@@ -92,37 +91,32 @@ func cleanWalk(path string) error {
const cleanWalkFrequency = 10 * time.Minute
-func walkLoop(walkPath string) {
+func (c *Cache) walkLoop(walkPath string) {
logger := logrus.WithField("path", walkPath)
logger.Infof("Starting file walker for %s", walkPath)
walkTick := time.NewTicker(cleanWalkFrequency)
dontpanic.GoForever(time.Minute, func() {
- if err := cleanWalk(walkPath); err != nil {
+ if err := c.cleanWalk(walkPath); err != nil {
logger.Error(err)
}
<-walkTick.C
})
}
-func startCleanWalker(storagePath string) {
- if disableWalker {
+func (c *Cache) startCleanWalker(storagePath string) {
+ if c.cacheConfig.disableWalker {
return
}
- walkLoop(tempdir.AppendCacheDir(storagePath))
- walkLoop(tempdir.AppendStateDir(storagePath))
+ c.walkLoop(tempdir.AppendCacheDir(storagePath))
+ c.walkLoop(tempdir.AppendStateDir(storagePath))
}
-var (
- disableMoveAndClear bool // only used to disable move and clear in tests
- disableWalker bool // only used to disable object walker in tests
-)
-
// moveAndClear will move the cache to the storage location's
// temporary folder, and then remove its contents asynchronously
-func moveAndClear(storagePath string) error {
- if disableMoveAndClear {
+func (c *Cache) moveAndClear(storagePath string) error {
+ if c.cacheConfig.disableMoveAndClear {
return nil
}
@@ -165,19 +159,20 @@ func moveAndClear(storagePath string) error {
return nil
}
-func init() {
- config.RegisterHook(func(cfg *config.Cfg) error {
- pathSet := map[string]struct{}{}
- for _, storage := range cfg.Storages {
- pathSet[storage.Path] = struct{}{}
- }
+// StartWalkers starts the cache walker Goroutines. Initially, this function will try to clean up
+// any preexisting cache directories.
+func (c *Cache) StartWalkers() error {
+ pathSet := map[string]struct{}{}
+ for _, storage := range c.storages {
+ pathSet[storage.Path] = struct{}{}
+ }
- for sPath := range pathSet {
- if err := moveAndClear(sPath); err != nil {
- return err
- }
- startCleanWalker(sPath)
+ for sPath := range pathSet {
+ if err := c.moveAndClear(sPath); err != nil {
+ return err
}
- return nil
- })
+ c.startCleanWalker(sPath)
+ }
+
+ return nil
}
diff --git a/internal/cache/walker_internal_test.go b/internal/cache/walker_internal_test.go
deleted file mode 100644
index a09d75563..000000000
--- a/internal/cache/walker_internal_test.go
+++ /dev/null
@@ -1,64 +0,0 @@
-package cache
-
-import (
- "io/ioutil"
- "os"
- "os/exec"
- "path/filepath"
- "strings"
- "testing"
- "time"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/testhelper"
-)
-
-func TestCleanWalkDirNotExists(t *testing.T) {
- err := cleanWalk("/path/that/does/not/exist")
- assert.NoError(t, err, "cleanWalk returned an error for a non existing directory")
-}
-
-func TestCleanWalkEmptyDirs(t *testing.T) {
- tmp := testhelper.TempDir(t)
-
- for _, tt := range []struct {
- path string
- stale bool
- }{
- {path: "a/b/c/"},
- {path: "a/b/c/1", stale: true},
- {path: "a/b/c/2", stale: true},
- {path: "a/b/d/"},
- {path: "e/"},
- {path: "e/1"},
- {path: "f/"},
- } {
- p := filepath.Join(tmp, tt.path)
- if strings.HasSuffix(tt.path, "/") {
- require.NoError(t, os.MkdirAll(p, 0755))
- } else {
- require.NoError(t, ioutil.WriteFile(p, nil, 0655))
- if tt.stale {
- require.NoError(t, os.Chtimes(p, time.Now(), time.Now().Add(-time.Hour)))
- }
- }
- }
-
- require.NoError(t, cleanWalk(tmp))
-
- actual := findFiles(t, tmp)
- expect := `.
-./e
-./e/1
-`
- require.Equal(t, expect, actual)
-}
-
-func findFiles(t testing.TB, path string) string {
- cmd := exec.Command("find", ".")
- cmd.Dir = path
- out, err := cmd.Output()
- require.NoError(t, err)
- return string(out)
-}
diff --git a/internal/cache/walker_test.go b/internal/cache/walker_test.go
index 1c7139ba0..fae4df576 100644
--- a/internal/cache/walker_test.go
+++ b/internal/cache/walker_test.go
@@ -1,15 +1,18 @@
-package cache_test
+package cache
import (
"io/ioutil"
"os"
+ "os/exec"
"path/filepath"
+ "strings"
"testing"
"time"
+ promtest "github.com/prometheus/client_golang/prometheus/testutil"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/cache"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/tempdir"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
@@ -20,8 +23,6 @@ func TestDiskCacheObjectWalker(t *testing.T) {
var shouldExist, shouldNotExist []string
- cache.ExportMockRemovalCounter.Reset()
-
for _, tt := range []struct {
name string
age time.Duration
@@ -50,22 +51,18 @@ func TestDiskCacheObjectWalker(t *testing.T) {
}
}
- // disable the initial move-and-clear function since we are only
- // evaluating the walker
- *cache.ExportDisableMoveAndClear = true
- defer func() { *cache.ExportDisableMoveAndClear = false }()
-
- require.NoError(t, cfg.Validate()) // triggers walker
+ locator := config.NewLocator(cfg)
+ cache := New(cfg, locator, withDisabledMoveAndClear())
+ require.NoError(t, cache.StartWalkers())
- pollCountersUntil(t, 4)
+ pollCountersUntil(t, cache, 4)
for _, p := range shouldExist {
assert.FileExists(t, p)
}
for _, p := range shouldNotExist {
- _, err := os.Stat(p)
- require.True(t, os.IsNotExist(err), "expected %s not to exist", p)
+ require.NoFileExists(t, p)
}
}
@@ -78,23 +75,18 @@ func TestDiskCacheInitialClear(t *testing.T) {
require.NoError(t, os.MkdirAll(filepath.Dir(canary), 0755))
require.NoError(t, ioutil.WriteFile(canary, []byte("chirp chirp"), 0755))
- // disable the background walkers since we are only
- // evaluating the initial move-and-clear function
- *cache.ExportDisableWalker = true
- defer func() { *cache.ExportDisableWalker = false }()
-
- // validation will run cache walker hook which synchronously
- // runs the move-and-clear function
- require.NoError(t, cfg.Validate())
+ locator := config.NewLocator(cfg)
+ cache := New(cfg, locator, withDisabledWalker())
+ require.NoError(t, cache.StartWalkers())
- testhelper.AssertPathNotExists(t, canary)
+ require.NoFileExists(t, canary)
}
-func pollCountersUntil(t testing.TB, expectRemovals int) {
+func pollCountersUntil(t testing.TB, cache *Cache, expectRemovals int) {
// poll injected mock prometheus counters until expected events occur
timeout := time.After(time.Second)
for {
- count := cache.ExportMockRemovalCounter.Count()
+ count := int(promtest.ToFloat64(cache.walkerRemovalTotal))
select {
case <-timeout:
t.Fatalf(
@@ -110,3 +102,59 @@ func pollCountersUntil(t testing.TB, expectRemovals int) {
time.Sleep(time.Millisecond)
}
}
+
+func TestCleanWalkDirNotExists(t *testing.T) {
+ cfg := testcfg.Build(t)
+
+ cache := New(cfg, config.NewLocator(cfg))
+
+ err := cache.cleanWalk("/path/that/does/not/exist")
+ assert.NoError(t, err, "cleanWalk returned an error for a non existing directory")
+}
+
+func TestCleanWalkEmptyDirs(t *testing.T) {
+ tmp := testhelper.TempDir(t)
+
+ for _, tt := range []struct {
+ path string
+ stale bool
+ }{
+ {path: "a/b/c/"},
+ {path: "a/b/c/1", stale: true},
+ {path: "a/b/c/2", stale: true},
+ {path: "a/b/d/"},
+ {path: "e/"},
+ {path: "e/1"},
+ {path: "f/"},
+ } {
+ p := filepath.Join(tmp, tt.path)
+ if strings.HasSuffix(tt.path, "/") {
+ require.NoError(t, os.MkdirAll(p, 0755))
+ } else {
+ require.NoError(t, ioutil.WriteFile(p, nil, 0655))
+ if tt.stale {
+ require.NoError(t, os.Chtimes(p, time.Now(), time.Now().Add(-time.Hour)))
+ }
+ }
+ }
+
+ cfg := testcfg.Build(t)
+ cache := New(cfg, config.NewLocator(cfg))
+
+ require.NoError(t, cache.cleanWalk(tmp))
+
+ actual := findFiles(t, tmp)
+ expect := `.
+./e
+./e/1
+`
+ require.Equal(t, expect, actual)
+}
+
+func findFiles(t testing.TB, path string) string {
+ cmd := exec.Command("find", ".")
+ cmd.Dir = path
+ out, err := cmd.Output()
+ require.NoError(t, err)
+ return string(out)
+}
diff --git a/internal/cgroups/v1_linux_test.go b/internal/cgroups/v1_linux_test.go
index 5fcadfc64..9903c202b 100644
--- a/internal/cgroups/v1_linux_test.go
+++ b/internal/cgroups/v1_linux_test.go
@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"hash/crc32"
- "io/ioutil"
"os"
"os/exec"
"path/filepath"
@@ -15,6 +14,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config/cgroups"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper"
)
func defaultCgroupsConfig() cgroups.Config {
@@ -123,8 +123,5 @@ func readCgroupFile(t *testing.T, path string) []byte {
// so we can read the file.
require.NoError(t, os.Chmod(path, 0666))
- content, err := ioutil.ReadFile(path)
- require.NoError(t, err)
-
- return content
+ return testhelper.MustReadFile(t, path)
}
diff --git a/internal/command/command.go b/internal/command/command.go
index 3d4e361c8..5dd4813cc 100644
--- a/internal/command/command.go
+++ b/internal/command/command.go
@@ -1,7 +1,6 @@
package command
import (
- "bufio"
"context"
"errors"
"fmt"
@@ -17,14 +16,9 @@ import (
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"github.com/opentracing/opentracing-go"
"github.com/sirupsen/logrus"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/labkit/tracing"
)
-const (
- escapedNewline = `\n`
-)
-
// GitEnv contains the ENV variables for git commands
var GitEnv = []string{
// Force english locale for consistency on the output messages
@@ -60,11 +54,11 @@ var exportedEnvVars = []string{
var envInjector = tracing.NewEnvInjector()
const (
- // MaxStderrBytes is at most how many bytes will be written to stderr
- MaxStderrBytes = 10000 // 10kb
- // StderrBufferSize is the buffer size we use for the reader that reads from
- // the stderr stream of the command
- StderrBufferSize = 4096
+ // maxStderrBytes is at most how many bytes will be written to stderr
+ maxStderrBytes = 10000 // 10kb
+ // maxStderrLineLength is at most how many bytes a single line will be
+ // written to stderr. Lines exceeding this limit should be truncated
+ maxStderrLineLength = 4096
)
// Command encapsulates a running exec.Cmd. The embedded exec.Cmd is
@@ -73,8 +67,7 @@ const (
type Command struct {
reader io.Reader
writer io.WriteCloser
- stderrCloser io.WriteCloser
- stderrDone chan struct{}
+ stderrBuffer *stderrBuffer
cmd *exec.Cmd
context context.Context
startTime time.Time
@@ -137,18 +130,6 @@ func WaitAllDone() {
type contextWithoutDonePanic string
-// noopWriteCloser has a noop Close(). The reason for this is so we can close any WriteClosers that get
-// passed into writeLines. We need this for WriteClosers such as the Logrus writer, which has a
-// goroutine that is stopped by the runtime https://github.com/sirupsen/logrus/blob/master/writer.go#L51.
-// Unless we explicitly close it, go test will complain that logs are being written to after the Test exits.
-type noopWriteCloser struct {
- io.Writer
-}
-
-func (n *noopWriteCloser) Close() error {
- return nil
-}
-
// New creates a Command from an exec.Cmd. On success, the Command
// contains a running subprocess. When ctx is canceled the embedded
// process will be terminated and reaped automatically.
@@ -167,7 +148,7 @@ func New(ctx context.Context, cmd *exec.Cmd, stdin io.Reader, stdout, stderr io.
span, ctx := opentracing.StartSpanFromContext(
ctx,
cmd.Path,
- opentracing.Tag{"args", strings.Join(cmd.Args, " ")},
+ opentracing.Tag{Key: "args", Value: strings.Join(cmd.Args, " ")},
)
putToken, err := getSpawnToken(ctx)
@@ -186,11 +167,10 @@ func New(ctx context.Context, cmd *exec.Cmd, stdin io.Reader, stdout, stderr io.
}()
command := &Command{
- cmd: cmd,
- startTime: time.Now(),
- context: ctx,
- stderrDone: make(chan struct{}),
- span: span,
+ cmd: cmd,
+ startTime: time.Now(),
+ context: ctx,
+ span: span,
}
// Explicitly set the environment for the command
@@ -230,14 +210,15 @@ func New(ctx context.Context, cmd *exec.Cmd, stdin io.Reader, stdout, stderr io.
}
if stderr != nil {
- command.stderrCloser = &noopWriteCloser{stderr}
- close(command.stderrDone)
+ cmd.Stderr = stderr
} else {
- command.stderrCloser = escapeNewlineWriter(ctxlogrus.Extract(ctx).WriterLevel(logrus.ErrorLevel), command.stderrDone, MaxStderrBytes)
+ command.stderrBuffer, err = newStderrBuffer(maxStderrBytes, maxStderrLineLength, []byte("\n"))
+ if err != nil {
+ return nil, fmt.Errorf("GitCommand: failed to create stderr buffer: %v", err)
+ }
+ cmd.Stderr = command.stderrBuffer
}
- cmd.Stderr = command.stderrCloser
-
if err := cmd.Start(); err != nil {
return nil, fmt.Errorf("GitCommand: start %v: %v", cmd.Args, err)
}
@@ -280,71 +261,6 @@ func AllowedEnvironment(envs []string) []string {
return filtered
}
-func escapeNewlineWriter(outbound io.WriteCloser, done chan struct{}, maxBytes int) io.WriteCloser {
- r, w := io.Pipe()
-
- go writeLines(outbound, r, done, maxBytes)
-
- return w
-}
-
-func writeLines(writer io.WriteCloser, reader io.Reader, done chan struct{}, maxBytes int) {
- var bytesWritten int
-
- bufReader := bufio.NewReaderSize(reader, StderrBufferSize)
-
- var err error
- var b []byte
- var isPrefix, discardRestOfLine bool
-
- for err == nil {
- b, isPrefix, err = bufReader.ReadLine()
-
- if discardRestOfLine {
- _, _ = ioutil.Discard.Write(b)
- // if isPrefix = false, that means the reader has gotten to the end
- // of the line. We want to read the first chunk of the next line
- if !isPrefix {
- discardRestOfLine = false
- }
- continue
- }
-
- // if we've reached the max, discard
- if bytesWritten+len(escapedNewline) >= maxBytes {
- _, _ = ioutil.Discard.Write(b)
- continue
- }
-
- // only write up to the max
- if len(b)+bytesWritten+len(escapedNewline) >= maxBytes {
- b = b[:maxBytes-bytesWritten-len(escapedNewline)]
- }
-
- // prepend an escaped newline
- if bytesWritten > 0 {
- b = append([]byte(escapedNewline), b...)
- }
-
- n, _ := writer.Write(b)
- bytesWritten += n
-
- // if isPrefix, it means the line is too long so we want to discard the rest
- if isPrefix {
- discardRestOfLine = true
- }
- }
-
- // read the rest so the command doesn't get blocked
- if err != io.EOF {
- logrus.WithError(err).Error("error while reading from Writer")
- io.Copy(ioutil.Discard, reader)
- }
-
- writer.Close()
- done <- struct{}{}
-}
-
// This function should never be called directly, use Wait().
func (c *Command) wait() {
if c.writer != nil {
@@ -359,21 +275,9 @@ func (c *Command) wait() {
c.waitError = c.cmd.Wait()
- exitCode := 0
- if c.waitError != nil {
- if exitStatus, ok := ExitStatus(c.waitError); ok {
- exitCode = exitStatus
- }
- }
-
inFlightCommandGauge.Dec()
- c.logProcessComplete(c.context, exitCode)
- if w := c.stderrCloser; w != nil {
- w.Close()
- }
-
- <-c.stderrDone
+ c.logProcessComplete()
}
// ExitStatus will return the exit-code from an error returned by Wait().
@@ -391,7 +295,15 @@ func ExitStatus(err error) (int, bool) {
return waitStatus.ExitStatus(), true
}
-func (c *Command) logProcessComplete(ctx context.Context, exitCode int) {
+func (c *Command) logProcessComplete() {
+ exitCode := 0
+ if c.waitError != nil {
+ if exitStatus, ok := ExitStatus(c.waitError); ok {
+ exitCode = exitStatus
+ }
+ }
+
+ ctx := c.context
cmd := c.cmd
systemTime := cmd.ProcessState.SystemTime()
@@ -418,8 +330,11 @@ func (c *Command) logProcessComplete(ctx context.Context, exitCode int) {
}
entry.Debug("spawn complete")
+ if c.stderrBuffer != nil && c.stderrBuffer.Len() > 0 {
+ entry.Error(c.stderrBuffer.String())
+ }
- if stats := StatsFromContext(ctx); stats != nil && featureflag.IsEnabled(ctx, featureflag.LogCommandStats) {
+ if stats := StatsFromContext(ctx); stats != nil {
stats.RecordSum("command.count", 1)
stats.RecordSum("command.system_time_ms", int(systemTime.Seconds()*1000))
stats.RecordSum("command.user_time_ms", int(userTime.Seconds()*1000))
@@ -482,15 +397,3 @@ func (c *Command) Env() []string {
func (c *Command) Pid() int {
return c.cmd.Process.Pid
}
-
-// suppressedContext suppresses cancellation or expiration of the context.
-type suppressedContext struct{ context.Context }
-
-func (suppressedContext) Deadline() (deadline time.Time, ok bool) { return time.Time{}, false }
-
-func (suppressedContext) Done() <-chan struct{} { return nil }
-
-func (suppressedContext) Err() error { return nil }
-
-// SuppressCancellation returns a context that suppresses cancellation or expiration of the parent context.
-func SuppressCancellation(ctx context.Context) context.Context { return suppressedContext{ctx} }
diff --git a/internal/command/command_test.go b/internal/command/command_test.go
index 74c250a6b..4c2bcaec4 100644
--- a/internal/command/command_test.go
+++ b/internal/command/command_test.go
@@ -1,7 +1,6 @@
package command
import (
- "bufio"
"bytes"
"context"
"fmt"
@@ -17,26 +16,11 @@ import (
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
+ "go.uber.org/goleak"
)
-func TestNewCommandTZEnv(t *testing.T) {
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- oldTZ := os.Getenv("TZ")
- defer func() {
- require.NoError(t, os.Setenv("TZ", oldTZ))
- }()
-
- require.NoError(t, os.Setenv("TZ", "foobar"))
-
- buff := &bytes.Buffer{}
- cmd, err := New(ctx, exec.Command("env"), nil, buff, nil)
-
- require.NoError(t, err)
- require.NoError(t, cmd.Wait())
-
- require.Contains(t, strings.Split(buff.String(), "\n"), "TZ=foobar")
+func TestMain(m *testing.M) {
+ goleak.VerifyTestMain(m)
}
func TestNewCommandExtraEnv(t *testing.T) {
@@ -53,7 +37,7 @@ func TestNewCommandExtraEnv(t *testing.T) {
require.Contains(t, strings.Split(buff.String(), "\n"), extraVar)
}
-func TestNewCommandProxyEnv(t *testing.T) {
+func TestNewCommandExportedEnv(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
@@ -62,6 +46,42 @@ func TestNewCommandProxyEnv(t *testing.T) {
value string
}{
{
+ key: "HOME",
+ value: "/home/git",
+ },
+ {
+ key: "PATH",
+ value: "/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin",
+ },
+ {
+ key: "LD_LIBRARY_PATH",
+ value: "/path/to/your/lib",
+ },
+ {
+ key: "TZ",
+ value: "foobar",
+ },
+ {
+ key: "GIT_TRACE",
+ value: "true",
+ },
+ {
+ key: "GIT_TRACE_PACK_ACCESS",
+ value: "true",
+ },
+ {
+ key: "GIT_TRACE_PACKET",
+ value: "true",
+ },
+ {
+ key: "GIT_TRACE_PERFORMANCE",
+ value: "true",
+ },
+ {
+ key: "GIT_TRACE_SETUP",
+ value: "true",
+ },
+ {
key: "all_proxy",
value: "http://localhost:4000",
},
@@ -93,18 +113,53 @@ func TestNewCommandProxyEnv(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.key, func(t *testing.T) {
- extraVar := fmt.Sprintf("%s=%s", tc.key, tc.value)
- buff := &bytes.Buffer{}
- cmd, err := New(ctx, exec.Command("/usr/bin/env"), nil, buff, nil, extraVar)
+ oldValue, exists := os.LookupEnv(tc.key)
+ defer func() {
+ if !exists {
+ require.NoError(t, os.Unsetenv(tc.key))
+ return
+ }
+ require.NoError(t, os.Setenv(tc.key, oldValue))
+ }()
+ require.NoError(t, os.Setenv(tc.key, tc.value))
+ buff := &bytes.Buffer{}
+ cmd, err := New(ctx, exec.Command("/usr/bin/env"), nil, buff, nil)
require.NoError(t, err)
require.NoError(t, cmd.Wait())
- require.Contains(t, strings.Split(buff.String(), "\n"), extraVar)
+ expectedEnv := fmt.Sprintf("%s=%s", tc.key, tc.value)
+ require.Contains(t, strings.Split(buff.String(), "\n"), expectedEnv)
})
}
}
+func TestNewCommandUnexportedEnv(t *testing.T) {
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ unexportedEnvKey, unexportedEnvVal := "GITALY_UNEXPORTED_ENV", "foobar"
+
+ oldValue, exists := os.LookupEnv(unexportedEnvKey)
+ defer func() {
+ if !exists {
+ require.NoError(t, os.Unsetenv(unexportedEnvKey))
+ return
+ }
+ require.NoError(t, os.Setenv(unexportedEnvKey, oldValue))
+ }()
+
+ require.NoError(t, os.Setenv(unexportedEnvKey, unexportedEnvVal))
+
+ buff := &bytes.Buffer{}
+ cmd, err := New(ctx, exec.Command("/usr/bin/env"), nil, buff, nil)
+
+ require.NoError(t, err)
+ require.NoError(t, cmd.Wait())
+
+ require.NotContains(t, strings.Split(buff.String(), "\n"), fmt.Sprintf("%s=%s", unexportedEnvKey, unexportedEnvVal))
+}
+
func TestRejectEmptyContextDone(t *testing.T) {
defer func() {
p := recover()
@@ -220,146 +275,114 @@ func TestNewCommandNullInArg(t *testing.T) {
require.EqualError(t, err, `detected null byte in command argument "hello\x00world"`)
}
-func TestCommandStdErr(t *testing.T) {
+func TestNewNonExistent(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- var stdout bytes.Buffer
+ cmd, err := New(ctx, exec.Command("command-non-existent"), nil, nil, nil)
+ require.Nil(t, cmd)
+ require.Error(t, err)
+}
- expectedMessage := `hello world\\nhello world\\nhello world\\nhello world\\nhello world\\n`
+func TestCommandStdErr(t *testing.T) {
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
- r, w := io.Pipe()
- defer r.Close()
- defer w.Close()
+ var stdout, stderr bytes.Buffer
+ expectedMessage := `hello world\nhello world\nhello world\nhello world\nhello world\n`
logger := logrus.New()
- logger.SetOutput(w)
+ logger.SetOutput(&stderr)
ctx = ctxlogrus.ToContext(ctx, logrus.NewEntry(logger))
cmd, err := New(ctx, exec.Command("./testdata/stderr_script.sh"), nil, &stdout, nil)
require.NoError(t, err)
-
require.Error(t, cmd.Wait())
- assert.Empty(t, stdout.Bytes())
- b := bufio.NewReader(r)
- line, err := b.ReadString('\n')
- require.NoError(t, err)
- require.Equal(t, expectedMessage, extractMessage(line))
+ assert.Empty(t, stdout.Bytes())
+ require.Equal(t, expectedMessage, extractMessage(stderr.String()))
}
func TestCommandStdErrLargeOutput(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- var stdout bytes.Buffer
- r, w := io.Pipe()
- defer r.Close()
- defer w.Close()
+ var stdout, stderr bytes.Buffer
logger := logrus.New()
- logger.SetOutput(w)
+ logger.SetOutput(&stderr)
ctx = ctxlogrus.ToContext(ctx, logrus.NewEntry(logger))
cmd, err := New(ctx, exec.Command("./testdata/stderr_many_lines.sh"), nil, &stdout, nil)
require.NoError(t, err)
-
require.Error(t, cmd.Wait())
- assert.Empty(t, stdout.Bytes())
-
- b := bufio.NewReader(r)
- line, err := b.ReadString('\n')
- require.NoError(t, err)
- // the logrus printer prints with %q, so with an escaped newline it will add an extra \ escape to the
- // output. So for the test we can take out the extra \ since it was logrus that added it, not the command
- // https://github.com/sirupsen/logrus/blob/master/text_formatter.go#L324
- msg := strings.Replace(extractMessage(line), `\\n`, `\n`, -1)
- require.LessOrEqual(t, len(msg), MaxStderrBytes)
+ assert.Empty(t, stdout.Bytes())
+ msg := strings.ReplaceAll(extractMessage(stderr.String()), "\\n", "\n")
+ require.LessOrEqual(t, len(msg), maxStderrBytes)
}
func TestCommandStdErrBinaryNullBytes(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- var stdout bytes.Buffer
-
- r, w := io.Pipe()
- defer r.Close()
- defer w.Close()
+ var stdout, stderr bytes.Buffer
logger := logrus.New()
- logger.SetOutput(w)
+ logger.SetOutput(&stderr)
ctx = ctxlogrus.ToContext(ctx, logrus.NewEntry(logger))
cmd, err := New(ctx, exec.Command("./testdata/stderr_binary_null.sh"), nil, &stdout, nil)
require.NoError(t, err)
-
require.Error(t, cmd.Wait())
- assert.Empty(t, stdout.Bytes())
- b := bufio.NewReader(r)
- line, err := b.ReadString('\n')
- require.NoError(t, err)
- require.NotEmpty(t, extractMessage(line))
+ assert.Empty(t, stdout.Bytes())
+ msg := strings.SplitN(extractMessage(stderr.String()), "\\n", 2)[0]
+ require.Equal(t, strings.Repeat("\\x00", maxStderrLineLength), msg)
}
func TestCommandStdErrLongLine(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- var stdout bytes.Buffer
- r, w := io.Pipe()
- defer r.Close()
- defer w.Close()
+ var stdout, stderr bytes.Buffer
logger := logrus.New()
- logger.SetOutput(w)
+ logger.SetOutput(&stderr)
ctx = ctxlogrus.ToContext(ctx, logrus.NewEntry(logger))
cmd, err := New(ctx, exec.Command("./testdata/stderr_repeat_a.sh"), nil, &stdout, nil)
require.NoError(t, err)
-
require.Error(t, cmd.Wait())
- assert.Empty(t, stdout.Bytes())
- b := bufio.NewReader(r)
- line, err := b.ReadString('\n')
- require.NoError(t, err)
- require.Contains(t, line, fmt.Sprintf(`%s\\n%s`, strings.Repeat("a", StderrBufferSize), strings.Repeat("b", StderrBufferSize)))
+ assert.Empty(t, stdout.Bytes())
+ require.Contains(t, stderr.String(), fmt.Sprintf("%s\\n%s", strings.Repeat("a", maxStderrLineLength), strings.Repeat("b", maxStderrLineLength)))
}
func TestCommandStdErrMaxBytes(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- var stdout bytes.Buffer
- r, w := io.Pipe()
- defer r.Close()
- defer w.Close()
+ var stdout, stderr bytes.Buffer
logger := logrus.New()
- logger.SetOutput(w)
+ logger.SetOutput(&stderr)
ctx = ctxlogrus.ToContext(ctx, logrus.NewEntry(logger))
cmd, err := New(ctx, exec.Command("./testdata/stderr_max_bytes_edge_case.sh"), nil, &stdout, nil)
require.NoError(t, err)
-
require.Error(t, cmd.Wait())
- assert.Empty(t, stdout.Bytes())
- b := bufio.NewReader(r)
- line, err := b.ReadString('\n')
- require.NoError(t, err)
- require.NotEmpty(t, extractMessage(line))
+ assert.Empty(t, stdout.Bytes())
+ require.Equal(t, maxStderrBytes, len(strings.ReplaceAll(extractMessage(stderr.String()), "\\n", "\n")))
}
-var logMsgRegex = regexp.MustCompile(`msg="(.+)"`)
+var logMsgRegex = regexp.MustCompile(`msg="(.+?)"`)
func extractMessage(logMessage string) string {
subMatches := logMsgRegex.FindStringSubmatch(logMessage)
@@ -369,87 +392,3 @@ func extractMessage(logMessage string) string {
return subMatches[1]
}
-
-func TestUncancellableContext(t *testing.T) {
- t.Run("cancellation", func(t *testing.T) {
- parent, cancel := context.WithCancel(context.Background())
- ctx := SuppressCancellation(parent)
-
- cancel()
- require.Equal(t, context.Canceled, parent.Err(), "sanity check: context should be cancelled")
-
- require.Nil(t, ctx.Err(), "cancellation of the parent shouldn't propagate via Err")
- select {
- case <-ctx.Done():
- require.FailNow(t, "cancellation of the parent shouldn't propagate via Done")
- default:
- }
- })
-
- t.Run("timeout", func(t *testing.T) {
- parent, cancel := context.WithTimeout(context.Background(), time.Nanosecond)
- defer cancel()
-
- ctx := SuppressCancellation(parent)
-
- time.Sleep(time.Millisecond)
- require.Equal(t, context.DeadlineExceeded, parent.Err(), "sanity check: context should be expired after awaiting")
-
- require.Nil(t, ctx.Err(), "timeout on the parent shouldn't propagate via Err")
- select {
- case <-ctx.Done():
- require.FailNow(t, "timeout on the parent shouldn't propagate via Done")
- default:
- }
- _, ok := ctx.Deadline()
- require.False(t, ok, "no deadline should be set")
- })
-
- t.Run("re-cancellation", func(t *testing.T) {
- parent, cancelParent := context.WithCancel(context.Background())
- ctx := SuppressCancellation(parent)
- child, cancelChild := context.WithCancel(ctx)
- defer cancelChild()
-
- cancelParent()
- select {
- case <-child.Done():
- require.FailNow(t, "uncancellable context should suppress cancellation on the parent")
- default:
- // all good
- }
-
- cancelChild()
- require.Equal(t, context.Canceled, child.Err(), "context derived from cancellable could be cancelled")
-
- select {
- case <-child.Done():
- // all good
- default:
- require.FailNow(t, "child context should be canceled despite if parent is uncancellable")
- }
- })
-
- t.Run("context values are preserved", func(t *testing.T) {
- type ctxKey string
- k1 := ctxKey("1")
- k2 := ctxKey("2")
-
- parent, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- parent = context.WithValue(parent, k1, 1)
- parent = context.WithValue(parent, k2, "two")
-
- ctx := SuppressCancellation(parent)
-
- require.Equal(t, 1, ctx.Value(k1))
- require.Equal(t, "two", ctx.Value(k2))
-
- cancel()
- require.Equal(t, context.Canceled, parent.Err(), "sanity check: context should be cancelled")
-
- require.Equal(t, 1, ctx.Value(k1), "should be accessible after parent context cancellation")
- require.Equal(t, "two", ctx.Value(k2))
- })
-}
diff --git a/internal/command/stderrbuffer.go b/internal/command/stderrbuffer.go
new file mode 100644
index 000000000..1673b5be4
--- /dev/null
+++ b/internal/command/stderrbuffer.go
@@ -0,0 +1,101 @@
+package command
+
+import (
+ "bytes"
+ "fmt"
+)
+
+const delimiter = '\n'
+
+// stderrBuffer implements io.Writer and buffers outputs with limited buffer
+// size and line length.
+// Bytes will be truncated if `bufLimit` and `lineLimit` exceeded. It will
+// always return the full len of input and nil on writes.
+type stderrBuffer struct {
+ buf []byte
+ bufLimit int
+ lineLimit int
+ lineSep []byte
+
+ currentLineLength int
+}
+
+func newStderrBuffer(bufLimit, lineLimit int, lineSep []byte) (*stderrBuffer, error) {
+ if bufLimit < 0 || lineLimit < 0 {
+ return nil, fmt.Errorf("invalid limit")
+ }
+ res := &stderrBuffer{
+ bufLimit: bufLimit,
+ lineLimit: lineLimit,
+ lineSep: lineSep,
+ }
+ if len(res.lineSep) == 0 {
+ // use default '\n' for linesep if not specified
+ res.lineSep = []byte{'\n'}
+ }
+ res.buf = make([]byte, 0, res.lineLimit)
+ return res, nil
+}
+
+func (b *stderrBuffer) Write(p []byte) (int, error) {
+ if b.bufLimit <= 0 || b.lineLimit <= 0 {
+ return len(p), nil
+ }
+ // The loop below scans `p` for new lines and cares for lineLimit and bufLimit.
+ // During a iteration
+ // 1. if new line found, buffer the found line(or the last part of a line) and
+ // move cursor for next search
+ // 2. if no new line found, buffer the rest of `p` and move cursor to the end
+ s := 0 // search start index
+ for s < len(p) && len(b.buf) < b.bufLimit {
+ var part []byte
+ var foundNewLine bool
+ if i := bytes.IndexByte(p[s:], delimiter); i >= 0 {
+ i += s
+ part = p[s:i] // final '\n' not included
+ s = i + 1
+ foundNewLine = true
+ } else {
+ // no newLine found, we should try to buffer the rest of `p`
+ part = p[s:]
+ s = len(p)
+ }
+
+ // make line length limit and buf limit happy
+ part = part[:min(len(part), b.lineLimit-b.currentLineLength, b.bufLimit-len(b.buf))]
+ b.buf = append(b.buf, part...)
+
+ if foundNewLine {
+ // a new line found so we need to feed the final linesep for current line
+ // and reset currentLineLength
+ b.currentLineLength = 0
+ if len(b.buf)+len(b.lineSep) <= b.bufLimit {
+ b.buf = append(b.buf, b.lineSep...)
+ } else {
+ // not space anymore
+ break
+ }
+ } else {
+ b.currentLineLength += len(part)
+ }
+ }
+ return len(p), nil
+}
+
+func (b *stderrBuffer) Len() int {
+ return len(b.buf)
+}
+
+func (b *stderrBuffer) String() string {
+ return string(b.buf)
+}
+
+func min(first int, candidates ...int) int {
+ res := first
+ for _, val := range candidates {
+ if val < res {
+ res = val
+ }
+ }
+ return res
+}
diff --git a/internal/command/stderrbuffer_test.go b/internal/command/stderrbuffer_test.go
new file mode 100644
index 000000000..49da19cc3
--- /dev/null
+++ b/internal/command/stderrbuffer_test.go
@@ -0,0 +1,126 @@
+package command
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestStderrBufferSingleWrite(t *testing.T) {
+ tests := []struct {
+ name string
+ input string
+ expectedOutput string
+ }{
+ {
+ name: "case empty input",
+ input: "",
+ expectedOutput: "",
+ },
+ {
+ name: "case single short line with delimiter",
+ input: "12345\n",
+ expectedOutput: "12345\n",
+ },
+ {
+ name: "case single short line without delimiter",
+ input: "12345",
+ expectedOutput: "12345",
+ },
+ {
+ name: "case single long line with delimiter",
+ input: "12345678901234567890\n",
+ expectedOutput: "1234567890\n",
+ },
+ {
+ name: "case single long line without delimiter",
+ input: "12345678901234567890",
+ expectedOutput: "1234567890",
+ },
+ {
+ name: "case multi lines not exceeding line limit",
+ input: "123\n1234\n12345",
+ expectedOutput: "123\n1234\n12345",
+ },
+ {
+ name: "case multi lines exceeding line limit",
+ input: "123\n12345678901234567890\n12345",
+ expectedOutput: "123\n1234567890\n12345",
+ },
+ {
+ name: "case multi lines exceeding buf limit",
+ input: "1234567890\n1234567890\n1234567890\n1234567890",
+ expectedOutput: "1234567890\n1234567890\n12345678",
+ },
+ {
+ name: "case multi lines exceeding line limit and buf limit",
+ input: "12345678901234567890\n12345678901234567890\n12345678901234567890\nn12345678901234567890",
+ expectedOutput: "1234567890\n1234567890\n12345678",
+ },
+ {
+ name: "case multi lines with blank lines",
+ input: "1234567890\n\n\n\n\n1234567890\n1234567890",
+ expectedOutput: "1234567890\n\n\n\n\n1234567890\n1234",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ buf, err := newStderrBuffer(30, 10, nil)
+ require.NoError(t, err)
+
+ n, err := buf.Write([]byte(tt.input))
+ require.NoError(t, err)
+ require.Equal(t, len(tt.input), n)
+ require.Equal(t, tt.expectedOutput, buf.String())
+ })
+ }
+}
+
+func TestStderrBufferMultiWrite(t *testing.T) {
+ tests := []struct {
+ name string
+ input []string
+ expectedOutput string
+ }{
+ {
+ name: "case write not exceeding limit",
+ input: []string{"12345\n123", "45\n12345\n"},
+ expectedOutput: "12345\n12345\n12345\n",
+ },
+ {
+ name: "case write exceeding line limit between two writes",
+ input: []string{"12345\n12345", "678901234567890\n12345\n"},
+ expectedOutput: "12345\n1234567890\n12345\n",
+ },
+ {
+ name: "case write exceeding limit for second write",
+ input: []string{"1234567890\n1234567890\n12345", "67890\n1234567890\n"},
+ expectedOutput: "1234567890\n1234567890\n12345678",
+ },
+ {
+ name: "case write exceeding limit for first write",
+ input: []string{"1234567890\n1234567890\n1234567890\n1234567890\n", "1234567890\n1234567890\n"},
+ expectedOutput: "1234567890\n1234567890\n12345678",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ buf, err := newStderrBuffer(30, 10, nil)
+ require.NoError(t, err)
+
+ for _, chunk := range tt.input {
+ n, err := buf.Write([]byte(chunk))
+ require.Equal(t, len(chunk), n)
+ require.NoError(t, err)
+ }
+ require.Equal(t, tt.expectedOutput, buf.String())
+ })
+ }
+}
+
+func TestStderrBufferWithInvalidLimit(t *testing.T) {
+ bufInvalidLimit, err := newStderrBuffer(100, -1, nil)
+ require.Nil(t, bufInvalidLimit)
+ require.Error(t, err)
+}
diff --git a/internal/command/testdata/stderr_binary_null.sh b/internal/command/testdata/stderr_binary_null.sh
index 8b7a7f6e7..e8e1d0baa 100755
--- a/internal/command/testdata/stderr_binary_null.sh
+++ b/internal/command/testdata/stderr_binary_null.sh
@@ -1,4 +1,4 @@
#!/bin/bash
-dd if=/dev/zero bs=1000 count=1000 >&2;
-exit 1; \ No newline at end of file
+dd if=/dev/zero bs=1000 count=1000 >&2
+exit 1
diff --git a/internal/command/testdata/stderr_many_lines.sh b/internal/command/testdata/stderr_many_lines.sh
index 7a346328f..e8b51b646 100755
--- a/internal/command/testdata/stderr_many_lines.sh
+++ b/internal/command/testdata/stderr_many_lines.sh
@@ -1,4 +1,9 @@
#!/bin/bash
-let x=0; while [ $x -lt 100010 ]; do let x=x+1; printf '%06d zzzzzzzzzz\n' $x >&2 ; done
-exit 1; \ No newline at end of file
+let x=0
+while [ $x -lt 100010 ]
+do
+ let x=x+1
+ printf '%06d zzzzzzzzzz\n' $x >&2
+done
+exit 1
diff --git a/internal/command/testdata/stderr_max_bytes_edge_case.sh b/internal/command/testdata/stderr_max_bytes_edge_case.sh
index a53c7ecaa..6e1454966 100755
--- a/internal/command/testdata/stderr_max_bytes_edge_case.sh
+++ b/internal/command/testdata/stderr_max_bytes_edge_case.sh
@@ -11,10 +11,10 @@
# The first 3 lines sum up to 9999 bytes written, since we write a 2-byte escaped `\n` for each \n we see.
# The 4th line can be any data.
-cat >&2 << EOF
-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
-a
-EOF
-exit 1; \ No newline at end of file
+printf 'a%.0s' {1..3333} >&2
+printf '\n' >&2
+printf 'a%.0s' {1..3331} >&2
+printf '\n' >&2
+printf 'a%.0s' {1..3331} >&2
+printf '\na\n' >&2
+exit 1
diff --git a/internal/command/testdata/stderr_repeat_a.sh b/internal/command/testdata/stderr_repeat_a.sh
index 0c967e730..8463929ae 100755
--- a/internal/command/testdata/stderr_repeat_a.sh
+++ b/internal/command/testdata/stderr_repeat_a.sh
@@ -3,4 +3,4 @@
printf 'a%.0s' {1..8192} >&2
printf '\n' >&2
printf 'b%.0s' {1..8192} >&2
-exit 1; \ No newline at end of file
+exit 1
diff --git a/internal/command/testdata/stderr_script.sh b/internal/command/testdata/stderr_script.sh
index 807d9860e..57abd97d0 100755
--- a/internal/command/testdata/stderr_script.sh
+++ b/internal/command/testdata/stderr_script.sh
@@ -2,6 +2,6 @@
for i in {1..5}
do
- echo 'hello world' 1>&2
+ echo 'hello world' 1>&2
done
-exit 1 \ No newline at end of file
+exit 1
diff --git a/internal/git/catfile/batch.go b/internal/git/catfile/batch.go
index a59f74973..e5a651f56 100644
--- a/internal/git/catfile/batch.go
+++ b/internal/git/catfile/batch.go
@@ -7,38 +7,6 @@ import (
"github.com/opentracing/opentracing-go"
"github.com/prometheus/client_golang/prometheus"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/repository"
- "gitlab.com/gitlab-org/gitaly/internal/metadata"
-)
-
-var catfileCacheCounter = prometheus.NewCounterVec(
- prometheus.CounterOpts{
- Name: "gitaly_catfile_cache_total",
- Help: "Counter of catfile cache hit/miss",
- },
- []string{"type"},
-)
-
-var currentCatfileProcesses = prometheus.NewGauge(
- prometheus.GaugeOpts{
- Name: "gitaly_catfile_processes",
- Help: "Gauge of active catfile processes",
- },
-)
-
-var totalCatfileProcesses = prometheus.NewCounter(
- prometheus.CounterOpts{
- Name: "gitaly_catfile_processes_total",
- Help: "Counter of catfile processes",
- },
-)
-
-var catfileLookupCounter = prometheus.NewCounterVec(
- prometheus.CounterOpts{
- Name: "gitaly_catfile_lookups_total",
- Help: "Git catfile lookups by object type",
- },
- []string{"type"},
)
const (
@@ -46,13 +14,6 @@ const (
SessionIDField = "gitaly-session-id"
)
-func init() {
- prometheus.MustRegister(catfileCacheCounter)
- prometheus.MustRegister(currentCatfileProcesses)
- prometheus.MustRegister(totalCatfileProcesses)
- prometheus.MustRegister(catfileLookupCounter)
-}
-
// Batch abstracts 'git cat-file --batch' and 'git cat-file --batch-check'.
// It lets you retrieve object metadata and raw objects from a Git repo.
//
@@ -136,69 +97,11 @@ func (c *batch) isClosed() bool {
return c.closed
}
-// New returns a new Batch instance. It is important that ctx gets canceled
-// somewhere, because if it doesn't the cat-file processes spawned by
-// New() never terminate.
-func New(ctx context.Context, gitCmdFactory git.CommandFactory, repo repository.GitRepo) (Batch, error) {
- if ctx.Done() == nil {
- panic("empty ctx.Done() in catfile.Batch.New()")
- }
-
- sessionID := metadata.GetValue(ctx, SessionIDField)
- if sessionID == "" {
- c, err := newBatch(ctx, gitCmdFactory, repo)
- if err != nil {
- return nil, err
- }
- return newInstrumentedBatch(c), err
- }
-
- cacheKey := newCacheKey(sessionID, repo)
- requestDone := ctx.Done()
-
- if c, ok := cache.Checkout(cacheKey); ok {
- go returnToCacheWhenDone(requestDone, cache, cacheKey, c)
- return newInstrumentedBatch(c), nil
- }
-
- // if we are using caching, create a fresh context for the new batch
- // and initialize the new batch with a cache key and cancel function
- cacheCtx, cacheCancel := context.WithCancel(context.Background())
- c, err := newBatch(cacheCtx, gitCmdFactory, repo)
- if err != nil {
- cacheCancel()
- return nil, err
- }
-
- c.cancel = cacheCancel
- go returnToCacheWhenDone(requestDone, cache, cacheKey, c)
-
- return newInstrumentedBatch(c), nil
-}
-
-func returnToCacheWhenDone(done <-chan struct{}, bc *batchCache, cacheKey key, c *batch) {
- <-done
-
- if c == nil || c.isClosed() {
- return
- }
-
- if c.hasUnreadData() {
- catfileCacheCounter.WithLabelValues("dirty").Inc()
- c.Close()
- return
- }
-
- bc.Add(cacheKey, c)
-}
-
-var injectSpawnErrors = false
-
type simulatedBatchSpawnError struct{}
func (simulatedBatchSpawnError) Error() string { return "simulated spawn error" }
-func newBatch(ctx context.Context, gitCmdFactory git.CommandFactory, repo repository.GitRepo) (_ *batch, err error) {
+func (bc *BatchCache) newBatch(ctx context.Context, repo git.RepositoryExecutor) (_ *batch, err error) {
ctx, cancel := context.WithCancel(ctx)
defer func() {
if err != nil {
@@ -206,12 +109,12 @@ func newBatch(ctx context.Context, gitCmdFactory git.CommandFactory, repo reposi
}
}()
- batchProcess, err := newBatchProcess(ctx, gitCmdFactory, repo)
+ batchProcess, err := bc.newBatchProcess(ctx, repo)
if err != nil {
return nil, err
}
- batchCheckProcess, err := newBatchCheckProcess(ctx, gitCmdFactory, repo)
+ batchCheckProcess, err := bc.newBatchCheckProcess(ctx, repo)
if err != nil {
return nil, err
}
@@ -219,19 +122,20 @@ func newBatch(ctx context.Context, gitCmdFactory git.CommandFactory, repo reposi
return &batch{batchProcess: batchProcess, batchCheckProcess: batchCheckProcess}, nil
}
-func newInstrumentedBatch(c Batch) Batch {
- return &instrumentedBatch{c}
+func newInstrumentedBatch(c Batch, catfileLookupCounter *prometheus.CounterVec) Batch {
+ return &instrumentedBatch{c, catfileLookupCounter}
}
type instrumentedBatch struct {
Batch
+ catfileLookupCounter *prometheus.CounterVec
}
func (ib *instrumentedBatch) Info(ctx context.Context, revision git.Revision) (*ObjectInfo, error) {
span, ctx := opentracing.StartSpanFromContext(ctx, "Batch.Info", opentracing.Tag{"revision", revision})
defer span.Finish()
- catfileLookupCounter.WithLabelValues("info").Inc()
+ ib.catfileLookupCounter.WithLabelValues("info").Inc()
return ib.Batch.Info(ctx, revision)
}
@@ -240,7 +144,7 @@ func (ib *instrumentedBatch) Tree(ctx context.Context, revision git.Revision) (*
span, ctx := opentracing.StartSpanFromContext(ctx, "Batch.Tree", opentracing.Tag{"revision", revision})
defer span.Finish()
- catfileLookupCounter.WithLabelValues("tree").Inc()
+ ib.catfileLookupCounter.WithLabelValues("tree").Inc()
return ib.Batch.Tree(ctx, revision)
}
@@ -249,7 +153,7 @@ func (ib *instrumentedBatch) Commit(ctx context.Context, revision git.Revision)
span, ctx := opentracing.StartSpanFromContext(ctx, "Batch.Commit", opentracing.Tag{"revision", revision})
defer span.Finish()
- catfileLookupCounter.WithLabelValues("commit").Inc()
+ ib.catfileLookupCounter.WithLabelValues("commit").Inc()
return ib.Batch.Commit(ctx, revision)
}
@@ -258,7 +162,7 @@ func (ib *instrumentedBatch) Blob(ctx context.Context, revision git.Revision) (*
span, ctx := opentracing.StartSpanFromContext(ctx, "Batch.Blob", opentracing.Tag{"revision", revision})
defer span.Finish()
- catfileLookupCounter.WithLabelValues("blob").Inc()
+ ib.catfileLookupCounter.WithLabelValues("blob").Inc()
return ib.Batch.Blob(ctx, revision)
}
@@ -267,7 +171,7 @@ func (ib *instrumentedBatch) Tag(ctx context.Context, revision git.Revision) (*O
span, ctx := opentracing.StartSpanFromContext(ctx, "Batch.Tag", opentracing.Tag{"revision", revision})
defer span.Finish()
- catfileLookupCounter.WithLabelValues("tag").Inc()
+ ib.catfileLookupCounter.WithLabelValues("tag").Inc()
return ib.Batch.Tag(ctx, revision)
}
diff --git a/internal/git/catfile/batch_cache.go b/internal/git/catfile/batch_cache.go
index 4e92954d5..08af66b4d 100644
--- a/internal/git/catfile/batch_cache.go
+++ b/internal/git/catfile/batch_cache.go
@@ -1,19 +1,21 @@
package catfile
import (
+ "context"
"strings"
"sync"
"time"
"github.com/prometheus/client_golang/prometheus"
- "github.com/prometheus/client_golang/prometheus/promauto"
+ "gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/metadata"
)
const (
- // DefaultBatchfileTTL is the default ttl for batch files to live in the cache
- DefaultBatchfileTTL = 10 * time.Second
+ // defaultBatchfileTTL is the default ttl for batch files to live in the cache
+ defaultBatchfileTTL = 10 * time.Second
defaultEvictionInterval = 1 * time.Second
@@ -21,20 +23,13 @@ const (
defaultMaxLen = 100
)
-var catfileCacheMembers = promauto.NewGauge(
- prometheus.GaugeOpts{
- Name: "gitaly_catfile_cache_members",
- Help: "Gauge of catfile cache members",
- },
-)
-
-var cache *batchCache
-
-func init() {
- config.RegisterHook(func(cfg *config.Cfg) error {
- cache = newCache(DefaultBatchfileTTL, cfg.Git.CatfileCacheSize)
- return nil
- })
+// Cache is a cache for git-cat-file(1) processes.
+type Cache interface {
+ // BatchProcess either creates a new git-cat-file(1) process or returns a cached one for
+ // the given repository.
+ BatchProcess(context.Context, git.RepositoryExecutor) (Batch, error)
+ // Evict evicts all cached processes from the cache.
+ Evict()
}
func newCacheKey(sessionID string, repo repository.GitRepo) key {
@@ -61,11 +56,11 @@ type entry struct {
expiry time.Time
}
-// batchCache entries always get added to the back of the list. If the
+// BatchCache entries always get added to the back of the list. If the
// list gets too long, we evict entries from the front of the list. When
// an entry gets added it gets an expiry time based on a fixed TTL. A
// monitor goroutine periodically evicts expired entries.
-type batchCache struct {
+type BatchCache struct {
entries []*entry
sync.Mutex
@@ -74,42 +69,153 @@ type batchCache struct {
// ttl is the fixed ttl for cache entries
ttl time.Duration
+
+ // injectSpawnErrors is used for testing purposes only. If set to true, then spawned batch
+ // processes will simulate spawn errors.
+ injectSpawnErrors bool
+
+ catfileCacheCounter *prometheus.CounterVec
+ currentCatfileProcesses prometheus.Gauge
+ totalCatfileProcesses prometheus.Counter
+ catfileLookupCounter *prometheus.CounterVec
+ catfileCacheMembers prometheus.Gauge
}
-func newCache(ttl time.Duration, maxLen int) *batchCache {
- return newCacheWithRefresh(ttl, maxLen, defaultEvictionInterval)
+// NewCache creates a new catfile process cache.
+func NewCache(cfg config.Cfg) *BatchCache {
+ return newCache(defaultBatchfileTTL, cfg.Git.CatfileCacheSize, defaultEvictionInterval)
}
-func newCacheWithRefresh(ttl time.Duration, maxLen int, refreshInterval time.Duration) *batchCache {
+func newCache(ttl time.Duration, maxLen int, refreshInterval time.Duration) *BatchCache {
if maxLen <= 0 {
maxLen = defaultMaxLen
}
- bc := &batchCache{
+ bc := &BatchCache{
maxLen: maxLen,
ttl: ttl,
+ catfileCacheCounter: prometheus.NewCounterVec(
+ prometheus.CounterOpts{
+ Name: "gitaly_catfile_cache_total",
+ Help: "Counter of catfile cache hit/miss",
+ },
+ []string{"type"},
+ ),
+ currentCatfileProcesses: prometheus.NewGauge(
+ prometheus.GaugeOpts{
+ Name: "gitaly_catfile_processes",
+ Help: "Gauge of active catfile processes",
+ },
+ ),
+ totalCatfileProcesses: prometheus.NewCounter(
+ prometheus.CounterOpts{
+ Name: "gitaly_catfile_processes_total",
+ Help: "Counter of catfile processes",
+ },
+ ),
+ catfileLookupCounter: prometheus.NewCounterVec(
+ prometheus.CounterOpts{
+ Name: "gitaly_catfile_lookups_total",
+ Help: "Git catfile lookups by object type",
+ },
+ []string{"type"},
+ ),
+ catfileCacheMembers: prometheus.NewGauge(
+ prometheus.GaugeOpts{
+ Name: "gitaly_catfile_cache_members",
+ Help: "Gauge of catfile cache members",
+ },
+ ),
}
go bc.monitor(refreshInterval)
return bc
}
-func (bc *batchCache) monitor(refreshInterval time.Duration) {
+// Describe describes all metrics exposed by BatchCache.
+func (bc *BatchCache) Describe(descs chan<- *prometheus.Desc) {
+ prometheus.DescribeByCollect(bc, descs)
+}
+
+// Collect collects all metrics exposed by BatchCache.
+func (bc *BatchCache) Collect(metrics chan<- prometheus.Metric) {
+ bc.catfileCacheCounter.Collect(metrics)
+ bc.currentCatfileProcesses.Collect(metrics)
+ bc.totalCatfileProcesses.Collect(metrics)
+ bc.catfileLookupCounter.Collect(metrics)
+ bc.catfileCacheMembers.Collect(metrics)
+}
+
+func (bc *BatchCache) monitor(refreshInterval time.Duration) {
ticker := time.NewTicker(refreshInterval)
for range ticker.C {
- bc.EnforceTTL(time.Now())
+ bc.enforceTTL(time.Now())
}
}
-// Add adds a key, value pair to bc. If there are too many keys in bc
-// already Add will evict old keys until the length is OK again.
-func (bc *batchCache) Add(k key, b *batch) {
+// BatchProcess creates a new Batch process for the given repository.
+func (bc *BatchCache) BatchProcess(ctx context.Context, repo git.RepositoryExecutor) (Batch, error) {
+ if ctx.Done() == nil {
+ panic("empty ctx.Done() in catfile.Batch.New()")
+ }
+
+ sessionID := metadata.GetValue(ctx, SessionIDField)
+ if sessionID == "" {
+ c, err := bc.newBatch(ctx, repo)
+ if err != nil {
+ return nil, err
+ }
+ return newInstrumentedBatch(c, bc.catfileLookupCounter), err
+ }
+
+ cacheKey := newCacheKey(sessionID, repo)
+ requestDone := ctx.Done()
+
+ if c, ok := bc.checkout(cacheKey); ok {
+ go bc.returnWhenDone(requestDone, cacheKey, c)
+ return newInstrumentedBatch(c, bc.catfileLookupCounter), nil
+ }
+
+ // if we are using caching, create a fresh context for the new batch
+ // and initialize the new batch with a bc key and cancel function
+ cacheCtx, cacheCancel := context.WithCancel(context.Background())
+ c, err := bc.newBatch(cacheCtx, repo)
+ if err != nil {
+ cacheCancel()
+ return nil, err
+ }
+
+ c.cancel = cacheCancel
+ go bc.returnWhenDone(requestDone, cacheKey, c)
+
+ return newInstrumentedBatch(c, bc.catfileLookupCounter), nil
+}
+
+func (bc *BatchCache) returnWhenDone(done <-chan struct{}, cacheKey key, c *batch) {
+ <-done
+
+ if c == nil || c.isClosed() {
+ return
+ }
+
+ if c.hasUnreadData() {
+ bc.catfileCacheCounter.WithLabelValues("dirty").Inc()
+ c.Close()
+ return
+ }
+
+ bc.add(cacheKey, c)
+}
+
+// add adds a key, value pair to bc. If there are too many keys in bc
+// already add will evict old keys until the length is OK again.
+func (bc *BatchCache) add(k key, b *batch) {
bc.Lock()
defer bc.Unlock()
if i, ok := bc.lookup(k); ok {
- catfileCacheCounter.WithLabelValues("duplicate").Inc()
+ bc.catfileCacheCounter.WithLabelValues("duplicate").Inc()
bc.delete(i, true)
}
@@ -120,34 +226,34 @@ func (bc *batchCache) Add(k key, b *batch) {
bc.evictHead()
}
- catfileCacheMembers.Set(float64(bc.len()))
+ bc.catfileCacheMembers.Set(float64(bc.len()))
}
-func (bc *batchCache) head() *entry { return bc.entries[0] }
-func (bc *batchCache) evictHead() { bc.delete(0, true) }
-func (bc *batchCache) len() int { return len(bc.entries) }
+func (bc *BatchCache) head() *entry { return bc.entries[0] }
+func (bc *BatchCache) evictHead() { bc.delete(0, true) }
+func (bc *BatchCache) len() int { return len(bc.entries) }
-// Checkout removes a value from bc. After use the caller can re-add the value with bc.Add.
-func (bc *batchCache) Checkout(k key) (*batch, bool) {
+// checkout removes a value from bc. After use the caller can re-add the value with bc.Add.
+func (bc *BatchCache) checkout(k key) (*batch, bool) {
bc.Lock()
defer bc.Unlock()
i, ok := bc.lookup(k)
if !ok {
- catfileCacheCounter.WithLabelValues("miss").Inc()
+ bc.catfileCacheCounter.WithLabelValues("miss").Inc()
return nil, false
}
- catfileCacheCounter.WithLabelValues("hit").Inc()
+ bc.catfileCacheCounter.WithLabelValues("hit").Inc()
ent := bc.entries[i]
bc.delete(i, false)
return ent.value, true
}
-// EnforceTTL evicts all entries older than now, assuming the entry
+// enforceTTL evicts all entries older than now, assuming the entry
// expiry times are increasing.
-func (bc *batchCache) EnforceTTL(now time.Time) {
+func (bc *BatchCache) enforceTTL(now time.Time) {
bc.Lock()
defer bc.Unlock()
@@ -156,7 +262,8 @@ func (bc *batchCache) EnforceTTL(now time.Time) {
}
}
-func (bc *batchCache) EvictAll() {
+// Evict evicts all cached processes from the cache.
+func (bc *BatchCache) Evict() {
bc.Lock()
defer bc.Unlock()
@@ -165,12 +272,7 @@ func (bc *batchCache) EvictAll() {
}
}
-// ExpireAll is used to expire all of the batches in the cache
-func ExpireAll() {
- cache.EvictAll()
-}
-
-func (bc *batchCache) lookup(k key) (int, bool) {
+func (bc *BatchCache) lookup(k key) (int, bool) {
for i, ent := range bc.entries {
if ent.key == k {
return i, true
@@ -180,7 +282,7 @@ func (bc *batchCache) lookup(k key) (int, bool) {
return -1, false
}
-func (bc *batchCache) delete(i int, wantClose bool) {
+func (bc *BatchCache) delete(i int, wantClose bool) {
ent := bc.entries[i]
if wantClose {
@@ -188,5 +290,5 @@ func (bc *batchCache) delete(i int, wantClose bool) {
}
bc.entries = append(bc.entries[:i], bc.entries[i+1:]...)
- catfileCacheMembers.Set(float64(bc.len()))
+ bc.catfileCacheMembers.Set(float64(bc.len()))
}
diff --git a/internal/git/catfile/batch_cache_test.go b/internal/git/catfile/batch_cache_test.go
index 4d22a898c..c797e396a 100644
--- a/internal/git/catfile/batch_cache_test.go
+++ b/internal/git/catfile/batch_cache_test.go
@@ -10,25 +10,25 @@ import (
func TestCacheAdd(t *testing.T) {
const maxLen = 3
- bc := newCache(time.Hour, maxLen)
+ bc := newCache(time.Hour, maxLen, defaultEvictionInterval)
key0 := testKey(0)
value0 := testValue()
- bc.Add(key0, value0)
+ bc.add(key0, value0)
requireCacheValid(t, bc)
key1 := testKey(1)
- bc.Add(key1, testValue())
+ bc.add(key1, testValue())
requireCacheValid(t, bc)
key2 := testKey(2)
- bc.Add(key2, testValue())
+ bc.add(key2, testValue())
requireCacheValid(t, bc)
// Because maxLen is 3, and key0 is oldest, we expect that adding key3
// will kick out key0.
key3 := testKey(3)
- bc.Add(key3, testValue())
+ bc.add(key3, testValue())
requireCacheValid(t, bc)
require.Equal(t, maxLen, bc.len(), "length should be maxLen")
@@ -37,21 +37,21 @@ func TestCacheAdd(t *testing.T) {
}
func TestCacheAddTwice(t *testing.T) {
- bc := newCache(time.Hour, 10)
+ bc := newCache(time.Hour, 10, defaultEvictionInterval)
key0 := testKey(0)
value0 := testValue()
- bc.Add(key0, value0)
+ bc.add(key0, value0)
requireCacheValid(t, bc)
key1 := testKey(1)
- bc.Add(key1, testValue())
+ bc.add(key1, testValue())
requireCacheValid(t, bc)
require.Equal(t, key0, bc.head().key, "key0 should be oldest key")
value2 := testValue()
- bc.Add(key0, value2)
+ bc.add(key0, value2)
requireCacheValid(t, bc)
require.Equal(t, key1, bc.head().key, "key1 should be oldest key")
@@ -61,18 +61,18 @@ func TestCacheAddTwice(t *testing.T) {
}
func TestCacheCheckout(t *testing.T) {
- bc := newCache(time.Hour, 10)
+ bc := newCache(time.Hour, 10, defaultEvictionInterval)
key0 := testKey(0)
value0 := testValue()
- bc.Add(key0, value0)
+ bc.add(key0, value0)
- v, ok := bc.Checkout(key{sessionID: "foo"})
+ v, ok := bc.checkout(key{sessionID: "foo"})
requireCacheValid(t, bc)
require.Nil(t, v, "expect nil value when key not found")
require.False(t, ok, "ok flag")
- v, ok = bc.Checkout(key0)
+ v, ok = bc.checkout(key0)
requireCacheValid(t, bc)
require.Equal(t, value0, v)
@@ -80,42 +80,42 @@ func TestCacheCheckout(t *testing.T) {
require.False(t, v.isClosed(), "value should not be closed after checkout")
- v, ok = bc.Checkout(key0)
+ v, ok = bc.checkout(key0)
require.False(t, ok, "ok flag after second checkout")
require.Nil(t, v, "value from second checkout")
}
func TestCacheEnforceTTL(t *testing.T) {
ttl := time.Hour
- bc := newCache(ttl, 10)
+ bc := newCache(ttl, 10, defaultEvictionInterval)
sleep := func() { time.Sleep(2 * time.Millisecond) }
key0 := testKey(0)
value0 := testValue()
- bc.Add(key0, value0)
+ bc.add(key0, value0)
sleep()
key1 := testKey(1)
value1 := testValue()
- bc.Add(key1, value1)
+ bc.add(key1, value1)
sleep()
cutoff := time.Now().Add(ttl)
sleep()
key2 := testKey(2)
- bc.Add(key2, testValue())
+ bc.add(key2, testValue())
sleep()
key3 := testKey(3)
- bc.Add(key3, testValue())
+ bc.add(key3, testValue())
sleep()
requireCacheValid(t, bc)
// We expect this cutoff to cause eviction of key0 and key1 but no other keys.
- bc.EnforceTTL(cutoff)
+ bc.enforceTTL(cutoff)
requireCacheValid(t, bc)
@@ -125,7 +125,7 @@ func TestCacheEnforceTTL(t *testing.T) {
require.Equal(t, []key{key2, key3}, keys(bc), "remaining keys after EnforceTTL")
- bc.EnforceTTL(cutoff)
+ bc.enforceTTL(cutoff)
requireCacheValid(t, bc)
require.Equal(t, []key{key2, key3}, keys(bc), "remaining keys after second EnforceTTL")
@@ -134,11 +134,11 @@ func TestCacheEnforceTTL(t *testing.T) {
func TestAutoExpiry(t *testing.T) {
ttl := 5 * time.Millisecond
refresh := 1 * time.Millisecond
- bc := newCacheWithRefresh(ttl, 10, refresh)
+ bc := newCache(ttl, 10, refresh)
key0 := testKey(0)
value0 := testValue()
- bc.Add(key0, value0)
+ bc.add(key0, value0)
requireCacheValid(t, bc)
require.Contains(t, keys(bc), key0, "key should still be in map")
@@ -157,7 +157,7 @@ func TestAutoExpiry(t *testing.T) {
require.True(t, value0.isClosed(), "value should be closed after eviction")
}
-func requireCacheValid(t *testing.T, bc *batchCache) {
+func requireCacheValid(t *testing.T, bc *BatchCache) {
bc.Lock()
defer bc.Unlock()
@@ -171,7 +171,7 @@ func testValue() *batch { return &batch{} }
func testKey(i int) key { return key{sessionID: fmt.Sprintf("key-%d", i)} }
-func keys(bc *batchCache) []key {
+func keys(bc *BatchCache) []key {
bc.Lock()
defer bc.Unlock()
diff --git a/internal/git/catfile/batch_check_process.go b/internal/git/catfile/batch_check_process.go
index 2d1d7d637..a3158a75d 100644
--- a/internal/git/catfile/batch_check_process.go
+++ b/internal/git/catfile/batch_check_process.go
@@ -9,7 +9,6 @@ import (
"github.com/opentracing/opentracing-go"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/labkit/correlation"
)
@@ -20,18 +19,18 @@ type batchCheckProcess struct {
sync.Mutex
}
-func newBatchCheckProcess(ctx context.Context, gitCmdFactory git.CommandFactory, repo repository.GitRepo) (*batchCheckProcess, error) {
- bc := &batchCheckProcess{}
+func (bc *BatchCache) newBatchCheckProcess(ctx context.Context, repo git.RepositoryExecutor) (*batchCheckProcess, error) {
+ process := &batchCheckProcess{}
var stdinReader io.Reader
- stdinReader, bc.w = io.Pipe()
+ stdinReader, process.w = io.Pipe()
// batch processes are long-lived and reused across RPCs,
// so we de-correlate the process from the RPC
ctx = correlation.ContextWithCorrelation(ctx, "")
ctx = opentracing.ContextWithSpan(ctx, nil)
- batchCmd, err := gitCmdFactory.New(ctx, repo,
+ batchCmd, err := repo.Exec(ctx,
git.SubCmd{
Name: "cat-file",
Flags: []git.Option{
@@ -44,19 +43,19 @@ func newBatchCheckProcess(ctx context.Context, gitCmdFactory git.CommandFactory,
return nil, err
}
- bc.r = bufio.NewReader(batchCmd)
+ process.r = bufio.NewReader(batchCmd)
go func() {
<-ctx.Done()
// This is crucial to prevent leaking file descriptors.
- bc.w.Close()
+ process.w.Close()
}()
- if injectSpawnErrors {
+ if bc.injectSpawnErrors {
// Testing only: intentionally leak process
return nil, &simulatedBatchSpawnError{}
}
- return bc, nil
+ return process, nil
}
func (bc *batchCheckProcess) info(revision git.Revision) (*ObjectInfo, error) {
diff --git a/internal/git/catfile/batch_process.go b/internal/git/catfile/batch_process.go
index f34292cb1..1f014a13e 100644
--- a/internal/git/catfile/batch_process.go
+++ b/internal/git/catfile/batch_process.go
@@ -10,7 +10,6 @@ import (
"github.com/opentracing/opentracing-go"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/labkit/correlation"
)
@@ -33,8 +32,8 @@ type batchProcess struct {
sync.Mutex
}
-func newBatchProcess(ctx context.Context, gitCmdFactory git.CommandFactory, repo repository.GitRepo) (*batchProcess, error) {
- totalCatfileProcesses.Inc()
+func (bc *BatchCache) newBatchProcess(ctx context.Context, repo git.RepositoryExecutor) (*batchProcess, error) {
+ bc.totalCatfileProcesses.Inc()
b := &batchProcess{}
var stdinReader io.Reader
@@ -45,7 +44,7 @@ func newBatchProcess(ctx context.Context, gitCmdFactory git.CommandFactory, repo
ctx = correlation.ContextWithCorrelation(ctx, "")
ctx = opentracing.ContextWithSpan(ctx, nil)
- batchCmd, err := gitCmdFactory.New(ctx, repo,
+ batchCmd, err := repo.Exec(ctx,
git.SubCmd{
Name: "cat-file",
Flags: []git.Option{
@@ -60,15 +59,15 @@ func newBatchProcess(ctx context.Context, gitCmdFactory git.CommandFactory, repo
b.r = bufio.NewReader(batchCmd)
- currentCatfileProcesses.Inc()
+ bc.currentCatfileProcesses.Inc()
go func() {
<-ctx.Done()
// This Close() is crucial to prevent leaking file descriptors.
b.w.Close()
- currentCatfileProcesses.Dec()
+ bc.currentCatfileProcesses.Dec()
}()
- if injectSpawnErrors {
+ if bc.injectSpawnErrors {
// Testing only: intentionally leak process
return nil, &simulatedBatchSpawnError{}
}
diff --git a/internal/git/catfile/batch_test.go b/internal/git/catfile/batch_test.go
index 570e878d5..d0a2766d8 100644
--- a/internal/git/catfile/batch_test.go
+++ b/internal/git/catfile/batch_test.go
@@ -14,6 +14,8 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
@@ -21,22 +23,43 @@ import (
"google.golang.org/grpc/metadata"
)
-func setupBatch(t *testing.T, ctx context.Context) Batch {
+type repoExecutor struct {
+ repository.GitRepo
+ gitCmdFactory git.CommandFactory
+}
+
+func (e *repoExecutor) Exec(ctx context.Context, cmd git.Cmd, opts ...git.CmdOpt) (*command.Command, error) {
+ return e.gitCmdFactory.New(ctx, e.GitRepo, cmd, opts...)
+}
+
+func (e *repoExecutor) ExecAndWait(ctx context.Context, cmd git.Cmd, opts ...git.CmdOpt) error {
+ command, err := e.Exec(ctx, cmd, opts...)
+ if err != nil {
+ return err
+ }
+ return command.Wait()
+}
+
+func setupBatch(t *testing.T, ctx context.Context) (config.Cfg, Batch, *gitalypb.Repository) {
t.Helper()
cfg, repo, _ := testcfg.BuildWithRepo(t)
+ repoExecutor := &repoExecutor{
+ GitRepo: repo, gitCmdFactory: git.NewExecCommandFactory(cfg),
+ }
- c, err := New(ctx, git.NewExecCommandFactory(cfg), repo)
+ cache := newCache(1*time.Hour, 1000, defaultEvictionInterval)
+ batch, err := cache.BatchProcess(ctx, repoExecutor)
require.NoError(t, err)
- return c
+ return cfg, batch, repo
}
func TestInfo(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- c := setupBatch(t, ctx)
+ _, c, _ := setupBatch(t, ctx)
testCases := []struct {
desc string
@@ -68,10 +91,9 @@ func TestBlob(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- c := setupBatch(t, ctx)
+ _, c, _ := setupBatch(t, ctx)
- gitignoreBytes, err := ioutil.ReadFile("testdata/blob-dfaa3f97ca337e20154a98ac9d0be76ddd1fcc82")
- require.NoError(t, err)
+ gitignoreBytes := testhelper.MustReadFile(t, "testdata/blob-dfaa3f97ca337e20154a98ac9d0be76ddd1fcc82")
testCases := []struct {
desc string
@@ -131,10 +153,9 @@ func TestCommit(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- c := setupBatch(t, ctx)
+ _, c, _ := setupBatch(t, ctx)
- commitBytes, err := ioutil.ReadFile("testdata/commit-e63f41fe459e62e1228fcef60d7189127aeba95a")
- require.NoError(t, err)
+ commitBytes := testhelper.MustReadFile(t, "testdata/commit-e63f41fe459e62e1228fcef60d7189127aeba95a")
testCases := []struct {
desc string
@@ -165,10 +186,9 @@ func TestTag(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- c := setupBatch(t, ctx)
+ _, c, _ := setupBatch(t, ctx)
- tagBytes, err := ioutil.ReadFile("testdata/tag-a509fa67c27202a2bc9dd5e014b4af7e6063ac76")
- require.NoError(t, err)
+ tagBytes := testhelper.MustReadFile(t, "testdata/tag-a509fa67c27202a2bc9dd5e014b4af7e6063ac76")
testCases := []struct {
desc string
@@ -228,10 +248,9 @@ func TestTree(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- c := setupBatch(t, ctx)
+ _, c, _ := setupBatch(t, ctx)
- treeBytes, err := ioutil.ReadFile("testdata/tree-7e2f26d033ee47cd0745649d1a28277c56197921")
- require.NoError(t, err)
+ treeBytes := testhelper.MustReadFile(t, "testdata/tree-7e2f26d033ee47cd0745649d1a28277c56197921")
testCases := []struct {
desc string
@@ -291,11 +310,10 @@ func TestRepeatedCalls(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- c := setupBatch(t, ctx)
+ _, c, _ := setupBatch(t, ctx)
treeOid := git.Revision("7e2f26d033ee47cd0745649d1a28277c56197921")
- treeBytes, err := ioutil.ReadFile("testdata/tree-7e2f26d033ee47cd0745649d1a28277c56197921")
- require.NoError(t, err)
+ treeBytes := testhelper.MustReadFile(t, "testdata/tree-7e2f26d033ee47cd0745649d1a28277c56197921")
tree1Obj, err := c.Tree(ctx, treeOid)
require.NoError(t, err)
@@ -330,14 +348,12 @@ func TestRepeatedCalls(t *testing.T) {
}
func TestSpawnFailure(t *testing.T) {
- defer func() { injectSpawnErrors = false }()
-
- // reset global cache
- defer func(old *batchCache) { cache = old }(cache)
+ cfg, testRepo, _ := testcfg.BuildWithRepo(t)
+ testRepoExecutor := &repoExecutor{
+ GitRepo: testRepo, gitCmdFactory: git.NewExecCommandFactory(cfg),
+ }
- // Use very high values to effectively disable auto-expiry
- cache = newCache(1*time.Hour, 1000)
- defer cache.EvictAll()
+ cache := newCache(1*time.Hour, 1000, defaultEvictionInterval)
require.True(
t,
@@ -349,12 +365,7 @@ func TestSpawnFailure(t *testing.T) {
ctx1, cancel1 := testhelper.Context()
defer cancel1()
- cfg, testRepo, _ := testcfg.BuildWithRepo(t)
-
- gitCmdFactory := git.NewExecCommandFactory(cfg)
-
- injectSpawnErrors = false
- _, err := catfileWithFreshSessionID(ctx1, gitCmdFactory, testRepo)
+ _, err := catfileWithFreshSessionID(ctx1, cache, testRepoExecutor)
require.NoError(t, err, "catfile spawn should succeed in normal circumstances")
require.Equal(t, 2, numGitChildren(t), "there should be 2 git child processes")
@@ -369,7 +380,7 @@ func TestSpawnFailure(t *testing.T) {
require.Equal(t, 2, numGitChildren(t), "there should still be 2 git child processes")
- cache.EvictAll()
+ cache.Evict()
require.Equal(t, 0, cacheSize(cache), "the cache should be empty now")
require.True(
@@ -381,8 +392,8 @@ func TestSpawnFailure(t *testing.T) {
ctx2, cancel2 := testhelper.Context()
defer cancel2()
- injectSpawnErrors = true
- _, err = catfileWithFreshSessionID(ctx2, gitCmdFactory, testRepo)
+ cache.injectSpawnErrors = true
+ _, err = catfileWithFreshSessionID(ctx2, cache, testRepoExecutor)
require.Error(t, err, "expect simulated error")
require.IsType(t, &simulatedBatchSpawnError{}, err)
@@ -393,7 +404,7 @@ func TestSpawnFailure(t *testing.T) {
)
}
-func catfileWithFreshSessionID(ctx context.Context, gitCmdFactory git.CommandFactory, repo *gitalypb.Repository) (Batch, error) {
+func catfileWithFreshSessionID(ctx context.Context, cache Cache, repo git.RepositoryExecutor) (Batch, error) {
id, err := text.RandomHex(4)
if err != nil {
return nil, err
@@ -403,7 +414,7 @@ func catfileWithFreshSessionID(ctx context.Context, gitCmdFactory git.CommandFac
SessionIDField: id,
})
- return New(metadata.NewIncomingContext(ctx, md), gitCmdFactory, repo)
+ return cache.BatchProcess(metadata.NewIncomingContext(ctx, md), repo)
}
func waitTrue(callback func() bool) bool {
@@ -431,7 +442,7 @@ func numGitChildren(t *testing.T) int {
return bytes.Count(out, []byte("\n"))
}
-func cacheSize(bc *batchCache) int {
+func cacheSize(bc *BatchCache) int {
bc.Lock()
defer bc.Unlock()
return bc.len()
diff --git a/internal/git/log/commit.go b/internal/git/catfile/commit.go
index 49e28812f..5b53e5545 100644
--- a/internal/git/log/commit.go
+++ b/internal/git/catfile/commit.go
@@ -1,4 +1,4 @@
-package log
+package catfile
import (
"bufio"
@@ -12,15 +12,14 @@ import (
"github.com/golang/protobuf/ptypes/timestamp"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/git/trailerparser"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
-// GetCommitCatfile looks up a commit by revision using an existing catfile.Batch instance.
-func GetCommitCatfile(ctx context.Context, c catfile.Batch, revision git.Revision) (*gitalypb.GitCommit, error) {
+// GetCommit looks up a commit by revision using an existing Batch instance.
+func GetCommit(ctx context.Context, c Batch, revision git.Revision) (*gitalypb.GitCommit, error) {
obj, err := c.Commit(ctx, revision+"^{commit}")
if err != nil {
return nil, err
@@ -29,10 +28,10 @@ func GetCommitCatfile(ctx context.Context, c catfile.Batch, revision git.Revisio
return parseRawCommit(obj.Reader, &obj.ObjectInfo)
}
-// GetCommitCatfileWithTrailers looks up a commit by revision using an existing
-// catfile.Batch instance, and includes Git trailers in the returned commit.
-func GetCommitCatfileWithTrailers(ctx context.Context, gitCmdFactory git.CommandFactory, repo repository.GitRepo, c catfile.Batch, revision git.Revision) (*gitalypb.GitCommit, error) {
- commit, err := GetCommitCatfile(ctx, c, revision)
+// GetCommitWithTrailers looks up a commit by revision using an existing Batch instance, and
+// includes Git trailers in the returned commit.
+func GetCommitWithTrailers(ctx context.Context, gitCmdFactory git.CommandFactory, repo repository.GitRepo, c Batch, revision git.Revision) (*gitalypb.GitCommit, error) {
+ commit, err := GetCommit(ctx, c, revision)
if err != nil {
return nil, err
@@ -69,7 +68,7 @@ func GetCommitCatfileWithTrailers(ctx context.Context, gitCmdFactory git.Command
}
// GetCommitMessage looks up a commit message and returns it in its entirety.
-func GetCommitMessage(ctx context.Context, c catfile.Batch, repo repository.GitRepo, revision git.Revision) ([]byte, error) {
+func GetCommitMessage(ctx context.Context, c Batch, repo repository.GitRepo, revision git.Revision) ([]byte, error) {
obj, err := c.Commit(ctx, revision+"^{commit}")
if err != nil {
return nil, err
@@ -82,7 +81,7 @@ func GetCommitMessage(ctx context.Context, c catfile.Batch, repo repository.GitR
return body, nil
}
-func parseRawCommit(r io.Reader, info *catfile.ObjectInfo) (*gitalypb.GitCommit, error) {
+func parseRawCommit(r io.Reader, info *ObjectInfo) (*gitalypb.GitCommit, error) {
header, body, err := splitRawCommit(r)
if err != nil {
return nil, err
@@ -107,7 +106,7 @@ func splitRawCommit(r io.Reader) ([]byte, []byte, error) {
return header, body, nil
}
-func buildCommit(header, body []byte, info *catfile.ObjectInfo) (*gitalypb.GitCommit, error) {
+func buildCommit(header, body []byte, info *ObjectInfo) (*gitalypb.GitCommit, error) {
commit := &gitalypb.GitCommit{
Id: info.Oid.String(),
BodySize: int64(len(body)),
diff --git a/internal/git/log/commit_test.go b/internal/git/catfile/commit_test.go
index 918308dde..ffb3c5a9e 100644
--- a/internal/git/log/commit_test.go
+++ b/internal/git/catfile/commit_test.go
@@ -1,46 +1,19 @@
-package log
+package catfile
import (
"bytes"
- "context"
- "os"
"testing"
"github.com/golang/protobuf/ptypes/timestamp"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
- "gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/metadata"
)
-func TestMain(m *testing.M) {
- os.Exit(testMain(m))
-}
-
-func testMain(m *testing.M) int {
- defer testhelper.MustHaveNoChildProcess()
- cleanup := testhelper.Configure()
- defer cleanup()
- return m.Run()
-}
-
-func setupBatch(t *testing.T, ctx context.Context) (config.Cfg, catfile.Batch, *gitalypb.Repository) {
- t.Helper()
-
- cfg, repo, _ := testcfg.BuildWithRepo(t)
-
- c, err := catfile.New(ctx, git.NewExecCommandFactory(cfg), repo)
- require.NoError(t, err)
-
- return cfg, c, repo
-}
-
func TestParseRawCommit(t *testing.T) {
- info := &catfile.ObjectInfo{
+ info := &ObjectInfo{
Oid: "a984dfa4dee018c6d5f5f57ffec0d0e22763df16",
Type: "commit",
}
@@ -132,7 +105,7 @@ func TestParseRawCommit(t *testing.T) {
}
}
-func TestGetCommitCatfile(t *testing.T) {
+func TestGetCommit(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
@@ -167,7 +140,7 @@ func TestGetCommitCatfile(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- c, err := GetCommitCatfile(ctx, c, git.Revision(tc.revision))
+ c, err := GetCommit(ctx, c, git.Revision(tc.revision))
if tc.errStr == "" {
require.NoError(t, err)
@@ -179,7 +152,7 @@ func TestGetCommitCatfile(t *testing.T) {
}
}
-func TestGetCommitCatfileWithTrailers(t *testing.T) {
+func TestGetCommitWithTrailers(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
@@ -187,7 +160,7 @@ func TestGetCommitCatfileWithTrailers(t *testing.T) {
ctx = metadata.NewIncomingContext(ctx, metadata.MD{})
- commit, err := GetCommitCatfileWithTrailers(ctx, git.NewExecCommandFactory(cfg), testRepo, c, "5937ac0a7beb003549fc5fd26fc247adbce4a52e")
+ commit, err := GetCommitWithTrailers(ctx, git.NewExecCommandFactory(cfg), testRepo, c, "5937ac0a7beb003549fc5fd26fc247adbce4a52e")
require.NoError(t, err)
diff --git a/internal/git/log/tag.go b/internal/git/catfile/tag.go
index 153eb09e7..a11a934dd 100644
--- a/internal/git/log/tag.go
+++ b/internal/git/catfile/tag.go
@@ -1,4 +1,4 @@
-package log
+package catfile
import (
"bufio"
@@ -10,7 +10,6 @@ import (
"strings"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -20,12 +19,12 @@ const (
MaxTagReferenceDepth = 10
)
-// GetTagCatfile looks up a commit by tagID using an existing catfile.Batch instance.
-// When 'trim' is 'true', the tag message will be trimmed to fit in a gRPC message.
-// When 'trimRightNewLine' is 'true', the tag message will be trimmed to remove all '\n' characters from right.
-// note: we pass in the tagName because the tag name from refs/tags may be different
-// than the name found in the actual tag object. We want to use the tagName found in refs/tags
-func GetTagCatfile(ctx context.Context, c catfile.Batch, tagID git.Revision, tagName string, trimLen, trimRightNewLine bool) (*gitalypb.Tag, error) {
+// GetTag looks up a commit by tagID using an existing catfile.Batch instance. When 'trim' is
+// 'true', the tag message will be trimmed to fit in a gRPC message. When 'trimRightNewLine' is
+// 'true', the tag message will be trimmed to remove all '\n' characters from right. note: we pass
+// in the tagName because the tag name from refs/tags may be different than the name found in the
+// actual tag object. We want to use the tagName found in refs/tags
+func GetTag(ctx context.Context, c Batch, tagID git.Revision, tagName string, trimLen, trimRightNewLine bool) (*gitalypb.Tag, error) {
tagObj, err := c.Tag(ctx, tagID)
if err != nil {
return nil, err
@@ -94,7 +93,7 @@ func splitRawTag(r io.Reader, trimRightNewLine bool) (*tagHeader, []byte, error)
return &header, body, nil
}
-func buildAnnotatedTag(ctx context.Context, b catfile.Batch, tagID, name string, header *tagHeader, body []byte, trimLen, trimRightNewLine bool) (*gitalypb.Tag, error) {
+func buildAnnotatedTag(ctx context.Context, b Batch, tagID, name string, header *tagHeader, body []byte, trimLen, trimRightNewLine bool) (*gitalypb.Tag, error) {
tag := &gitalypb.Tag{
Id: tagID,
Name: []byte(name),
@@ -109,7 +108,7 @@ func buildAnnotatedTag(ctx context.Context, b catfile.Batch, tagID, name string,
var err error
switch header.tagType {
case "commit":
- tag.TargetCommit, err = GetCommitCatfile(ctx, b, git.Revision(header.oid))
+ tag.TargetCommit, err = GetCommit(ctx, b, git.Revision(header.oid))
if err != nil {
return nil, fmt.Errorf("buildAnnotatedTag error when getting target commit: %v", err)
}
@@ -143,7 +142,7 @@ func buildAnnotatedTag(ctx context.Context, b catfile.Batch, tagID, name string,
// This matches the original behavior in the ruby implementation.
// we also protect against circular tag references. Even though this is not possible in git,
// we still want to protect against an infinite looop
-func dereferenceTag(ctx context.Context, b catfile.Batch, oid git.Revision) (*gitalypb.GitCommit, error) {
+func dereferenceTag(ctx context.Context, b Batch, oid git.Revision) (*gitalypb.GitCommit, error) {
for depth := 0; depth < MaxTagReferenceDepth; depth++ {
i, err := b.Info(ctx, oid)
if err != nil {
@@ -165,7 +164,7 @@ func dereferenceTag(ctx context.Context, b catfile.Batch, oid git.Revision) (*gi
oid = git.Revision(header.oid)
continue
case "commit":
- return GetCommitCatfile(ctx, b, oid)
+ return GetCommit(ctx, b, oid)
default: // This current tag points to a tree or a blob
return nil, nil
}
diff --git a/internal/git/log/tag_test.go b/internal/git/catfile/tag_test.go
index 8065816a9..4a6b7d3cd 100644
--- a/internal/git/log/tag_test.go
+++ b/internal/git/catfile/tag_test.go
@@ -1,4 +1,4 @@
-package log
+package catfile
import (
"bytes"
@@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
)
@@ -50,9 +51,9 @@ func TestGetTag(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.tagName, func(t *testing.T) {
- tagID := testhelper.CreateTag(t, testRepoPath, testCase.tagName, testCase.rev, &testhelper.CreateTagOpts{Message: testCase.message})
+ tagID := gittest.CreateTag(t, cfg, testRepoPath, testCase.tagName, testCase.rev, &gittest.CreateTagOpts{Message: testCase.message})
- tag, err := GetTagCatfile(ctx, c, git.Revision(tagID), testCase.tagName, testCase.trim, true)
+ tag, err := GetTag(ctx, c, git.Revision(tagID), testCase.tagName, testCase.trim, true)
require.NoError(t, err)
if testCase.trim && len(testCase.message) >= helper.MaxCommitOrTagMessageSize {
testCase.message = testCase.message[:helper.MaxCommitOrTagMessageSize]
diff --git a/internal/git/command_description.go b/internal/git/command_description.go
index 5e26d5d7d..faed63717 100644
--- a/internal/git/command_description.go
+++ b/internal/git/command_description.go
@@ -2,6 +2,7 @@ package git
import (
"fmt"
+ "log"
"strings"
)
@@ -67,6 +68,13 @@ var commandDescriptions = map[string]commandDescription{
},
"fetch": {
flags: 0,
+
+ opts: []GlobalOption{
+ // When fetching objects from an untrusted source, we want to always assert
+ // that all objects are valid.
+ ConfigPair{Key: "fetch.fsckObjects", Value: "true"},
+ ConfigPair{Key: "fetch.fsck.badTimezone", Value: "ignore"},
+ },
},
"for-each-ref": {
flags: scNoRefUpdates | scNoEndOfOptions,
@@ -138,6 +146,10 @@ var commandDescriptions = map[string]commandDescription{
// command ends with a "#". The end result is that Git runs `/bin/sh -c 'exit 0 # /path/to/pool.git`.
ConfigPair{Key: "core.alternateRefsCommand", Value: "exit 0 #"},
+ // When receiving objects from an untrusted source, we want to always assert
+ // that all objects are valid.
+ ConfigPair{Key: "receive.fsckObjects", Value: "true"},
+
// In the past, there was a bug in git that caused users to
// create commits with invalid timezones. As a result, some
// histories contain commits that do not match the spec. As we
@@ -149,6 +161,16 @@ var commandDescriptions = map[string]commandDescription{
// Make git-receive-pack(1) advertise the push options
// capability to clients.
ConfigPair{Key: "receive.advertisePushOptions", Value: "true"},
+
+ // Hide several reference spaces from being displayed on pushes. This has
+ // two outcomes: first, we reduce the initial ref advertisement and should
+ // speed up pushes for repos which have loads of merge requests, pipelines
+ // and environments. Second, this also prohibits clients to update or delete
+ // these refs.
+ ConfigPair{Key: "receive.hideRefs", Value: "refs/environments/"},
+ ConfigPair{Key: "receive.hideRefs", Value: "refs/keep-around/"},
+ ConfigPair{Key: "receive.hideRefs", Value: "refs/merge-requests/"},
+ ConfigPair{Key: "receive.hideRefs", Value: "refs/pipelines/"},
},
},
"remote": {
@@ -221,6 +243,34 @@ var commandDescriptions = map[string]commandDescription{
},
}
+func init() {
+ // This is the poor-mans static assert that all internal ref prefixes are properly hidden
+ // from git-receive-pack(1) such that they cannot be written to when the user pushes.
+ receivePackDesc, ok := commandDescriptions["receive-pack"]
+ if !ok {
+ log.Fatal("could not find command description of git-receive-pack(1)")
+ }
+
+ hiddenRefs := map[string]bool{}
+ for _, opt := range receivePackDesc.opts {
+ configPair, ok := opt.(ConfigPair)
+ if !ok {
+ continue
+ }
+ if configPair.Key != "receive.hideRefs" {
+ continue
+ }
+
+ hiddenRefs[configPair.Value] = true
+ }
+
+ for _, internalRef := range InternalRefPrefixes {
+ if !hiddenRefs[internalRef] {
+ log.Fatalf("command description of receive-pack is missing hidden ref %q", internalRef)
+ }
+ }
+}
+
// mayUpdateRef indicates if a command is known to update references.
// This is useful to determine if a command requires reference hook
// configuration. A non-exhaustive list of commands is consulted to determine if
diff --git a/internal/git/command_factory.go b/internal/git/command_factory.go
index 1168cd4ad..3654fff7b 100644
--- a/internal/git/command_factory.go
+++ b/internal/git/command_factory.go
@@ -189,27 +189,28 @@ func (cf *ExecCommandFactory) combineArgs(ctx context.Context, gitConfig []confi
)
}
- // As global options may cancel out each other, we have a clearly
- // defined order in which globals get applied. The order is similar to
- // how git handles configuration options from most general to most
- // specific. This allows callsites to override options which would
- // otherwise be set up automatically.
+ // As global options may cancel out each other, we have a clearly defined order in which
+ // globals get applied. The order is similar to how git handles configuration options from
+ // most general to most specific. This allows callsites to override options which would
+ // otherwise be set up automatically. The exception to this is configuration specified by
+ // the admin, which always overrides all other items. The following order of precedence
+ // applies:
//
- // 1. Configuration as provided by the admin in Gitaly's config.toml.
- // 2. Globals which get set up by default for all git commands.
- // 3. Globals which get set up by default for a given git command.
- // 4. Globals passed via command options, e.g. as set up by
+ // 1. Globals which get set up by default for all git commands.
+ // 2. Globals which get set up by default for a given git command.
+ // 3. Globals passed via command options, e.g. as set up by
// `WithReftxHook()`.
+ // 4. Configuration as provided by the admin in Gitaly's config.toml.
var combinedGlobals []GlobalOption
+ combinedGlobals = append(combinedGlobals, globalOptions...)
+ combinedGlobals = append(combinedGlobals, commandSpecificOptions...)
+ combinedGlobals = append(combinedGlobals, cc.globals...)
for _, configPair := range gitConfig {
combinedGlobals = append(combinedGlobals, ConfigPair{
Key: configPair.Key,
Value: configPair.Value,
})
}
- combinedGlobals = append(combinedGlobals, globalOptions...)
- combinedGlobals = append(combinedGlobals, commandSpecificOptions...)
- combinedGlobals = append(combinedGlobals, cc.globals...)
for _, global := range combinedGlobals {
globalArgs, err := global.GlobalArgs()
diff --git a/internal/git/command_factory_test.go b/internal/git/command_factory_test.go
index dbcffbf5f..fe958cd71 100644
--- a/internal/git/command_factory_test.go
+++ b/internal/git/command_factory_test.go
@@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
@@ -63,9 +64,8 @@ func TestExecCommandFactory_NewWithDir(t *testing.T) {
t.Run("runs in dir", func(t *testing.T) {
repoPath := testhelper.TempDir(t)
- testhelper.MustRunCommand(t, nil, "git", "init", repoPath)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "--allow-empty",
- "-m", "initial commit")
+ gittest.Exec(t, cfg, "init", repoPath)
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "--allow-empty", "-m", "initial commit")
ctx, cancel := testhelper.Context()
defer cancel()
diff --git a/internal/git/command_options_test.go b/internal/git/command_options_test.go
index 9ccd1d964..5a5ac4545 100644
--- a/internal/git/command_options_test.go
+++ b/internal/git/command_options_test.go
@@ -268,6 +268,32 @@ func TestWithConfig(t *testing.T) {
}
}
+func TestExecCommandFactoryGitalyConfigOverrides(t *testing.T) {
+ var cfg config.Cfg
+ require.NoError(t, cfg.SetGitPath())
+
+ cfg.Git.Config = []config.GitConfig{
+ {Key: "foo.bar", Value: "from-gitaly-config"},
+ }
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ var stdout bytes.Buffer
+ cmd, err := NewExecCommandFactory(cfg).NewWithoutRepo(ctx,
+ SubCmd{
+ Name: "config",
+ Args: []string{"foo.bar"},
+ },
+ WithStdout(&stdout),
+ WithConfig(ConfigPair{Key: "foo.bar", Value: "from-config-option"}),
+ WithConfigEnv(ConfigPair{Key: "foo.bar", Value: "from-config-env"}),
+ )
+ require.NoError(t, err)
+ require.NoError(t, cmd.Wait())
+ require.Equal(t, "from-gitaly-config\n", stdout.String())
+}
+
func TestWithConfigEnv(t *testing.T) {
var cfg config.Cfg
require.NoError(t, cfg.SetGitPath())
diff --git a/internal/git/gittest/command.go b/internal/git/gittest/command.go
index 7a5d1bb54..e3d98bfcc 100644
--- a/internal/git/gittest/command.go
+++ b/internal/git/gittest/command.go
@@ -12,11 +12,15 @@ import (
// Exec runs a git command and returns the standard output, or fails.
func Exec(t testing.TB, cfg config.Cfg, args ...string) []byte {
+ t.Helper()
+
return run(t, nil, cfg, args...)
}
// ExecStream runs a git command with an input stream and returns the standard output, or fails.
func ExecStream(t testing.TB, cfg config.Cfg, stream io.Reader, args ...string) []byte {
+ t.Helper()
+
return run(t, stream, cfg, args...)
}
diff --git a/internal/git/gittest/commit.go b/internal/git/gittest/commit.go
index 0fb23da2e..8bd960931 100644
--- a/internal/git/gittest/commit.go
+++ b/internal/git/gittest/commit.go
@@ -6,45 +6,97 @@ import (
"os"
"os/exec"
"path/filepath"
- "strings"
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
-// CreateCommitOpts holds extra options for CreateCommit.
-type CreateCommitOpts struct {
- Message string
- ParentID string
-}
-
const (
committerName = "Scrooge McDuck"
committerEmail = "scrooge@mcduck.com"
)
-// CreateCommit makes a new empty commit and updates the named branch to point to it.
-func CreateCommit(t testing.TB, cfg config.Cfg, repoPath, branchName string, opts *CreateCommitOpts) string {
- message := "message"
- // The ID of an arbitrary commit known to exist in the test repository.
- parentID := "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863"
+type writeCommitConfig struct {
+ branch string
+ parents []git.ObjectID
+ message string
+ treeEntries []TreeEntry
+}
- if opts != nil {
- if opts.Message != "" {
- message = opts.Message
- }
+// WriteCommitOption is an option which can be passed to WriteCommit.
+type WriteCommitOption func(*writeCommitConfig)
- if opts.ParentID != "" {
- parentID = opts.ParentID
+// WithBranch is an option for WriteCommit which will cause it to update the update the given branch
+// name to the new commit.
+func WithBranch(branch string) WriteCommitOption {
+ return func(cfg *writeCommitConfig) {
+ cfg.branch = branch
+ }
+}
+
+// WithMessage is an option for WriteCommit which will set the commit message.
+func WithMessage(message string) WriteCommitOption {
+ return func(cfg *writeCommitConfig) {
+ cfg.message = message
+ }
+}
+
+// WithParents is an option for WriteCommit which will set the parent OIDs of the resulting commit.
+func WithParents(parents ...git.ObjectID) WriteCommitOption {
+ return func(cfg *writeCommitConfig) {
+ if parents != nil {
+ cfg.parents = parents
+ } else {
+ // We're explicitly initializing parents here such that we can discern the
+ // case where the commit should be created with no parents.
+ cfg.parents = []git.ObjectID{}
}
}
+}
- // message can be very large, passing it directly in args would blow things up!
+// WithTreeEntries is an option for WriteCommit which will cause it to create a new tree and use it
+// as root tree of the resulting commit.
+func WithTreeEntries(entries ...TreeEntry) WriteCommitOption {
+ return func(cfg *writeCommitConfig) {
+ cfg.treeEntries = entries
+ }
+}
+
+// WriteCommit writes a new commit into the target repository.
+func WriteCommit(t testing.TB, cfg config.Cfg, repoPath string, opts ...WriteCommitOption) git.ObjectID {
+ t.Helper()
+
+ var writeCommitConfig writeCommitConfig
+ for _, opt := range opts {
+ opt(&writeCommitConfig)
+ }
+
+ message := "message"
+ if writeCommitConfig.message != "" {
+ message = writeCommitConfig.message
+ }
stdin := bytes.NewBufferString(message)
+ // The ID of an arbitrary commit known to exist in the test repository.
+ parents := []git.ObjectID{"1a0b36b3cdad1d2ee32457c102a8c0b7056fa863"}
+ if writeCommitConfig.parents != nil {
+ parents = writeCommitConfig.parents
+ }
+
+ var tree string
+ if len(writeCommitConfig.treeEntries) > 0 {
+ tree = WriteTree(t, cfg, repoPath, writeCommitConfig.treeEntries).String()
+ } else if len(parents) == 0 {
+ // If there are no parents, then we set the root tree to the empty tree.
+ tree = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"
+ } else {
+ tree = parents[0].String() + "^{tree}"
+ }
+
// Use 'commit-tree' instead of 'commit' because we are in a bare
// repository. What we do here is the same as "commit -m message
// --allow-empty".
@@ -52,13 +104,22 @@ func CreateCommit(t testing.TB, cfg config.Cfg, repoPath, branchName string, opt
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"-C", repoPath,
- "commit-tree", "-F", "-", "-p", parentID, parentID + "^{tree}",
+ "commit-tree", "-F", "-", tree,
+ }
+
+ for _, parent := range parents {
+ commitArgs = append(commitArgs, "-p", parent.String())
+ }
+
+ stdout := ExecStream(t, cfg, stdin, commitArgs...)
+ oid, err := git.NewObjectIDFromHex(text.ChompBytes(stdout))
+ require.NoError(t, err)
+
+ if writeCommitConfig.branch != "" {
+ Exec(t, cfg, "-C", repoPath, "update-ref", "refs/heads/"+writeCommitConfig.branch, oid.String())
}
- newCommit := ExecStream(t, cfg, stdin, commitArgs...)
- newCommitID := text.ChompBytes(newCommit)
- Exec(t, cfg, "-C", repoPath, "update-ref", "refs/heads/"+branchName, newCommitID)
- return newCommitID
+ return oid
}
// CreateCommitInAlternateObjectDirectory runs a command such that its created
@@ -90,78 +151,28 @@ func CreateCommitInAlternateObjectDirectory(t testing.TB, gitBin, repoPath, altO
return currentHead[:len(currentHead)-1]
}
-// CommitBlobWithName will create a commit for the specified blob with the
-// specified name. This enables testing situations where the filepath is not
-// possible due to filesystem constraints (e.g. non-UTF characters). The commit
-// ID is returned.
-func CommitBlobWithName(t testing.TB, cfg config.Cfg, testRepoPath, blobID, fileName, commitMessage string) string {
- mktreeIn := strings.NewReader(fmt.Sprintf("100644 blob %s\t%s", blobID, fileName))
- treeID := text.ChompBytes(ExecStream(t, cfg, mktreeIn, "-C", testRepoPath, "mktree"))
-
- return text.ChompBytes(
- Exec(t, cfg,
- "-c", fmt.Sprintf("user.name=%s", committerName),
- "-c", fmt.Sprintf("user.email=%s", committerEmail),
- "-C", testRepoPath, "commit-tree", treeID, "-m", commitMessage),
- )
-}
-
-// CreateCommitOnNewBranch creates a branch and a commit, returning the commit sha and the branch name respectivelyi
-func CreateCommitOnNewBranch(t testing.TB, cfg config.Cfg, repoPath string) (string, string) {
- nonce, err := text.RandomHex(4)
- require.NoError(t, err)
- newBranch := "branch-" + nonce
-
- sha := CreateCommit(t, cfg, repoPath, newBranch, &CreateCommitOpts{
- Message: "a new branch and commit " + nonce,
- })
-
- return sha, newBranch
-}
-
-// authorSortofEqual tests if two `CommitAuthor`s have the same name and email.
-// useful when creating commits in the tests.
-func authorSortofEqual(a, b *gitalypb.CommitAuthor) bool {
- if (a == nil) != (b == nil) {
- return false
- }
- return bytes.Equal(a.GetName(), b.GetName()) &&
- bytes.Equal(a.GetEmail(), b.GetEmail())
+func authorEqualIgnoringDate(t testing.TB, expected *gitalypb.CommitAuthor, actual *gitalypb.CommitAuthor) {
+ t.Helper()
+ require.Equal(t, expected.GetName(), actual.GetName(), "author name does not match")
+ require.Equal(t, expected.GetEmail(), actual.GetEmail(), "author mail does not match")
}
-// AuthorsEqual tests if two `CommitAuthor`s are equal
-func AuthorsEqual(a *gitalypb.CommitAuthor, b *gitalypb.CommitAuthor) bool {
- return authorSortofEqual(a, b) &&
- a.GetDate().Seconds == b.GetDate().Seconds
+// AuthorEqual tests if two `CommitAuthor`s are equal.
+func AuthorEqual(t testing.TB, expected *gitalypb.CommitAuthor, actual *gitalypb.CommitAuthor) {
+ t.Helper()
+ authorEqualIgnoringDate(t, expected, actual)
+ require.Equal(t, expected.GetDate().GetSeconds(), actual.GetDate().GetSeconds(),
+ "date does not match")
}
-// GitCommitEqual tests if two `GitCommit`s are equal
-func GitCommitEqual(a, b *gitalypb.GitCommit) error {
- if !authorSortofEqual(a.GetAuthor(), b.GetAuthor()) {
- return fmt.Errorf("author does not match: %v != %v", a.GetAuthor(), b.GetAuthor())
- }
- if !authorSortofEqual(a.GetCommitter(), b.GetCommitter()) {
- return fmt.Errorf("commiter does not match: %v != %v", a.GetCommitter(), b.GetCommitter())
- }
- if !bytes.Equal(a.GetBody(), b.GetBody()) {
- return fmt.Errorf("body differs: %q != %q", a.GetBody(), b.GetBody())
- }
- if !bytes.Equal(a.GetSubject(), b.GetSubject()) {
- return fmt.Errorf("subject differs: %q != %q", a.GetSubject(), b.GetSubject())
- }
- if strings.Compare(a.GetId(), b.GetId()) != 0 {
- return fmt.Errorf("id does not match: %q != %q", a.GetId(), b.GetId())
- }
- if len(a.GetParentIds()) != len(b.GetParentIds()) {
- return fmt.Errorf("ParentId does not match: %v != %v", a.GetParentIds(), b.GetParentIds())
- }
-
- for i, pid := range a.GetParentIds() {
- pid2 := b.GetParentIds()[i]
- if strings.Compare(pid, pid2) != 0 {
- return fmt.Errorf("parent id mismatch: %v != %v", pid, pid2)
- }
- }
+// CommitEqual tests if two `GitCommit`s are equal
+func CommitEqual(t testing.TB, expected, actual *gitalypb.GitCommit) {
+ t.Helper()
- return nil
+ authorEqualIgnoringDate(t, expected.GetAuthor(), actual.GetAuthor())
+ authorEqualIgnoringDate(t, expected.GetCommitter(), actual.GetCommitter())
+ require.Equal(t, expected.GetBody(), actual.GetBody(), "body does not match")
+ require.Equal(t, expected.GetSubject(), actual.GetSubject(), "subject does not match")
+ require.Equal(t, expected.GetId(), actual.GetId(), "object ID does not match")
+ require.Equal(t, expected.GetParentIds(), actual.GetParentIds(), "parent IDs do not match")
}
diff --git a/internal/git/gittest/commit_test.go b/internal/git/gittest/commit_test.go
new file mode 100644
index 000000000..60f4bce66
--- /dev/null
+++ b/internal/git/gittest/commit_test.go
@@ -0,0 +1,172 @@
+package gittest
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+)
+
+func TestWriteCommit(t *testing.T) {
+ cfg, repoProto, repoPath := setup(t)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ batchCache := catfile.NewCache(cfg)
+ batch, err := batchCache.BatchProcess(ctx, repo)
+ require.NoError(t, err)
+
+ defaultCommitter := &gitalypb.CommitAuthor{
+ Name: []byte(committerName),
+ Email: []byte(committerEmail),
+ }
+ defaultParentID := "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863"
+
+ revisions := map[git.Revision]git.ObjectID{
+ "refs/heads/master": "",
+ "refs/heads/master~": "",
+ }
+ for revision := range revisions {
+ oid, err := repo.ResolveRevision(ctx, revision)
+ require.NoError(t, err)
+ revisions[revision] = oid
+ }
+
+ for _, tc := range []struct {
+ desc string
+ opts []WriteCommitOption
+ expectedCommit *gitalypb.GitCommit
+ expectedTreeEntries []TreeEntry
+ expectedRevUpdate git.Revision
+ }{
+ {
+ desc: "no options",
+ expectedCommit: &gitalypb.GitCommit{
+ Author: defaultCommitter,
+ Committer: defaultCommitter,
+ Subject: []byte("message"),
+ Body: []byte("message"),
+ Id: "cab056fb7bfc5a4d024c2c5b9b445b80f212fdcd",
+ ParentIds: []string{
+ defaultParentID,
+ },
+ },
+ },
+ {
+ desc: "with commit message",
+ opts: []WriteCommitOption{
+ WithMessage("my custom message\n\nfoobar\n"),
+ },
+ expectedCommit: &gitalypb.GitCommit{
+ Author: defaultCommitter,
+ Committer: defaultCommitter,
+ Subject: []byte("my custom message"),
+ Body: []byte("my custom message\n\nfoobar\n"),
+ Id: "7b7e8876f7df27ab99e46678acbf9ae3d29264ba",
+ ParentIds: []string{
+ defaultParentID,
+ },
+ },
+ },
+ {
+ desc: "with no parents",
+ opts: []WriteCommitOption{
+ WithParents(),
+ },
+ expectedCommit: &gitalypb.GitCommit{
+ Author: defaultCommitter,
+ Committer: defaultCommitter,
+ Subject: []byte("message"),
+ Body: []byte("message"),
+ Id: "549090fbeacc6607bc70648d3ba554c355e670c5",
+ ParentIds: nil,
+ },
+ },
+ {
+ desc: "with multiple parents",
+ opts: []WriteCommitOption{
+ WithParents(revisions["refs/heads/master"], revisions["refs/heads/master~"]),
+ },
+ expectedCommit: &gitalypb.GitCommit{
+ Author: defaultCommitter,
+ Committer: defaultCommitter,
+ Subject: []byte("message"),
+ Body: []byte("message"),
+ Id: "650084693e5ca9c0b05a21fc5ac21ad1805c758b",
+ ParentIds: []string{
+ revisions["refs/heads/master"].String(),
+ revisions["refs/heads/master~"].String(),
+ },
+ },
+ },
+ {
+ desc: "with branch",
+ opts: []WriteCommitOption{
+ WithBranch("foo"),
+ },
+ expectedCommit: &gitalypb.GitCommit{
+ Author: defaultCommitter,
+ Committer: defaultCommitter,
+ Subject: []byte("message"),
+ Body: []byte("message"),
+ Id: "cab056fb7bfc5a4d024c2c5b9b445b80f212fdcd",
+ ParentIds: []string{
+ defaultParentID,
+ },
+ },
+ expectedRevUpdate: "refs/heads/foo",
+ },
+ {
+ desc: "with tree entry",
+ opts: []WriteCommitOption{
+ WithTreeEntries(TreeEntry{
+ Content: "foobar",
+ Mode: "100644",
+ Path: "file",
+ }),
+ },
+ expectedCommit: &gitalypb.GitCommit{
+ Author: defaultCommitter,
+ Committer: defaultCommitter,
+ Subject: []byte("message"),
+ Body: []byte("message"),
+ Id: "12da4907ed3331f4991ba6817317a3a90801288e",
+ ParentIds: []string{
+ defaultParentID,
+ },
+ },
+ expectedTreeEntries: []TreeEntry{
+ {
+ Content: "foobar",
+ Mode: "100644",
+ Path: "file",
+ },
+ },
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ oid := WriteCommit(t, cfg, repoPath, tc.opts...)
+
+ commit, err := catfile.GetCommit(ctx, batch, oid.Revision())
+ require.NoError(t, err)
+
+ CommitEqual(t, tc.expectedCommit, commit)
+
+ if tc.expectedTreeEntries != nil {
+ RequireTree(t, cfg, repoPath, oid.String(), tc.expectedTreeEntries)
+ }
+
+ if tc.expectedRevUpdate != "" {
+ updatedOID, err := repo.ResolveRevision(ctx, tc.expectedRevUpdate)
+ require.NoError(t, err)
+ require.Equal(t, oid, updatedOID)
+ }
+ })
+ }
+}
diff --git a/internal/git/gittest/delta_islands.go b/internal/git/gittest/delta_islands.go
index 4d1aab9e6..f48c43d6d 100644
--- a/internal/git/gittest/delta_islands.go
+++ b/internal/git/gittest/delta_islands.go
@@ -1,9 +1,7 @@
package gittest
import (
- "bytes"
"crypto/rand"
- "fmt"
"io"
"io/ioutil"
"strings"
@@ -12,12 +10,12 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
)
// TestDeltaIslands is based on the tests in
// https://github.com/git/git/blob/master/t/t5320-delta-islands.sh .
-func TestDeltaIslands(t *testing.T, repoPath string, repack func() error) {
+func TestDeltaIslands(t *testing.T, cfg config.Cfg, repoPath string, repack func() error) {
// Create blobs that we expect Git to use delta compression on.
blob1, err := ioutil.ReadAll(io.LimitReader(rand.Reader, 100000))
require.NoError(t, err)
@@ -27,49 +25,48 @@ func TestDeltaIslands(t *testing.T, repoPath string, repack func() error) {
// Assume Git prefers the largest blob as the delta base.
badBlob := append(blob2, "\nbad blob"...)
- blob1ID := commitBlob(t, repoPath, "refs/heads/branch1", blob1)
- blob2ID := commitBlob(t, repoPath, "refs/tags/tag2", blob2)
+ blob1ID := commitBlob(t, cfg, repoPath, "refs/heads/branch1", blob1)
+ blob2ID := commitBlob(t, cfg, repoPath, "refs/tags/tag2", blob2)
// The bad blob will only be reachable via a non-standard ref. Because of
// that it should be excluded from delta chains in the main island.
- badBlobID := commitBlob(t, repoPath, "refs/bad/ref3", badBlob)
+ badBlobID := commitBlob(t, cfg, repoPath, "refs/bad/ref3", badBlob)
// So far we have create blobs and commits but they will be in loose
// object files; we want them to be delta compressed. Run repack to make
// that happen.
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "repack", "-ad")
+ Exec(t, cfg, "-C", repoPath, "repack", "-ad")
- assert.Equal(t, badBlobID, deltaBase(t, repoPath, blob1ID), "expect blob 1 delta base to be bad blob after test setup")
- assert.Equal(t, badBlobID, deltaBase(t, repoPath, blob2ID), "expect blob 2 delta base to be bad blob after test setup")
+ assert.Equal(t, badBlobID, deltaBase(t, cfg, repoPath, blob1ID), "expect blob 1 delta base to be bad blob after test setup")
+ assert.Equal(t, badBlobID, deltaBase(t, cfg, repoPath, blob2ID), "expect blob 2 delta base to be bad blob after test setup")
require.NoError(t, repack(), "repack after delta island setup")
- assert.Equal(t, blob2ID, deltaBase(t, repoPath, blob1ID), "blob 1 delta base should be blob 2 after repack")
+ assert.Equal(t, blob2ID, deltaBase(t, cfg, repoPath, blob1ID), "blob 1 delta base should be blob 2 after repack")
// blob2 is the bigger of the two so it should be the delta base
- assert.Equal(t, git.ZeroOID.String(), deltaBase(t, repoPath, blob2ID), "blob 2 should not be delta compressed after repack")
+ assert.Equal(t, git.ZeroOID.String(), deltaBase(t, cfg, repoPath, blob2ID), "blob 2 should not be delta compressed after repack")
}
-func commitBlob(t *testing.T, repoPath, ref string, content []byte) string {
- hashObjectOut := testhelper.MustRunCommand(t, bytes.NewReader(content), "git", "-C", repoPath, "hash-object", "-w", "--stdin")
- blobID := chompToString(hashObjectOut)
-
- treeSpec := fmt.Sprintf("100644 blob %s\tfile\n", blobID)
- mktreeOut := testhelper.MustRunCommand(t, strings.NewReader(treeSpec), "git", "-C", repoPath, "mktree")
- treeID := chompToString(mktreeOut)
+func commitBlob(t *testing.T, cfg config.Cfg, repoPath, ref string, content []byte) string {
+ blobID := WriteBlob(t, cfg, repoPath, content)
// No parent, that means this will be an initial commit. Not very
// realistic but it doesn't matter for delta compression.
- commitTreeOut := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit-tree", "-m", "msg", treeID)
- commitID := chompToString(commitTreeOut)
+ commitID := WriteCommit(t, cfg, repoPath,
+ WithTreeEntries(TreeEntry{
+ Mode: "100644", OID: blobID, Path: "file",
+ }),
+ WithParents(),
+ )
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", ref, commitID)
+ Exec(t, cfg, "-C", repoPath, "update-ref", ref, commitID.String())
- return blobID
+ return blobID.String()
}
-func deltaBase(t *testing.T, repoPath string, blobID string) string {
- catfileOut := testhelper.MustRunCommand(t, strings.NewReader(blobID), "git", "-C", repoPath, "cat-file", "--batch-check=%(deltabase)")
+func deltaBase(t *testing.T, cfg config.Cfg, repoPath string, blobID string) string {
+ catfileOut := ExecStream(t, cfg, strings.NewReader(blobID), "-C", repoPath, "cat-file", "--batch-check=%(deltabase)")
return chompToString(catfileOut)
}
diff --git a/internal/git/gittest/objects.go b/internal/git/gittest/objects.go
index bf364280e..afd4c8090 100644
--- a/internal/git/gittest/objects.go
+++ b/internal/git/gittest/objects.go
@@ -11,8 +11,8 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
- "gitlab.com/gitlab-org/gitaly/internal/testhelper"
)
// GitObjectMustExist is a test assertion that fails unless the git repo in repoPath contains sha
@@ -65,19 +65,19 @@ func getGitDirSize(t testing.TB, repoPath string, subdirs ...string) int64 {
// WriteBlobs writes n distinct blobs into the git repository's object
// database. Each object has the current time in nanoseconds as contents.
-func WriteBlobs(t testing.TB, testRepoPath string, n int) []string {
+func WriteBlobs(t testing.TB, cfg config.Cfg, testRepoPath string, n int) []string {
var blobIDs []string
for i := 0; i < n; i++ {
contents := []byte(strconv.Itoa(time.Now().Nanosecond()))
- blobIDs = append(blobIDs, WriteBlob(t, testRepoPath, contents).String())
+ blobIDs = append(blobIDs, WriteBlob(t, cfg, testRepoPath, contents).String())
}
return blobIDs
}
// WriteBlob writes the given contents as a blob into the repository and returns its OID.
-func WriteBlob(t testing.TB, testRepoPath string, contents []byte) git.ObjectID {
- hex := text.ChompBytes(testhelper.MustRunCommand(t, bytes.NewReader(contents), "git", "-C", testRepoPath, "hash-object", "-w", "--stdin"))
+func WriteBlob(t testing.TB, cfg config.Cfg, testRepoPath string, contents []byte) git.ObjectID {
+ hex := text.ChompBytes(ExecStream(t, cfg, bytes.NewReader(contents), "-C", testRepoPath, "hash-object", "-w", "--stdin"))
oid, err := git.NewObjectIDFromHex(hex)
require.NoError(t, err)
return oid
diff --git a/internal/git/gittest/remote.go b/internal/git/gittest/remote.go
index e4bbd4f9c..6b82665d7 100644
--- a/internal/git/gittest/remote.go
+++ b/internal/git/gittest/remote.go
@@ -4,16 +4,16 @@ import (
"strings"
"testing"
- "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
)
// RemoteExists tests if the repository at repoPath has a Git remote named remoteName.
-func RemoteExists(t testing.TB, repoPath string, remoteName string) bool {
+func RemoteExists(t testing.TB, cfg config.Cfg, repoPath string, remoteName string) bool {
if remoteName == "" {
t.Fatal("empty remote name")
}
- remotes := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "remote")
+ remotes := Exec(t, cfg, "-C", repoPath, "remote")
for _, r := range strings.Split(string(remotes), "\n") {
if r == remoteName {
return true
diff --git a/internal/git/gittest/repo.go b/internal/git/gittest/repo.go
index e8b1c8113..caa58d664 100644
--- a/internal/git/gittest/repo.go
+++ b/internal/git/gittest/repo.go
@@ -38,24 +38,14 @@ func InitRepoDir(t testing.TB, storagePath, relativePath string) *gitalypb.Repos
}
}
-// InitBareRepo creates a new bare repository
-func InitBareRepo(t testing.TB) (*gitalypb.Repository, string, func()) {
- return initRepoAt(t, true, config.Storage{Name: "default", Path: testhelper.GitlabTestStoragePath()})
-}
-
// InitBareRepoAt creates a new bare repository in the storage
-func InitBareRepoAt(t testing.TB, storage config.Storage) (*gitalypb.Repository, string, func()) {
- return initRepoAt(t, true, storage)
-}
-
-// InitRepoWithWorktree creates a new repository with a worktree
-func InitRepoWithWorktree(t testing.TB) (*gitalypb.Repository, string, func()) {
- return initRepoAt(t, false, config.Storage{Name: "default", Path: testhelper.GitlabTestStoragePath()})
+func InitBareRepoAt(t testing.TB, cfg config.Cfg, storage config.Storage) (*gitalypb.Repository, string, func()) {
+ return initRepoAt(t, cfg, true, storage)
}
// InitRepoWithWorktreeAtStorage creates a new repository with a worktree in the storage
-func InitRepoWithWorktreeAtStorage(t testing.TB, storage config.Storage) (*gitalypb.Repository, string, func()) {
- return initRepoAt(t, false, storage)
+func InitRepoWithWorktreeAtStorage(t testing.TB, cfg config.Cfg, storage config.Storage) (*gitalypb.Repository, string, func()) {
+ return initRepoAt(t, cfg, false, storage)
}
// NewObjectPoolName returns a random pool repository name in format
@@ -86,7 +76,7 @@ func newDiskHash(t testing.TB) string {
return filepath.Join(b[0:2], b[2:4], b)
}
-func initRepoAt(t testing.TB, bare bool, storage config.Storage) (*gitalypb.Repository, string, func()) {
+func initRepoAt(t testing.TB, cfg config.Cfg, bare bool, storage config.Storage) (*gitalypb.Repository, string, func()) {
relativePath := NewRepositoryName(t, bare)
repoPath := filepath.Join(storage.Path, relativePath)
@@ -95,7 +85,7 @@ func initRepoAt(t testing.TB, bare bool, storage config.Storage) (*gitalypb.Repo
args = append(args, "--bare")
}
- testhelper.MustRunCommand(t, nil, "git", append(args, repoPath)...)
+ Exec(t, cfg, append(args, repoPath)...)
repo := InitRepoDir(t, storage.Path, relativePath)
repo.StorageName = storage.Name
@@ -107,33 +97,22 @@ func initRepoAt(t testing.TB, bare bool, storage config.Storage) (*gitalypb.Repo
}
// CloneRepoAtStorageRoot clones a new copy of test repository under a subdirectory in the storage root.
-func CloneRepoAtStorageRoot(t testing.TB, storageRoot, relativePath string) *gitalypb.Repository {
- repo, _, _ := cloneRepo(t, storageRoot, relativePath, testRepo, true)
+func CloneRepoAtStorageRoot(t testing.TB, cfg config.Cfg, storageRoot, relativePath string) *gitalypb.Repository {
+ repo, _, _ := cloneRepo(t, cfg, storageRoot, relativePath, testRepo, true)
return repo
}
// CloneRepoAtStorage clones a new copy of test repository under a subdirectory in the storage root.
-func CloneRepoAtStorage(t testing.TB, storage config.Storage, relativePath string) (*gitalypb.Repository, string, testhelper.Cleanup) {
- repo, repoPath, cleanup := cloneRepo(t, storage.Path, relativePath, testRepo, true)
+func CloneRepoAtStorage(t testing.TB, cfg config.Cfg, storage config.Storage, relativePath string) (*gitalypb.Repository, string, testhelper.Cleanup) {
+ repo, repoPath, cleanup := cloneRepo(t, cfg, storage.Path, relativePath, testRepo, true)
repo.StorageName = storage.Name
return repo, repoPath, cleanup
}
-// CloneRepo creates a bare copy of the test repository.
-func CloneRepo(t testing.TB) (repo *gitalypb.Repository, repoPath string, cleanup func()) {
- return cloneRepo(t, testhelper.GitlabTestStoragePath(), NewRepositoryName(t, true), testRepo, true)
-}
-
-// CloneRepoWithWorktree creates a copy of the test repository with a worktree. This is allows you
-// to run normal 'non-bare' Git commands.
-func CloneRepoWithWorktree(t testing.TB) (repo *gitalypb.Repository, repoPath string, cleanup func()) {
- return cloneRepo(t, testhelper.GitlabTestStoragePath(), NewRepositoryName(t, false), testRepo, false)
-}
-
// CloneRepoWithWorktreeAtStorage creates a copy of the test repository with a worktree at the storage you want.
// This is allows you to run normal 'non-bare' Git commands.
-func CloneRepoWithWorktreeAtStorage(t testing.TB, storage config.Storage) (*gitalypb.Repository, string, testhelper.Cleanup) {
- repo, repoPath, cleanup := cloneRepo(t, storage.Path, NewRepositoryName(t, false), testRepo, false)
+func CloneRepoWithWorktreeAtStorage(t testing.TB, cfg config.Cfg, storage config.Storage) (*gitalypb.Repository, string, testhelper.Cleanup) {
+ repo, repoPath, cleanup := cloneRepo(t, cfg, storage.Path, NewRepositoryName(t, false), testRepo, false)
repo.StorageName = storage.Name
return repo, repoPath, cleanup
}
@@ -166,7 +145,7 @@ func isValidRepoPath(absolutePath string) bool {
return true
}
-func cloneRepo(t testing.TB, storageRoot, relativePath, repoName string, bare bool) (repo *gitalypb.Repository, repoPath string, cleanup func()) {
+func cloneRepo(t testing.TB, cfg config.Cfg, storageRoot, relativePath, repoName string, bare bool) (repo *gitalypb.Repository, repoPath string, cleanup func()) {
repoPath = filepath.Join(storageRoot, relativePath)
repo = InitRepoDir(t, storageRoot, relativePath)
@@ -178,14 +157,14 @@ func cloneRepo(t testing.TB, storageRoot, relativePath, repoName string, bare bo
repo.RelativePath = filepath.Join(relativePath, ".git")
}
- testhelper.MustRunCommand(t, nil, "git", append(args, testRepositoryPath(t, repoName), repoPath)...)
+ Exec(t, cfg, append(args, testRepositoryPath(t, repoName), repoPath)...)
return repo, repoPath, func() { require.NoError(t, os.RemoveAll(repoPath)) }
}
// CloneBenchRepo creates a bare copy of the benchmarking test repository.
-func CloneBenchRepo(t testing.TB) (repo *gitalypb.Repository, repoPath string, cleanup func()) {
- return cloneRepo(t, testhelper.GitlabTestStoragePath(), NewRepositoryName(t, true),
+func CloneBenchRepo(t testing.TB, cfg config.Cfg) (repo *gitalypb.Repository, repoPath string, cleanup func()) {
+ return cloneRepo(t, cfg, testhelper.GitlabTestStoragePath(), NewRepositoryName(t, true),
"benchmark.git", true)
}
@@ -196,6 +175,6 @@ func AddWorktreeArgs(repoPath, worktreeName string) []string {
}
// AddWorktree creates a worktree in the repository path for tests
-func AddWorktree(t testing.TB, repoPath string, worktreeName string) {
- testhelper.MustRunCommand(t, nil, "git", AddWorktreeArgs(repoPath, worktreeName)...)
+func AddWorktree(t testing.TB, cfg config.Cfg, repoPath string, worktreeName string) {
+ Exec(t, cfg, AddWorktreeArgs(repoPath, worktreeName)...)
}
diff --git a/internal/git/gittest/repository_suite.go b/internal/git/gittest/repository_suite.go
index 8a14cac24..9f1580361 100644
--- a/internal/git/gittest/repository_suite.go
+++ b/internal/git/gittest/repository_suite.go
@@ -36,7 +36,7 @@ func testRepositoryResolveRevision(t *testing.T, cfg config.Cfg, getRepository f
ctx, cancel := testhelper.Context()
defer cancel()
- pbRepo, _, _ := CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ pbRepo, _, _ := CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
for _, tc := range []struct {
desc string
@@ -89,26 +89,20 @@ func testRepositoryHasBranches(t *testing.T, cfg config.Cfg, getRepository func(
ctx, cancel := testhelper.Context()
defer cancel()
- pbRepo, repoPath, cleanup := InitBareRepoAt(t, cfg.Storages[0])
+ pbRepo, repoPath, cleanup := InitBareRepoAt(t, cfg, cfg.Storages[0])
defer cleanup()
repo := getRepository(t, pbRepo)
- emptyCommit := text.ChompBytes(testhelper.MustRunCommand(t, nil,
- "git", "-C", repoPath, "commit-tree", git.EmptyTreeOID.String(),
- ))
+ emptyCommit := text.ChompBytes(Exec(t, cfg, "-C", repoPath, "commit-tree", git.EmptyTreeOID.String()))
- testhelper.MustRunCommand(t, nil,
- "git", "-C", repoPath, "update-ref", "refs/headsbranch", emptyCommit,
- )
+ Exec(t, cfg, "-C", repoPath, "update-ref", "refs/headsbranch", emptyCommit)
hasBranches, err := repo.HasBranches(ctx)
require.NoError(t, err)
require.False(t, hasBranches)
- testhelper.MustRunCommand(t, nil,
- "git", "-C", repoPath, "update-ref", "refs/heads/branch", emptyCommit,
- )
+ Exec(t, cfg, "-C", repoPath, "update-ref", "refs/heads/branch", emptyCommit)
hasBranches, err = repo.HasBranches(ctx)
require.NoError(t, err)
diff --git a/internal/testhelper/tag.go b/internal/git/gittest/tag.go
index 3b94e84fd..730dbcd7b 100644
--- a/internal/testhelper/tag.go
+++ b/internal/git/gittest/tag.go
@@ -1,10 +1,11 @@
-package testhelper
+package gittest
import (
"bytes"
"fmt"
"testing"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
)
@@ -15,7 +16,7 @@ type CreateTagOpts struct {
}
// CreateTag creates a new tag.
-func CreateTag(t testing.TB, repoPath, tagName, targetID string, opts *CreateTagOpts) string {
+func CreateTag(t testing.TB, cfg config.Cfg, repoPath, tagName, targetID string, opts *CreateTagOpts) string {
var message string
force := false
@@ -47,8 +48,8 @@ func CreateTag(t testing.TB, repoPath, tagName, targetID string, opts *CreateTag
}
args = append(args, tagName, targetID)
- MustRunCommand(t, stdin, "git", args...)
+ ExecStream(t, cfg, stdin, args...)
- tagID := MustRunCommand(t, nil, "git", "-C", repoPath, "show-ref", "-s", tagName)
+ tagID := Exec(t, cfg, "-C", repoPath, "show-ref", "-s", tagName)
return text.ChompBytes(tagID)
}
diff --git a/internal/git/gittest/testhelper_test.go b/internal/git/gittest/testhelper_test.go
new file mode 100644
index 000000000..01008107b
--- /dev/null
+++ b/internal/git/gittest/testhelper_test.go
@@ -0,0 +1,57 @@
+package gittest
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+)
+
+func TestMain(m *testing.M) {
+ os.Exit(testMain(m))
+}
+
+func testMain(m *testing.M) int {
+ defer testhelper.MustHaveNoChildProcess()
+ cleanup := testhelper.Configure()
+ defer cleanup()
+ return m.Run()
+}
+
+// setup sets up a test configuration and repository. Ideally we'd use our central test helpers to
+// do this, but because of an import cycle we can't.
+func setup(t testing.TB) (config.Cfg, *gitalypb.Repository, string) {
+ t.Helper()
+
+ rootDir := testhelper.TempDir(t)
+
+ var cfg config.Cfg
+
+ cfg.SocketPath = "it is a stub to bypass Validate method"
+
+ cfg.Storages = []config.Storage{
+ {
+ Name: "default",
+ Path: filepath.Join(rootDir, "storage.d"),
+ },
+ }
+ require.NoError(t, os.Mkdir(cfg.Storages[0].Path, 0755))
+
+ cfg.GitlabShell.Dir = filepath.Join(rootDir, "shell.d")
+ require.NoError(t, os.Mkdir(cfg.GitlabShell.Dir, 0755))
+
+ cfg.BinDir = filepath.Join(rootDir, "bin.d")
+ require.NoError(t, os.Mkdir(cfg.BinDir, 0755))
+
+ require.NoError(t, testhelper.ConfigureRuby(&cfg))
+ require.NoError(t, cfg.Validate())
+
+ repo, repoPath, cleanup := CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
+ t.Cleanup(cleanup)
+
+ return cfg, repo, repoPath
+}
diff --git a/internal/git/gittest/tree.go b/internal/git/gittest/tree.go
index c8299274f..f6a93136b 100644
--- a/internal/git/gittest/tree.go
+++ b/internal/git/gittest/tree.go
@@ -2,14 +2,21 @@ package gittest
import (
"bytes"
+ "crypto/sha1"
+ "encoding/hex"
+ "fmt"
"testing"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/helper/text"
)
// TreeEntry represents an entry of a git tree object.
type TreeEntry struct {
+ // OID is the object ID the tree entry refers to.
+ OID git.ObjectID
// Mode is the file mode of the tree entry.
Mode string
// Path is the full path of the tree entry.
@@ -20,26 +27,86 @@ type TreeEntry struct {
// RequireTree looks up the given treeish and asserts that its entries match
// the given expected entries. Tree entries are checked recursively.
-func RequireTree(t testing.TB, repoPath, treeish string, expectedEntries []TreeEntry) {
+func RequireTree(t testing.TB, cfg config.Cfg, repoPath, treeish string, expectedEntries []TreeEntry) {
t.Helper()
+ for i, entry := range expectedEntries {
+ if entry.OID != "" {
+ continue
+ }
+
+ blob := fmt.Sprintf("blob %d\000%s", len(entry.Content), entry.Content)
+ hash := sha1.Sum([]byte(blob))
+ expectedEntries[i].OID = git.ObjectID(hex.EncodeToString(hash[:]))
+ }
+
var actualEntries []TreeEntry
- output := bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "ls-tree", "-r", treeish))
+ output := bytes.TrimSpace(Exec(t, cfg, "-C", repoPath, "ls-tree", "-r", treeish))
if len(output) > 0 {
for _, line := range bytes.Split(output, []byte("\n")) {
// Format: <mode> SP <type> SP <object> TAB <file>
tabSplit := bytes.Split(line, []byte("\t"))
+ require.Len(t, tabSplit, 2)
+
spaceSplit := bytes.Split(tabSplit[0], []byte(" "))
+ require.Len(t, spaceSplit, 3)
+
path := string(tabSplit[1])
+
+ objectID, err := git.NewObjectIDFromHex(string(spaceSplit[2]))
+ require.NoError(t, err)
+
actualEntries = append(actualEntries, TreeEntry{
+ OID: objectID,
Mode: string(spaceSplit[0]),
Path: path,
- Content: string(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "show", treeish+":"+path)),
+ Content: string(Exec(t, cfg, "-C", repoPath, "show", treeish+":"+path)),
})
}
}
require.Equal(t, expectedEntries, actualEntries)
}
+
+// WriteTree writes a new tree object to the given path. This function does not verify whether OIDs
+// referred to by tree entries actually exist in the repository.
+func WriteTree(t testing.TB, cfg config.Cfg, repoPath string, entries []TreeEntry) git.ObjectID {
+ t.Helper()
+
+ require.NotEmpty(t, entries)
+
+ var tree bytes.Buffer
+ for _, entry := range entries {
+ var entryType string
+ switch entry.Mode {
+ case "100644":
+ entryType = "blob"
+ case "040000":
+ entryType = "tree"
+ default:
+ t.Fatalf("invalid entry type %q", entry.Mode)
+ }
+
+ require.True(t, len(entry.OID) > 0 || len(entry.Content) > 0,
+ "entry cannot have both OID and content")
+ require.False(t, len(entry.OID) == 0 && len(entry.Content) == 0,
+ "entry must have either an OID or content")
+
+ oid := entry.OID
+ if len(entry.Content) > 0 {
+ oid = WriteBlob(t, cfg, repoPath, []byte(entry.Content))
+ }
+
+ formattedEntry := fmt.Sprintf("%s %s %s\t%s\000", entry.Mode, entryType, oid.String(), entry.Path)
+ _, err := tree.WriteString(formattedEntry)
+ require.NoError(t, err)
+ }
+
+ stdout := ExecStream(t, cfg, &tree, "-C", repoPath, "mktree", "-z", "--missing")
+ treeOID, err := git.NewObjectIDFromHex(text.ChompBytes(stdout))
+ require.NoError(t, err)
+
+ return treeOID
+}
diff --git a/internal/git/gittest/tree_test.go b/internal/git/gittest/tree_test.go
new file mode 100644
index 000000000..5ca015c36
--- /dev/null
+++ b/internal/git/gittest/tree_test.go
@@ -0,0 +1,154 @@
+package gittest
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git"
+)
+
+func TestWriteTree(t *testing.T) {
+ cfg, _, repoPath := setup(t)
+
+ blobID := WriteBlob(t, cfg, repoPath, []byte("foobar\n"))
+ treeID := WriteTree(t, cfg, repoPath, []TreeEntry{
+ {
+ OID: blobID,
+ Mode: "100644",
+ Path: "file",
+ },
+ })
+
+ for _, tc := range []struct {
+ desc string
+ entries []TreeEntry
+ expectedEntries []TreeEntry
+ expectedOID git.ObjectID
+ }{
+ {
+ desc: "entry with blob OID",
+ entries: []TreeEntry{
+ {
+ OID: blobID,
+ Mode: "100644",
+ Path: "file",
+ },
+ },
+ expectedEntries: []TreeEntry{
+ {
+ OID: blobID,
+ Content: "foobar\n",
+ Mode: "100644",
+ Path: "file",
+ },
+ },
+ expectedOID: "54a22f36d78d0ba7964f71ff72c7309edecab857",
+ },
+ {
+ desc: "entry with blob content",
+ entries: []TreeEntry{
+ {
+ Content: "foobar\n",
+ Mode: "100644",
+ Path: "file",
+ },
+ },
+ expectedEntries: []TreeEntry{
+ {
+ OID: "323fae03f4606ea9991df8befbb2fca795e648fa",
+ Content: "foobar\n",
+ Mode: "100644",
+ Path: "file",
+ },
+ },
+ expectedOID: "54a22f36d78d0ba7964f71ff72c7309edecab857",
+ },
+ {
+ desc: "entry with tree OID",
+ entries: []TreeEntry{
+ {
+ OID: treeID,
+ Mode: "040000",
+ Path: "dir",
+ },
+ },
+ expectedEntries: []TreeEntry{
+ {
+ OID: blobID,
+ Content: "foobar\n",
+ Mode: "100644",
+ Path: "dir/file",
+ },
+ },
+ expectedOID: "c69f8fc9c97fcae2a80ba1578c493171984d810a",
+ },
+ {
+ desc: "mixed tree and blob entries",
+ entries: []TreeEntry{
+ {
+ OID: treeID,
+ Mode: "040000",
+ Path: "dir",
+ },
+ {
+ OID: blobID,
+ Mode: "100644",
+ Path: "file1",
+ },
+ {
+ Content: "different content",
+ Mode: "100644",
+ Path: "file2",
+ },
+ },
+ expectedEntries: []TreeEntry{
+ {
+ OID: blobID,
+ Content: "foobar\n",
+ Mode: "100644",
+ Path: "dir/file",
+ },
+ {
+ OID: blobID,
+ Content: "foobar\n",
+ Mode: "100644",
+ Path: "file1",
+ },
+ {
+ OID: "9b62abfb7f69b6d5801a232a9e6c332a10c9cafc",
+ Content: "different content",
+ Mode: "100644",
+ Path: "file2",
+ },
+ },
+ expectedOID: "70a96b29b67eb29344f399c1c4bc0047568e8dba",
+ },
+ {
+ desc: "two entries with nonexistant objects",
+ entries: []TreeEntry{
+ {
+ OID: git.ObjectID(strings.Repeat("1", 40)),
+ Mode: "100644",
+ Path: "file",
+ },
+ {
+ OID: git.ObjectID(strings.Repeat("0", 40)),
+ Mode: "100644",
+ Path: "file",
+ },
+ },
+ expectedOID: "09e7f53dec572807e651fc368d834f9744a5a42c",
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ oid := WriteTree(t, cfg, repoPath, tc.entries)
+
+ if tc.expectedEntries != nil {
+ RequireTree(t, cfg, repoPath, oid.String(), tc.expectedEntries)
+ }
+
+ require.Equal(t, tc.expectedOID, oid)
+ })
+ }
+}
diff --git a/internal/git/gittest/user.go b/internal/git/gittest/user.go
new file mode 100644
index 000000000..77dc7b1d0
--- /dev/null
+++ b/internal/git/gittest/user.go
@@ -0,0 +1,20 @@
+package gittest
+
+import (
+ "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+)
+
+const (
+ // GlID is the ID of the default user.
+ GlID = "user-123"
+)
+
+var (
+ // TestUser is the default user for tests.
+ TestUser = &gitalypb.User{
+ Name: []byte("Jane Doe"),
+ Email: []byte("janedoe@gitlab.com"),
+ GlId: GlID,
+ GlUsername: "janedoe",
+ }
+)
diff --git a/internal/git/hooks_options.go b/internal/git/hooks_options.go
index 84af8d627..e92da3159 100644
--- a/internal/git/hooks_options.go
+++ b/internal/git/hooks_options.go
@@ -11,7 +11,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/log"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -84,7 +84,7 @@ func (cc *cmdCfg) configureHooks(
return errors.New("hooks already configured")
}
- transaction, praefect, err := metadata.TransactionMetadataFromContext(ctx)
+ transaction, praefect, err := txinfo.FromContext(ctx)
if err != nil {
return err
}
diff --git a/internal/git/hooks_payload.go b/internal/git/hooks_payload.go
index f98fffadd..0ba261fae 100644
--- a/internal/git/hooks_payload.go
+++ b/internal/git/hooks_payload.go
@@ -10,7 +10,7 @@ import (
"github.com/golang/protobuf/jsonpb"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -75,10 +75,10 @@ type HooksPayload struct {
// Transaction is used to identify a reference transaction. This is an optional field -- if
// it's not set, no transactional voting will happen.
- Transaction *metadata.Transaction `json:"transaction"`
+ Transaction *txinfo.Transaction `json:"transaction"`
// Praefect is used to identify the Praefect server which is hosting the transaction. This
// field must be set if and only if `Transaction` is.
- Praefect *metadata.PraefectServer `json:"praefect"`
+ Praefect *txinfo.PraefectServer `json:"praefect"`
// ReceiveHooksPayload contains information required when executing
// git-receive-pack.
@@ -110,8 +110,8 @@ type jsonHooksPayload struct {
func NewHooksPayload(
cfg config.Cfg,
repo *gitalypb.Repository,
- tx *metadata.Transaction,
- praefect *metadata.PraefectServer,
+ tx *txinfo.Transaction,
+ praefect *txinfo.PraefectServer,
receiveHooksPayload *ReceiveHooksPayload,
requestedHooks Hook,
featureFlags featureflag.Raw,
@@ -174,7 +174,7 @@ func HooksPayloadFromEnv(envs []string) (HooksPayload, error) {
payload.Repo = &repo
if payload.Transaction != nil && payload.Praefect == nil {
- return HooksPayload{}, metadata.ErrPraefectServerNotFound
+ return HooksPayload{}, txinfo.ErrPraefectServerNotFound
}
// If no git path is set up as part of the serialized hooks payload,
diff --git a/internal/git/hooks_payload_test.go b/internal/git/hooks_payload_test.go
index 243f80df5..e8b7ed486 100644
--- a/internal/git/hooks_payload_test.go
+++ b/internal/git/hooks_payload_test.go
@@ -7,20 +7,20 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
)
func TestHooksPayload(t *testing.T) {
cfg, repo, _ := testcfg.BuildWithRepo(t)
- tx := metadata.Transaction{
+ tx := txinfo.Transaction{
ID: 1234,
Node: "primary",
Primary: true,
}
- praefect := metadata.PraefectServer{
+ praefect := txinfo.PraefectServer{
BackchannelID: 1,
ListenAddr: "127.0.0.1:1234",
TLSListenAddr: "127.0.0.1:4321",
@@ -90,7 +90,7 @@ func TestHooksPayload(t *testing.T) {
require.NoError(t, err)
_, err = git.HooksPayloadFromEnv([]string{env})
- require.Equal(t, err, metadata.ErrPraefectServerNotFound)
+ require.Equal(t, err, txinfo.ErrPraefectServerNotFound)
})
t.Run("receive hooks payload", func(t *testing.T) {
diff --git a/internal/git/housekeeping/housekeeping_test.go b/internal/git/housekeeping/housekeeping_test.go
index d8e3a6e7b..dcf0ad0e0 100644
--- a/internal/git/housekeeping/housekeeping_test.go
+++ b/internal/git/housekeeping/housekeeping_test.go
@@ -201,7 +201,7 @@ func TestPerform(t *testing.T) {
for _, tc := range testcases {
t.Run(tc.name, func(t *testing.T) {
cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -289,7 +289,7 @@ func TestPerform_references(t *testing.T) {
for _, tc := range testcases {
t.Run(tc.desc, func(t *testing.T) {
cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
for _, ref := range tc.refs {
path := filepath.Join(repoPath, ref.name)
@@ -397,7 +397,7 @@ func TestPerform_emptyRefDirs(t *testing.T) {
for _, tc := range testcases {
t.Run(tc.name, func(t *testing.T) {
cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -431,7 +431,7 @@ func testPerformWithSpecificFile(t *testing.T, file string, finder staleFileFind
defer cancel()
cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
for _, tc := range []struct {
desc string
@@ -545,7 +545,7 @@ func TestPerform_referenceLocks(t *testing.T) {
} {
t.Run(tc.desc, func(t *testing.T) {
cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
for _, e := range tc.entries {
e.create(t, repoPath)
@@ -648,7 +648,7 @@ func TestShouldRemoveTemporaryObject(t *testing.T) {
func TestPerformRepoDoesNotExist(t *testing.T) {
cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -660,7 +660,7 @@ func TestPerformRepoDoesNotExist(t *testing.T) {
func TestPerform_UnsetConfiguration(t *testing.T) {
cfg, repoProto, _ := testcfg.BuildWithRepo(t)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
ctx, cancel := testhelper.Context()
defer cancel()
diff --git a/internal/git/lfs.go b/internal/git/lfs.go
index 78f3aadd9..852d4a9f1 100644
--- a/internal/git/lfs.go
+++ b/internal/git/lfs.go
@@ -10,7 +10,10 @@ var (
lfsSizeRe = regexp.MustCompile(`(?m)^size [0-9]+$`)
)
-// IsLFSPointer checks to see if a blob is an LFS pointer. It returns the raw data of the pointer if it is
+// IsLFSPointer checks to see if a blob is an LFS pointer.
+// TODO: this is incomplete as it does not recognize pre-release version of LFS blobs with
+// the "https://hawser.github.com/spec/v1" version. For compatibility with the Ruby RPC, we
+// leave this as-is for now though.
func IsLFSPointer(b []byte) bool {
// ensure the version exists
if !bytes.HasPrefix(b, []byte("version https://git-lfs.github.com/spec")) {
diff --git a/internal/git/localrepo/config_test.go b/internal/git/localrepo/config_test.go
index f945c4e6a..58d2b9949 100644
--- a/internal/git/localrepo/config_test.go
+++ b/internal/git/localrepo/config_test.go
@@ -2,15 +2,14 @@ package localrepo
import (
"errors"
- "io/ioutil"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
@@ -21,19 +20,13 @@ func setupRepoConfig(t *testing.T) (Config, string) {
cfg := testcfg.Build(t)
- repoProto, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ repoProto, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
- repo := New(git.NewExecCommandFactory(cfg), repoProto, cfg)
- return repo.Config(), repoPath
-}
-
-func TestRepo_Config(t *testing.T) {
- bareRepo, _, cleanup := gittest.InitBareRepo(t)
- defer cleanup()
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
+ repo := New(gitCmdFactory, catfile.NewCache(cfg), repoProto, cfg)
- repo := New(nil, bareRepo, config.Cfg{})
- require.Equal(t, Config{repo: repo}, repo.Config())
+ return repo.Config(), repoPath
}
func TestBuildConfigAddOptsFlags(t *testing.T) {
@@ -68,7 +61,7 @@ func TestConfig_Add(t *testing.T) {
t.Run("ok", func(t *testing.T) {
require.NoError(t, repoConfig.Add(ctx, "key.one", "1", git.ConfigAddOpts{}))
- actual := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "key.one"))
+ actual := text.ChompBytes(gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "key.one"))
require.Equal(t, "1", actual)
})
@@ -76,14 +69,14 @@ func TestConfig_Add(t *testing.T) {
require.NoError(t, repoConfig.Add(ctx, "key.two", "2", git.ConfigAddOpts{}))
require.NoError(t, repoConfig.Add(ctx, "key.two", "3", git.ConfigAddOpts{}))
- actual := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--get-all", "key.two"))
+ actual := text.ChompBytes(gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--get-all", "key.two"))
require.Equal(t, "2\n3", actual)
})
t.Run("options are passed", func(t *testing.T) {
require.NoError(t, repoConfig.Add(ctx, "key.three", "3", git.ConfigAddOpts{Type: git.ConfigTypeInt}))
- actual := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--int", "key.three"))
+ actual := text.ChompBytes(gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--int", "key.three"))
require.Equal(t, "3", actual)
})
@@ -160,9 +153,9 @@ func TestConfig_GetRegexp(t *testing.T) {
repoConfig, repoPath := setupRepoConfig(t)
t.Run("ok", func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.one", "one")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.two", "2")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.three", "!@#$%^&")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.one", "one")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.two", "2")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.three", "!@#$%^&")
vals, err := repoConfig.GetRegexp(ctx, "^key\\..*o", git.ConfigGetRegexpOpts{})
require.NoError(t, err)
@@ -170,8 +163,8 @@ func TestConfig_GetRegexp(t *testing.T) {
})
t.Run("show origin and scope", func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.four", "4")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.five", "five")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.four", "4")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.five", "five")
exp := []git.ConfigPair{
{Key: "key.four", Value: "4", Origin: "file:" + filepath.Join(repoPath, "config"), Scope: "local"},
@@ -190,7 +183,7 @@ func TestConfig_GetRegexp(t *testing.T) {
})
t.Run("bad combination of regexp and type", func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.six", "key-six")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.six", "key-six")
_, err := repoConfig.GetRegexp(ctx, "^key\\.six$", git.ConfigGetRegexpOpts{Type: git.ConfigTypeBool})
require.Error(t, err)
@@ -253,8 +246,7 @@ func TestBuildConfigUnsetOptsFlags(t *testing.T) {
func TestConfig_UnsetAll(t *testing.T) {
configContains := func(t *testing.T, repoPath string) func(t *testing.T, val string, contains bool) {
- data, err := ioutil.ReadFile(filepath.Join(repoPath, "config"))
- require.NoError(t, err)
+ data := testhelper.MustReadFile(t, filepath.Join(repoPath, "config"))
require.Contains(t, string(data), "[core]", "config should have core section defined by default")
return func(t *testing.T, val string, contains bool) {
require.Equal(t, contains, strings.Contains(string(data), val))
@@ -267,7 +259,7 @@ func TestConfig_UnsetAll(t *testing.T) {
repoConfig, repoPath := setupRepoConfig(t)
t.Run("unset single value", func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.one", "key-one")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.one", "key-one")
require.NoError(t, repoConfig.Unset(ctx, "key.one", git.ConfigUnsetOpts{}))
@@ -276,8 +268,8 @@ func TestConfig_UnsetAll(t *testing.T) {
})
t.Run("unset multiple values", func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.two", "key-two-1")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.two", "key-two-2")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.two", "key-two-1")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.two", "key-two-2")
require.NoError(t, repoConfig.Unset(ctx, "key.two", git.ConfigUnsetOpts{All: true}))
@@ -287,8 +279,8 @@ func TestConfig_UnsetAll(t *testing.T) {
})
t.Run("unset single with multiple values", func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.two", "key-two-1")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.two", "key-two-2")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.two", "key-two-1")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.two", "key-two-2")
err := repoConfig.Unset(ctx, "key.two", git.ConfigUnsetOpts{})
require.Equal(t, git.ErrNotFound, err)
@@ -299,7 +291,7 @@ func TestConfig_UnsetAll(t *testing.T) {
})
t.Run("config key doesn't exist - is strict (by default)", func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.three", "key-three")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.three", "key-three")
err := repoConfig.Unset(ctx, "some.stub", git.ConfigUnsetOpts{})
require.Equal(t, git.ErrNotFound, err)
@@ -309,7 +301,7 @@ func TestConfig_UnsetAll(t *testing.T) {
})
t.Run("config key doesn't exist - not strict", func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--add", "key.four", "key-four")
+ gittest.Exec(t, repoConfig.repo.cfg, "-C", repoPath, "config", "--add", "key.four", "key-four")
require.NoError(t, repoConfig.Unset(ctx, "some.stub", git.ConfigUnsetOpts{NotStrict: true}))
diff --git a/internal/git/localrepo/objects.go b/internal/git/localrepo/objects.go
index 312b3f593..e80909edf 100644
--- a/internal/git/localrepo/objects.go
+++ b/internal/git/localrepo/objects.go
@@ -12,7 +12,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -227,20 +226,20 @@ func (repo *Repo) ReadCommit(ctx context.Context, revision git.Revision, opts ..
opt(&cfg)
}
- c, err := catfile.New(ctx, repo.gitCmdFactory, repo)
+ c, err := repo.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return nil, err
}
var commit *gitalypb.GitCommit
if cfg.withTrailers {
- commit, err = log.GetCommitCatfileWithTrailers(ctx, repo.gitCmdFactory, repo, c, revision)
+ commit, err = catfile.GetCommitWithTrailers(ctx, repo.gitCmdFactory, repo, c, revision)
} else {
- commit, err = log.GetCommitCatfile(ctx, c, revision)
+ commit, err = catfile.GetCommit(ctx, c, revision)
}
if err != nil {
- if log.IsNotFound(err) {
+ if catfile.IsNotFound(err) {
return nil, ErrObjectNotFound
}
return nil, err
diff --git a/internal/git/localrepo/objects_test.go b/internal/git/localrepo/objects_test.go
index a25305ba7..e2dec327c 100644
--- a/internal/git/localrepo/objects_test.go
+++ b/internal/git/localrepo/objects_test.go
@@ -14,6 +14,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
@@ -30,13 +31,14 @@ func setupRepo(t *testing.T, bare bool) (*Repo, string) {
var repoPath string
var repoCleanUp func()
if bare {
- repoProto, repoPath, repoCleanUp = gittest.InitBareRepoAt(t, cfg.Storages[0])
+ repoProto, repoPath, repoCleanUp = gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
} else {
- repoProto, repoPath, repoCleanUp = gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repoProto, repoPath, repoCleanUp = gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
}
t.Cleanup(repoCleanUp)
- return New(git.NewExecCommandFactory(cfg), repoProto, cfg), repoPath
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
+ return New(gitCmdFactory, catfile.NewCache(cfg), repoProto, cfg), repoPath
}
type ReaderFunc func([]byte) (int, error)
@@ -208,7 +210,7 @@ func TestRepo_WriteTag(t *testing.T) {
tagObjID, err := repo.WriteTag(ctx, tc.objectID, tc.objectType, tc.tagName, tc.userName, tc.userEmail, tc.tagBody, tc.authorDate)
require.NoError(t, err)
- repoTagObjID := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", tagObjID.String())
+ repoTagObjID := gittest.Exec(t, repo.cfg, "-C", repoPath, "rev-parse", tagObjID.String())
require.Equal(t, text.ChompBytes(repoTagObjID), tagObjID.String())
})
}
diff --git a/internal/git/localrepo/refs_test.go b/internal/git/localrepo/refs_test.go
index cf91c7b65..1a5de4515 100644
--- a/internal/git/localrepo/refs_test.go
+++ b/internal/git/localrepo/refs_test.go
@@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
@@ -180,14 +181,14 @@ func TestRepo_GetReferences(t *testing.T) {
{
desc: "all references",
match: func(t *testing.T, refs []git.Reference) {
- require.Len(t, refs, 94)
+ require.Len(t, refs, 96)
},
},
{
desc: "branches",
patterns: []string{"refs/heads/"},
match: func(t *testing.T, refs []git.Reference) {
- require.Len(t, refs, 91)
+ require.Len(t, refs, 93)
},
},
{
@@ -220,9 +221,9 @@ func TestRepo_GetRemoteReferences(t *testing.T) {
const relativePath = "repository-1"
repoPath := filepath.Join(storagePath, relativePath)
- testhelper.MustRunCommand(t, nil, "git", "init", repoPath)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "--allow-empty", "-m", "commit message")
- commit := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "refs/heads/master"))
+ gittest.Exec(t, cfg, "init", repoPath)
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "--allow-empty", "-m", "commit message")
+ commit := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "refs/heads/master"))
for _, cmd := range [][]string{
{"update-ref", "refs/heads/master", commit},
@@ -231,13 +232,15 @@ func TestRepo_GetRemoteReferences(t *testing.T) {
{"symbolic-ref", "refs/heads/symbolic", "refs/heads/master"},
{"update-ref", "refs/remote/remote-name/remote-branch", commit},
} {
- testhelper.MustRunCommand(t, nil, "git", append([]string{"-C", repoPath}, cmd...)...)
+ gittest.Exec(t, cfg, append([]string{"-C", repoPath}, cmd...)...)
}
- annotatedTagOID := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "annotated-tag"))
+ annotatedTagOID := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "annotated-tag"))
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
repo := New(
- git.NewExecCommandFactory(cfg),
+ gitCmdFactory,
+ catfile.NewCache(cfg),
&gitalypb.Repository{StorageName: "default", RelativePath: filepath.Join(relativePath, ".git")},
cfg,
)
@@ -287,7 +290,7 @@ func TestRepo_GetBranches(t *testing.T) {
refs, err := repo.GetBranches(ctx)
require.NoError(t, err)
- require.Len(t, refs, 91)
+ require.Len(t, refs, 93)
}
func TestRepo_UpdateRef(t *testing.T) {
@@ -397,8 +400,8 @@ func TestRepo_UpdateRef(t *testing.T) {
for _, tc := range testcases {
t.Run(tc.desc, func(t *testing.T) {
// Re-create repo for each testcase.
- repoProto, _, _ := gittest.CloneRepoAtStorage(t, repo.cfg.Storages[0], t.Name())
- repo := New(repo.gitCmdFactory, repoProto, repo.cfg)
+ repoProto, _, _ := gittest.CloneRepoAtStorage(t, repo.cfg, repo.cfg.Storages[0], t.Name())
+ repo := New(repo.gitCmdFactory, repo.catfileCache, repoProto, repo.cfg)
err := repo.UpdateRef(ctx, git.ReferenceName(tc.ref), tc.newValue, tc.oldValue)
tc.verify(t, repo, err)
})
diff --git a/internal/git/localrepo/remote_test.go b/internal/git/localrepo/remote_test.go
index 14659e816..91fbc6a3f 100644
--- a/internal/git/localrepo/remote_test.go
+++ b/internal/git/localrepo/remote_test.go
@@ -13,6 +13,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
@@ -32,19 +33,20 @@ func setupRepoRemote(t *testing.T, bare bool) (Remote, string) {
var repoPath string
var repoCleanUp func()
if bare {
- repoProto, repoPath, repoCleanUp = gittest.InitBareRepoAt(t, cfg.Storages[0])
+ repoProto, repoPath, repoCleanUp = gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
} else {
- repoProto, repoPath, repoCleanUp = gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repoProto, repoPath, repoCleanUp = gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
}
t.Cleanup(repoCleanUp)
- return New(git.NewExecCommandFactory(cfg), repoProto, cfg).Remote(), repoPath
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
+ return New(gitCmdFactory, catfile.NewCache(cfg), repoProto, cfg).Remote(), repoPath
}
func TestRepo_Remote(t *testing.T) {
repository := &gitalypb.Repository{StorageName: "stub", RelativePath: "/stub"}
- repo := New(nil, repository, config.Cfg{})
+ repo := New(nil, nil, repository, config.Cfg{})
require.Equal(t, Remote{repo: repo}, repo.Remote())
}
@@ -94,9 +96,9 @@ func TestRemote_Add(t *testing.T) {
remote, repoPath := setupRepoRemote(t, false)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "remote", "remove", "origin")
+ gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "remote", "remove", "origin")
- _, remoteRepoPath, cleanup := gittest.CloneRepoAtStorage(t, remote.repo.cfg.Storages[0], "repository")
+ _, remoteRepoPath, cleanup := gittest.CloneRepoAtStorage(t, remote.repo.cfg, remote.repo.cfg.Storages[0], "repository")
defer cleanup()
t.Run("invalid argument", func(t *testing.T) {
@@ -130,27 +132,27 @@ func TestRemote_Add(t *testing.T) {
t.Run("fetch", func(t *testing.T) {
require.NoError(t, remote.Add(ctx, "first", remoteRepoPath, git.RemoteAddOpts{Fetch: true}))
- remotes := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "remote", "--verbose"))
+ remotes := text.ChompBytes(gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "remote", "--verbose"))
require.Equal(t,
"first "+remoteRepoPath+" (fetch)\n"+
"first "+remoteRepoPath+" (push)",
remotes,
)
- latestSHA := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "refs/remotes/first/master"))
+ latestSHA := text.ChompBytes(gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "rev-parse", "refs/remotes/first/master"))
require.Equal(t, "1e292f8fedd741b75372e19097c76d327140c312", latestSHA)
})
t.Run("default branch", func(t *testing.T) {
require.NoError(t, remote.Add(ctx, "second", "http://some.com.git", git.RemoteAddOpts{DefaultBranch: "wip"}))
- defaultRemote := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "symbolic-ref", "refs/remotes/second/HEAD"))
+ defaultRemote := text.ChompBytes(gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "symbolic-ref", "refs/remotes/second/HEAD"))
require.Equal(t, "refs/remotes/second/wip", defaultRemote)
})
t.Run("remote tracking branches", func(t *testing.T) {
require.NoError(t, remote.Add(ctx, "third", "http://some.com.git", git.RemoteAddOpts{RemoteTrackingBranches: []string{"a", "b"}}))
- defaultRemote := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--get-all", "remote.third.fetch"))
+ defaultRemote := text.ChompBytes(gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "config", "--get-all", "remote.third.fetch"))
require.Equal(t, "+refs/heads/a:refs/remotes/third/a\n+refs/heads/b:refs/remotes/third/b", defaultRemote)
})
@@ -169,11 +171,11 @@ func TestRemote_Remove(t *testing.T) {
remote, repoPath := setupRepoRemote(t, true)
t.Run("ok", func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "remote", "add", "first", "http://some.com.git")
+ gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "remote", "add", "first", "http://some.com.git")
require.NoError(t, remote.Remove(ctx, "first"))
- remotes := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "remote", "--verbose"))
+ remotes := text.ChompBytes(gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "remote", "--verbose"))
require.Empty(t, remotes)
})
@@ -193,17 +195,17 @@ func TestRemote_Remove(t *testing.T) {
remote, repoPath := setupRepoRemote(t, false)
// configure remote as fetch mirror
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "remote.origin.fetch", "+refs/*:refs/*")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "fetch")
+ gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "config", "remote.origin.fetch", "+refs/*:refs/*")
+ gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "fetch")
- masterBeforeRemove := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "show-ref", "refs/heads/master")
+ masterBeforeRemove := gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "show-ref", "refs/heads/master")
require.NoError(t, remote.Remove(ctx, "origin"))
- out := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "remote")
+ out := gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "remote")
require.Len(t, out, 0)
- out = testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "show-ref", "refs/heads/master")
+ out = gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "show-ref", "refs/heads/master")
require.Equal(t, masterBeforeRemove, out)
})
}
@@ -280,11 +282,11 @@ func TestRemote_SetURL(t *testing.T) {
})
t.Run("ok", func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "remote", "add", "first", "file:/"+repoPath)
+ gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "remote", "add", "first", "file:/"+repoPath)
require.NoError(t, remote.SetURL(ctx, "first", "http://some.com.git", git.SetURLOpts{Push: true}))
- remotes := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "remote", "--verbose"))
+ remotes := text.ChompBytes(gittest.Exec(t, remote.repo.cfg, "-C", repoPath, "remote", "--verbose"))
require.Equal(t,
"first file:/"+repoPath+" (fetch)\n"+
"first http://some.com.git (push)",
@@ -308,7 +310,7 @@ func TestRepo_FetchRemote(t *testing.T) {
initBareWithRemote := func(t *testing.T, remote string) (*Repo, string, testhelper.Cleanup) {
t.Helper()
- testRepo, testRepoPath, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ testRepo, testRepoPath, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
cmd := exec.Command(cfg.Git.BinPath, "-C", testRepoPath, "remote", "add", remote, remoteRepoPath)
err := cmd.Run()
@@ -317,11 +319,11 @@ func TestRepo_FetchRemote(t *testing.T) {
require.NoError(t, err)
}
- return New(remoteCmd.repo.gitCmdFactory, testRepo, cfg), testRepoPath, cleanup
+ return New(remoteCmd.repo.gitCmdFactory, remoteCmd.repo.catfileCache, testRepo, cfg), testRepoPath, cleanup
}
t.Run("invalid name", func(t *testing.T) {
- repo := New(remoteCmd.repo.gitCmdFactory, nil, cfg)
+ repo := New(remoteCmd.repo.gitCmdFactory, remoteCmd.repo.catfileCache, nil, cfg)
err := repo.FetchRemote(ctx, " ", FetchOpts{})
require.True(t, errors.Is(err, git.ErrInvalidArg))
@@ -329,7 +331,7 @@ func TestRepo_FetchRemote(t *testing.T) {
})
t.Run("unknown remote", func(t *testing.T) {
- repo := New(remoteCmd.repo.gitCmdFactory, remoteCmd.repo, cfg)
+ repo := New(remoteCmd.repo.gitCmdFactory, remoteCmd.repo.catfileCache, remoteCmd.repo, cfg)
var stderr bytes.Buffer
err := repo.FetchRemote(ctx, "stub", FetchOpts{Stderr: &stderr})
require.Error(t, err)
@@ -345,8 +347,7 @@ func TestRepo_FetchRemote(t *testing.T) {
require.Empty(t, stderr.String(), "it should not produce output as it is called with --quite flag by default")
- fetchHeadData, err := ioutil.ReadFile(filepath.Join(testRepoPath, "FETCH_HEAD"))
- require.NoError(t, err, "it should create FETCH_HEAD with info about fetch")
+ fetchHeadData := testhelper.MustReadFile(t, filepath.Join(testRepoPath, "FETCH_HEAD"))
fetchHead := string(fetchHeadData)
require.Contains(t, fetchHead, "e56497bb5f03a90a51293fc6d516788730953899 not-for-merge branch ''test''")
@@ -358,11 +359,11 @@ func TestRepo_FetchRemote(t *testing.T) {
})
t.Run("with env", func(t *testing.T) {
- _, sourceRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name()+"-1")
- testRepo, testRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name()+"-2")
+ _, sourceRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name()+"-1")
+ testRepo, testRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name()+"-2")
- repo := New(remoteCmd.repo.gitCmdFactory, testRepo, cfg)
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "remote", "add", "source", sourceRepoPath)
+ repo := New(remoteCmd.repo.gitCmdFactory, remoteCmd.repo.catfileCache, testRepo, cfg)
+ gittest.Exec(t, cfg, "-C", testRepoPath, "remote", "add", "source", sourceRepoPath)
var stderr bytes.Buffer
require.NoError(t, repo.FetchRemote(ctx, "source", FetchOpts{Stderr: &stderr, Env: []string{"GIT_TRACE=1"}}))
@@ -370,16 +371,16 @@ func TestRepo_FetchRemote(t *testing.T) {
})
t.Run("with globals", func(t *testing.T) {
- _, sourceRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name()+"-1")
- testRepo, testRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name()+"-2")
+ _, sourceRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name()+"-1")
+ testRepo, testRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name()+"-2")
- repo := New(remoteCmd.repo.gitCmdFactory, testRepo, cfg)
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "remote", "add", "source", sourceRepoPath)
+ repo := New(remoteCmd.repo.gitCmdFactory, remoteCmd.repo.catfileCache, testRepo, cfg)
+ gittest.Exec(t, cfg, "-C", testRepoPath, "remote", "add", "source", sourceRepoPath)
require.NoError(t, repo.FetchRemote(ctx, "source", FetchOpts{}))
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "branch", "--track", "testing-fetch-prune", "refs/remotes/source/markdown")
- testhelper.MustRunCommand(t, nil, "git", "-C", sourceRepoPath, "branch", "-D", "markdown")
+ gittest.Exec(t, cfg, "-C", testRepoPath, "branch", "--track", "testing-fetch-prune", "refs/remotes/source/markdown")
+ gittest.Exec(t, cfg, "-C", sourceRepoPath, "branch", "-D", "markdown")
require.NoError(t, repo.FetchRemote(
ctx,
@@ -397,16 +398,16 @@ func TestRepo_FetchRemote(t *testing.T) {
})
t.Run("with prune", func(t *testing.T) {
- _, sourceRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name()+"-1")
- testRepo, testRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name()+"-2")
+ _, sourceRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name()+"-1")
+ testRepo, testRepoPath, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name()+"-2")
- repo := New(remoteCmd.repo.gitCmdFactory, testRepo, cfg)
+ repo := New(remoteCmd.repo.gitCmdFactory, remoteCmd.repo.catfileCache, testRepo, cfg)
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "remote", "add", "source", sourceRepoPath)
+ gittest.Exec(t, cfg, "-C", testRepoPath, "remote", "add", "source", sourceRepoPath)
require.NoError(t, repo.FetchRemote(ctx, "source", FetchOpts{}))
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "branch", "--track", "testing-fetch-prune", "refs/remotes/source/markdown")
- testhelper.MustRunCommand(t, nil, "git", "-C", sourceRepoPath, "branch", "-D", "markdown")
+ gittest.Exec(t, cfg, "-C", testRepoPath, "branch", "--track", "testing-fetch-prune", "refs/remotes/source/markdown")
+ gittest.Exec(t, cfg, "-C", sourceRepoPath, "branch", "-D", "markdown")
require.NoError(t, repo.FetchRemote(ctx, "source", FetchOpts{Prune: true}))
@@ -419,12 +420,12 @@ func TestRepo_FetchRemote(t *testing.T) {
repo, testRepoPath, cleanup := initBareWithRemote(t, "origin")
defer cleanup()
- tagsBefore := testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "tag", "--list")
+ tagsBefore := gittest.Exec(t, cfg, "-C", testRepoPath, "tag", "--list")
require.Empty(t, tagsBefore)
require.NoError(t, repo.FetchRemote(ctx, "origin", FetchOpts{Tags: FetchOptsTagsNone, Force: true}))
- tagsAfter := testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "tag", "--list")
+ tagsAfter := gittest.Exec(t, cfg, "-C", testRepoPath, "tag", "--list")
require.Empty(t, tagsAfter)
containsBranches, err := repo.HasRevision(ctx, git.Revision("'test'"))
@@ -460,7 +461,8 @@ if [ -z ${GIT_SSH_COMMAND+x} ];then rm -f %q ;else echo -n "$GIT_SSH_COMMAND" >
)
cfg.Git.BinPath = gitPath
- sourceRepo := New(git.NewExecCommandFactory(cfg), sourceRepoPb, cfg)
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
+ sourceRepo := New(gitCmdFactory, catfile.NewCache(cfg), sourceRepoPb, cfg)
for _, tc := range []struct {
desc string
@@ -497,14 +499,15 @@ if [ -z ${GIT_SSH_COMMAND+x} ];then rm -f %q ;else echo -n "$GIT_SSH_COMMAND" >
require.NoError(t, err)
require.NoError(t, sourceRepo.Push(ctx, pushRepoPath, []string{"refs/*"}, PushOptions{}))
- divergedMaster := gittest.CreateCommit(t, cfg, pushRepoPath, "master", &gittest.CreateCommitOpts{
- ParentID: sourceMaster.Target,
- })
+ divergedMaster := gittest.WriteCommit(t, cfg, pushRepoPath,
+ gittest.WithBranch("master"),
+ gittest.WithParents(git.ObjectID(sourceMaster.Target)),
+ )
master, err := repo.GetReference(ctx, "refs/heads/master")
require.NoError(t, err)
- require.Equal(t, master.Target, divergedMaster)
+ require.Equal(t, master.Target, divergedMaster.String())
},
},
{
@@ -524,8 +527,9 @@ if [ -z ${GIT_SSH_COMMAND+x} ];then rm -f %q ;else echo -n "$GIT_SSH_COMMAND" >
},
} {
t.Run(tc.desc, func(t *testing.T) {
- pushRepoPb, pushRepoPath, _ := gittest.InitBareRepoAt(t, cfg.Storages[0])
- pushRepo := New(git.NewExecCommandFactory(cfg), pushRepoPb, cfg)
+ pushRepoPb, pushRepoPath, _ := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
+ pushRepo := New(gitCmdFactory, catfile.NewCache(cfg), pushRepoPb, cfg)
if tc.setupPushRepo != nil {
tc.setupPushRepo(t, pushRepo)
diff --git a/internal/git/localrepo/repo.go b/internal/git/localrepo/repo.go
index a51365d52..8de30f9dd 100644
--- a/internal/git/localrepo/repo.go
+++ b/internal/git/localrepo/repo.go
@@ -3,9 +3,11 @@ package localrepo
import (
"context"
"fmt"
+ "testing"
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/storage"
@@ -17,18 +19,27 @@ type Repo struct {
gitCmdFactory git.CommandFactory
cfg config.Cfg
locator storage.Locator
+ catfileCache catfile.Cache
}
// New creates a new Repo from its protobuf representation.
-func New(gitCmdFactory git.CommandFactory, repo repository.GitRepo, cfg config.Cfg) *Repo {
+func New(gitCmdFactory git.CommandFactory, catfileCache catfile.Cache, repo repository.GitRepo, cfg config.Cfg) *Repo {
return &Repo{
GitRepo: repo,
cfg: cfg,
gitCmdFactory: gitCmdFactory,
+ catfileCache: catfileCache,
locator: config.NewLocator(cfg),
}
}
+// NewTestRepo constructs a Repo. It is intended as a helper function for tests which assembles
+// dependencies ad-hoc from the given config.
+func NewTestRepo(t testing.TB, cfg config.Cfg, repo repository.GitRepo) *Repo {
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
+ return New(gitCmdFactory, catfile.NewCache(cfg), repo, cfg)
+}
+
// Path returns the on-disk path of the repository.
func (repo *Repo) Path() (string, error) {
return repo.locator.GetRepoPath(repo)
diff --git a/internal/git/localrepo/repo_test.go b/internal/git/localrepo/repo_test.go
index 67da2ac34..4a5ab58e1 100644
--- a/internal/git/localrepo/repo_test.go
+++ b/internal/git/localrepo/repo_test.go
@@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
@@ -18,14 +19,16 @@ func TestRepo(t *testing.T) {
gittest.TestRepository(t, cfg, func(t testing.TB, pbRepo *gitalypb.Repository) git.Repository {
t.Helper()
- return New(git.NewExecCommandFactory(cfg), pbRepo, cfg)
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
+ return New(gitCmdFactory, catfile.NewCache(cfg), pbRepo, cfg)
})
}
func TestRepo_Path(t *testing.T) {
t.Run("valid repository", func(t *testing.T) {
cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
- repo := New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
+ repo := New(gitCmdFactory, catfile.NewCache(cfg), repoProto, cfg)
path, err := repo.Path()
require.NoError(t, err)
@@ -34,7 +37,8 @@ func TestRepo_Path(t *testing.T) {
t.Run("deleted repository", func(t *testing.T) {
cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
- repo := New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
+ repo := New(gitCmdFactory, catfile.NewCache(cfg), repoProto, cfg)
require.NoError(t, os.RemoveAll(repoPath))
@@ -44,7 +48,8 @@ func TestRepo_Path(t *testing.T) {
t.Run("non-git repository", func(t *testing.T) {
cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
- repo := New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
+ repo := New(gitCmdFactory, catfile.NewCache(cfg), repoProto, cfg)
// Recreate the repository as a simple empty directory to simulate
// that the repository is in a partially-created state.
diff --git a/internal/git/log/last_commit.go b/internal/git/log/last_commit.go
index 28f74af4b..49bdecef5 100644
--- a/internal/git/log/last_commit.go
+++ b/internal/git/log/last_commit.go
@@ -29,7 +29,7 @@ func LastCommitForPath(ctx context.Context, gitCmdFactory git.CommandFactory, ba
return nil, err
}
- return GetCommitCatfile(ctx, batch, git.Revision(text.ChompBytes(commitID)))
+ return catfile.GetCommit(ctx, batch, git.Revision(text.ChompBytes(commitID)))
}
// GitLogCommand returns a Command that executes git log with the given the arguments
diff --git a/internal/git/log/log.go b/internal/git/log/parser.go
index 3987071a9..2b3a24f0a 100644
--- a/internal/git/log/log.go
+++ b/internal/git/log/parser.go
@@ -8,7 +8,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -23,9 +22,9 @@ type Parser struct {
c catfile.Batch
}
-// NewLogParser returns a new Parser
-func NewLogParser(ctx context.Context, gitCmdFactory git.CommandFactory, repo repository.GitRepo, src io.Reader) (*Parser, error) {
- c, err := catfile.New(ctx, gitCmdFactory, repo)
+// NewParser returns a new Parser
+func NewParser(ctx context.Context, catfileCache catfile.Cache, repo git.RepositoryExecutor, src io.Reader) (*Parser, error) {
+ c, err := catfileCache.BatchProcess(ctx, repo)
if err != nil {
return nil, err
}
@@ -48,7 +47,7 @@ func (parser *Parser) Parse(ctx context.Context) bool {
commitID := parser.scanner.Text()
- commit, err := GetCommitCatfile(ctx, parser.c, git.Revision(commitID))
+ commit, err := catfile.GetCommit(ctx, parser.c, git.Revision(commitID))
if err != nil {
parser.err = err
return false
diff --git a/internal/git/objectpool/clone_test.go b/internal/git/objectpool/clone_test.go
index 78d52dcd0..4b48e1142 100644
--- a/internal/git/objectpool/clone_test.go
+++ b/internal/git/objectpool/clone_test.go
@@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
@@ -18,9 +19,18 @@ func setupObjectPool(t *testing.T) (*ObjectPool, *gitalypb.Repository) {
t.Helper()
cfg, repo, _ := testcfg.BuildWithRepo(t)
+ gitCommandFactory := git.NewExecCommandFactory(cfg)
- pool, err := NewObjectPool(cfg, config.NewLocator(cfg), git.NewExecCommandFactory(cfg), repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := NewObjectPool(
+ cfg,
+ config.NewLocator(cfg),
+ gitCommandFactory,
+ catfile.NewCache(cfg),
+ repo.GetStorageName(),
+ gittest.NewObjectPoolName(t),
+ )
require.NoError(t, err)
+
t.Cleanup(func() {
if err := pool.Remove(context.TODO()); err != nil {
panic(err)
diff --git a/internal/git/objectpool/fetch.go b/internal/git/objectpool/fetch.go
index 0299e9239..6d65fb22f 100644
--- a/internal/git/objectpool/fetch.go
+++ b/internal/git/objectpool/fetch.go
@@ -18,6 +18,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/housekeeping"
"gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/git/updateref"
+ "gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -63,6 +64,7 @@ func (o *ObjectPool) FetchFromOrigin(ctx context.Context, origin *gitalypb.Repos
}
refSpec := fmt.Sprintf("+refs/*:%s/*", sourceRefNamespace)
+ var stderr bytes.Buffer
if err := o.poolRepo.ExecAndWait(ctx,
git.SubCmd{
Name: "fetch",
@@ -73,8 +75,10 @@ func (o *ObjectPool) FetchFromOrigin(ctx context.Context, origin *gitalypb.Repos
Args: []string{sourceRemote, refSpec},
},
git.WithRefTxHook(ctx, o.poolRepo, o.cfg),
+ git.WithStderr(&stderr),
); err != nil {
- return err
+ return helper.ErrInternalf("fetch into object pool: %w, stderr: %q", err,
+ stderr.String())
}
if err := o.rescueDanglingObjects(ctx); err != nil {
@@ -114,7 +118,7 @@ func (o *ObjectPool) rescueDanglingObjects(ctx context.Context) error {
return err
}
- updater, err := updateref.New(ctx, o.cfg, o.gitCmdFactory, o, updateref.WithDisabledTransactions())
+ updater, err := updateref.New(ctx, o.cfg, o.poolRepo, updateref.WithDisabledTransactions())
if err != nil {
return err
}
diff --git a/internal/git/objectpool/fetch_test.go b/internal/git/objectpool/fetch_test.go
index 229b84de7..d56bb65f4 100644
--- a/internal/git/objectpool/fetch_test.go
+++ b/internal/git/objectpool/fetch_test.go
@@ -8,7 +8,9 @@ import (
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
)
@@ -32,32 +34,32 @@ func TestFetchFromOriginDangling(t *testing.T) {
nonce, err := text.RandomHex(4)
require.NoError(t, err)
- baseArgs := []string{"-C", pool.FullPath()}
-
// A blob with random contents should be unique.
- newBlobArgs := append(baseArgs, "hash-object", "-t", "blob", "-w", "--stdin")
- newBlob := text.ChompBytes(testhelper.MustRunCommand(t, strings.NewReader(nonce), "git", newBlobArgs...))
+ newBlob := gittest.WriteBlob(t, pool.cfg, pool.FullPath(), []byte(nonce))
// A tree with a randomly named blob entry should be unique.
- newTreeArgs := append(baseArgs, "mktree")
- newTreeStdin := strings.NewReader(fmt.Sprintf("100644 blob %s %s\n", existingBlob, nonce))
- newTree := text.ChompBytes(testhelper.MustRunCommand(t, newTreeStdin, "git", newTreeArgs...))
+ newTree := gittest.WriteTree(t, pool.cfg, pool.FullPath(), []gittest.TreeEntry{
+ {Mode: "100644", OID: git.ObjectID(existingBlob), Path: nonce},
+ })
// A commit with a random message should be unique.
- newCommitArgs := append(baseArgs, "commit-tree", existingTree)
- newCommit := text.ChompBytes(testhelper.MustRunCommand(t, strings.NewReader(nonce), "git", newCommitArgs...))
+ newCommit := gittest.WriteCommit(t, pool.cfg, pool.FullPath(),
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ OID: git.ObjectID(existingTree), Path: nonce, Mode: "040000",
+ }),
+ )
// A tag with random hex characters in its name should be unique.
newTagName := "tag-" + nonce
- newTagArgs := append(baseArgs, "tag", "-m", "msg", "-a", newTagName, existingCommit)
- testhelper.MustRunCommand(t, strings.NewReader(nonce), "git", newTagArgs...)
- newTag := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", append(baseArgs, "rev-parse", newTagName)...))
+ newTag := gittest.CreateTag(t, pool.cfg, pool.FullPath(), newTagName, existingCommit, &gittest.CreateTagOpts{
+ Message: "msg",
+ })
// `git tag` automatically creates a ref, so our new tag is not dangling.
// Deleting the ref should fix that.
- testhelper.MustRunCommand(t, nil, "git", append(baseArgs, "update-ref", "-d", "refs/tags/"+newTagName)...)
+ gittest.Exec(t, pool.cfg, "-C", pool.FullPath(), "update-ref", "-d", "refs/tags/"+newTagName)
- fsckBefore := testhelper.MustRunCommand(t, nil, "git", append(baseArgs, "fsck", "--connectivity-only", "--dangling")...)
+ fsckBefore := gittest.Exec(t, pool.cfg, "-C", pool.FullPath(), "fsck", "--connectivity-only", "--dangling")
fsckBeforeLines := strings.Split(string(fsckBefore), "\n")
for _, l := range []string{
@@ -73,14 +75,37 @@ func TestFetchFromOriginDangling(t *testing.T) {
// non-dangling objects.
require.NoError(t, pool.FetchFromOrigin(ctx, testRepo), "second fetch")
- refsArgs := append(baseArgs, "for-each-ref", "--format=%(refname) %(objectname)")
- refsAfter := testhelper.MustRunCommand(t, nil, "git", refsArgs...)
+ refsAfter := gittest.Exec(t, pool.cfg, "-C", pool.FullPath(), "for-each-ref", "--format=%(refname) %(objectname)")
refsAfterLines := strings.Split(string(refsAfter), "\n")
- for _, id := range []string{newBlob, newTree, newCommit, newTag} {
+ for _, id := range []string{newBlob.String(), newTree.String(), newCommit.String(), newTag} {
require.Contains(t, refsAfterLines, fmt.Sprintf("refs/dangling/%s %s", id, id))
}
}
+func TestFetchFromOriginFsck(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ pool, repo := setupObjectPool(t)
+ repoPath := filepath.Join(pool.cfg.Storages[0].Path, repo.RelativePath)
+
+ require.NoError(t, pool.FetchFromOrigin(ctx, repo), "seed pool")
+
+ // We're creating a new commit which has a root tree with duplicate entries. git-mktree(1)
+ // allows us to create these trees just fine, but git-fsck(1) complains.
+ gittest.WriteCommit(t, pool.cfg, repoPath,
+ gittest.WithTreeEntries(
+ gittest.TreeEntry{OID: "4b825dc642cb6eb9a060e54bf8d69288fbee4904", Path: "dup", Mode: "040000"},
+ gittest.TreeEntry{OID: "4b825dc642cb6eb9a060e54bf8d69288fbee4904", Path: "dup", Mode: "040000"},
+ ),
+ gittest.WithBranch("branch"),
+ )
+
+ err := pool.FetchFromOrigin(ctx, repo)
+ require.Error(t, err)
+ require.Contains(t, err.Error(), "duplicateEntries: contains duplicate file entries")
+}
+
func TestFetchFromOriginDeltaIslands(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
@@ -91,14 +116,14 @@ func TestFetchFromOriginDeltaIslands(t *testing.T) {
require.NoError(t, pool.FetchFromOrigin(ctx, testRepo), "seed pool")
require.NoError(t, pool.Link(ctx, testRepo))
- gittest.TestDeltaIslands(t, testRepoPath, func() error {
+ gittest.TestDeltaIslands(t, pool.cfg, testRepoPath, func() error {
// This should create a new packfile with good delta chains in the pool
if err := pool.FetchFromOrigin(ctx, testRepo); err != nil {
return err
}
// Make sure the old packfile, with bad delta chains, is deleted from the source repo
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "repack", "-ald")
+ gittest.Exec(t, pool.cfg, "-C", testRepoPath, "repack", "-ald")
return nil
})
@@ -146,8 +171,8 @@ func TestFetchFromOriginRefUpdates(t *testing.T) {
}
for ref, oid := range oldRefs {
- require.Equal(t, oid, resolveRef(t, testRepoPath, "refs/"+ref), "look up %q in source", ref)
- require.Equal(t, oid, resolveRef(t, poolPath, "refs/remotes/origin/"+ref), "look up %q in pool", ref)
+ require.Equal(t, oid, resolveRef(t, pool.cfg, testRepoPath, "refs/"+ref), "look up %q in source", ref)
+ require.Equal(t, oid, resolveRef(t, pool.cfg, poolPath, "refs/remotes/origin/"+ref), "look up %q in pool", ref)
}
newRefs := map[string]string{
@@ -160,21 +185,21 @@ func TestFetchFromOriginRefUpdates(t *testing.T) {
}
for ref, oid := range newRefs {
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "update-ref", "refs/"+ref, oid)
- require.Equal(t, oid, resolveRef(t, testRepoPath, "refs/"+ref), "look up %q in source after update", ref)
+ gittest.Exec(t, pool.cfg, "-C", testRepoPath, "update-ref", "refs/"+ref, oid)
+ require.Equal(t, oid, resolveRef(t, pool.cfg, testRepoPath, "refs/"+ref), "look up %q in source after update", ref)
}
require.NoError(t, pool.FetchFromOrigin(ctx, testRepo), "update pool")
for ref, oid := range newRefs {
- require.Equal(t, oid, resolveRef(t, poolPath, "refs/remotes/origin/"+ref), "look up %q in pool after update", ref)
+ require.Equal(t, oid, resolveRef(t, pool.cfg, poolPath, "refs/remotes/origin/"+ref), "look up %q in pool after update", ref)
}
looseRefs := testhelper.MustRunCommand(t, nil, "find", filepath.Join(poolPath, "refs"), "-type", "f")
require.Equal(t, "", string(looseRefs), "there should be no loose refs after the fetch")
}
-func resolveRef(t *testing.T, repo string, ref string) string {
- out := testhelper.MustRunCommand(t, nil, "git", "-C", repo, "rev-parse", ref)
+func resolveRef(t *testing.T, cfg config.Cfg, repo string, ref string) string {
+ out := gittest.Exec(t, cfg, "-C", repo, "rev-parse", ref)
return text.ChompBytes(out)
}
diff --git a/internal/git/objectpool/link_test.go b/internal/git/objectpool/link_test.go
index faa8dd1f7..0413e41e1 100644
--- a/internal/git/objectpool/link_test.go
+++ b/internal/git/objectpool/link_test.go
@@ -2,7 +2,6 @@ package objectpool
import (
"io/ioutil"
- "os"
"path/filepath"
"strings"
"testing"
@@ -23,26 +22,21 @@ func TestLink(t *testing.T) {
altPath, err := pool.locator.InfoAlternatesPath(testRepo)
require.NoError(t, err)
- _, err = os.Stat(altPath)
- require.True(t, os.IsNotExist(err))
+ require.NoFileExists(t, altPath)
require.NoError(t, pool.Link(ctx, testRepo))
require.FileExists(t, altPath, "alternates file must exist after Link")
- content, err := ioutil.ReadFile(altPath)
- require.NoError(t, err)
-
+ content := testhelper.MustReadFile(t, altPath)
require.True(t, strings.HasPrefix(string(content), "../"), "expected %q to be relative path", content)
require.NoError(t, pool.Link(ctx, testRepo))
- newContent, err := ioutil.ReadFile(altPath)
- require.NoError(t, err)
-
+ newContent := testhelper.MustReadFile(t, altPath)
require.Equal(t, content, newContent)
- require.False(t, gittest.RemoteExists(t, pool.FullPath(), testRepo.GetGlRepository()), "pool remotes should not include %v", testRepo)
+ require.False(t, gittest.RemoteExists(t, pool.cfg, pool.FullPath(), testRepo.GetGlRepository()), "pool remotes should not include %v", testRepo)
}
func TestLinkRemoveBitmap(t *testing.T) {
@@ -55,24 +49,24 @@ func TestLinkRemoveBitmap(t *testing.T) {
testRepoPath := filepath.Join(pool.cfg.Storages[0].Path, testRepo.RelativePath)
poolPath := pool.FullPath()
- testhelper.MustRunCommand(t, nil, "git", "-C", poolPath, "fetch", testRepoPath, "+refs/*:refs/*")
+ gittest.Exec(t, pool.cfg, "-C", poolPath, "fetch", testRepoPath, "+refs/*:refs/*")
- testhelper.MustRunCommand(t, nil, "git", "-C", poolPath, "repack", "-adb")
+ gittest.Exec(t, pool.cfg, "-C", poolPath, "repack", "-adb")
require.Len(t, listBitmaps(t, pool.FullPath()), 1, "pool bitmaps before")
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "repack", "-adb")
+ gittest.Exec(t, pool.cfg, "-C", testRepoPath, "repack", "-adb")
require.Len(t, listBitmaps(t, testRepoPath), 1, "member bitmaps before")
- refsBefore := testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "for-each-ref")
+ refsBefore := gittest.Exec(t, pool.cfg, "-C", testRepoPath, "for-each-ref")
require.NoError(t, pool.Link(ctx, testRepo))
require.Len(t, listBitmaps(t, pool.FullPath()), 1, "pool bitmaps after")
require.Len(t, listBitmaps(t, testRepoPath), 0, "member bitmaps after")
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "fsck")
+ gittest.Exec(t, pool.cfg, "-C", testRepoPath, "fsck")
- refsAfter := testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "for-each-ref")
+ refsAfter := gittest.Exec(t, pool.cfg, "-C", testRepoPath, "for-each-ref")
require.Equal(t, refsBefore, refsAfter, "compare member refs before/after link")
}
@@ -101,10 +95,10 @@ func TestUnlink(t *testing.T) {
require.NoError(t, pool.Create(ctx, testRepo), "create pool")
require.NoError(t, pool.Link(ctx, testRepo), "link test repo to pool")
- require.False(t, gittest.RemoteExists(t, pool.FullPath(), testRepo.GetGlRepository()), "pool remotes should include %v", testRepo)
+ require.False(t, gittest.RemoteExists(t, pool.cfg, pool.FullPath(), testRepo.GetGlRepository()), "pool remotes should include %v", testRepo)
require.NoError(t, pool.Unlink(ctx, testRepo), "unlink repo")
- require.False(t, gittest.RemoteExists(t, pool.FullPath(), testRepo.GetGlRepository()), "pool remotes should no longer include %v", testRepo)
+ require.False(t, gittest.RemoteExists(t, pool.cfg, pool.FullPath(), testRepo.GetGlRepository()), "pool remotes should no longer include %v", testRepo)
}
func TestLinkAbsoluteLinkExists(t *testing.T) {
@@ -129,13 +123,11 @@ func TestLinkAbsoluteLinkExists(t *testing.T) {
require.FileExists(t, altPath, "alternates file must exist after Link")
- content, err := ioutil.ReadFile(altPath)
- require.NoError(t, err)
-
+ content := testhelper.MustReadFile(t, altPath)
require.False(t, filepath.IsAbs(string(content)), "expected %q to be relative path", content)
testRepoObjectsPath := filepath.Join(testRepoPath, "objects")
require.Equal(t, fullPath, filepath.Join(testRepoObjectsPath, string(content)), "the content of the alternates file should be the relative version of the absolute pat")
- require.True(t, gittest.RemoteExists(t, pool.FullPath(), "origin"), "pool remotes should include %v", testRepo)
+ require.True(t, gittest.RemoteExists(t, pool.cfg, pool.FullPath(), "origin"), "pool remotes should include %v", testRepo)
}
diff --git a/internal/git/objectpool/pool.go b/internal/git/objectpool/pool.go
index 68d15ed52..f23e5a854 100644
--- a/internal/git/objectpool/pool.go
+++ b/internal/git/objectpool/pool.go
@@ -13,6 +13,7 @@ import (
"strings"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/storage"
@@ -46,7 +47,14 @@ type ObjectPool struct {
// NewObjectPool will initialize the object with the required data on the storage
// shard. Relative path is validated to match the expected naming and directory
// structure. If the shard cannot be found, this function returns an error.
-func NewObjectPool(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory, storageName, relativePath string) (*ObjectPool, error) {
+func NewObjectPool(
+ cfg config.Cfg,
+ locator storage.Locator,
+ gitCmdFactory git.CommandFactory,
+ catfileCache catfile.Cache,
+ storageName,
+ relativePath string,
+) (*ObjectPool, error) {
storagePath, err := locator.GetStorageByName(storageName)
if err != nil {
return nil, err
@@ -65,7 +73,7 @@ func NewObjectPool(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.Co
storagePath: storagePath,
relativePath: relativePath,
}
- pool.poolRepo = localrepo.New(gitCmdFactory, pool, cfg)
+ pool.poolRepo = localrepo.New(gitCmdFactory, catfileCache, pool, cfg)
return pool, nil
}
@@ -150,7 +158,13 @@ func (o *ObjectPool) Init(ctx context.Context) (err error) {
}
// FromRepo returns an instance of ObjectPool that the repository points to
-func FromRepo(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory, repo *gitalypb.Repository) (*ObjectPool, error) {
+func FromRepo(
+ cfg config.Cfg,
+ locator storage.Locator,
+ gitCmdFactory git.CommandFactory,
+ catfileCache catfile.Cache,
+ repo *gitalypb.Repository,
+) (*ObjectPool, error) {
dir, err := getAlternateObjectDir(locator, repo)
if err != nil {
return nil, err
@@ -170,7 +184,7 @@ func FromRepo(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.Command
return nil, err
}
- return NewObjectPool(cfg, locator, gitCmdFactory, repo.GetStorageName(), filepath.Dir(altPathRelativeToStorage))
+ return NewObjectPool(cfg, locator, gitCmdFactory, catfileCache, repo.GetStorageName(), filepath.Dir(altPathRelativeToStorage))
}
var (
diff --git a/internal/git/objectpool/pool_test.go b/internal/git/objectpool/pool_test.go
index f1105805f..4dc51a9b6 100644
--- a/internal/git/objectpool/pool_test.go
+++ b/internal/git/objectpool/pool_test.go
@@ -20,10 +20,10 @@ func TestNewObjectPool(t *testing.T) {
locator := config.NewLocator(cfg)
- _, err := NewObjectPool(cfg, locator, nil, cfg.Storages[0].Name, gittest.NewObjectPoolName(t))
+ _, err := NewObjectPool(cfg, locator, nil, nil, cfg.Storages[0].Name, gittest.NewObjectPoolName(t))
require.NoError(t, err)
- _, err = NewObjectPool(cfg, locator, nil, "mepmep", gittest.NewObjectPoolName(t))
+ _, err = NewObjectPool(cfg, locator, nil, nil, "mepmep", gittest.NewObjectPoolName(t))
require.Error(t, err, "creating pool in storage that does not exist should fail")
}
@@ -36,7 +36,7 @@ func TestNewFromRepoSuccess(t *testing.T) {
require.NoError(t, pool.Create(ctx, testRepo))
require.NoError(t, pool.Link(ctx, testRepo))
- poolFromRepo, err := FromRepo(pool.cfg, pool.locator, pool.gitCmdFactory, testRepo)
+ poolFromRepo, err := FromRepo(pool.cfg, pool.locator, pool.gitCmdFactory, nil, testRepo)
require.NoError(t, err)
require.Equal(t, pool.relativePath, poolFromRepo.relativePath)
require.Equal(t, pool.storageName, poolFromRepo.storageName)
@@ -48,7 +48,7 @@ func TestNewFromRepoNoObjectPool(t *testing.T) {
testRepoPath := filepath.Join(pool.cfg.Storages[0].Path, testRepo.RelativePath)
// no alternates file
- poolFromRepo, err := FromRepo(pool.cfg, pool.locator, pool.gitCmdFactory, testRepo)
+ poolFromRepo, err := FromRepo(pool.cfg, pool.locator, pool.gitCmdFactory, nil, testRepo)
require.Equal(t, ErrAlternateObjectDirNotExist, err)
require.Nil(t, poolFromRepo)
@@ -81,7 +81,7 @@ func TestNewFromRepoNoObjectPool(t *testing.T) {
t.Run(tc.desc, func(t *testing.T) {
alternateFilePath := filepath.Join(testRepoPath, "objects", "info", "alternates")
require.NoError(t, ioutil.WriteFile(alternateFilePath, tc.fileContent, 0644))
- poolFromRepo, err := FromRepo(pool.cfg, pool.locator, pool.gitCmdFactory, testRepo)
+ poolFromRepo, err := FromRepo(pool.cfg, pool.locator, pool.gitCmdFactory, nil, testRepo)
require.Equal(t, tc.expectedErr, err)
require.Nil(t, poolFromRepo)
@@ -98,7 +98,7 @@ func TestCreate(t *testing.T) {
testRepoPath := filepath.Join(pool.cfg.Storages[0].Path, testRepo.RelativePath)
- masterSha := testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "show-ref", "master")
+ masterSha := gittest.Exec(t, pool.cfg, "-C", testRepoPath, "show-ref", "master")
err := pool.Create(ctx, testRepo)
require.NoError(t, err)
@@ -109,19 +109,18 @@ func TestCreate(t *testing.T) {
require.True(t, pool.IsValid())
// No hooks
- _, err = os.Stat(filepath.Join(pool.FullPath(), "hooks"))
- assert.True(t, os.IsNotExist(err))
+ assert.NoDirExists(t, filepath.Join(pool.FullPath(), "hooks"))
// origin is set
- out := testhelper.MustRunCommand(t, nil, "git", "-C", pool.FullPath(), "remote", "get-url", "origin")
+ out := gittest.Exec(t, pool.cfg, "-C", pool.FullPath(), "remote", "get-url", "origin")
assert.Equal(t, testRepoPath, strings.TrimRight(string(out), "\n"))
// refs exist
- out = testhelper.MustRunCommand(t, nil, "git", "-C", pool.FullPath(), "show-ref", "refs/heads/master")
+ out = gittest.Exec(t, pool.cfg, "-C", pool.FullPath(), "show-ref", "refs/heads/master")
assert.Equal(t, masterSha, out)
// No problems
- out = testhelper.MustRunCommand(t, nil, "git", "-C", pool.FullPath(), "cat-file", "-s", "55bc176024cfa3baaceb71db584c7e5df900ea65")
+ out = gittest.Exec(t, pool.cfg, "-C", pool.FullPath(), "cat-file", "-s", "55bc176024cfa3baaceb71db584c7e5df900ea65")
assert.Equal(t, "282\n", string(out))
}
diff --git a/internal/git/objectpool/proto.go b/internal/git/objectpool/proto.go
index e4065f708..6feebdfb5 100644
--- a/internal/git/objectpool/proto.go
+++ b/internal/git/objectpool/proto.go
@@ -2,14 +2,21 @@ package objectpool
import (
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
// FromProto returns an object pool object from a git repository object
-func FromProto(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory, o *gitalypb.ObjectPool) (*ObjectPool, error) {
- return NewObjectPool(cfg, locator, gitCmdFactory, o.GetRepository().GetStorageName(), o.GetRepository().GetRelativePath())
+func FromProto(
+ cfg config.Cfg,
+ locator storage.Locator,
+ gitCmdFactory git.CommandFactory,
+ catfileCache catfile.Cache,
+ o *gitalypb.ObjectPool,
+) (*ObjectPool, error) {
+ return NewObjectPool(cfg, locator, gitCmdFactory, catfileCache, o.GetRepository().GetStorageName(), o.GetRepository().GetRelativePath())
}
// ToProto returns a new struct that is the protobuf definition of the ObjectPool
diff --git a/internal/git/packfile/packfile_test.go b/internal/git/packfile/packfile_test.go
index 2074f903a..9bad0bfd7 100644
--- a/internal/git/packfile/packfile_test.go
+++ b/internal/git/packfile/packfile_test.go
@@ -1,4 +1,4 @@
-package packfile
+package packfile_test
import (
"os"
@@ -6,7 +6,10 @@ import (
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
+ "gitlab.com/gitlab-org/gitaly/internal/git/packfile"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
)
func TestMain(m *testing.M) {
@@ -21,18 +24,19 @@ func testMain(m *testing.M) int {
}
func TestList(t *testing.T) {
+ cfg := testcfg.Build(t)
tempDir := testhelper.TempDir(t)
emptyRepo := filepath.Join(tempDir, "empty.git")
- testhelper.MustRunCommand(t, nil, "git", "init", "--bare", emptyRepo)
+ gittest.Exec(t, cfg, "init", "--bare", emptyRepo)
populatedRepo := filepath.Join(tempDir, "populated")
- testhelper.MustRunCommand(t, nil, "git", "init", populatedRepo)
+ gittest.Exec(t, cfg, "init", populatedRepo)
for i := 0; i < 10; i++ {
- testhelper.MustRunCommand(t, nil, "git", "-C", populatedRepo, "commit",
+ gittest.Exec(t, cfg, "-C", populatedRepo, "commit",
"--allow-empty", "--message", "commit message")
}
- testhelper.MustRunCommand(t, nil, "git", "-C", populatedRepo, "repack", "-ad")
+ gittest.Exec(t, cfg, "-C", populatedRepo, "repack", "-ad")
testCases := []struct {
desc string
@@ -45,7 +49,7 @@ func TestList(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- out, err := List(filepath.Join(tc.path, "objects"))
+ out, err := packfile.List(filepath.Join(tc.path, "objects"))
require.NoError(t, err)
require.Len(t, out, tc.numPacks)
})
diff --git a/internal/git/reference.go b/internal/git/reference.go
index 494d20c77..96a32190d 100644
--- a/internal/git/reference.go
+++ b/internal/git/reference.go
@@ -6,6 +6,15 @@ import (
"strings"
)
+// InternalRefPrefixes is an array of all reference prefixes which are used internally by GitLab.
+// These need special treatment in some cases, e.g. to restrict writing to them.
+var InternalRefPrefixes = [...]string{
+ "refs/environments/",
+ "refs/keep-around/",
+ "refs/merge-requests/",
+ "refs/pipelines/",
+}
+
// Revision represents anything that resolves to either a commit, multiple
// commits or to an object different than a commit. This could be e.g.
// "master", "master^{commit}", an object hash or similar. See gitrevisions(1)
diff --git a/internal/git/remoterepo/repository_test.go b/internal/git/remoterepo/repository_test.go
index 4bf533544..2124f24cc 100644
--- a/internal/git/remoterepo/repository_test.go
+++ b/internal/git/remoterepo/repository_test.go
@@ -23,8 +23,21 @@ func TestRepository(t *testing.T) {
cfg := testcfg.Build(t)
serverSocketPath := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetLocator(), deps.GetTxManager(), deps.GetGitCmdFactory()))
- gitalypb.RegisterCommitServiceServer(srv, commit.NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory(), deps.GetLinguist()))
+ gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetTxManager(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterCommitServiceServer(srv, commit.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetLinguist(),
+ deps.GetCatfileCache(),
+ ))
})
ctx, cancel := testhelper.Context()
diff --git a/internal/git/repository.go b/internal/git/repository.go
index 6eb2655b8..deb0e4f9b 100644
--- a/internal/git/repository.go
+++ b/internal/git/repository.go
@@ -3,6 +3,9 @@ package git
import (
"context"
"errors"
+
+ "gitlab.com/gitlab-org/gitaly/internal/command"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
)
// DefaultBranch now defaults to master, as that's the Git default
@@ -37,3 +40,11 @@ type Repository interface {
// HasBranches returns whether the repository has branches.
HasBranches(ctx context.Context) (bool, error)
}
+
+// RepositoryExecutor is an interface which allows execution of Git commands in a specific
+// repository.
+type RepositoryExecutor interface {
+ repository.GitRepo
+ Exec(ctx context.Context, cmd Cmd, opts ...CmdOpt) (*command.Command, error)
+ ExecAndWait(ctx context.Context, cmd Cmd, opts ...CmdOpt) error
+}
diff --git a/internal/git/ssh_test.go b/internal/git/ssh_test.go
index 201b9fc3a..cdf799b9c 100644
--- a/internal/git/ssh_test.go
+++ b/internal/git/ssh_test.go
@@ -1,7 +1,6 @@
package git
import (
- "io/ioutil"
"path/filepath"
"regexp"
"testing"
@@ -59,8 +58,7 @@ func TestBuildSSHInvocation(t *testing.T) {
expectedCommand := "ssh"
if tc.sshKey != "" {
- content, err := ioutil.ReadFile(sshKeyPath)
- require.NoError(t, err)
+ content := testhelper.MustReadFile(t, sshKeyPath)
require.Equal(t, tc.sshKey, string(content))
expectedCommand += " -oIdentitiesOnly=yes -oIdentityFile=" + sshKeyPath
} else {
@@ -68,8 +66,7 @@ func TestBuildSSHInvocation(t *testing.T) {
}
if tc.knownHosts != "" {
- content, err := ioutil.ReadFile(knownHostsPath)
- require.NoError(t, err)
+ content := testhelper.MustReadFile(t, knownHostsPath)
require.Equal(t, tc.knownHosts, string(content))
expectedCommand += " -oStrictHostKeyChecking=yes -oCheckHostIP=no -oUserKnownHostsFile=" + knownHostsPath
} else {
diff --git a/internal/git/stats/git_test.go b/internal/git/stats/git_test.go
index bd9c76aa4..5056e84ba 100644
--- a/internal/git/stats/git_test.go
+++ b/internal/git/stats/git_test.go
@@ -23,10 +23,10 @@ import (
func TestLogObjectInfo(t *testing.T) {
cfg := testcfg.Build(t)
- repo1, repoPath1, cleanup1 := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name()+"-1")
+ repo1, repoPath1, cleanup1 := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name()+"-1")
defer cleanup1()
- repo2, repoPath2, cleanup2 := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name()+"-2")
+ repo2, repoPath2, cleanup2 := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name()+"-2")
defer cleanup2()
ctx, cancel := testhelper.Context()
@@ -63,7 +63,7 @@ func TestLogObjectInfo(t *testing.T) {
defer func() { require.NoError(t, os.RemoveAll(tmpDir)) }()
// clone existing local repo with two alternates
- testhelper.MustRunCommand(t, nil, "git", "clone", "--shared", repoPath1, "--reference", repoPath1, "--reference", repoPath2, tmpDir)
+ gittest.Exec(t, cfg, "clone", "--shared", repoPath1, "--reference", repoPath1, "--reference", repoPath2, tmpDir)
logBuffer.Reset()
LogObjectsInfo(testCtx, gitCmdFactory, &gitalypb.Repository{
diff --git a/internal/git/stats/profile_test.go b/internal/git/stats/profile_test.go
index 4d8080f3f..87d6ced60 100644
--- a/internal/git/stats/profile_test.go
+++ b/internal/git/stats/profile_test.go
@@ -16,7 +16,7 @@ import (
func TestRepositoryProfile(t *testing.T) {
cfg := testcfg.Build(t)
- testRepo, testRepoPath, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ testRepo, testRepoPath, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
defer cleanup()
ctx, cancel := testhelper.Context()
@@ -36,7 +36,7 @@ func TestRepositoryProfile(t *testing.T) {
require.Zero(t, packfilesCount)
blobs := 10
- blobIDs := gittest.WriteBlobs(t, testRepoPath, blobs)
+ blobIDs := gittest.WriteBlobs(t, cfg, testRepoPath, blobs)
unpackedObjects, err = UnpackedObjects(testRepoPath)
require.NoError(t, err)
@@ -48,14 +48,19 @@ func TestRepositoryProfile(t *testing.T) {
require.Equal(t, int64(blobs), looseObjects)
for _, blobID := range blobIDs {
- commitID := gittest.CommitBlobWithName(t, cfg, testRepoPath, blobID, blobID, "adding another blob....")
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "update-ref", "refs/heads/"+blobID, commitID)
+ commitID := gittest.WriteCommit(t, cfg, testRepoPath,
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ Mode: "100644", Path: "blob", OID: git.ObjectID(blobID),
+ }),
+ gittest.WithParents(),
+ )
+ gittest.Exec(t, cfg, "-C", testRepoPath, "update-ref", "refs/heads/"+blobID, commitID.String())
}
// write a loose object
- gittest.WriteBlobs(t, testRepoPath, 1)
+ gittest.WriteBlobs(t, cfg, testRepoPath, 1)
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "repack", "-A", "-b", "-d")
+ gittest.Exec(t, cfg, "-C", testRepoPath, "repack", "-A", "-b", "-d")
unpackedObjects, err = UnpackedObjects(testRepoPath)
require.NoError(t, err)
@@ -68,7 +73,7 @@ func TestRepositoryProfile(t *testing.T) {
time.Sleep(1 * time.Millisecond)
// write another loose object
- blobID := gittest.WriteBlobs(t, testRepoPath, 1)[0]
+ blobID := gittest.WriteBlobs(t, cfg, testRepoPath, 1)[0]
// due to OS semantics, ensure that the blob has a timestamp that is after the packfile
theFuture := time.Now().Add(10 * time.Minute)
diff --git a/internal/git/updateref/updateref.go b/internal/git/updateref/updateref.go
index c80f1764e..0f3e53ef6 100644
--- a/internal/git/updateref/updateref.go
+++ b/internal/git/updateref/updateref.go
@@ -7,7 +7,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
)
@@ -15,7 +14,7 @@ import (
// that allows references to be easily updated in bulk. It is not suitable for
// concurrent use.
type Updater struct {
- repo repository.GitRepo
+ repo git.RepositoryExecutor
cmd *command.Command
stderr *bytes.Buffer
}
@@ -41,7 +40,7 @@ func WithDisabledTransactions() UpdaterOpt {
//
// It is important that ctx gets canceled somewhere. If it doesn't, the process
// spawned by New() may never terminate.
-func New(ctx context.Context, conf config.Cfg, gitCmdFactory git.CommandFactory, repo repository.GitRepo, opts ...UpdaterOpt) (*Updater, error) {
+func New(ctx context.Context, conf config.Cfg, repo git.RepositoryExecutor, opts ...UpdaterOpt) (*Updater, error) {
var cfg updaterConfig
for _, opt := range opts {
opt(&cfg)
@@ -53,7 +52,7 @@ func New(ctx context.Context, conf config.Cfg, gitCmdFactory git.CommandFactory,
}
var stderr bytes.Buffer
- cmd, err := gitCmdFactory.New(ctx, repo,
+ cmd, err := repo.Exec(ctx,
git.SubCmd{
Name: "update-ref",
Flags: []git.Option{git.Flag{Name: "-z"}, git.Flag{Name: "--stdin"}},
diff --git a/internal/git/updateref/updateref_test.go b/internal/git/updateref/updateref_test.go
index d439e444a..375af21b3 100644
--- a/internal/git/updateref/updateref_test.go
+++ b/internal/git/updateref/updateref_test.go
@@ -33,10 +33,9 @@ func setupUpdater(t *testing.T, ctx context.Context) (config.Cfg, *localrepo.Rep
cfg, protoRepo, _ := testcfg.BuildWithRepo(t)
- gitCmdFactory := git.NewExecCommandFactory(cfg)
- repo := localrepo.New(gitCmdFactory, protoRepo, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, protoRepo)
- updater, err := New(ctx, cfg, gitCmdFactory, repo)
+ updater, err := New(ctx, cfg, repo)
require.NoError(t, err)
return cfg, repo, updater
@@ -165,7 +164,8 @@ func TestContextCancelAbortsRefChanges(t *testing.T) {
require.NoError(t, err)
childCtx, childCancel := context.WithCancel(ctx)
- updater, err := New(childCtx, cfg, git.NewExecCommandFactory(cfg), repo)
+ localRepo := localrepo.NewTestRepo(t, cfg, repo)
+ updater, err := New(childCtx, cfg, localRepo)
require.NoError(t, err)
ref := git.ReferenceName("refs/heads/_shouldnotexist")
diff --git a/internal/git2go/apply_test.go b/internal/git2go/apply_test.go
index 17aadd76d..c936e6bd1 100644
--- a/internal/git2go/apply_test.go
+++ b/internal/git2go/apply_test.go
@@ -19,10 +19,10 @@ func TestExecutor_Apply(t *testing.T) {
cfg := testcfg.Build(t)
testhelper.ConfigureGitalyGit2GoBin(t, cfg)
- repoProto, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ repoProto, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
executor := New(filepath.Join(cfg.BinDir, "gitaly-git2go"), cfg.Git.BinPath)
ctx, cancel := testhelper.Context()
@@ -99,8 +99,7 @@ func TestExecutor_Apply(t *testing.T) {
diffBetween := func(t testing.TB, fromCommit, toCommit git.ObjectID) []byte {
t.Helper()
- return testhelper.MustRunCommand(t, nil,
- "git", "-C", repoPath, "format-patch", "--stdout", fromCommit.String()+".."+toCommit.String())
+ return gittest.Exec(t, cfg, "-C", repoPath, "format-patch", "--stdout", fromCommit.String()+".."+toCommit.String())
}
for _, tc := range []struct {
@@ -215,7 +214,7 @@ func TestExecutor_Apply(t *testing.T) {
Committer: committer,
Message: tc.patches[len(tc.patches)-1].Message,
}, getCommit(t, ctx, repo, commitID))
- gittest.RequireTree(t, repoPath, commitID.String(), tc.tree)
+ gittest.RequireTree(t, cfg, repoPath, commitID.String(), tc.tree)
})
}
}
diff --git a/internal/git2go/commit_test.go b/internal/git2go/commit_test.go
index 68916b759..ef386caaa 100644
--- a/internal/git2go/commit_test.go
+++ b/internal/git2go/commit_test.go
@@ -56,10 +56,10 @@ func TestExecutor_Commit(t *testing.T) {
cfg := testcfg.Build(t)
testhelper.ConfigureGitalyGit2GoBin(t, cfg)
- repoProto, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ repoProto, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
originalFile, err := repo.WriteBlob(ctx, "file", bytes.NewBufferString("original"))
require.NoError(t, err)
@@ -492,7 +492,7 @@ func TestExecutor_Commit(t *testing.T) {
Message: message,
}, getCommit(t, ctx, repo, commitID))
- gittest.RequireTree(t, repoPath, commitID.String(), step.treeEntries)
+ gittest.RequireTree(t, cfg, repoPath, commitID.String(), step.treeEntries)
parentCommit = commitID
}
})
diff --git a/internal/git2go/rebase.go b/internal/git2go/rebase.go
new file mode 100644
index 000000000..e35fc6e55
--- /dev/null
+++ b/internal/git2go/rebase.go
@@ -0,0 +1,25 @@
+package git2go
+
+import (
+ "context"
+
+ "gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+)
+
+// RebaseCommand contains parameters to rebase a branch.
+type RebaseCommand struct {
+ // Repository is the path to execute rebase in.
+ Repository string
+ // Committer contains the the committer signature.
+ Committer Signature
+ // BranchName is the branch that is rebased.
+ BranchName string
+ // UpstreamRevision is the revision where the branch is rebased onto.
+ UpstreamRevision string
+}
+
+// Run performs the rebase via gitaly-git2go
+func (r RebaseCommand) Run(ctx context.Context, cfg config.Cfg) (git.ObjectID, error) {
+ return runWithGob(ctx, cfg, "rebase", r)
+}
diff --git a/internal/gitaly/config/config.go b/internal/gitaly/config/config.go
index 9446b0c81..deb4fb7c5 100644
--- a/internal/gitaly/config/config.go
+++ b/internal/gitaly/config/config.go
@@ -33,15 +33,6 @@ const (
GitalyDataPrefix = "+gitaly"
)
-var (
- // Config stores the global configuration
- // Deprecated: please do not use global variable and pass preconfigured Cfg as a parameter
- // where it is needed.
- Config Cfg
-
- hooks []func(*Cfg) error
-)
-
// DailyJob enables a daily task to be scheduled for specific storages
type DailyJob struct {
Hour uint `toml:"start_hour"`
@@ -180,16 +171,7 @@ func Load(file io.Reader) (Cfg, error) {
return cfg, nil
}
-// RegisterHook adds a post-validation callback. Your hook should only
-// access config via the Cfg instance it gets passed. This avoids race
-// conditions during testing, when the global config.Config instance gets
-// updated after these hooks have run.
-func RegisterHook(f func(c *Cfg) error) {
- hooks = append(hooks, f)
-}
-
-// Validate checks the current Config for sanity. It also runs all hooks
-// registered with RegisterHook.
+// Validate checks the current Config for sanity.
func (cfg *Cfg) Validate() error {
for _, run := range []func() error{
cfg.validateListeners,
@@ -210,12 +192,6 @@ func (cfg *Cfg) Validate() error {
}
}
- for _, f := range hooks {
- if err := f(cfg); err != nil {
- return err
- }
- }
-
return nil
}
@@ -426,7 +402,7 @@ func (cfg *Cfg) Storage(storageName string) (Storage, bool) {
// GitalyInternalSocketPath is the path to the internal gitaly socket
func (cfg *Cfg) GitalyInternalSocketPath() string {
- return filepath.Join(cfg.InternalSocketDir, "internal.sock")
+ return filepath.Join(cfg.InternalSocketDir, fmt.Sprintf("internal_%d.sock", os.Getpid()))
}
func (cfg *Cfg) validateBinDir() error {
diff --git a/internal/gitaly/config/config_test.go b/internal/gitaly/config/config_test.go
index 7fbf45129..65829556e 100644
--- a/internal/gitaly/config/config_test.go
+++ b/internal/gitaly/config/config_test.go
@@ -18,17 +18,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config/sentry"
)
-func TestLoad_doesntClearPreviousGlobalConfig(t *testing.T) {
- defer func(old Cfg) { Config = old }(Config)
-
- Config = Cfg{SocketPath: "/tmp"}
- cfg, err := Load(&bytes.Buffer{})
- require.NoError(t, err)
-
- require.Equal(t, "", cfg.SocketPath)
- require.Equal(t, "/tmp", Config.SocketPath)
-}
-
func TestLoadBrokenConfig(t *testing.T) {
tmpFile := strings.NewReader(`path = "/tmp"\nname="foo"`)
_, err := Load(tmpFile)
@@ -424,11 +413,6 @@ func TestValidateHooks(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- originalConfig := Config
- defer func() {
- Config = originalConfig
- }()
-
tempHookDir, cleanup := setupTempHookDirs(t, tc.hookFiles)
defer cleanup()
diff --git a/internal/gitaly/hook/check.go b/internal/gitaly/hook/check.go
index ce793277b..1a2e24834 100644
--- a/internal/gitaly/hook/check.go
+++ b/internal/gitaly/hook/check.go
@@ -2,53 +2,10 @@ package hook
import (
"context"
- "encoding/json"
- "fmt"
- "io"
- "io/ioutil"
- "net/http"
-)
-
-// CheckInfo represents the response of GitLabs `check` API endpoint
-type CheckInfo struct {
- // Version of the GitLab Rails component
- Version string `json:"gitlab_version"`
- // Revision of the Git object of the running GitLab
- Revision string `json:"gitlab_revision"`
- // APIVersion of GitLab, expected to be v4
- APIVersion string `json:"api_version"`
- // RedisReachable shows if GitLab can reach Redis. This can be false
- // while the check itself succeeds. Normal hook API calls will likely
- // fail.
- RedisReachable bool `json:"redis"`
-}
-
-// Check performs an HTTP request to the internal/check API endpoint to verify
-// the connection and tokens. It returns basic information of the installed
-// GitLab
-func (a *gitlabAPI) Check(ctx context.Context) (*CheckInfo, error) {
- resp, err := a.client.Get(ctx, "/check")
- if err != nil {
- return nil, fmt.Errorf("HTTP GET to GitLab endpoint /check failed: %w", err)
- }
-
- defer func() {
- io.Copy(ioutil.Discard, resp.Body)
- resp.Body.Close()
- }()
- if resp.StatusCode != http.StatusOK {
- return nil, fmt.Errorf("Check HTTP request failed with status: %d", resp.StatusCode)
- }
-
- var info CheckInfo
- if err := json.NewDecoder(resp.Body).Decode(&info); err != nil {
- return nil, fmt.Errorf("failed to decode response from /check endpoint: %w", err)
- }
-
- return &info, nil
-}
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
+)
-func (m *GitLabHookManager) Check(ctx context.Context) (*CheckInfo, error) {
- return m.gitlabAPI.Check(ctx)
+func (m *GitLabHookManager) Check(ctx context.Context) (*gitlab.CheckInfo, error) {
+ return m.gitlabClient.Check(ctx)
}
diff --git a/internal/gitaly/hook/manager.go b/internal/gitaly/hook/manager.go
index 69c90534d..5fc208549 100644
--- a/internal/gitaly/hook/manager.go
+++ b/internal/gitaly/hook/manager.go
@@ -6,6 +6,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -48,18 +49,18 @@ type Manager interface {
// GitLabHookManager is a hook manager containing Git hook business logic. It
// uses the GitLab API to authenticate and track ongoing hook calls.
type GitLabHookManager struct {
- locator storage.Locator
- gitlabAPI GitlabAPI
- hooksConfig config.Hooks
- txManager transaction.Manager
+ locator storage.Locator
+ gitlabClient gitlab.Client
+ hooksConfig config.Hooks
+ txManager transaction.Manager
}
// NewManager returns a new hook manager
-func NewManager(locator storage.Locator, txManager transaction.Manager, gitlabAPI GitlabAPI, cfg config.Cfg) *GitLabHookManager {
+func NewManager(locator storage.Locator, txManager transaction.Manager, gitlabClient gitlab.Client, cfg config.Cfg) *GitLabHookManager {
return &GitLabHookManager{
- locator: locator,
- gitlabAPI: gitlabAPI,
- hooksConfig: cfg.Hooks,
- txManager: txManager,
+ locator: locator,
+ gitlabClient: gitlabClient,
+ hooksConfig: cfg.Hooks,
+ txManager: txManager,
}
}
diff --git a/internal/gitaly/hook/postreceive.go b/internal/gitaly/hook/postreceive.go
index 6ab89ecdd..1e60e1e31 100644
--- a/internal/gitaly/hook/postreceive.go
+++ b/internal/gitaly/hook/postreceive.go
@@ -12,6 +12,7 @@ import (
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -42,7 +43,7 @@ func getEnvVar(key string, vars []string) string {
return ""
}
-func printMessages(messages []PostReceiveMessage, w io.Writer) error {
+func printMessages(messages []gitlab.PostReceiveMessage, w io.Writer) error {
for _, message := range messages {
if _, err := w.Write([]byte("\n")); err != nil {
return err
@@ -75,7 +76,7 @@ func centerLine(b []byte) []byte {
return append(bytes.Repeat([]byte(" "), linePadding), b...)
}
-func printAlert(m PostReceiveMessage, w io.Writer) error {
+func printAlert(m gitlab.PostReceiveMessage, w io.Writer) error {
if _, err := w.Write(bytes.Repeat([]byte("="), maxMessageWidth)); err != nil {
return err
}
@@ -159,7 +160,7 @@ func (m *GitLabHookManager) postReceiveHook(ctx context.Context, payload git.Hoo
return helper.ErrInternalf("repository not set")
}
- ok, messages, err := m.gitlabAPI.PostReceive(
+ ok, messages, err := m.gitlabClient.PostReceive(
ctx, repo.GetGlRepository(),
payload.ReceiveHooksPayload.UserID,
string(stdin),
diff --git a/internal/gitaly/hook/postreceive_test.go b/internal/gitaly/hook/postreceive_test.go
index d4ae1d00e..60392bd29 100644
--- a/internal/gitaly/hook/postreceive_test.go
+++ b/internal/gitaly/hook/postreceive_test.go
@@ -15,10 +15,11 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
)
func TestPrintAlert(t *testing.T) {
@@ -57,7 +58,7 @@ func TestPrintAlert(t *testing.T) {
for _, tc := range testCases {
var result bytes.Buffer
- require.NoError(t, printAlert(PostReceiveMessage{Message: tc.message}, &result))
+ require.NoError(t, printAlert(gitlab.PostReceiveMessage{Message: tc.message}, &result))
assert.Equal(t, tc.expected, result.String())
}
}
@@ -65,7 +66,7 @@ func TestPrintAlert(t *testing.T) {
func TestPostReceive_customHook(t *testing.T) {
cfg, repo, repoPath := testcfg.BuildWithRepo(t)
- hookManager := NewManager(config.NewLocator(cfg), transaction.NewManager(cfg, backchannel.NewRegistry()), GitlabAPIStub, cfg)
+ hookManager := NewManager(config.NewLocator(cfg), transaction.NewManager(cfg, backchannel.NewRegistry()), gitlab.NewMockClient(), cfg)
receiveHooksPayload := &git.ReceiveHooksPayload{
UserID: "1234",
@@ -82,10 +83,10 @@ func TestPostReceive_customHook(t *testing.T) {
primaryPayload, err := git.NewHooksPayload(
cfg,
repo,
- &metadata.Transaction{
+ &txinfo.Transaction{
ID: 1234, Node: "primary", Primary: true,
},
- &metadata.PraefectServer{
+ &txinfo.PraefectServer{
SocketPath: "/path/to/socket",
Token: "secret",
},
@@ -98,10 +99,10 @@ func TestPostReceive_customHook(t *testing.T) {
secondaryPayload, err := git.NewHooksPayload(
cfg,
repo,
- &metadata.Transaction{
+ &txinfo.Transaction{
ID: 1234, Node: "secondary", Primary: false,
},
- &metadata.PraefectServer{
+ &txinfo.PraefectServer{
SocketPath: "/path/to/socket",
Token: "secret",
},
@@ -225,10 +226,10 @@ func TestPostReceive_customHook(t *testing.T) {
}
type postreceiveAPIMock struct {
- postreceive func(context.Context, string, string, string, ...string) (bool, []PostReceiveMessage, error)
+ postreceive func(context.Context, string, string, string, ...string) (bool, []gitlab.PostReceiveMessage, error)
}
-func (m *postreceiveAPIMock) Allowed(ctx context.Context, params AllowedParams) (bool, string, error) {
+func (m *postreceiveAPIMock) Allowed(ctx context.Context, params gitlab.AllowedParams) (bool, string, error) {
return true, "", nil
}
@@ -236,11 +237,11 @@ func (m *postreceiveAPIMock) PreReceive(ctx context.Context, glRepository string
return true, nil
}
-func (m *postreceiveAPIMock) Check(ctx context.Context) (*CheckInfo, error) {
+func (m *postreceiveAPIMock) Check(ctx context.Context) (*gitlab.CheckInfo, error) {
return nil, errors.New("unexpected call")
}
-func (m *postreceiveAPIMock) PostReceive(ctx context.Context, glRepository, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error) {
+func (m *postreceiveAPIMock) PostReceive(ctx context.Context, glRepository, glID, changes string, pushOptions ...string) (bool, []gitlab.PostReceiveMessage, error) {
return m.postreceive(ctx, glRepository, glID, changes, pushOptions...)
}
@@ -261,7 +262,7 @@ func TestPostReceive_gitlab(t *testing.T) {
env []string
pushOptions []string
changes string
- postreceive func(*testing.T, context.Context, string, string, string, ...string) (bool, []PostReceiveMessage, error)
+ postreceive func(*testing.T, context.Context, string, string, string, ...string) (bool, []gitlab.PostReceiveMessage, error)
expectHookCall bool
expectedErr error
expectedStdout string
@@ -271,7 +272,7 @@ func TestPostReceive_gitlab(t *testing.T) {
desc: "allowed change",
env: standardEnv,
changes: "changes\n",
- postreceive: func(t *testing.T, ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error) {
+ postreceive: func(t *testing.T, ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []gitlab.PostReceiveMessage, error) {
require.Equal(t, repo.GlRepository, glRepo)
require.Equal(t, "1234", glID)
require.Equal(t, "changes\n", changes)
@@ -288,7 +289,7 @@ func TestPostReceive_gitlab(t *testing.T) {
"mr.create",
},
changes: "changes\n",
- postreceive: func(t *testing.T, ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error) {
+ postreceive: func(t *testing.T, ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []gitlab.PostReceiveMessage, error) {
require.Equal(t, []string{
"mr.merge_when_pipeline_succeeds",
"mr.create",
@@ -301,7 +302,7 @@ func TestPostReceive_gitlab(t *testing.T) {
desc: "access denied without message",
env: standardEnv,
changes: "changes\n",
- postreceive: func(t *testing.T, ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error) {
+ postreceive: func(t *testing.T, ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []gitlab.PostReceiveMessage, error) {
return false, nil, nil
},
expectedErr: errors.New(""),
@@ -310,8 +311,8 @@ func TestPostReceive_gitlab(t *testing.T) {
desc: "access denied with message",
env: standardEnv,
changes: "changes\n",
- postreceive: func(t *testing.T, ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error) {
- return false, []PostReceiveMessage{
+ postreceive: func(t *testing.T, ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []gitlab.PostReceiveMessage, error) {
+ return false, []gitlab.PostReceiveMessage{
{
Message: "access denied",
Type: "alert",
@@ -325,7 +326,7 @@ func TestPostReceive_gitlab(t *testing.T) {
desc: "access check returns error",
env: standardEnv,
changes: "changes\n",
- postreceive: func(t *testing.T, ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error) {
+ postreceive: func(t *testing.T, ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []gitlab.PostReceiveMessage, error) {
return false, nil, errors.New("failure")
},
expectedErr: errors.New("GitLab: failure"),
@@ -338,7 +339,7 @@ func TestPostReceive_gitlab(t *testing.T) {
defer cleanup()
gitlabAPI := postreceiveAPIMock{
- postreceive: func(ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error) {
+ postreceive: func(ctx context.Context, glRepo, glID, changes string, pushOptions ...string) (bool, []gitlab.PostReceiveMessage, error) {
return tc.postreceive(t, ctx, glRepo, glID, changes, pushOptions...)
},
}
diff --git a/internal/gitaly/hook/prereceive.go b/internal/gitaly/hook/prereceive.go
index 41dc8aca7..b3d10748f 100644
--- a/internal/gitaly/hook/prereceive.go
+++ b/internal/gitaly/hook/prereceive.go
@@ -12,6 +12,7 @@ import (
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -116,7 +117,7 @@ func (m *GitLabHookManager) preReceiveHook(ctx context.Context, payload git.Hook
return helper.ErrInternalf("protocol not set")
}
- params := AllowedParams{
+ params := gitlab.AllowedParams{
RepoPath: repoPath,
GitObjectDirectory: repo.GitObjectDirectory,
GitAlternateObjectDirectories: repo.GitAlternateObjectDirectories,
@@ -126,7 +127,7 @@ func (m *GitLabHookManager) preReceiveHook(ctx context.Context, payload git.Hook
Changes: string(changes),
}
- allowed, message, err := m.gitlabAPI.Allowed(ctx, params)
+ allowed, message, err := m.gitlabClient.Allowed(ctx, params)
if err != nil {
return NotAllowedError{Message: fmt.Sprintf("GitLab: %v", err)}
}
@@ -156,7 +157,7 @@ func (m *GitLabHookManager) preReceiveHook(ctx context.Context, payload git.Hook
}
// reference counter
- ok, err := m.gitlabAPI.PreReceive(ctx, repo.GetGlRepository())
+ ok, err := m.gitlabClient.PreReceive(ctx, repo.GetGlRepository())
if err != nil {
return helper.ErrInternalf("calling pre_receive endpoint: %v", err)
}
diff --git a/internal/gitaly/hook/prereceive_test.go b/internal/gitaly/hook/prereceive_test.go
index ee0054093..2995a5e67 100644
--- a/internal/gitaly/hook/prereceive_test.go
+++ b/internal/gitaly/hook/prereceive_test.go
@@ -14,17 +14,18 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
)
func TestPrereceive_customHooks(t *testing.T) {
cfg, repo, repoPath := testcfg.BuildWithRepo(t)
- hookManager := NewManager(config.NewLocator(cfg), transaction.NewManager(cfg, backchannel.NewRegistry()), GitlabAPIStub, cfg)
+ hookManager := NewManager(config.NewLocator(cfg), transaction.NewManager(cfg, backchannel.NewRegistry()), gitlab.NewMockClient(), cfg)
receiveHooksPayload := &git.ReceiveHooksPayload{
UserID: "1234",
@@ -41,10 +42,10 @@ func TestPrereceive_customHooks(t *testing.T) {
primaryPayload, err := git.NewHooksPayload(
cfg,
repo,
- &metadata.Transaction{
+ &txinfo.Transaction{
ID: 1234, Node: "primary", Primary: true,
},
- &metadata.PraefectServer{
+ &txinfo.PraefectServer{
SocketPath: "/path/to/socket",
Token: "secret",
},
@@ -57,10 +58,10 @@ func TestPrereceive_customHooks(t *testing.T) {
secondaryPayload, err := git.NewHooksPayload(
cfg,
repo,
- &metadata.Transaction{
+ &txinfo.Transaction{
ID: 1234, Node: "secondary", Primary: false,
},
- &metadata.PraefectServer{
+ &txinfo.PraefectServer{
SocketPath: "/path/to/socket",
Token: "secret",
},
@@ -186,11 +187,11 @@ func TestPrereceive_customHooks(t *testing.T) {
}
type prereceiveAPIMock struct {
- allowed func(context.Context, AllowedParams) (bool, string, error)
+ allowed func(context.Context, gitlab.AllowedParams) (bool, string, error)
prereceive func(context.Context, string) (bool, error)
}
-func (m *prereceiveAPIMock) Allowed(ctx context.Context, params AllowedParams) (bool, string, error) {
+func (m *prereceiveAPIMock) Allowed(ctx context.Context, params gitlab.AllowedParams) (bool, string, error) {
return m.allowed(ctx, params)
}
@@ -198,11 +199,11 @@ func (m *prereceiveAPIMock) PreReceive(ctx context.Context, glRepository string)
return m.prereceive(ctx, glRepository)
}
-func (m *prereceiveAPIMock) Check(ctx context.Context) (*CheckInfo, error) {
+func (m *prereceiveAPIMock) Check(ctx context.Context) (*gitlab.CheckInfo, error) {
return nil, errors.New("unexpected call")
}
-func (m *prereceiveAPIMock) PostReceive(context.Context, string, string, string, ...string) (bool, []PostReceiveMessage, error) {
+func (m *prereceiveAPIMock) PostReceive(context.Context, string, string, string, ...string) (bool, []gitlab.PostReceiveMessage, error) {
return true, nil, errors.New("unexpected call")
}
@@ -222,7 +223,7 @@ func TestPrereceive_gitlab(t *testing.T) {
desc string
env []string
changes string
- allowed func(*testing.T, context.Context, AllowedParams) (bool, string, error)
+ allowed func(*testing.T, context.Context, gitlab.AllowedParams) (bool, string, error)
prereceive func(*testing.T, context.Context, string) (bool, error)
expectHookCall bool
expectedErr error
@@ -231,7 +232,7 @@ func TestPrereceive_gitlab(t *testing.T) {
desc: "allowed change",
env: standardEnv,
changes: "changes\n",
- allowed: func(t *testing.T, ctx context.Context, params AllowedParams) (bool, string, error) {
+ allowed: func(t *testing.T, ctx context.Context, params gitlab.AllowedParams) (bool, string, error) {
require.Equal(t, repoPath, params.RepoPath)
require.Equal(t, repo.GlRepository, params.GLRepository)
require.Equal(t, "1234", params.GLID)
@@ -249,7 +250,7 @@ func TestPrereceive_gitlab(t *testing.T) {
desc: "disallowed change",
env: standardEnv,
changes: "changes\n",
- allowed: func(t *testing.T, ctx context.Context, params AllowedParams) (bool, string, error) {
+ allowed: func(t *testing.T, ctx context.Context, params gitlab.AllowedParams) (bool, string, error) {
return false, "you shall not pass", nil
},
expectHookCall: false,
@@ -259,7 +260,7 @@ func TestPrereceive_gitlab(t *testing.T) {
desc: "allowed returns error",
env: standardEnv,
changes: "changes\n",
- allowed: func(t *testing.T, ctx context.Context, params AllowedParams) (bool, string, error) {
+ allowed: func(t *testing.T, ctx context.Context, params gitlab.AllowedParams) (bool, string, error) {
return false, "", errors.New("oops")
},
expectHookCall: false,
@@ -269,7 +270,7 @@ func TestPrereceive_gitlab(t *testing.T) {
desc: "prereceive rejects",
env: standardEnv,
changes: "changes\n",
- allowed: func(t *testing.T, ctx context.Context, params AllowedParams) (bool, string, error) {
+ allowed: func(t *testing.T, ctx context.Context, params gitlab.AllowedParams) (bool, string, error) {
return true, "", nil
},
prereceive: func(t *testing.T, ctx context.Context, glRepo string) (bool, error) {
@@ -282,7 +283,7 @@ func TestPrereceive_gitlab(t *testing.T) {
desc: "prereceive errors",
env: standardEnv,
changes: "changes\n",
- allowed: func(t *testing.T, ctx context.Context, params AllowedParams) (bool, string, error) {
+ allowed: func(t *testing.T, ctx context.Context, params gitlab.AllowedParams) (bool, string, error) {
return true, "", nil
},
prereceive: func(t *testing.T, ctx context.Context, glRepo string) (bool, error) {
@@ -299,7 +300,7 @@ func TestPrereceive_gitlab(t *testing.T) {
defer cleanup()
gitlabAPI := prereceiveAPIMock{
- allowed: func(ctx context.Context, params AllowedParams) (bool, string, error) {
+ allowed: func(ctx context.Context, params gitlab.AllowedParams) (bool, string, error) {
return tc.allowed(t, ctx, params)
},
prereceive: func(ctx context.Context, glRepo string) (bool, error) {
diff --git a/internal/gitaly/hook/referencetransaction.go b/internal/gitaly/hook/referencetransaction.go
index 1028ab367..e2e23834b 100644
--- a/internal/gitaly/hook/referencetransaction.go
+++ b/internal/gitaly/hook/referencetransaction.go
@@ -29,10 +29,13 @@ func (m *GitLabHookManager) ReferenceTransactionHook(ctx context.Context, state
return fmt.Errorf("reading stdin from request: %w", err)
}
- // We're only voting in prepared state as this is the only stage in
- // Git's reference transaction which allows us to abort the
- // transaction.
- if state != ReferenceTransactionPrepared {
+ // We're voting in prepared state as this is the only stage in Git's reference transaction
+ // which allows us to abort the transaction. We're also voting in committed state to tell
+ // Praefect we've actually persisted the changes. This is necessary as some RPCs fail return
+ // errors in the response body rather than as an error code. Praefect can't tell if these RPCs
+ // have failed. Voting on committed ensure Praefect sees either a missing vote or that the RPC did
+ // commit the changes.
+ if state != ReferenceTransactionPrepared && state != ReferenceTransactionCommitted {
return nil
}
diff --git a/internal/gitaly/hook/transactions.go b/internal/gitaly/hook/transactions.go
index 8144cd9c9..4a487d002 100644
--- a/internal/gitaly/hook/transactions.go
+++ b/internal/gitaly/hook/transactions.go
@@ -5,8 +5,8 @@ import (
"errors"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
)
func isPrimary(payload git.HooksPayload) bool {
@@ -17,7 +17,7 @@ func isPrimary(payload git.HooksPayload) bool {
}
// transactionHandler is a callback invoked on a transaction if it exists.
-type transactionHandler func(ctx context.Context, tx metadata.Transaction, praefect metadata.PraefectServer) error
+type transactionHandler func(ctx context.Context, tx txinfo.Transaction, praefect txinfo.PraefectServer) error
// runWithTransaction runs the given function if the payload identifies a transaction. No error
// is returned if no transaction exists. If a transaction exists and the function is executed on it,
@@ -36,14 +36,15 @@ func (m *GitLabHookManager) runWithTransaction(ctx context.Context, payload git.
return nil
}
-func (m *GitLabHookManager) voteOnTransaction(ctx context.Context, hash transaction.Vote, payload git.HooksPayload) error {
- return m.runWithTransaction(ctx, payload, func(ctx context.Context, tx metadata.Transaction, praefect metadata.PraefectServer) error {
- return m.txManager.Vote(ctx, tx, praefect, hash)
+func (m *GitLabHookManager) voteOnTransaction(ctx context.Context, vote voting.Vote, payload git.HooksPayload) error {
+ return m.runWithTransaction(ctx, payload, func(ctx context.Context, tx txinfo.Transaction,
+ praefect txinfo.PraefectServer) error {
+ return m.txManager.Vote(ctx, tx, praefect, vote)
})
}
func (m *GitLabHookManager) stopTransaction(ctx context.Context, payload git.HooksPayload) error {
- return m.runWithTransaction(ctx, payload, func(ctx context.Context, tx metadata.Transaction, praefect metadata.PraefectServer) error {
+ return m.runWithTransaction(ctx, payload, func(ctx context.Context, tx txinfo.Transaction, praefect txinfo.PraefectServer) error {
return m.txManager.Stop(ctx, tx, praefect)
})
}
diff --git a/internal/gitaly/hook/transactions_test.go b/internal/gitaly/hook/transactions_test.go
index 837c1e6f7..80083441f 100644
--- a/internal/gitaly/hook/transactions_test.go
+++ b/internal/gitaly/hook/transactions_test.go
@@ -14,26 +14,28 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
)
func TestHookManager_stopCalled(t *testing.T) {
cfg, repo, repoPath := testcfg.BuildWithRepo(t)
- expectedTx := metadata.Transaction{
+ expectedTx := txinfo.Transaction{
ID: 1234, Node: "primary", Primary: true,
}
- expectedPraefect := metadata.PraefectServer{
+ expectedPraefect := txinfo.PraefectServer{
SocketPath: "socket",
Token: "foo",
}
var mockTxMgr transaction.MockManager
- hookManager := NewManager(config.NewLocator(cfg), &mockTxMgr, GitlabAPIStub, cfg)
+ hookManager := NewManager(config.NewLocator(cfg), &mockTxMgr, gitlab.NewMockClient(), cfg)
ctx, cleanup := testhelper.Context()
defer cleanup()
@@ -102,7 +104,7 @@ func TestHookManager_stopCalled(t *testing.T) {
} {
t.Run(tc.desc, func(t *testing.T) {
wasInvoked := false
- mockTxMgr.StopFn = func(ctx context.Context, tx metadata.Transaction, praefect metadata.PraefectServer) error {
+ mockTxMgr.StopFn = func(ctx context.Context, tx txinfo.Transaction, praefect txinfo.PraefectServer) error {
require.Equal(t, expectedTx, tx)
require.Equal(t, expectedPraefect, praefect)
wasInvoked = true
@@ -120,21 +122,21 @@ func TestHookManager_contextCancellationCancelsVote(t *testing.T) {
cfg, repo, _ := testcfg.BuildWithRepo(t)
mockTxMgr := transaction.MockManager{
- VoteFn: func(ctx context.Context, tx metadata.Transaction, praefect metadata.PraefectServer, vote transaction.Vote) error {
+ VoteFn: func(ctx context.Context, tx txinfo.Transaction, praefect txinfo.PraefectServer, vote voting.Vote) error {
<-ctx.Done()
return fmt.Errorf("mock error: %s", ctx.Err())
},
}
- hookManager := NewManager(config.NewLocator(cfg), &mockTxMgr, GitlabAPIStub, cfg)
+ hookManager := NewManager(config.NewLocator(cfg), &mockTxMgr, gitlab.NewMockClient(), cfg)
hooksPayload, err := git.NewHooksPayload(
cfg,
repo,
- &metadata.Transaction{
+ &txinfo.Transaction{
ID: 1234, Node: "primary", Primary: true,
},
- &metadata.PraefectServer{
+ &txinfo.PraefectServer{
SocketPath: "does_not",
Token: "matter",
},
diff --git a/internal/gitaly/hook/update_test.go b/internal/gitaly/hook/update_test.go
index 63b212080..5ed7b8b59 100644
--- a/internal/gitaly/hook/update_test.go
+++ b/internal/gitaly/hook/update_test.go
@@ -12,16 +12,17 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
)
func TestUpdate_customHooks(t *testing.T) {
cfg, repo, repoPath := testcfg.BuildWithRepo(t)
- hookManager := NewManager(config.NewLocator(cfg), transaction.NewManager(cfg, backchannel.NewRegistry()), GitlabAPIStub, cfg)
+ hookManager := NewManager(config.NewLocator(cfg), transaction.NewManager(cfg, backchannel.NewRegistry()), gitlab.NewMockClient(), cfg)
receiveHooksPayload := &git.ReceiveHooksPayload{
UserID: "1234",
@@ -38,10 +39,10 @@ func TestUpdate_customHooks(t *testing.T) {
primaryPayload, err := git.NewHooksPayload(
cfg,
repo,
- &metadata.Transaction{
+ &txinfo.Transaction{
ID: 1234, Node: "primary", Primary: true,
},
- &metadata.PraefectServer{
+ &txinfo.PraefectServer{
SocketPath: "/path/to/socket",
Token: "secret",
},
@@ -54,10 +55,10 @@ func TestUpdate_customHooks(t *testing.T) {
secondaryPayload, err := git.NewHooksPayload(
cfg,
repo,
- &metadata.Transaction{
+ &txinfo.Transaction{
ID: 1234, Node: "secondary", Primary: false,
},
- &metadata.PraefectServer{
+ &txinfo.PraefectServer{
SocketPath: "/path/to/socket",
Token: "secret",
},
diff --git a/internal/gitaly/maintenance/optimize_test.go b/internal/gitaly/maintenance/optimize_test.go
index c205bbd2e..e3852df24 100644
--- a/internal/gitaly/maintenance/optimize_test.go
+++ b/internal/gitaly/maintenance/optimize_test.go
@@ -10,6 +10,8 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
@@ -30,7 +32,8 @@ func (mo *mockOptimizer) OptimizeRepository(ctx context.Context, req *gitalypb.O
mo.actual = append(mo.actual, req.Repository)
l := config.NewLocator(mo.cfg)
gitCmdFactory := git.NewExecCommandFactory(mo.cfg)
- resp, err := repository.NewServer(mo.cfg, nil, l, transaction.NewManager(mo.cfg, backchannel.NewRegistry()), gitCmdFactory).OptimizeRepository(ctx, req)
+ catfileCache := catfile.NewCache(mo.cfg)
+ resp, err := repository.NewServer(mo.cfg, nil, l, transaction.NewManager(mo.cfg, backchannel.NewRegistry()), gitCmdFactory, catfileCache).OptimizeRepository(ctx, req)
assert.NoError(mo.t, err)
return resp, err
}
@@ -40,8 +43,8 @@ func TestOptimizeReposRandomly(t *testing.T) {
cfg := cfgBuilder.Build(t)
for _, storage := range cfg.Storages {
- testhelper.MustRunCommand(t, nil, "git", "init", "--bare", filepath.Join(storage.Path, "a"))
- testhelper.MustRunCommand(t, nil, "git", "init", "--bare", filepath.Join(storage.Path, "b"))
+ gittest.Exec(t, cfg, "init", "--bare", filepath.Join(storage.Path, "a"))
+ gittest.Exec(t, cfg, "init", "--bare", filepath.Join(storage.Path, "b"))
}
cfg.Storages = append(cfg.Storages, config.Storage{
diff --git a/internal/gitaly/rubyserver/proxy.go b/internal/gitaly/rubyserver/proxy.go
index 69c334b9c..b10cabc37 100644
--- a/internal/gitaly/rubyserver/proxy.go
+++ b/internal/gitaly/rubyserver/proxy.go
@@ -6,8 +6,8 @@ import (
"os"
"strings"
- praefect_metadata "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/storage"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/metadata"
)
@@ -58,20 +58,20 @@ func setHeaders(ctx context.Context, locator storage.Locator, repo *gitalypb.Rep
// Praefect server info into the context, `PraefectFromContext()` will
// also resolve connection information from the context's peer info.
// Thus the re-injected connection info will contain resolved addresses.
- if praefectServer, err := praefect_metadata.PraefectFromContext(ctx); err == nil {
+ if praefectServer, err := txinfo.PraefectFromContext(ctx); err == nil {
ctx, err = praefectServer.Inject(ctx)
if err != nil {
return nil, err
}
- } else if err != praefect_metadata.ErrPraefectServerNotFound {
+ } else if err != txinfo.ErrPraefectServerNotFound {
return nil, err
}
// list of http/2 headers that will be forwarded as-is to gitaly-ruby
proxyHeaderAllowlist := []string{
"gitaly-servers",
- praefect_metadata.TransactionMetadataKey,
- praefect_metadata.PraefectMetadataKey,
+ txinfo.TransactionMetadataKey,
+ txinfo.PraefectMetadataKey,
}
if inMD, ok := metadata.FromIncomingContext(ctx); ok {
diff --git a/internal/gitaly/rubyserver/rubyserver_test.go b/internal/gitaly/rubyserver/rubyserver_test.go
index d16c33aae..756607b7c 100644
--- a/internal/gitaly/rubyserver/rubyserver_test.go
+++ b/internal/gitaly/rubyserver/rubyserver_test.go
@@ -106,7 +106,7 @@ func TestSetupEnv(t *testing.T) {
require.Contains(t, env, fmt.Sprintf("GITALY_RUBY_MAX_COMMIT_OR_TAG_MESSAGE_SIZE=%d", helper.MaxCommitOrTagMessageSize))
require.Contains(t, env, "GITALY_RUBY_GITALY_BIN_DIR=/bin/dit")
require.Contains(t, env, "GITALY_VERSION="+version.GetVersion())
- require.Contains(t, env, "GITALY_SOCKET=/gitaly/internal.sock")
+ require.Contains(t, env, fmt.Sprintf("GITALY_SOCKET=%s", cfg.GitalyInternalSocketPath()))
require.Contains(t, env, "GITALY_TOKEN=paswd")
require.Contains(t, env, "GITALY_RUGGED_GIT_CONFIG_SEARCH_PATH=/bin/rugged")
require.Contains(t, env, "SENTRY_DSN=testDSN")
diff --git a/internal/gitaly/server/auth_test.go b/internal/gitaly/server/auth_test.go
index f5fd1b1f7..9de2049b7 100644
--- a/internal/gitaly/server/auth_test.go
+++ b/internal/gitaly/server/auth_test.go
@@ -16,7 +16,9 @@ import (
gitalyauth "gitlab.com/gitlab-org/gitaly/auth"
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
+ "gitlab.com/gitlab-org/gitaly/internal/cache"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config/auth"
@@ -24,6 +26,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/setup"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -63,9 +66,7 @@ func TestTLSSanity(t *testing.T) {
certPool, err := x509.SystemCertPool()
require.NoError(t, err)
- cert, err := ioutil.ReadFile("testdata/gitalycert.pem")
- require.NoError(t, err)
-
+ cert := testhelper.MustReadFile(t, "testdata/gitalycert.pem")
ok := certPool.AppendCertsFromPEM(cert)
require.True(t, ok)
@@ -196,10 +197,12 @@ func runServer(t *testing.T, cfg config.Cfg) string {
t.Cleanup(func() { conns.Close() })
locator := config.NewLocator(cfg)
txManager := transaction.NewManager(cfg, registry)
- hookManager := hook.NewManager(locator, txManager, hook.GitlabAPIStub, cfg)
+ hookManager := hook.NewManager(locator, txManager, gitlab.NewMockClient(), cfg)
gitCmdFactory := git.NewExecCommandFactory(cfg)
+ catfileCache := catfile.NewCache(cfg)
+ diskCache := cache.New(cfg, locator)
- srv, err := New(false, cfg, testhelper.DiscardTestEntry(t), registry)
+ srv, err := New(false, cfg, testhelper.DiscardTestEntry(t), registry, diskCache)
require.NoError(t, err)
setup.RegisterAll(srv, &service.Dependencies{
@@ -209,6 +212,7 @@ func runServer(t *testing.T, cfg config.Cfg) string {
StorageLocator: locator,
ClientPool: conns,
GitCmdFactory: gitCmdFactory,
+ CatfileCache: catfileCache,
})
serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
@@ -232,7 +236,7 @@ func runSecureServer(t *testing.T, cfg config.Cfg) string {
conns := client.NewPool()
t.Cleanup(func() { conns.Close() })
- srv, err := New(true, cfg, testhelper.DiscardTestEntry(t), backchannel.NewRegistry())
+ srv, err := New(true, cfg, testhelper.DiscardTestEntry(t), backchannel.NewRegistry(), cache.New(cfg, config.NewLocator(cfg)))
require.NoError(t, err)
healthpb.RegisterHealthServer(srv, health.NewServer())
@@ -304,7 +308,7 @@ func TestAuthBeforeLimit(t *testing.T) {
gitlabURL, cleanup := testhelper.SetupAndStartGitlabServer(t, cfg.GitlabShell.Dir, &testhelper.GitlabTestServerOptions{
SecretToken: "secretToken",
- GLID: testhelper.GlID,
+ GLID: gittest.GlID,
GLRepository: repo.GetGlRepository(),
PostReceiveCounterDecreased: true,
Protocol: "web",
@@ -326,7 +330,7 @@ func TestAuthBeforeLimit(t *testing.T) {
Repository: repo,
TagName: []byte(inputTagName),
TargetRevision: []byte(targetRevision),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Message: []byte("a new tag!"),
}
diff --git a/internal/gitaly/server/server.go b/internal/gitaly/server/server.go
index 3daed7d8c..bb9b3ddee 100644
--- a/internal/gitaly/server/server.go
+++ b/internal/gitaly/server/server.go
@@ -12,7 +12,6 @@ import (
grpc_prometheus "github.com/grpc-ecosystem/go-grpc-prometheus"
log "github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
- diskcache "gitlab.com/gitlab-org/gitaly/internal/cache"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/server/auth"
@@ -66,15 +65,19 @@ func init() {
// New returns a GRPC server instance with a set of interceptors configured.
// If logrusEntry is nil the default logger will be used.
-func New(secure bool, cfg config.Cfg, logrusEntry *log.Entry, registry *backchannel.Registry) (*grpc.Server, error) {
+func New(
+ secure bool,
+ cfg config.Cfg,
+ logrusEntry *log.Entry,
+ registry *backchannel.Registry,
+ cacheInvalidator cache.Invalidator,
+) (*grpc.Server, error) {
ctxTagOpts := []grpc_ctxtags.Option{
grpc_ctxtags.WithFieldExtractorForInitialReq(fieldextractors.FieldExtractor),
}
lh := limithandler.New(concurrencyKeyFn)
- storageLocator := config.NewLocator(cfg)
-
transportCredentials := backchannel.Insecure()
// If tls config is specified attempt to extract tls options and use it
// as a grpc.ServerOption
@@ -106,7 +109,7 @@ func New(secure bool, cfg config.Cfg, logrusEntry *log.Entry, registry *backchan
auth.StreamServerInterceptor(cfg.Auth),
lh.StreamInterceptor(), // Should be below auth handler to prevent v2 hmac tokens from timing out while queued
grpctracing.StreamServerTracingInterceptor(),
- cache.StreamInvalidator(diskcache.NewLeaseKeyer(storageLocator), protoregistry.GitalyProtoPreregistered),
+ cache.StreamInvalidator(cacheInvalidator, protoregistry.GitalyProtoPreregistered),
// Panic handler should remain last so that application panics will be
// converted to errors and logged
panichandler.StreamPanicHandler,
@@ -125,7 +128,7 @@ func New(secure bool, cfg config.Cfg, logrusEntry *log.Entry, registry *backchan
auth.UnaryServerInterceptor(cfg.Auth),
lh.UnaryInterceptor(), // Should be below auth handler to prevent v2 hmac tokens from timing out while queued
grpctracing.UnaryServerTracingInterceptor(),
- cache.UnaryInvalidator(diskcache.NewLeaseKeyer(storageLocator), protoregistry.GitalyProtoPreregistered),
+ cache.UnaryInvalidator(cacheInvalidator, protoregistry.GitalyProtoPreregistered),
// Panic handler should remain last so that application panics will be
// converted to errors and logged
panichandler.UnaryPanicHandler,
diff --git a/internal/gitaly/server/server_factory.go b/internal/gitaly/server/server_factory.go
index 26d0e8666..9fe311bc2 100644
--- a/internal/gitaly/server/server_factory.go
+++ b/internal/gitaly/server/server_factory.go
@@ -14,7 +14,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/maintenance"
"gitlab.com/gitlab-org/gitaly/internal/helper"
- gitalylog "gitlab.com/gitlab-org/gitaly/internal/log"
+ "gitlab.com/gitlab-org/gitaly/internal/middleware/cache"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
)
@@ -22,15 +22,27 @@ import (
// GitalyServerFactory is a factory of gitaly grpc servers
type GitalyServerFactory struct {
registry *backchannel.Registry
- mtx sync.Mutex
+ cacheInvalidator cache.Invalidator
cfg config.Cfg
- secure, insecure []*grpc.Server
+ logger *logrus.Entry
+ externalServers []*grpc.Server
+ internalServers []*grpc.Server
}
// NewGitalyServerFactory allows to create and start secure/insecure 'grpc.Server'-s with gitaly-ruby
// server shared in between.
-func NewGitalyServerFactory(cfg config.Cfg, registry *backchannel.Registry) *GitalyServerFactory {
- return &GitalyServerFactory{cfg: cfg, registry: registry}
+func NewGitalyServerFactory(
+ cfg config.Cfg,
+ logger *logrus.Entry,
+ registry *backchannel.Registry,
+ cacheInvalidator cache.Invalidator,
+) *GitalyServerFactory {
+ return &GitalyServerFactory{
+ cfg: cfg,
+ logger: logger,
+ registry: registry,
+ cacheInvalidator: cacheInvalidator,
+ }
}
// StartWorkers will start any auxiliary background workers that are allowed
@@ -86,51 +98,61 @@ func (s *GitalyServerFactory) StartWorkers(ctx context.Context, l logrus.FieldLo
return shutdown, nil
}
-// Stop stops all servers started by calling Serve and the gitaly-ruby server.
+// Stop immediately stops all servers created by the GitalyServerFactory.
func (s *GitalyServerFactory) Stop() {
- for _, srv := range s.all() {
- srv.Stop()
+ for _, servers := range [][]*grpc.Server{
+ s.externalServers,
+ s.internalServers,
+ } {
+ for _, server := range servers {
+ server.Stop()
+ }
}
}
-// GracefulStop stops both the secure and insecure servers gracefully
+// GracefulStop gracefully stops all servers created by the GitalyServerFactory. ExternalServers
+// are stopped before the internal servers to ensure any RPCs accepted by the externals servers
+// can still complete their requests to the internal servers. This is important for hooks calling
+// back to Gitaly.
func (s *GitalyServerFactory) GracefulStop() {
- wg := sync.WaitGroup{}
-
- for _, srv := range s.all() {
- wg.Add(1)
+ for _, servers := range [][]*grpc.Server{
+ s.externalServers,
+ s.internalServers,
+ } {
+ var wg sync.WaitGroup
+
+ for _, server := range servers {
+ wg.Add(1)
+ go func(server *grpc.Server) {
+ defer wg.Done()
+ server.GracefulStop()
+ }(server)
+ }
- go func(s *grpc.Server) {
- s.GracefulStop()
- wg.Done()
- }(srv)
+ wg.Wait()
}
-
- wg.Wait()
}
-// Create returns newly instantiated and initialized with interceptors instance of the gRPC server.
-func (s *GitalyServerFactory) Create(secure bool) (*grpc.Server, error) {
- s.mtx.Lock()
- defer s.mtx.Unlock()
-
- server, err := New(secure, s.cfg, gitalylog.Default(), s.registry)
+// CreateExternal creates a new external gRPC server. The external servers are closed
+// before the internal servers when gracefully shutting down.
+func (s *GitalyServerFactory) CreateExternal(secure bool) (*grpc.Server, error) {
+ server, err := New(secure, s.cfg, s.logger, s.registry, s.cacheInvalidator)
if err != nil {
return nil, err
}
- if secure {
- s.secure = append(s.secure, server)
- return s.secure[len(s.secure)-1], nil
- }
-
- s.insecure = append(s.insecure, server)
- return s.insecure[len(s.insecure)-1], nil
+ s.externalServers = append(s.externalServers, server)
+ return server, nil
}
-func (s *GitalyServerFactory) all() []*grpc.Server {
- s.mtx.Lock()
- defer s.mtx.Unlock()
+// CreateInternal creates a new internal gRPC server. Internal servers are closed
+// after the external ones when gracefully shutting down.
+func (s *GitalyServerFactory) CreateInternal() (*grpc.Server, error) {
+ server, err := New(false, s.cfg, s.logger, s.registry, s.cacheInvalidator)
+ if err != nil {
+ return nil, err
+ }
- return append(s.secure[:], s.insecure...)
+ s.internalServers = append(s.internalServers, server)
+ return server, nil
}
diff --git a/internal/gitaly/server/server_factory_test.go b/internal/gitaly/server/server_factory_test.go
index 78ef98747..7f625946a 100644
--- a/internal/gitaly/server/server_factory_test.go
+++ b/internal/gitaly/server/server_factory_test.go
@@ -1,17 +1,20 @@
package server
import (
+ "context"
"crypto/tls"
"crypto/x509"
- "io/ioutil"
+ "errors"
"net"
"os"
"testing"
+ "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/bootstrap/starter"
+ "gitlab.com/gitlab-org/gitaly/internal/cache"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
@@ -36,7 +39,7 @@ func TestGitalyServerFactory(t *testing.T) {
require.NoError(t, err)
t.Cleanup(func() { listener.Close() })
- srv, err := sf.Create(true)
+ srv, err := sf.CreateExternal(true)
require.NoError(t, err)
healthpb.RegisterHealthServer(srv, health.NewServer())
go srv.Serve(listener)
@@ -44,9 +47,7 @@ func TestGitalyServerFactory(t *testing.T) {
certPool, err := x509.SystemCertPool()
require.NoError(t, err)
- pem, err := ioutil.ReadFile(sf.cfg.TLS.CertPath)
- require.NoError(t, err)
-
+ pem := testhelper.MustReadFile(t, sf.cfg.TLS.CertPath)
require.True(t, certPool.AppendCertsFromPEM(pem))
creds := credentials.NewTLS(&tls.Config{
@@ -61,7 +62,7 @@ func TestGitalyServerFactory(t *testing.T) {
require.NoError(t, err)
t.Cleanup(func() { listener.Close() })
- srv, err := sf.Create(false)
+ srv, err := sf.CreateExternal(false)
require.NoError(t, err)
healthpb.RegisterHealthServer(srv, health.NewServer())
go srv.Serve(listener)
@@ -85,7 +86,7 @@ func TestGitalyServerFactory(t *testing.T) {
t.Run("insecure", func(t *testing.T) {
cfg := testcfg.Build(t)
- sf := NewGitalyServerFactory(cfg, backchannel.NewRegistry())
+ sf := NewGitalyServerFactory(cfg, testhelper.DiscardTestEntry(t), backchannel.NewRegistry(), cache.New(cfg, config.NewLocator(cfg)))
checkHealth(t, sf, starter.TCP, "localhost:0")
})
@@ -98,7 +99,7 @@ func TestGitalyServerFactory(t *testing.T) {
KeyPath: keyFile,
}}))
- sf := NewGitalyServerFactory(cfg, backchannel.NewRegistry())
+ sf := NewGitalyServerFactory(cfg, testhelper.DiscardTestEntry(t), backchannel.NewRegistry(), cache.New(cfg, config.NewLocator(cfg)))
t.Cleanup(sf.Stop)
checkHealth(t, sf, starter.TLS, "localhost:0")
@@ -106,7 +107,7 @@ func TestGitalyServerFactory(t *testing.T) {
t.Run("all services must be stopped", func(t *testing.T) {
cfg := testcfg.Build(t)
- sf := NewGitalyServerFactory(cfg, backchannel.NewRegistry())
+ sf := NewGitalyServerFactory(cfg, testhelper.DiscardTestEntry(t), backchannel.NewRegistry(), cache.New(cfg, config.NewLocator(cfg)))
t.Cleanup(sf.Stop)
tcpHealthClient := checkHealth(t, sf, starter.TCP, "localhost:0")
@@ -125,3 +126,164 @@ func TestGitalyServerFactory(t *testing.T) {
require.Equal(t, codes.Unavailable, status.Code(socketErr))
})
}
+
+func TestGitalyServerFactory_closeOrder(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ cfg := testcfg.Build(t)
+ sf := NewGitalyServerFactory(cfg, testhelper.DiscardTestEntry(t), backchannel.NewRegistry(), cache.New(cfg, config.NewLocator(cfg)))
+ defer sf.Stop()
+
+ errQuickRPC := status.Error(codes.Internal, "quick RPC")
+ errBlockingRPC := status.Error(codes.Internal, "blocking RPC")
+
+ invokeQuick := func(conn *grpc.ClientConn, shouldSucceed bool) {
+ err := conn.Invoke(ctx, "/Service/Quick", &healthpb.HealthCheckRequest{}, &healthpb.HealthCheckRequest{})
+ if !shouldSucceed {
+ testhelper.RequireGrpcError(t, err, codes.Unavailable)
+ return
+ }
+
+ require.Equal(t, errQuickRPC, err)
+ }
+
+ invokeBlocking := func(conn *grpc.ClientConn) chan struct{} {
+ rpcFinished := make(chan struct{})
+
+ go func() {
+ defer close(rpcFinished)
+ assert.Equal(t,
+ errBlockingRPC,
+ conn.Invoke(ctx, "/Service/Blocking", &healthpb.HealthCheckRequest{}, &healthpb.HealthCheckRequest{}),
+ )
+ }()
+
+ return rpcFinished
+ }
+
+ waitUntilFailure := func(conn *grpc.ClientConn) {
+ for {
+ err := conn.Invoke(ctx, "/Service/Quick", &healthpb.HealthCheckRequest{}, &healthpb.HealthCheckRequest{})
+ if errors.Is(err, errQuickRPC) {
+ continue
+ }
+
+ testhelper.RequireGrpcError(t, err, codes.Unavailable)
+ break
+ }
+ }
+
+ var internalConn, externalConn *grpc.ClientConn
+ var internalIsBlocking, externalIsBlocking chan struct{}
+ var releaseInternalBlock, releaseExternalBlock chan struct{}
+ for _, builder := range []struct {
+ createServer func() *grpc.Server
+ conn **grpc.ClientConn
+ isBlocking *chan struct{}
+ releaseBlock *chan struct{}
+ }{
+ {
+ createServer: func() *grpc.Server {
+ server, err := sf.CreateInternal()
+ require.NoError(t, err)
+ return server
+ },
+ conn: &internalConn,
+ isBlocking: &internalIsBlocking,
+ releaseBlock: &releaseInternalBlock,
+ },
+ {
+ createServer: func() *grpc.Server {
+ server, err := sf.CreateExternal(false)
+ require.NoError(t, err)
+ return server
+ },
+ conn: &externalConn,
+ isBlocking: &externalIsBlocking,
+ releaseBlock: &releaseExternalBlock,
+ },
+ } {
+ server := builder.createServer()
+
+ releaseBlock := make(chan struct{})
+ *builder.releaseBlock = releaseBlock
+
+ isBlocking := make(chan struct{})
+ *builder.isBlocking = isBlocking
+
+ server.RegisterService(&grpc.ServiceDesc{
+ ServiceName: "Service",
+ Methods: []grpc.MethodDesc{
+ {
+ MethodName: "Quick",
+ Handler: func(interface{}, context.Context, func(interface{}) error, grpc.UnaryServerInterceptor) (interface{}, error) {
+ return nil, errQuickRPC
+ },
+ },
+ {
+ MethodName: "Blocking",
+ Handler: func(interface{}, context.Context, func(interface{}) error, grpc.UnaryServerInterceptor) (interface{}, error) {
+ close(isBlocking)
+ <-releaseBlock
+ return nil, errBlockingRPC
+ },
+ },
+ },
+ HandlerType: (*interface{})(nil),
+ }, server)
+
+ ln, err := net.Listen("tcp", "localhost:0")
+ require.NoError(t, err)
+ defer ln.Close()
+
+ go server.Serve(ln)
+
+ *builder.conn, err = grpc.DialContext(ctx, ln.Addr().String(), grpc.WithInsecure())
+ require.NoError(t, err)
+ }
+
+ // both servers should be up and accepting RPCs
+ invokeQuick(externalConn, true)
+ invokeQuick(internalConn, true)
+
+ // invoke a blocking RPC on the external server to block the graceful shutdown
+ invokeBlocking(externalConn)
+ <-externalIsBlocking
+
+ shutdownCompeleted := make(chan struct{})
+ go func() {
+ defer close(shutdownCompeleted)
+ sf.GracefulStop()
+ }()
+
+ // wait until the graceful shutdown is in progress and new RPCs are no longer accepted on the
+ // external servers
+ waitUntilFailure(externalConn)
+
+ // internal sockets should still accept RPCs even if external sockets are gracefully closing.
+ invokeQuick(internalConn, true)
+
+ // block on the internal server
+ internalBlockingRPCFinished := invokeBlocking(internalConn)
+ <-internalIsBlocking
+
+ // release the external server's blocking RPC so the graceful shutdown can complete and proceed to
+ // shutting down the internal servers.
+ close(releaseExternalBlock)
+
+ // wait until the graceful shutdown is in progress and new RPCs are no longer accepted on the internal
+ // servers
+ waitUntilFailure(internalConn)
+
+ // neither internal nor external servers should be accepting new RPCs anymore
+ invokeQuick(externalConn, false)
+ invokeQuick(internalConn, false)
+
+ // wait until the blocking rpc has successfully completed
+ close(releaseInternalBlock)
+ <-internalBlockingRPCFinished
+
+ // wait until the graceful shutdown completes
+ <-shutdownCompeleted
+}
diff --git a/internal/gitaly/service/blob/blob_filter.go b/internal/gitaly/service/blob/blob_filter.go
new file mode 100644
index 000000000..dd81cd068
--- /dev/null
+++ b/internal/gitaly/service/blob/blob_filter.go
@@ -0,0 +1,65 @@
+package blob
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "strconv"
+
+ "golang.org/x/text/transform"
+)
+
+// blobFilter transforms and filters the output of `git cat-file --batch-check='%(objecttype)
+// %(objectsize) %(objectname)' into a list of blobs matching the given criteria.. It strips all
+// objects which are not blobs or whose size exceeds maxSize.
+type blobFilter struct {
+ maxSize uint64
+}
+
+func (f blobFilter) Transform(dst, src []byte, atEOF bool) (int, int, error) {
+ origDst, origSrc := dst, src
+
+ for {
+ if len(src) == 0 && atEOF {
+ return 0, 0, nil
+ }
+
+ index := bytes.Index(src, []byte{'\n'})
+ if index < 0 {
+ if atEOF {
+ return 0, 0, errors.New("invalid trailing line")
+ }
+ return len(origDst) - len(dst), len(origSrc) - len(src), transform.ErrShortSrc
+ }
+
+ objectInfo := bytes.SplitN(src[:index], []byte{' '}, 3)
+ if len(objectInfo) != 3 {
+ return 0, 0, fmt.Errorf("invalid line %q", string(src[:index]))
+ }
+
+ if f.maxSize > uint64(0) {
+ objectSize, err := strconv.ParseUint(string(objectInfo[1]), 10, 64)
+ if err != nil {
+ return 0, 0, fmt.Errorf("invalid blob size %q", string(objectInfo[1]))
+ }
+
+ if objectSize > f.maxSize || !bytes.Equal(objectInfo[0], []byte("blob")) {
+ src = src[index+1:]
+ continue
+ }
+ }
+
+ oid := objectInfo[2]
+ if len(dst) < len(oid)+1 {
+ return len(origDst) - len(dst), len(origSrc) - len(src), transform.ErrShortDst
+ }
+
+ copy(dst, oid)
+ dst[len(oid)] = '\n'
+
+ src = src[index+1:]
+ dst = dst[len(oid)+1:]
+ }
+}
+
+func (f blobFilter) Reset() {}
diff --git a/internal/gitaly/service/blob/lfs_pointer_filter_test.go b/internal/gitaly/service/blob/blob_filter_test.go
index 9114a4601..b716cd7c7 100644
--- a/internal/gitaly/service/blob/lfs_pointer_filter_test.go
+++ b/internal/gitaly/service/blob/blob_filter_test.go
@@ -15,10 +15,9 @@ import (
"golang.org/x/text/transform"
)
-func TestLFSPointerFilter(t *testing.T) {
+func TestBlobFilter(t *testing.T) {
cfg, repo, _, _ := setup(t)
- gitCmdFactory := git.NewExecCommandFactory(cfg)
- localRepo := localrepo.New(gitCmdFactory, repo, cfg)
+ localRepo := localrepo.NewTestRepo(t, cfg, repo)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -56,6 +55,7 @@ func TestLFSPointerFilter(t *testing.T) {
for _, tc := range []struct {
desc string
input string
+ maxSize uint64
expectedOutput string
expectedErr error
}{
@@ -67,12 +67,12 @@ func TestLFSPointerFilter(t *testing.T) {
{
desc: "newline only",
input: "\n",
- expectedErr: errors.New("invalid LFS pointer candidate line \"\""),
+ expectedErr: errors.New("invalid line \"\""),
},
{
- desc: "invalid LFS pointer",
+ desc: "invalid blob",
input: "x",
- expectedErr: errors.New("invalid trailing LFS pointer line"),
+ expectedErr: errors.New("invalid trailing line"),
},
{
desc: "single blob",
@@ -87,37 +87,42 @@ func TestLFSPointerFilter(t *testing.T) {
{
desc: "mixed blobs and other objects",
input: "blob 140 1234\ntree 150 4321\ncommit 50123 123123\n",
+ maxSize: 160,
expectedOutput: "1234\n",
},
{
desc: "big blob gets filtered",
input: "blob 140 1234\nblob 201 4321\n",
+ maxSize: 200,
expectedOutput: "1234\n",
},
{
desc: "missing trailing newline",
input: "blob 140 1234",
- expectedErr: errors.New("invalid trailing LFS pointer line"),
+ expectedErr: errors.New("invalid trailing line"),
},
{
desc: "invalid object size",
input: "blob 140 1234\nblob x 4321\n",
- expectedErr: errors.New("invalid LFS pointer size \"x\""),
+ maxSize: 1,
+ expectedErr: errors.New("invalid blob size \"x\""),
},
{
desc: "missing field",
input: "blob 1234\n",
- expectedErr: errors.New("invalid LFS pointer candidate line \"blob 1234\""),
+ expectedErr: errors.New("invalid line \"blob 1234\""),
},
{
desc: "real-repo output",
input: batchCheckOutput.String(),
+ maxSize: lfsPointerMaxSize,
expectedOutput: strings.Join(expectedOIDs, "\n") + "\n",
},
} {
t.Run(tc.desc, func(t *testing.T) {
- reader := transform.NewReader(strings.NewReader(tc.input),
- lfsPointerFilter{})
+ reader := transform.NewReader(strings.NewReader(tc.input), blobFilter{
+ maxSize: tc.maxSize,
+ })
output, err := ioutil.ReadAll(reader)
require.Equal(t, tc.expectedErr, err)
require.Equal(t, tc.expectedOutput, string(output))
diff --git a/internal/gitaly/service/blob/get_blob.go b/internal/gitaly/service/blob/get_blob.go
index 213552416..96a266e65 100644
--- a/internal/gitaly/service/blob/get_blob.go
+++ b/internal/gitaly/service/blob/get_blob.go
@@ -16,11 +16,13 @@ import (
func (s *server) GetBlob(in *gitalypb.GetBlobRequest, stream gitalypb.BlobService_GetBlobServer) error {
ctx := stream.Context()
+ repo := s.localrepo(in.GetRepository())
+
if err := validateRequest(in); err != nil {
return status.Errorf(codes.InvalidArgument, "GetBlob: %v", err)
}
- c, err := catfile.New(stream.Context(), s.gitCmdFactory, in.Repository)
+ c, err := s.catfileCache.BatchProcess(stream.Context(), repo)
if err != nil {
return status.Errorf(codes.Internal, "GetBlob: %v", err)
}
diff --git a/internal/gitaly/service/blob/get_blobs.go b/internal/gitaly/service/blob/get_blobs.go
index 2dcf4122f..4f3249c1f 100644
--- a/internal/gitaly/service/blob/get_blobs.go
+++ b/internal/gitaly/service/blob/get_blobs.go
@@ -144,7 +144,9 @@ func (s *server) GetBlobs(req *gitalypb.GetBlobsRequest, stream gitalypb.BlobSer
return err
}
- c, err := catfile.New(stream.Context(), s.gitCmdFactory, req.Repository)
+ repo := s.localrepo(req.GetRepository())
+
+ c, err := s.catfileCache.BatchProcess(stream.Context(), repo)
if err != nil {
return err
}
diff --git a/internal/gitaly/service/blob/get_blobs_test.go b/internal/gitaly/service/blob/get_blobs_test.go
index a6543e877..b53b4b60f 100644
--- a/internal/gitaly/service/blob/get_blobs_test.go
+++ b/internal/gitaly/service/blob/get_blobs_test.go
@@ -7,13 +7,14 @@ import (
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
func TestSuccessfulGetBlobsRequest(t *testing.T) {
- _, repo, repoPath, client := setup(t)
+ cfg, repo, repoPath, client := setup(t)
expectedBlobs := []*gitalypb.GetBlobsResponse{
{
@@ -57,7 +58,7 @@ func TestSuccessfulGetBlobsRequest(t *testing.T) {
revision := "ef16b8d2b204706bd8dc211d4011a5bffb6fc0c2"
limits := []int{-1, 0, 10 * 1024 * 1024}
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "worktree", "add", "blobs-sandbox", revision)
+ gittest.Exec(t, cfg, "-C", repoPath, "worktree", "add", "blobs-sandbox", revision)
var revisionPaths []*gitalypb.GetBlobsRequest_RevisionPath
for _, blob := range expectedBlobs {
diff --git a/internal/gitaly/service/blob/lfs_pointer_filter.go b/internal/gitaly/service/blob/lfs_pointer_filter.go
deleted file mode 100644
index 3f211ab26..000000000
--- a/internal/gitaly/service/blob/lfs_pointer_filter.go
+++ /dev/null
@@ -1,60 +0,0 @@
-package blob
-
-import (
- "bytes"
- "errors"
- "fmt"
- "strconv"
-
- "golang.org/x/text/transform"
-)
-
-// lfsPointerFilter transforms and filters the output of `git cat-file --batch-check='%(objecttype)
-// %(objectsize) %(objectname)' into a list of LFS pointer candidates. It strips all objects which
-// are not blobs or whose size is too big for an LFS pointer.
-type lfsPointerFilter struct{}
-
-func (t lfsPointerFilter) Transform(dst, src []byte, atEOF bool) (int, int, error) {
- origDst, origSrc := dst, src
-
- for {
- if len(src) == 0 && atEOF {
- return 0, 0, nil
- }
-
- index := bytes.Index(src, []byte{'\n'})
- if index < 0 {
- if atEOF {
- return 0, 0, errors.New("invalid trailing LFS pointer line")
- }
- return len(origDst) - len(dst), len(origSrc) - len(src), transform.ErrShortSrc
- }
-
- objectInfo := bytes.SplitN(src[:index], []byte{' '}, 3)
- if len(objectInfo) != 3 {
- return 0, 0, fmt.Errorf("invalid LFS pointer candidate line %q", string(src[:index]))
- }
- objectSize, err := strconv.Atoi(string(objectInfo[1]))
- if err != nil {
- return 0, 0, fmt.Errorf("invalid LFS pointer size %q", string(objectInfo[1]))
- }
-
- if objectSize > lfsPointerMaxSize || !bytes.Equal(objectInfo[0], []byte("blob")) {
- src = src[index+1:]
- continue
- }
-
- oid := objectInfo[2]
- if len(dst) < len(oid)+1 {
- return len(origDst) - len(dst), len(origSrc) - len(src), transform.ErrShortDst
- }
-
- copy(dst, oid)
- dst[len(oid)] = '\n'
-
- src = src[index+1:]
- dst = dst[len(oid)+1:]
- }
-}
-
-func (t lfsPointerFilter) Reset() {}
diff --git a/internal/gitaly/service/blob/lfs_pointers.go b/internal/gitaly/service/blob/lfs_pointers.go
index b8e30188e..ec1c288ab 100644
--- a/internal/gitaly/service/blob/lfs_pointers.go
+++ b/internal/gitaly/service/blob/lfs_pointers.go
@@ -9,10 +9,12 @@ import (
"io"
"strings"
+ "github.com/golang/protobuf/proto"
gitaly_errors "gitlab.com/gitlab-org/gitaly/internal/errors"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/helper/chunk"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"golang.org/x/text/transform"
"google.golang.org/grpc/codes"
@@ -24,20 +26,13 @@ const (
// as a heuristic to filter blobs which can't be LFS pointers. The format of these pointers
// is described in https://github.com/git-lfs/git-lfs/blob/master/docs/spec.md#the-pointer.
lfsPointerMaxSize = 200
-
- // lfsPointerSliceSize is the maximum number of LFSPointers to send at once.
- lfsPointerSliceSize = 100
)
var (
errInvalidRevision = errors.New("invalid revision")
+ errLimitReached = errors.New("limit reached")
)
-type getLFSPointerByRevisionRequest interface {
- GetRepository() *gitalypb.Repository
- GetRevision() []byte
-}
-
// ListLFSPointers finds all LFS pointers which are transitively reachable via a graph walk of the
// given set of revisions.
func (s *server) ListLFSPointers(in *gitalypb.ListLFSPointersRequest, stream gitalypb.BlobService_ListLFSPointersServer) error {
@@ -50,21 +45,22 @@ func (s *server) ListLFSPointers(in *gitalypb.ListLFSPointersRequest, stream git
return status.Error(codes.InvalidArgument, "missing revisions")
}
- repo := localrepo.New(s.gitCmdFactory, in.Repository, s.cfg)
- lfsPointers, err := findLFSPointersByRevisions(ctx, repo, s.gitCmdFactory, int(in.Limit), in.Revisions...)
- if err != nil {
+ chunker := chunk.New(&lfsPointerSender{
+ send: func(pointers []*gitalypb.LFSPointer) error {
+ return stream.Send(&gitalypb.ListLFSPointersResponse{
+ LfsPointers: pointers,
+ })
+ },
+ })
+
+ repo := s.localrepo(in.GetRepository())
+ if err := findLFSPointersByRevisions(ctx, repo, s.gitCmdFactory, chunker, int(in.Limit), in.Revisions...); err != nil {
if errors.Is(err, errInvalidRevision) {
return status.Errorf(codes.InvalidArgument, err.Error())
}
- return err
- }
-
- if err := sliceLFSPointers(lfsPointers, func(slice []*gitalypb.LFSPointer) error {
- return stream.Send(&gitalypb.ListLFSPointersResponse{
- LfsPointers: slice,
- })
- }); err != nil {
- return err
+ if !errors.Is(err, errLimitReached) {
+ return err
+ }
}
return nil
@@ -79,7 +75,7 @@ func (s *server) ListAllLFSPointers(in *gitalypb.ListAllLFSPointersRequest, stre
return status.Error(codes.InvalidArgument, "empty repository")
}
- repo := localrepo.New(s.gitCmdFactory, in.Repository, s.cfg)
+ repo := s.localrepo(in.GetRepository())
cmd, err := repo.Exec(ctx, git.SubCmd{
Name: "cat-file",
Flags: []git.Option{
@@ -93,18 +89,22 @@ func (s *server) ListAllLFSPointers(in *gitalypb.ListAllLFSPointersRequest, stre
return status.Errorf(codes.Internal, "could not run batch-check: %v", err)
}
- filteredReader := transform.NewReader(cmd, lfsPointerFilter{})
- lfsPointers, err := readLFSPointers(ctx, repo, filteredReader, int(in.Limit))
- if err != nil {
- return status.Errorf(codes.Internal, "could not read LFS pointers: %v", err)
- }
+ chunker := chunk.New(&lfsPointerSender{
+ send: func(pointers []*gitalypb.LFSPointer) error {
+ return stream.Send(&gitalypb.ListAllLFSPointersResponse{
+ LfsPointers: pointers,
+ })
+ },
+ })
- if err := sliceLFSPointers(lfsPointers, func(slice []*gitalypb.LFSPointer) error {
- return stream.Send(&gitalypb.ListAllLFSPointersResponse{
- LfsPointers: slice,
- })
- }); err != nil {
- return err
+ filteredReader := transform.NewReader(cmd, blobFilter{
+ maxSize: lfsPointerMaxSize,
+ })
+
+ if err := readLFSPointers(ctx, repo, chunker, filteredReader, int(in.Limit)); err != nil {
+ if !errors.Is(err, errLimitReached) {
+ return status.Errorf(codes.Internal, "could not read LFS pointers: %v", err)
+ }
}
return nil
@@ -120,21 +120,21 @@ func (s *server) GetLFSPointers(req *gitalypb.GetLFSPointersRequest, stream gita
return status.Errorf(codes.InvalidArgument, "GetLFSPointers: %v", err)
}
- repo := localrepo.New(s.gitCmdFactory, req.Repository, s.cfg)
+ repo := s.localrepo(req.GetRepository())
objectIDs := strings.Join(req.BlobIds, "\n")
- lfsPointers, err := readLFSPointers(ctx, repo, strings.NewReader(objectIDs), 0)
- if err != nil {
- return err
- }
-
- err = sliceLFSPointers(lfsPointers, func(slice []*gitalypb.LFSPointer) error {
- return stream.Send(&gitalypb.GetLFSPointersResponse{
- LfsPointers: slice,
- })
+ chunker := chunk.New(&lfsPointerSender{
+ send: func(pointers []*gitalypb.LFSPointer) error {
+ return stream.Send(&gitalypb.GetLFSPointersResponse{
+ LfsPointers: pointers,
+ })
+ },
})
- if err != nil {
- return err
+
+ if err := readLFSPointers(ctx, repo, chunker, strings.NewReader(objectIDs), 0); err != nil {
+ if !errors.Is(err, errLimitReached) {
+ return err
+ }
}
return nil
@@ -152,106 +152,19 @@ func validateGetLFSPointersRequest(req *gitalypb.GetLFSPointersRequest) error {
return nil
}
-// GetNewLFSPointers returns all LFS pointers which were newly introduced in a given revision,
-// excluding either all other existing refs or a set of provided refs. If NotInAll is set, then it
-// has precedence over NotInRefs.
-func (s *server) GetNewLFSPointers(in *gitalypb.GetNewLFSPointersRequest, stream gitalypb.BlobService_GetNewLFSPointersServer) error {
- ctx := stream.Context()
-
- if err := validateGetLfsPointersByRevisionRequest(in); err != nil {
- return status.Errorf(codes.InvalidArgument, "GetNewLFSPointers: %v", err)
- }
-
- repo := localrepo.New(s.gitCmdFactory, in.Repository, s.cfg)
-
- var refs []string
- if in.NotInAll {
- refs = []string{string(in.Revision), "--not", "--all"}
- } else {
- refs = []string{string(in.Revision), "--not"}
- for _, notInRef := range in.NotInRefs {
- refs = append(refs, string(notInRef))
- }
- }
-
- lfsPointers, err := findLFSPointersByRevisions(ctx, repo, s.gitCmdFactory, int(in.Limit), refs...)
- if err != nil {
- if errors.Is(err, errInvalidRevision) {
- return status.Errorf(codes.InvalidArgument, err.Error())
- }
- return err
- }
-
- err = sliceLFSPointers(lfsPointers, func(slice []*gitalypb.LFSPointer) error {
- return stream.Send(&gitalypb.GetNewLFSPointersResponse{
- LfsPointers: slice,
- })
- })
- if err != nil {
- return err
- }
-
- return nil
-}
-
-func validateGetLfsPointersByRevisionRequest(in getLFSPointerByRevisionRequest) error {
- if in.GetRepository() == nil {
- return fmt.Errorf("empty Repository")
- }
-
- return git.ValidateRevision(in.GetRevision())
-}
-
-// GetAllLFSPointers returns all LFS pointers of the git repository which are reachable by any git
-// reference. LFS pointers are streamed back in batches of lfsPointerSliceSize.
-func (s *server) GetAllLFSPointers(in *gitalypb.GetAllLFSPointersRequest, stream gitalypb.BlobService_GetAllLFSPointersServer) error {
- ctx := stream.Context()
-
- if err := validateGetAllLFSPointersRequest(in); err != nil {
- return status.Errorf(codes.InvalidArgument, "GetAllLFSPointers: %v", err)
- }
-
- repo := localrepo.New(s.gitCmdFactory, in.Repository, s.cfg)
-
- lfsPointers, err := findLFSPointersByRevisions(ctx, repo, s.gitCmdFactory, 0, "--all")
- if err != nil {
- if errors.Is(err, errInvalidRevision) {
- return status.Errorf(codes.InvalidArgument, err.Error())
- }
- return err
- }
-
- err = sliceLFSPointers(lfsPointers, func(slice []*gitalypb.LFSPointer) error {
- return stream.Send(&gitalypb.GetAllLFSPointersResponse{
- LfsPointers: slice,
- })
- })
- if err != nil {
- return err
- }
-
- return nil
-}
-
-func validateGetAllLFSPointersRequest(in *gitalypb.GetAllLFSPointersRequest) error {
- if in.GetRepository() == nil {
- return fmt.Errorf("empty Repository")
- }
- return nil
-}
-
// findLFSPointersByRevisions will return all LFS objects reachable via the given set of revisions.
// Revisions accept all syntax supported by git-rev-list(1).
func findLFSPointersByRevisions(
ctx context.Context,
repo *localrepo.Repo,
gitCmdFactory git.CommandFactory,
+ chunker *chunk.Chunker,
limit int,
revisions ...string,
-) (lfsPointers []*gitalypb.LFSPointer, returnErr error) {
+) (returnErr error) {
for _, revision := range revisions {
if strings.HasPrefix(revision, "-") && revision != "--all" && revision != "--not" {
- return nil, fmt.Errorf("%w: %q", errInvalidRevision, revision)
+ return fmt.Errorf("%w: %q", errInvalidRevision, revision)
}
}
@@ -269,8 +182,9 @@ func findLFSPointersByRevisions(
Args: revisions,
}, git.WithStderr(&revListStderr))
if err != nil {
- return nil, fmt.Errorf("could not execute rev-list: %w", err)
+ return fmt.Errorf("could not execute rev-list: %w", err)
}
+
defer func() {
// There is no way to properly determine whether the process has exited because of
// us signalling the context or because of any other means. We can only approximate
@@ -278,12 +192,11 @@ func findLFSPointersByRevisions(
// awful, but given that `Signaled()` status is also not accessible to us,
// it's the best we could do.
//
- // Let's not do any of this, it's awful. Instead, we can simply check whether a
- // limit was set and if the number of returned LFS pointers matches that limit. If
- // so, we found all LFS pointers which the user requested and needn't bother whether
- // git-rev-list(1) may have failed. So let's instead just have the RPCcontext cancel
- // the process.
- if limit > 0 && len(lfsPointers) == limit {
+ // Let's not do any of this, it's awful. Instead, we can simply check whether we
+ // have reached the limit. If so, we found all LFS pointers which the user requested
+ // and needn't bother whether git-rev-list(1) may have failed. So let's instead just
+ // have the RPCcontext cancel the process.
+ if errors.Is(returnErr, errLimitReached) {
return
}
@@ -293,7 +206,7 @@ func findLFSPointersByRevisions(
}
}()
- return readLFSPointers(ctx, repo, revlist, limit)
+ return readLFSPointers(ctx, repo, chunker, revlist, limit)
}
// readLFSPointers reads object IDs of potential LFS pointers from the given reader and for each of
@@ -302,9 +215,16 @@ func findLFSPointersByRevisions(
func readLFSPointers(
ctx context.Context,
repo *localrepo.Repo,
+ chunker *chunk.Chunker,
objectIDReader io.Reader,
limit int,
-) ([]*gitalypb.LFSPointer, error) {
+) (returnErr error) {
+ defer func() {
+ if err := chunker.Flush(); err != nil && returnErr == nil {
+ returnErr = err
+ }
+ }()
+
catfileBatch, err := repo.Exec(ctx, git.SubCmd{
Name: "cat-file",
Flags: []git.Option{
@@ -313,10 +233,10 @@ func readLFSPointers(
},
}, git.WithStdin(objectIDReader))
if err != nil {
- return nil, fmt.Errorf("could not execute cat-file: %w", err)
+ return fmt.Errorf("could not execute cat-file: %w", err)
}
- var lfsPointers []*gitalypb.LFSPointer
+ var pointersFound int
reader := bufio.NewReader(catfileBatch)
buf := &bytes.Buffer{}
for {
@@ -325,18 +245,18 @@ func readLFSPointers(
if errors.Is(err, io.EOF) {
break
}
- return nil, fmt.Errorf("could not get LFS pointer info: %w", err)
+ return fmt.Errorf("could not get LFS pointer info: %w", err)
}
// Avoid allocating bytes for an LFS pointer until we know the current
// blob really is an LFS pointer.
buf.Reset()
if _, err := io.CopyN(buf, reader, objectInfo.Size+1); err != nil {
- return nil, fmt.Errorf("could not read LFS pointer candidate: %w", err)
+ return fmt.Errorf("could not read LFS pointer candidate: %w", err)
}
tempData := buf.Bytes()[:buf.Len()-1]
- if objectInfo.Type != "blob" || !isLFSPointer(tempData) {
+ if objectInfo.Type != "blob" || !git.IsLFSPointer(tempData) {
continue
}
@@ -345,11 +265,15 @@ func readLFSPointers(
data := make([]byte, len(tempData))
copy(data, tempData)
- lfsPointers = append(lfsPointers, &gitalypb.LFSPointer{
+ if err := chunker.Send(&gitalypb.LFSPointer{
Data: data,
Size: int64(len(data)),
Oid: objectInfo.Oid.String(),
- })
+ }); err != nil {
+ return fmt.Errorf("sending LFS pointer chunk: %w", err)
+ }
+
+ pointersFound++
// Exit early in case we've got all LFS pointers. We want to do this here instead of
// just terminating the loop because we need to check git-cat-file(1)'s exit code in
@@ -357,45 +281,31 @@ func readLFSPointers(
// though: we don't care for successful termination of the command, we only care
// that we've got all pointers. The command is then getting cancelled via the
// parent's context.
- if limit > 0 && len(lfsPointers) >= limit {
- return lfsPointers, nil
+ if limit > 0 && pointersFound >= limit {
+ return errLimitReached
}
}
if err := catfileBatch.Wait(); err != nil {
- return nil, err
+ return nil
}
- return lfsPointers, nil
+ return nil
}
-// isLFSPointer determines whether the given blob contents are an LFS pointer or not.
-func isLFSPointer(data []byte) bool {
- // TODO: this is incomplete as it does not recognize pre-release version of LFS blobs with
- // the "https://hawser.github.com/spec/v1" version. For compatibility with the Ruby RPC, we
- // leave this as-is for now though.
- return bytes.HasPrefix(data, []byte("version https://git-lfs.github.com/spec"))
+type lfsPointerSender struct {
+ pointers []*gitalypb.LFSPointer
+ send func([]*gitalypb.LFSPointer) error
}
-// sliceLFSPointers slices the given pointers into subsets of slices with at most
-// lfsPointerSliceSize many pointers and executes the given fallback function. If the callback
-// returns an error, slicing is aborted and the error is returned verbosely.
-func sliceLFSPointers(pointers []*gitalypb.LFSPointer, fn func([]*gitalypb.LFSPointer) error) error {
- chunkSize := lfsPointerSliceSize
-
- for {
- if len(pointers) == 0 {
- return nil
- }
-
- if len(pointers) < chunkSize {
- chunkSize = len(pointers)
- }
+func (t *lfsPointerSender) Reset() {
+ t.pointers = t.pointers[:0]
+}
- if err := fn(pointers[:chunkSize]); err != nil {
- return err
- }
+func (t *lfsPointerSender) Append(m proto.Message) {
+ t.pointers = append(t.pointers, m.(*gitalypb.LFSPointer))
+}
- pointers = pointers[chunkSize:]
- }
+func (t *lfsPointerSender) Send() error {
+ return t.send(t.pointers)
}
diff --git a/internal/gitaly/service/blob/lfs_pointers_test.go b/internal/gitaly/service/blob/lfs_pointers_test.go
index 09404f330..921a9a055 100644
--- a/internal/gitaly/service/blob/lfs_pointers_test.go
+++ b/internal/gitaly/service/blob/lfs_pointers_test.go
@@ -6,15 +6,17 @@ import (
"fmt"
"io"
"os"
- "os/exec"
"path/filepath"
+ "sort"
"strings"
"testing"
+ "github.com/golang/protobuf/proto"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/helper/chunk"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
@@ -171,7 +173,7 @@ func TestListLFSPointers(t *testing.T) {
actualLFSPointers = append(actualLFSPointers, resp.GetLfsPointers()...)
}
- require.ElementsMatch(t, tc.expectedPointers, actualLFSPointers)
+ lfsPointersEqual(t, tc.expectedPointers, actualLFSPointers)
})
}
}
@@ -205,7 +207,7 @@ size 12345`
Repository: repo,
})
require.NoError(t, err)
- require.ElementsMatch(t, []*gitalypb.LFSPointer{
+ lfsPointersEqual(t, []*gitalypb.LFSPointer{
lfsPointers[lfsPointer1],
lfsPointers[lfsPointer2],
lfsPointers[lfsPointer3],
@@ -216,16 +218,16 @@ size 12345`
})
t.Run("dangling LFS pointer", func(t *testing.T) {
- _, repo, repoPath, client := setup(t)
+ cfg, repo, repoPath, client := setup(t)
- lfsPointerOID := text.ChompBytes(testhelper.MustRunCommand(t, strings.NewReader(lfsPointerContents),
- "git", "-C", repoPath, "hash-object", "-w", "--stdin"))
+ hash := gittest.ExecStream(t, cfg, strings.NewReader(lfsPointerContents), "-C", repoPath, "hash-object", "-w", "--stdin")
+ lfsPointerOID := text.ChompBytes(hash)
stream, err := client.ListAllLFSPointers(ctx, &gitalypb.ListAllLFSPointersRequest{
Repository: repo,
})
require.NoError(t, err)
- require.ElementsMatch(t, []*gitalypb.LFSPointer{
+ lfsPointersEqual(t, []*gitalypb.LFSPointer{
&gitalypb.LFSPointer{
Oid: lfsPointerOID,
Data: []byte(lfsPointerContents),
@@ -262,7 +264,7 @@ size 12345`
// Write a new object into the repository. Because we set GIT_OBJECT_DIRECTORY to
// the quarantine directory, objects will be written in there instead of into the
// repository's normal object directory.
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
var buffer, stderr bytes.Buffer
err = repo.ExecAndWait(ctx, git.SubCmd{
Name: "hash-object",
@@ -280,7 +282,7 @@ size 12345`
// We only expect to find a single LFS pointer, which is the one we've just written
// into the quarantine directory.
- require.ElementsMatch(t, []*gitalypb.LFSPointer{
+ lfsPointersEqual(t, []*gitalypb.LFSPointer{
&gitalypb.LFSPointer{
Oid: text.ChompBytes(buffer.Bytes()),
Data: []byte(lfsPointerContents),
@@ -332,7 +334,7 @@ func TestSuccessfulGetLFSPointersRequest(t *testing.T) {
receivedLFSPointers = append(receivedLFSPointers, resp.GetLfsPointers()...)
}
- require.ElementsMatch(t, receivedLFSPointers, expectedLFSPointers)
+ lfsPointersEqual(t, receivedLFSPointers, expectedLFSPointers)
}
func TestFailedGetLFSPointersRequestDueToValidations(t *testing.T) {
@@ -376,321 +378,14 @@ func TestFailedGetLFSPointersRequestDueToValidations(t *testing.T) {
}
}
-func TestSuccessfulGetNewLFSPointersRequest(t *testing.T) {
- cfg, _, _, client := setup(t)
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- repo, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
- t.Cleanup(cleanup)
-
- revision := []byte("46abbb087fcc0fd02c340f0f2f052bd2c7708da3")
- commiterArgs := []string{"-c", "user.name=Scrooge McDuck", "-c", "user.email=scrooge@mcduck.com"}
- cmdArgs := append(commiterArgs, "-C", repoPath, "cherry-pick", string(revision))
- cmd := exec.Command(cfg.Git.BinPath, cmdArgs...)
- // Skip smudge since it doesn't work with file:// remotes and we don't need it
- cmd.Env = append(cmd.Env, "GIT_LFS_SKIP_SMUDGE=1")
- altDirs := "./alt-objects"
- altDirsCommit := gittest.CreateCommitInAlternateObjectDirectory(t, cfg.Git.BinPath, repoPath, altDirs, cmd)
-
- // Create a commit not pointed at by any ref to emulate being in the
- // pre-receive hook so that `--not --all` returns some objects
- newRevision := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit-tree", "8856a329dd38ca86dfb9ce5aa58a16d88cc119bd", "-m", "Add LFS objects")
- newRevision = newRevision[:len(newRevision)-1] // Strip newline
-
- testCases := []struct {
- desc string
- request *gitalypb.GetNewLFSPointersRequest
- expectedLFSPointers []*gitalypb.LFSPointer
- }{
- {
- desc: "standard request",
- request: &gitalypb.GetNewLFSPointersRequest{
- Repository: repo,
- Revision: revision,
- },
- expectedLFSPointers: []*gitalypb.LFSPointer{
- lfsPointers[lfsPointer1],
- lfsPointers[lfsPointer2],
- lfsPointers[lfsPointer3],
- },
- },
- {
- desc: "request with revision in alternate directory",
- request: &gitalypb.GetNewLFSPointersRequest{
- Repository: repo,
- Revision: altDirsCommit,
- },
- expectedLFSPointers: []*gitalypb.LFSPointer{
- lfsPointers[lfsPointer1],
- lfsPointers[lfsPointer2],
- lfsPointers[lfsPointer3],
- },
- },
- {
- desc: "request with non-exceeding limit",
- request: &gitalypb.GetNewLFSPointersRequest{
- Repository: repo,
- Revision: revision,
- Limit: 9000,
- },
- expectedLFSPointers: []*gitalypb.LFSPointer{
- {
- Size: 133,
- Data: []byte("version https://git-lfs.github.com/spec/v1\noid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\nsize 1575078\n\n"),
- Oid: "0c304a93cb8430108629bbbcaa27db3343299bc0",
- },
- {
- Size: 127,
- Data: []byte("version https://git-lfs.github.com/spec/v1\noid sha256:bad71f905b60729f502ca339f7c9f001281a3d12c68a5da7f15de8009f4bd63d\nsize 18\n"),
- Oid: "bab31d249f78fba464d1b75799aad496cc07fa3b",
- },
- {
- Size: 127,
- Data: []byte("version https://git-lfs.github.com/spec/v1\noid sha256:f2b0a1e7550e9b718dafc9b525a04879a766de62e4fbdfc46593d47f7ab74636\nsize 20\n"),
- Oid: "f78df813119a79bfbe0442ab92540a61d3ab7ff3",
- },
- },
- },
- {
- desc: "request with smaller limit",
- request: &gitalypb.GetNewLFSPointersRequest{
- Repository: repo,
- Revision: revision,
- Limit: 2,
- },
- expectedLFSPointers: []*gitalypb.LFSPointer{
- lfsPointers[lfsPointer3],
- lfsPointers[lfsPointer2],
- },
- },
- {
- desc: "with NotInAll true",
- request: &gitalypb.GetNewLFSPointersRequest{
- Repository: repo,
- Revision: newRevision,
- NotInAll: true,
- },
- expectedLFSPointers: []*gitalypb.LFSPointer{
- lfsPointers[lfsPointer1],
- },
- },
- {
- desc: "with some NotInRefs elements",
- request: &gitalypb.GetNewLFSPointersRequest{
- Repository: repo,
- Revision: revision,
- NotInRefs: [][]byte{[]byte("048721d90c449b244b7b4c53a9186b04330174ec")},
- },
- expectedLFSPointers: []*gitalypb.LFSPointer{
- lfsPointers[lfsPointer3],
- lfsPointers[lfsPointer2],
- },
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.desc, func(t *testing.T) {
- tc.request.Repository.GitAlternateObjectDirectories = []string{altDirs}
- stream, err := client.GetNewLFSPointers(ctx, tc.request)
- require.NoError(t, err)
-
- var receivedLFSPointers []*gitalypb.LFSPointer
- for {
- resp, err := stream.Recv()
- if err == io.EOF {
- break
- } else if err != nil {
- t.Fatal(err)
- }
-
- receivedLFSPointers = append(receivedLFSPointers, resp.GetLfsPointers()...)
- }
-
- require.ElementsMatch(t, receivedLFSPointers, tc.expectedLFSPointers)
- })
- }
-}
-
-func TestFailedGetNewLFSPointersRequestDueToValidations(t *testing.T) {
- _, repo, _, client := setup(t)
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- testCases := []struct {
- desc string
- repository *gitalypb.Repository
- revision []byte
- }{
- {
- desc: "empty Repository",
- repository: nil,
- revision: []byte("master"),
- },
- {
- desc: "empty revision",
- repository: repo,
- revision: nil,
- },
- {
- desc: "revision can't start with '-'",
- repository: repo,
- revision: []byte("-suspicious-revision"),
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.desc, func(t *testing.T) {
- request := &gitalypb.GetNewLFSPointersRequest{
- Repository: tc.repository,
- Revision: tc.revision,
- }
-
- c, err := client.GetNewLFSPointers(ctx, request)
- require.NoError(t, err)
-
- err = drainNewPointers(c)
- testhelper.RequireGrpcError(t, err, codes.InvalidArgument)
- require.Contains(t, err.Error(), tc.desc)
- })
- }
-}
-
-func drainNewPointers(c gitalypb.BlobService_GetNewLFSPointersClient) error {
- for {
- _, err := c.Recv()
- if err != nil {
- return err
- }
- }
-}
-
-func TestSuccessfulGetAllLFSPointersRequest(t *testing.T) {
- _, repo, _, client := setup(t)
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- request := &gitalypb.GetAllLFSPointersRequest{
- Repository: repo,
- }
-
- expectedLFSPointers := []*gitalypb.LFSPointer{
- lfsPointers[lfsPointer1],
- lfsPointers[lfsPointer2],
- lfsPointers[lfsPointer3],
- lfsPointers[lfsPointer4],
- lfsPointers[lfsPointer5],
- lfsPointers[lfsPointer6],
- }
-
- c, err := client.GetAllLFSPointers(ctx, request)
- require.NoError(t, err)
-
- require.ElementsMatch(t, expectedLFSPointers, getAllPointers(t, c))
-}
-
-func getAllPointers(t *testing.T, c gitalypb.BlobService_GetAllLFSPointersClient) []*gitalypb.LFSPointer {
- var receivedLFSPointers []*gitalypb.LFSPointer
- for {
- resp, err := c.Recv()
- if err == io.EOF {
- break
- }
- require.NoError(t, err)
-
- receivedLFSPointers = append(receivedLFSPointers, resp.GetLfsPointers()...)
- }
-
- return receivedLFSPointers
-}
-
-func TestFailedGetAllLFSPointersRequestDueToValidations(t *testing.T) {
- _, _, _, client := setup(t)
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- testCases := []struct {
- desc string
- repository *gitalypb.Repository
- }{
- {
- desc: "empty Repository",
- repository: nil,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.desc, func(t *testing.T) {
- request := &gitalypb.GetAllLFSPointersRequest{
- Repository: tc.repository,
- }
-
- c, err := client.GetAllLFSPointers(ctx, request)
- require.NoError(t, err)
-
- err = drainAllPointers(c)
- testhelper.RequireGrpcError(t, err, codes.InvalidArgument)
- require.Contains(t, err.Error(), tc.desc)
- })
- }
-}
-
-func drainAllPointers(c gitalypb.BlobService_GetAllLFSPointersClient) error {
- for {
- _, err := c.Recv()
- if err != nil {
- return err
- }
- }
-}
-
-// TestGetAllLFSPointersVerifyScope verifies that this RPC returns all LFS
-// pointers in a repository, not only ones reachable from the default branch
-func TestGetAllLFSPointersVerifyScope(t *testing.T) {
- _, repo, repoPath, client := setup(t)
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- request := &gitalypb.GetAllLFSPointersRequest{
- Repository: repo,
- }
-
- c, err := client.GetAllLFSPointers(ctx, request)
- require.NoError(t, err)
-
- lfsPtr := lfsPointers[lfsPointer2]
-
- // the LFS pointer is reachable from a non-default branch:
- require.True(t, refHasPtr(t, repoPath, "moar-lfs-ptrs", lfsPtr))
-
- // the same pointer is not reachable from a default branch
- require.False(t, refHasPtr(t, repoPath, "master", lfsPtr))
-
- require.Contains(t, getAllPointers(t, c), lfsPtr,
- "RPC should return all LFS pointers, not just ones in the default branch")
-}
-
-// refHasPtr verifies the provided ref has connectivity to the LFS pointer
-func refHasPtr(t *testing.T, repoPath, ref string, lfsPtr *gitalypb.LFSPointer) bool {
- objects := string(testhelper.MustRunCommand(t, nil,
- "git", "-C", repoPath, "rev-list", "--objects", ref))
-
- return strings.Contains(objects, lfsPtr.Oid)
-}
-
func TestFindLFSPointersByRevisions(t *testing.T) {
cfg := testcfg.Build(t)
gitCmdFactory := git.NewExecCommandFactory(cfg)
- repoProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repoProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanup)
- repo := localrepo.New(gitCmdFactory, repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -736,6 +431,7 @@ func TestFindLFSPointersByRevisions(t *testing.T) {
lfsPointers[lfsPointer5],
lfsPointers[lfsPointer6],
},
+ expectedErr: errLimitReached,
},
{
desc: "--not --all",
@@ -773,14 +469,16 @@ func TestFindLFSPointersByRevisions(t *testing.T) {
},
} {
t.Run(tc.desc, func(t *testing.T) {
- actualLFSPointers, err := findLFSPointersByRevisions(
- ctx, repo, gitCmdFactory, tc.limit, tc.revs...)
+ var collector lfsPointerCollector
+
+ err := findLFSPointersByRevisions(ctx, repo, gitCmdFactory,
+ collector.chunker(), tc.limit, tc.revs...)
if tc.expectedErr == nil {
require.NoError(t, err)
} else {
require.Contains(t, err.Error(), tc.expectedErr.Error())
}
- require.ElementsMatch(t, tc.expectedLFSPointers, actualLFSPointers)
+ lfsPointersEqual(t, tc.expectedLFSPointers, collector.pointers)
})
}
}
@@ -790,57 +488,57 @@ func BenchmarkFindLFSPointers(b *testing.B) {
gitCmdFactory := git.NewExecCommandFactory(cfg)
- repoProto, _, cleanup := gittest.CloneBenchRepo(b)
+ repoProto, _, cleanup := gittest.CloneBenchRepo(b, cfg)
b.Cleanup(cleanup)
- repo := localrepo.New(gitCmdFactory, repoProto, cfg)
+ repo := localrepo.NewTestRepo(b, cfg, repoProto)
ctx, cancel := testhelper.Context()
defer cancel()
b.Run("limitless", func(b *testing.B) {
- _, err := findLFSPointersByRevisions(ctx, repo, gitCmdFactory, 0, "--all")
+ var collector lfsPointerCollector
+ err := findLFSPointersByRevisions(ctx, repo, gitCmdFactory, collector.chunker(), 0, "--all")
require.NoError(b, err)
})
b.Run("limit", func(b *testing.B) {
- lfsPointer, err := findLFSPointersByRevisions(ctx, repo, gitCmdFactory, 1, "--all")
+ var collector lfsPointerCollector
+ err := findLFSPointersByRevisions(ctx, repo, gitCmdFactory, collector.chunker(), 1, "--all")
require.NoError(b, err)
- require.Len(b, lfsPointer, 1)
+ require.Len(b, collector.pointers, 1)
})
}
func BenchmarkReadLFSPointers(b *testing.B) {
cfg := testcfg.Build(b)
- gitCmdFactory := git.NewExecCommandFactory(cfg)
-
- repoProto, path, cleanup := gittest.CloneBenchRepo(b)
+ repoProto, path, cleanup := gittest.CloneBenchRepo(b, cfg)
b.Cleanup(cleanup)
- repo := localrepo.New(gitCmdFactory, repoProto, cfg)
+ repo := localrepo.NewTestRepo(b, cfg, repoProto)
ctx, cancel := testhelper.Context()
defer cancel()
- candidates := testhelper.MustRunCommand(b, nil, "git", "-C", path, "rev-list", "--in-commit-order", "--objects", "--no-object-names", "--filter=blob:limit=200", "--all")
+ candidates := gittest.Exec(b, cfg, "-C", path, "rev-list", "--in-commit-order", "--objects", "--no-object-names", "--filter=blob:limit=200", "--all")
b.Run("limitless", func(b *testing.B) {
- _, err := readLFSPointers(ctx, repo, bytes.NewReader(candidates), 0)
+ var collector lfsPointerCollector
+ err := readLFSPointers(ctx, repo, collector.chunker(), bytes.NewReader(candidates), 0)
require.NoError(b, err)
})
b.Run("limit", func(b *testing.B) {
- lfsPointer, err := readLFSPointers(ctx, repo, bytes.NewReader(candidates), 1)
- require.NoError(b, err)
- require.Len(b, lfsPointer, 1)
+ var collector lfsPointerCollector
+ err := readLFSPointers(ctx, repo, collector.chunker(), bytes.NewReader(candidates), 1)
+ require.Equal(b, errLimitReached, err)
+ require.Equal(b, 1, len(collector.pointers))
})
}
func TestReadLFSPointers(t *testing.T) {
cfg, repo, _, _ := setup(t)
- gitCmdFactory := git.NewExecCommandFactory(cfg)
-
- localRepo := localrepo.New(gitCmdFactory, repo, cfg)
+ localRepo := localrepo.NewTestRepo(t, cfg, repo)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -914,6 +612,7 @@ func TestReadLFSPointers(t *testing.T) {
lfsPointers[lfsPointer2],
lfsPointers[lfsPointer3],
},
+ expectedErr: errLimitReached,
},
{
desc: "multiple object IDs with name filter",
@@ -925,6 +624,10 @@ func TestReadLFSPointers(t *testing.T) {
lfsPointer5 + " z",
lfsPointer6 + " a",
}, "\n"),
+ expectedLFSPointers: []*gitalypb.LFSPointer{
+ lfsPointers[lfsPointer1],
+ lfsPointers[lfsPointer2],
+ },
expectedErr: errors.New("object not found"),
},
{
@@ -954,84 +657,52 @@ func TestReadLFSPointers(t *testing.T) {
t.Run(tc.desc, func(t *testing.T) {
reader := strings.NewReader(tc.input)
- actualLFSPointers, err := readLFSPointers(
- ctx, localRepo, reader, tc.limit)
+ var collector lfsPointerCollector
+
+ err := readLFSPointers(ctx, localRepo, collector.chunker(), reader, tc.limit)
if tc.expectedErr == nil {
require.NoError(t, err)
} else {
require.Contains(t, err.Error(), tc.expectedErr.Error())
}
- require.ElementsMatch(t, tc.expectedLFSPointers, actualLFSPointers)
+
+ lfsPointersEqual(t, tc.expectedLFSPointers, collector.pointers)
})
}
}
-func TestSliceLFSPointers(t *testing.T) {
- generateSlice := func(n, offset int) []*gitalypb.LFSPointer {
- slice := make([]*gitalypb.LFSPointer, n)
- for i := 0; i < n; i++ {
- slice[i] = &gitalypb.LFSPointer{
- Size: int64(i + offset),
- }
- }
- return slice
- }
+func lfsPointersEqual(tb testing.TB, expected, actual []*gitalypb.LFSPointer) {
+ tb.Helper()
- for _, tc := range []struct {
- desc string
- err error
- lfsPointers []*gitalypb.LFSPointer
- expectedSlices [][]*gitalypb.LFSPointer
- }{
- {
- desc: "empty",
- },
- {
- desc: "single slice",
- lfsPointers: generateSlice(10, 0),
- expectedSlices: [][]*gitalypb.LFSPointer{
- generateSlice(10, 0),
- },
- },
- {
- desc: "two slices",
- lfsPointers: generateSlice(101, 0),
- expectedSlices: [][]*gitalypb.LFSPointer{
- generateSlice(100, 0),
- generateSlice(1, 100),
- },
- },
- {
- desc: "many slices",
- lfsPointers: generateSlice(635, 0),
- expectedSlices: [][]*gitalypb.LFSPointer{
- generateSlice(100, 0),
- generateSlice(100, 100),
- generateSlice(100, 200),
- generateSlice(100, 300),
- generateSlice(100, 400),
- generateSlice(100, 500),
- generateSlice(35, 600),
- },
- },
- {
- desc: "error",
- lfsPointers: generateSlice(500, 0),
- err: errors.New("foo"),
- expectedSlices: [][]*gitalypb.LFSPointer{
- generateSlice(100, 0),
- },
- },
- } {
- t.Run(tc.desc, func(t *testing.T) {
- var slices [][]*gitalypb.LFSPointer
-
- err := sliceLFSPointers(tc.lfsPointers, func(slice []*gitalypb.LFSPointer) error {
- slices = append(slices, slice)
- return tc.err
- })
- require.Equal(t, tc.err, err)
- require.Equal(t, tc.expectedSlices, slices)
+ for _, slice := range [][]*gitalypb.LFSPointer{expected, actual} {
+ sort.Slice(slice, func(i, j int) bool {
+ return strings.Compare(slice[i].Oid, slice[j].Oid) < 0
})
}
+
+ require.Equal(tb, len(expected), len(actual))
+ for i := range expected {
+ testhelper.ProtoEqual(tb, expected[i], actual[i])
+ }
+}
+
+type lfsPointerCollector struct {
+ pointers []*gitalypb.LFSPointer
+}
+
+func (c *lfsPointerCollector) Append(m proto.Message) {
+ c.pointers = append(c.pointers, m.(*gitalypb.LFSPointer))
+}
+
+func (c *lfsPointerCollector) Reset() {
+ // We don'c reset anything given that we want to collect all pointers.
+}
+
+func (c *lfsPointerCollector) Send() error {
+ // And neither do we anything here.
+ return nil
+}
+
+func (c *lfsPointerCollector) chunker() *chunk.Chunker {
+ return chunk.New(c)
}
diff --git a/internal/gitaly/service/blob/server.go b/internal/gitaly/service/blob/server.go
index 4567d2ff2..4b89b5766 100644
--- a/internal/gitaly/service/blob/server.go
+++ b/internal/gitaly/service/blob/server.go
@@ -2,6 +2,9 @@ package blob
import (
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -11,13 +14,19 @@ type server struct {
cfg config.Cfg
locator storage.Locator
gitCmdFactory git.CommandFactory
+ catfileCache catfile.Cache
}
// NewServer creates a new instance of a grpc BlobServer
-func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory) gitalypb.BlobServiceServer {
+func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory, catfileCache catfile.Cache) gitalypb.BlobServiceServer {
return &server{
cfg: cfg,
locator: locator,
gitCmdFactory: gitCmdFactory,
+ catfileCache: catfileCache,
}
}
+
+func (s *server) localrepo(repo repository.GitRepo) *localrepo.Repo {
+ return localrepo.New(s.gitCmdFactory, s.catfileCache, repo, s.cfg)
+}
diff --git a/internal/gitaly/service/blob/testhelper_test.go b/internal/gitaly/service/blob/testhelper_test.go
index 07df48c2d..b9de56f3a 100644
--- a/internal/gitaly/service/blob/testhelper_test.go
+++ b/internal/gitaly/service/blob/testhelper_test.go
@@ -31,11 +31,16 @@ func testMain(m *testing.M) int {
func setup(t *testing.T) (config.Cfg, *gitalypb.Repository, string, gitalypb.BlobServiceClient) {
cfg := testcfg.Build(t)
- repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanup)
addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterBlobServiceServer(srv, NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterBlobServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
})
conn, err := grpc.Dial(addr, grpc.WithInsecure())
diff --git a/internal/gitaly/service/cleanup/apply_bfg_object_map_stream.go b/internal/gitaly/service/cleanup/apply_bfg_object_map_stream.go
index 7d023ecca..735612323 100644
--- a/internal/gitaly/service/cleanup/apply_bfg_object_map_stream.go
+++ b/internal/gitaly/service/cleanup/apply_bfg_object_map_stream.go
@@ -36,18 +36,18 @@ func (s *server) ApplyBfgObjectMapStream(server gitalypb.CleanupService_ApplyBfg
}
ctx := server.Context()
- repo := firstRequest.GetRepository()
+ repo := s.localrepo(firstRequest.GetRepository())
reader := &bfgStreamReader{firstRequest: firstRequest, server: server}
chunker := chunk.New(&bfgStreamWriter{server: server})
- notifier, err := notifier.New(ctx, s.gitCmdFactory, repo, chunker)
+ notifier, err := notifier.New(ctx, s.catfileCache, repo, chunker)
if err != nil {
return helper.ErrInternal(err)
}
// It doesn't matter if new internal references are added after this RPC
// starts running - they shouldn't point to the objects removed by the BFG
- cleaner, err := internalrefs.NewCleaner(ctx, s.cfg, s.gitCmdFactory, repo, notifier.Notify)
+ cleaner, err := internalrefs.NewCleaner(ctx, s.cfg, repo, notifier.Notify)
if err != nil {
return helper.ErrInternal(err)
}
diff --git a/internal/gitaly/service/cleanup/apply_bfg_object_map_stream_test.go b/internal/gitaly/service/cleanup/apply_bfg_object_map_stream_test.go
index c123283ec..2a9335850 100644
--- a/internal/gitaly/service/cleanup/apply_bfg_object_map_stream_test.go
+++ b/internal/gitaly/service/cleanup/apply_bfg_object_map_stream_test.go
@@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -21,7 +22,7 @@ func TestApplyBfgObjectMapStreamSuccess(t *testing.T) {
testhelper.ConfigureGitalyHooksBin(t, cfg)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), protoRepo, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, protoRepo)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -40,7 +41,7 @@ func TestApplyBfgObjectMapStreamSuccess(t *testing.T) {
"refs/environments/1", "refs/keep-around/1", "refs/merge-requests/1", "refs/pipelines/1",
"refs/heads/_keep", "refs/tags/_keep", "refs/notes/_keep",
} {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", ref, headCommit.Id)
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", ref, headCommit.Id)
}
// Create some refs pointing to ref/tags/v1.0.0, simulating an unmodified
@@ -48,7 +49,7 @@ func TestApplyBfgObjectMapStreamSuccess(t *testing.T) {
for _, ref := range []string{
"refs/environments/_keep", "refs/keep-around/_keep", "refs/merge-requests/_keep", "refs/pipelines/_keep",
} {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", ref, tagID)
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", ref, tagID)
}
const filterRepoCommitMapHeader = "old new\n"
diff --git a/internal/gitaly/service/cleanup/internalrefs/cleaner.go b/internal/gitaly/service/cleanup/internalrefs/cleaner.go
index 69369299e..624c5cb5a 100644
--- a/internal/gitaly/service/cleanup/internalrefs/cleaner.go
+++ b/internal/gitaly/service/cleanup/internalrefs/cleaner.go
@@ -12,17 +12,8 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/updateref"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
-// Only references in these namespaces are cleaned up
-var internalRefs = []string{
- "refs/environments/",
- "refs/keep-around/",
- "refs/merge-requests/",
- "refs/pipelines/",
-}
-
// A ForEachFunc can be called for every entry in the filter-repo or BFG object
// map file that the cleaner is processing. Returning an error will stop the
// cleaner before it has processed the entry in question
@@ -47,13 +38,13 @@ type ErrInvalidObjectMap error
// NewCleaner builds a new instance of Cleaner, which is used to apply a
// filter-repo or BFG object map to a repository.
-func NewCleaner(ctx context.Context, cfg config.Cfg, gitCmdFactory git.CommandFactory, repo *gitalypb.Repository, forEach ForEachFunc) (*Cleaner, error) {
- table, err := buildLookupTable(ctx, gitCmdFactory, repo)
+func NewCleaner(ctx context.Context, cfg config.Cfg, repo git.RepositoryExecutor, forEach ForEachFunc) (*Cleaner, error) {
+ table, err := buildLookupTable(ctx, repo)
if err != nil {
return nil, err
}
- updater, err := updateref.New(ctx, cfg, gitCmdFactory, repo)
+ updater, err := updateref.New(ctx, cfg, repo)
if err != nil {
return nil, err
}
@@ -133,11 +124,11 @@ func (c *Cleaner) processEntry(ctx context.Context, oldSHA, newSHA string) error
// an object that has been rewritten by the filter-repo or BFG (and so require
// action). It is consulted once per line in the object map. Git is optimized
// for ref -> SHA lookups, but we want the opposite!
-func buildLookupTable(ctx context.Context, gitCmdFactory git.CommandFactory, repo *gitalypb.Repository) (map[string][]git.ReferenceName, error) {
- cmd, err := gitCmdFactory.New(ctx, repo, git.SubCmd{
+func buildLookupTable(ctx context.Context, repo git.RepositoryExecutor) (map[string][]git.ReferenceName, error) {
+ cmd, err := repo.Exec(ctx, git.SubCmd{
Name: "for-each-ref",
Flags: []git.Option{git.ValueFlag{Name: "--format", Value: "%(objectname) %(refname)"}},
- Args: internalRefs,
+ Args: git.InternalRefPrefixes[:],
})
if err != nil {
return nil, err
diff --git a/internal/gitaly/service/cleanup/notifier/notifier.go b/internal/gitaly/service/cleanup/notifier/notifier.go
index 865a16e91..52b16ee55 100644
--- a/internal/gitaly/service/cleanup/notifier/notifier.go
+++ b/internal/gitaly/service/cleanup/notifier/notifier.go
@@ -17,8 +17,8 @@ type Notifier struct {
}
// New instantiates a new Notifier
-func New(ctx context.Context, gitCmdFactory git.CommandFactory, repo *gitalypb.Repository, chunker *chunk.Chunker) (*Notifier, error) {
- catfile, err := catfile.New(ctx, gitCmdFactory, repo)
+func New(ctx context.Context, catfileCache catfile.Cache, repo git.RepositoryExecutor, chunker *chunk.Chunker) (*Notifier, error) {
+ catfile, err := catfileCache.BatchProcess(ctx, repo)
if err != nil {
return nil, err
}
diff --git a/internal/gitaly/service/cleanup/server.go b/internal/gitaly/service/cleanup/server.go
index 7d58ad4b1..dc0278b5d 100644
--- a/internal/gitaly/service/cleanup/server.go
+++ b/internal/gitaly/service/cleanup/server.go
@@ -2,6 +2,9 @@ package cleanup
import (
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -9,9 +12,18 @@ import (
type server struct {
cfg config.Cfg
gitCmdFactory git.CommandFactory
+ catfileCache catfile.Cache
}
// NewServer creates a new instance of a grpc CleanupServer
-func NewServer(cfg config.Cfg, gitCmdFactory git.CommandFactory) gitalypb.CleanupServiceServer {
- return &server{cfg: cfg, gitCmdFactory: gitCmdFactory}
+func NewServer(cfg config.Cfg, gitCmdFactory git.CommandFactory, catfileCache catfile.Cache) gitalypb.CleanupServiceServer {
+ return &server{
+ cfg: cfg,
+ gitCmdFactory: gitCmdFactory,
+ catfileCache: catfileCache,
+ }
+}
+
+func (s *server) localrepo(repo repository.GitRepo) *localrepo.Repo {
+ return localrepo.New(s.gitCmdFactory, s.catfileCache, repo, s.cfg)
}
diff --git a/internal/gitaly/service/cleanup/testhelper_test.go b/internal/gitaly/service/cleanup/testhelper_test.go
index 1989e3873..795fe7950 100644
--- a/internal/gitaly/service/cleanup/testhelper_test.go
+++ b/internal/gitaly/service/cleanup/testhelper_test.go
@@ -38,7 +38,11 @@ func setupCleanupService(t *testing.T) (config.Cfg, *gitalypb.Repository, string
func runCleanupServiceServer(t *testing.T, cfg config.Cfg) string {
return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterCleanupServiceServer(srv, NewServer(deps.GetCfg(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterCleanupServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
gitalypb.RegisterHookServiceServer(srv, hookservice.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
})
}
diff --git a/internal/gitaly/service/commit/between.go b/internal/gitaly/service/commit/between.go
index e0d82c6f7..5c89fcfa1 100644
--- a/internal/gitaly/service/commit/between.go
+++ b/internal/gitaly/service/commit/between.go
@@ -29,16 +29,16 @@ func (s *server) CommitsBetween(in *gitalypb.CommitsBetweenRequest, stream gital
return helper.ErrInvalidArgument(err)
}
+ repo := s.localrepo(in.GetRepository())
sender := &commitsBetweenSender{stream: stream}
from, to, limit := normalizedCommitsBetweenParams(in)
revisionRange := fmt.Sprintf("%s..%s", from, to)
- if err := sendCommits(
+ if err := s.sendCommits(
stream.Context(),
sender,
- s.gitCmdFactory,
- in.GetRepository(),
+ repo,
[]string{revisionRange},
nil,
nil,
diff --git a/internal/gitaly/service/commit/commit_messages.go b/internal/gitaly/service/commit/commit_messages.go
index d6229c1c8..dd811e563 100644
--- a/internal/gitaly/service/commit/commit_messages.go
+++ b/internal/gitaly/service/commit/commit_messages.go
@@ -7,7 +7,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/streamio"
@@ -26,12 +25,14 @@ func (s *server) GetCommitMessages(request *gitalypb.GetCommitMessagesRequest, s
func (s *server) getAndStreamCommitMessages(request *gitalypb.GetCommitMessagesRequest, stream gitalypb.CommitService_GetCommitMessagesServer) error {
ctx := stream.Context()
- c, err := catfile.New(ctx, s.gitCmdFactory, request.GetRepository())
+ repo := s.localrepo(request.GetRepository())
+
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
for _, commitID := range request.GetCommitIds() {
- msg, err := log.GetCommitMessage(ctx, c, request.GetRepository(), git.Revision(commitID))
+ msg, err := catfile.GetCommitMessage(ctx, c, repo, git.Revision(commitID))
if err != nil {
return fmt.Errorf("failed to get commit message: %v", err)
}
diff --git a/internal/gitaly/service/commit/commit_messages_test.go b/internal/gitaly/service/commit/commit_messages_test.go
index fbb4ca916..507d7765c 100644
--- a/internal/gitaly/service/commit/commit_messages_test.go
+++ b/internal/gitaly/service/commit/commit_messages_test.go
@@ -22,12 +22,17 @@ func TestSuccessfulGetCommitMessagesRequest(t *testing.T) {
message1 := strings.Repeat("a\n", helper.MaxCommitOrTagMessageSize*2)
message2 := strings.Repeat("b\n", helper.MaxCommitOrTagMessageSize*2)
- commit1ID := gittest.CreateCommit(t, cfg, repoPath, "local-big-commits", &gittest.CreateCommitOpts{Message: message1})
- commit2ID := gittest.CreateCommit(t, cfg, repoPath, "local-big-commits", &gittest.CreateCommitOpts{Message: message2, ParentID: commit1ID})
+ commit1ID := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithBranch("local-big-commits"), gittest.WithMessage(message1),
+ )
+ commit2ID := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithBranch("local-big-commits"), gittest.WithMessage(message2),
+ gittest.WithParents(commit1ID),
+ )
request := &gitalypb.GetCommitMessagesRequest{
Repository: repo,
- CommitIds: []string{commit1ID, commit2ID},
+ CommitIds: []string{commit1ID.String(), commit2ID.String()},
}
c, err := client.GetCommitMessages(ctx, request)
@@ -35,11 +40,11 @@ func TestSuccessfulGetCommitMessagesRequest(t *testing.T) {
expectedMessages := []*gitalypb.GetCommitMessagesResponse{
{
- CommitId: commit1ID,
+ CommitId: commit1ID.String(),
Message: []byte(message1),
},
{
- CommitId: commit2ID,
+ CommitId: commit2ID.String(),
Message: []byte(message2),
},
}
diff --git a/internal/gitaly/service/commit/commit_signatures.go b/internal/gitaly/service/commit/commit_signatures.go
index 15ec9ab80..d322631f7 100644
--- a/internal/gitaly/service/commit/commit_signatures.go
+++ b/internal/gitaly/service/commit/commit_signatures.go
@@ -28,8 +28,9 @@ func (s *server) GetCommitSignatures(request *gitalypb.GetCommitSignaturesReques
func (s *server) getCommitSignatures(request *gitalypb.GetCommitSignaturesRequest, stream gitalypb.CommitService_GetCommitSignaturesServer) error {
ctx := stream.Context()
+ repo := s.localrepo(request.GetRepository())
- c, err := catfile.New(ctx, s.gitCmdFactory, request.GetRepository())
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return helper.ErrInternal(err)
}
diff --git a/internal/gitaly/service/commit/commit_signatures_test.go b/internal/gitaly/service/commit/commit_signatures_test.go
index 12690182d..260497ea7 100644
--- a/internal/gitaly/service/commit/commit_signatures_test.go
+++ b/internal/gitaly/service/commit/commit_signatures_test.go
@@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -14,10 +15,10 @@ import (
)
func TestSuccessfulGetCommitSignaturesRequest(t *testing.T) {
- _, repo, repoPath, client := setupCommitServiceWithRepo(t, true)
+ cfg, repo, repoPath, client := setupCommitServiceWithRepo(t, true)
commitData := testhelper.MustReadFile(t, "testdata/dc00eb001f41dfac08192ead79c2377c588b82ee.commit")
- commit := text.ChompBytes(testhelper.MustRunCommand(t, bytes.NewReader(commitData), "git", "-C", repoPath, "hash-object", "-w", "-t", "commit", "--stdin", "--literally"))
+ commit := text.ChompBytes(gittest.ExecStream(t, cfg, bytes.NewReader(commitData), "-C", repoPath, "hash-object", "-w", "-t", "commit", "--stdin", "--literally"))
require.Equal(t, "dc00eb001f41dfac08192ead79c2377c588b82ee", commit)
ctx, cancel := testhelper.Context()
diff --git a/internal/gitaly/service/commit/commits_by_message.go b/internal/gitaly/service/commit/commits_by_message.go
index 47735eeef..5264ca6ff 100644
--- a/internal/gitaly/service/commit/commits_by_message.go
+++ b/internal/gitaly/service/commit/commits_by_message.go
@@ -38,6 +38,7 @@ func (s *server) CommitsByMessage(in *gitalypb.CommitsByMessageRequest, stream g
func (s *server) commitsByMessage(in *gitalypb.CommitsByMessageRequest, stream gitalypb.CommitService_CommitsByMessageServer) error {
ctx := stream.Context()
sender := &commitsByMessageSender{stream: stream}
+ repo := s.localrepo(in.GetRepository())
gitLogExtraOptions := []git.Option{
git.Flag{Name: "--grep=" + in.GetQuery()},
@@ -54,7 +55,7 @@ func (s *server) commitsByMessage(in *gitalypb.CommitsByMessageRequest, stream g
if len(revision) == 0 {
var err error
- revision, err = defaultBranchName(ctx, s.gitCmdFactory, in.Repository)
+ revision, err = defaultBranchName(ctx, repo)
if err != nil {
return err
}
@@ -65,7 +66,7 @@ func (s *server) commitsByMessage(in *gitalypb.CommitsByMessageRequest, stream g
paths = append(paths, string(path))
}
- return sendCommits(stream.Context(), sender, s.gitCmdFactory, in.GetRepository(), []string{string(revision)}, paths, in.GetGlobalOptions(), gitLogExtraOptions...)
+ return s.sendCommits(stream.Context(), sender, repo, []string{string(revision)}, paths, in.GetGlobalOptions(), gitLogExtraOptions...)
}
func validateCommitsByMessageRequest(in *gitalypb.CommitsByMessageRequest) error {
diff --git a/internal/gitaly/service/commit/commits_helper.go b/internal/gitaly/service/commit/commits_helper.go
index 17aa143b7..4a7a87cb1 100644
--- a/internal/gitaly/service/commit/commits_helper.go
+++ b/internal/gitaly/service/commit/commits_helper.go
@@ -10,18 +10,26 @@ import (
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
-func sendCommits(ctx context.Context, sender chunk.Sender, gitCmdFactory git.CommandFactory, repo *gitalypb.Repository, revisionRange []string, paths []string, options *gitalypb.GlobalOptions, extraArgs ...git.Option) error {
+func (s *server) sendCommits(
+ ctx context.Context,
+ sender chunk.Sender,
+ repo git.RepositoryExecutor,
+ revisionRange []string,
+ paths []string,
+ options *gitalypb.GlobalOptions,
+ extraArgs ...git.Option,
+) error {
revisions := make([]git.Revision, len(revisionRange))
for i, revision := range revisionRange {
revisions[i] = git.Revision(revision)
}
- cmd, err := log.GitLogCommand(ctx, gitCmdFactory, repo, revisions, paths, options, extraArgs...)
+ cmd, err := log.GitLogCommand(ctx, s.gitCmdFactory, repo, revisions, paths, options, extraArgs...)
if err != nil {
return err
}
- logParser, err := log.NewLogParser(ctx, gitCmdFactory, repo, cmd)
+ logParser, err := log.NewParser(ctx, s.catfileCache, repo, cmd)
if err != nil {
return err
}
diff --git a/internal/gitaly/service/commit/count_commits_test.go b/internal/gitaly/service/commit/count_commits_test.go
index a69e0700b..c0c3d2f1d 100644
--- a/internal/gitaly/service/commit/count_commits_test.go
+++ b/internal/gitaly/service/commit/count_commits_test.go
@@ -16,23 +16,23 @@ import (
func TestSuccessfulCountCommitsRequest(t *testing.T) {
cfg, repo1, _, client := setupCommitServiceWithRepo(t, true)
- repo2, repo2Path, cleanupFn := gittest.InitRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repo2, repo2Path, cleanupFn := gittest.InitRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
t.Cleanup(cleanupFn)
committerName := "Scrooge McDuck"
committerEmail := "scrooge@mcduck.com"
for i := 0; i < 5; i++ {
- testhelper.MustRunCommand(t, nil, "git", "-C", repo2Path,
+ gittest.Exec(t, cfg, "-C", repo2Path,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "--allow-empty", "-m", "Empty commit")
}
- testhelper.MustRunCommand(t, nil, "git", "-C", repo2Path, "checkout", "-b", "another-branch")
+ gittest.Exec(t, cfg, "-C", repo2Path, "checkout", "-b", "another-branch")
for i := 0; i < 3; i++ {
- testhelper.MustRunCommand(t, nil, "git", "-C", repo2Path,
+ gittest.Exec(t, cfg, "-C", repo2Path,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "--allow-empty", "-m", "Empty commit")
diff --git a/internal/gitaly/service/commit/count_diverging_commits_test.go b/internal/gitaly/service/commit/count_diverging_commits_test.go
index 96ab6c959..cf3d8b268 100644
--- a/internal/gitaly/service/commit/count_diverging_commits_test.go
+++ b/internal/gitaly/service/commit/count_diverging_commits_test.go
@@ -26,31 +26,31 @@ func createRepoWithDivergentBranches(t *testing.T, cfg config.Cfg, leftCommits,
f h
*/
- repo, worktreePath, cleanupFn := gittest.InitRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repo, worktreePath, cleanupFn := gittest.InitRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
committerName := "Scrooge McDuck"
committerEmail := "scrooge@mcduck.com"
for i := 0; i < 2; i++ {
- testhelper.MustRunCommand(t, nil, "git", "-C", worktreePath,
+ gittest.Exec(t, cfg, "-C", worktreePath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "--allow-empty", "-m", fmt.Sprintf("master branch Empty commit %d", i))
}
- testhelper.MustRunCommand(t, nil, "git", "-C", worktreePath, "checkout", "-b", leftBranchName)
+ gittest.Exec(t, cfg, "-C", worktreePath, "checkout", "-b", leftBranchName)
for i := 0; i < leftCommits; i++ {
- testhelper.MustRunCommand(t, nil, "git", "-C", worktreePath,
+ gittest.Exec(t, cfg, "-C", worktreePath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "--allow-empty", "-m", fmt.Sprintf("branch-1 Empty commit %d", i))
}
- testhelper.MustRunCommand(t, nil, "git", "-C", worktreePath, "checkout", "master")
- testhelper.MustRunCommand(t, nil, "git", "-C", worktreePath, "checkout", "-b", rightBranchName)
+ gittest.Exec(t, cfg, "-C", worktreePath, "checkout", "master")
+ gittest.Exec(t, cfg, "-C", worktreePath, "checkout", "-b", rightBranchName)
for i := 0; i < rightCommits; i++ {
- testhelper.MustRunCommand(t, nil, "git", "-C", worktreePath,
+ gittest.Exec(t, cfg, "-C", worktreePath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "--allow-empty", "-m", fmt.Sprintf("branch-2 Empty commit %d", i))
diff --git a/internal/gitaly/service/commit/filter_shas_with_signatures.go b/internal/gitaly/service/commit/filter_shas_with_signatures.go
index 4c3e64f4a..e01832684 100644
--- a/internal/gitaly/service/commit/filter_shas_with_signatures.go
+++ b/internal/gitaly/service/commit/filter_shas_with_signatures.go
@@ -7,7 +7,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -37,7 +36,9 @@ func validateFirstFilterShasWithSignaturesRequest(in *gitalypb.FilterShasWithSig
func (s *server) filterShasWithSignatures(bidi gitalypb.CommitService_FilterShasWithSignaturesServer, firstRequest *gitalypb.FilterShasWithSignaturesRequest) error {
ctx := bidi.Context()
- c, err := catfile.New(ctx, s.gitCmdFactory, firstRequest.GetRepository())
+ repo := s.localrepo(firstRequest.GetRepository())
+
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
@@ -67,7 +68,7 @@ func (s *server) filterShasWithSignatures(bidi gitalypb.CommitService_FilterShas
func filterCommitShasWithSignatures(ctx context.Context, c catfile.Batch, shas [][]byte) ([][]byte, error) {
var foundShas [][]byte
for _, sha := range shas {
- commit, err := log.GetCommitCatfile(ctx, c, git.Revision(sha))
+ commit, err := catfile.GetCommit(ctx, c, git.Revision(sha))
if catfile.IsNotFound(err) {
continue
}
diff --git a/internal/gitaly/service/commit/find_all_commits.go b/internal/gitaly/service/commit/find_all_commits.go
index 3d4e3324d..6541bd017 100644
--- a/internal/gitaly/service/commit/find_all_commits.go
+++ b/internal/gitaly/service/commit/find_all_commits.go
@@ -32,9 +32,11 @@ func (s *server) FindAllCommits(in *gitalypb.FindAllCommitsRequest, stream gital
return err
}
+ repo := s.localrepo(in.GetRepository())
+
var revisions []string
if len(in.GetRevision()) == 0 {
- branchNames, err := _findBranchNamesFunc(stream.Context(), s.gitCmdFactory, in.Repository)
+ branchNames, err := _findBranchNamesFunc(stream.Context(), repo)
if err != nil {
return helper.ErrInvalidArgument(err)
}
@@ -46,7 +48,7 @@ func (s *server) FindAllCommits(in *gitalypb.FindAllCommitsRequest, stream gital
revisions = []string{string(in.GetRevision())}
}
- if err := s.findAllCommits(in, stream, revisions); err != nil {
+ if err := s.findAllCommits(repo, in, stream, revisions); err != nil {
return helper.ErrInternal(err)
}
@@ -61,7 +63,7 @@ func validateFindAllCommitsRequest(in *gitalypb.FindAllCommitsRequest) error {
return nil
}
-func (s *server) findAllCommits(in *gitalypb.FindAllCommitsRequest, stream gitalypb.CommitService_FindAllCommitsServer, revisions []string) error {
+func (s *server) findAllCommits(repo git.RepositoryExecutor, in *gitalypb.FindAllCommitsRequest, stream gitalypb.CommitService_FindAllCommitsServer, revisions []string) error {
sender := &findAllCommitsSender{stream: stream}
var gitLogExtraOptions []git.Option
@@ -80,5 +82,5 @@ func (s *server) findAllCommits(in *gitalypb.FindAllCommitsRequest, stream gital
gitLogExtraOptions = append(gitLogExtraOptions, git.Flag{Name: "--topo-order"})
}
- return sendCommits(stream.Context(), sender, s.gitCmdFactory, in.GetRepository(), revisions, nil, nil, gitLogExtraOptions...)
+ return s.sendCommits(stream.Context(), sender, repo, revisions, nil, nil, gitLogExtraOptions...)
}
diff --git a/internal/gitaly/service/commit/find_all_commits_test.go b/internal/gitaly/service/commit/find_all_commits_test.go
index 9b1905ba9..7928a14f7 100644
--- a/internal/gitaly/service/commit/find_all_commits_test.go
+++ b/internal/gitaly/service/commit/find_all_commits_test.go
@@ -18,7 +18,7 @@ func TestSuccessfulFindAllCommitsRequest(t *testing.T) {
_findBranchNamesFunc = ref.FindBranchNames
}()
- _findBranchNamesFunc = func(ctx context.Context, gitCmdFactory git.CommandFactory, repo *gitalypb.Repository) ([][]byte, error) {
+ _findBranchNamesFunc = func(context.Context, git.RepositoryExecutor) ([][]byte, error) {
return [][]byte{
[]byte("few-commits"),
[]byte("two-commits"),
diff --git a/internal/gitaly/service/commit/find_commit.go b/internal/gitaly/service/commit/find_commit.go
index 349725235..adbb2aa54 100644
--- a/internal/gitaly/service/commit/find_commit.go
+++ b/internal/gitaly/service/commit/find_commit.go
@@ -16,7 +16,7 @@ func (s *server) FindCommit(ctx context.Context, in *gitalypb.FindCommitRequest)
return nil, helper.ErrInvalidArgument(err)
}
- repo := localrepo.New(s.gitCmdFactory, in.GetRepository(), s.cfg)
+ repo := s.localrepo(in.GetRepository())
var opts []localrepo.ReadCommitOpt
if in.GetTrailers() {
diff --git a/internal/gitaly/service/commit/find_commit_test.go b/internal/gitaly/service/commit/find_commit_test.go
index 7f5182566..c1a084b86 100644
--- a/internal/gitaly/service/commit/find_commit_test.go
+++ b/internal/gitaly/service/commit/find_commit_test.go
@@ -2,14 +2,12 @@ package commit
import (
"bufio"
- "io/ioutil"
"strings"
"testing"
"github.com/golang/protobuf/ptypes/timestamp"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/helper"
@@ -21,21 +19,20 @@ import (
)
func TestSuccessfulFindCommitRequest(t *testing.T) {
- windows1251Message, err := ioutil.ReadFile("testdata/commit-c809470461118b7bcab850f6e9a7ca97ac42f8ea-message.txt")
- require.NoError(t, err)
+ windows1251Message := testhelper.MustReadFile(t, "testdata/commit-c809470461118b7bcab850f6e9a7ca97ac42f8ea-message.txt")
cfg, repoProto, repoPath, client := setupCommitServiceWithRepo(t, true)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
ctx, cancel := testhelper.Context()
defer cancel()
bigMessage := "An empty commit with REALLY BIG message\n\n" + strings.Repeat("MOAR!\n", 20*1024)
- bigCommitID := gittest.CreateCommit(t, cfg, repoPath, "local-big-commits", &gittest.CreateCommitOpts{
- Message: bigMessage,
- ParentID: "60ecb67744cb56576c30214ff52294f8ce2def98",
- })
+ bigCommitID := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithBranch("local-big-commits"), gittest.WithMessage(bigMessage),
+ gittest.WithParents("60ecb67744cb56576c30214ff52294f8ce2def98"),
+ )
bigCommit, err := repo.ReadCommit(ctx, git.Revision(bigCommitID))
require.NoError(t, err)
@@ -200,9 +197,9 @@ func TestSuccessfulFindCommitRequest(t *testing.T) {
},
{
description: "with a very large message",
- revision: bigCommitID,
+ revision: bigCommitID.String(),
commit: &gitalypb.GitCommit{
- Id: bigCommitID,
+ Id: bigCommitID.String(),
Subject: []byte("An empty commit with REALLY BIG message"),
Author: &gitalypb.CommitAuthor{
Name: []byte("Scrooge McDuck"),
@@ -316,8 +313,6 @@ func benchmarkFindCommit(withCache bool, b *testing.B) {
require.NoError(b, logCmd.Wait())
- defer catfile.ExpireAll()
-
for i := 0; i < b.N; i++ {
revision := revisions[b.N%len(revisions)]
if withCache {
@@ -357,8 +352,6 @@ func TestFindCommitWithCache(t *testing.T) {
require.NoError(t, logCmd.Wait())
- defer catfile.ExpireAll()
-
for i := 0; i < 10; i++ {
revision := revisions[i%len(revisions)]
md := metadata.New(map[string]string{
diff --git a/internal/gitaly/service/commit/find_commits.go b/internal/gitaly/service/commit/find_commits.go
index 8662ab593..11fbdc491 100644
--- a/internal/gitaly/service/commit/find_commits.go
+++ b/internal/gitaly/service/commit/find_commits.go
@@ -12,7 +12,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/git/trailerparser"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/helper/chunk"
@@ -26,11 +25,13 @@ func (s *server) FindCommits(req *gitalypb.FindCommitsRequest, stream gitalypb.C
return helper.ErrInvalidArgument(err)
}
+ repo := s.localrepo(req.GetRepository())
+
// Use Gitaly's default branch lookup function because that is already
// migrated.
if revision := req.Revision; len(revision) == 0 && !req.GetAll() {
var err error
- req.Revision, err = defaultBranchName(ctx, s.gitCmdFactory, req.Repository)
+ req.Revision, err = defaultBranchName(ctx, repo)
if err != nil {
return helper.ErrInternal(fmt.Errorf("defaultBranchName: %v", err))
}
@@ -51,12 +52,14 @@ func (s *server) FindCommits(req *gitalypb.FindCommitsRequest, stream gitalypb.C
func (s *server) findCommits(ctx context.Context, req *gitalypb.FindCommitsRequest, stream gitalypb.CommitService_FindCommitsServer) error {
opts := git.ConvertGlobalOptions(req.GetGlobalOptions())
- logCmd, err := s.gitCmdFactory.New(ctx, req.GetRepository(), getLogCommandSubCmd(req), opts...)
+ repo := s.localrepo(req.GetRepository())
+
+ logCmd, err := repo.Exec(ctx, getLogCommandSubCmd(req), opts...)
if err != nil {
return fmt.Errorf("error when creating git log command: %v", err)
}
- batch, err := catfile.New(ctx, s.gitCmdFactory, req.GetRepository())
+ batch, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return fmt.Errorf("creating catfile: %v", err)
}
@@ -137,7 +140,7 @@ func (g *GetCommits) Commit(ctx context.Context, trailers bool) (*gitalypb.GitCo
} else {
revision = logOutput
}
- commit, err := log.GetCommitCatfile(ctx, g.batch, git.Revision(revision))
+ commit, err := catfile.GetCommit(ctx, g.batch, git.Revision(revision))
if err != nil {
return nil, fmt.Errorf("cat-file get commit %q: %v", revision, err)
}
diff --git a/internal/gitaly/service/commit/find_commits_test.go b/internal/gitaly/service/commit/find_commits_test.go
index 1ab4f293a..2805aa16b 100644
--- a/internal/gitaly/service/commit/find_commits_test.go
+++ b/internal/gitaly/service/commit/find_commits_test.go
@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"io"
- "io/ioutil"
"os/exec"
"testing"
@@ -18,8 +17,7 @@ import (
)
func TestFindCommitsFields(t *testing.T) {
- windows1251Message, err := ioutil.ReadFile("testdata/commit-c809470461118b7bcab850f6e9a7ca97ac42f8ea-message.txt")
- require.NoError(t, err)
+ windows1251Message := testhelper.MustReadFile(t, "testdata/commit-c809470461118b7bcab850f6e9a7ca97ac42f8ea-message.txt")
_, repo, _, client := setupCommitServiceWithRepo(t, true)
@@ -486,7 +484,7 @@ func TestSuccessfulFindCommitsRequestWithAltGitObjectDirs(t *testing.T) {
}
func TestSuccessfulFindCommitsRequestWithAmbiguousRef(t *testing.T) {
- _, repo, repoPath, client := setupCommitServiceWithRepo(t, false)
+ cfg, repo, repoPath, client := setupCommitServiceWithRepo(t, false)
// These are arbitrary SHAs in the repository. The important part is
// that we create a branch using one of them with a different SHA so
@@ -494,7 +492,7 @@ func TestSuccessfulFindCommitsRequestWithAmbiguousRef(t *testing.T) {
branchName := "1e292f8fedd741b75372e19097c76d327140c312"
commitSha := "6907208d755b60ebeacb2e9dfea74c92c3449a1f"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "checkout", "-b", branchName, commitSha)
+ gittest.Exec(t, cfg, "-C", repoPath, "checkout", "-b", branchName, commitSha)
request := &gitalypb.FindCommitsRequest{
Repository: repo,
diff --git a/internal/gitaly/service/commit/isancestor_test.go b/internal/gitaly/service/commit/isancestor_test.go
index d1c4f63f9..27bcc08c9 100644
--- a/internal/gitaly/service/commit/isancestor_test.go
+++ b/internal/gitaly/service/commit/isancestor_test.go
@@ -164,7 +164,7 @@ func TestSuccessfulIsAncestorRequestWithAltGitObjectDirs(t *testing.T) {
committerName := "Scrooge McDuck"
committerEmail := "scrooge@mcduck.com"
- previousHead := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "show", "--format=format:%H", "--no-patch", "HEAD")
+ previousHead := gittest.Exec(t, cfg, "-C", repoPath, "show", "--format=format:%H", "--no-patch", "HEAD")
cmd := exec.Command(cfg.Git.BinPath, "-C", repoPath,
"-c", fmt.Sprintf("user.name=%s", committerName),
diff --git a/internal/gitaly/service/commit/languages.go b/internal/gitaly/service/commit/languages.go
index 4fd4cb164..4c75f5c83 100644
--- a/internal/gitaly/service/commit/languages.go
+++ b/internal/gitaly/service/commit/languages.go
@@ -21,15 +21,15 @@ import (
var errAmbigRef = errors.New("ambiguous reference")
func (s *server) CommitLanguages(ctx context.Context, req *gitalypb.CommitLanguagesRequest) (*gitalypb.CommitLanguagesResponse, error) {
- repo := req.Repository
-
if err := git.ValidateRevisionAllowEmpty(req.Revision); err != nil {
return nil, helper.ErrInvalidArgument(err)
}
+ repo := s.localrepo(req.GetRepository())
+
revision := string(req.Revision)
if revision == "" {
- defaultBranch, err := ref.DefaultBranchName(ctx, s.gitCmdFactory, req.Repository)
+ defaultBranch, err := ref.DefaultBranchName(ctx, repo)
if err != nil {
return nil, err
}
@@ -41,7 +41,7 @@ func (s *server) CommitLanguages(ctx context.Context, req *gitalypb.CommitLangua
return nil, err
}
- repoPath, err := s.locator.GetRepoPath(repo)
+ repoPath, err := repo.Path()
if err != nil {
return nil, err
}
@@ -85,7 +85,7 @@ func (ls languageSorter) Len() int { return len(ls) }
func (ls languageSorter) Swap(i, j int) { ls[i], ls[j] = ls[j], ls[i] }
func (ls languageSorter) Less(i, j int) bool { return ls[i].Share > ls[j].Share }
-func (s *server) lookupRevision(ctx context.Context, repo *gitalypb.Repository, revision string) (string, error) {
+func (s *server) lookupRevision(ctx context.Context, repo git.RepositoryExecutor, revision string) (string, error) {
rev, err := s.checkRevision(ctx, repo, revision)
if err != nil {
switch err {
@@ -107,10 +107,10 @@ func (s *server) lookupRevision(ctx context.Context, repo *gitalypb.Repository,
return rev, nil
}
-func (s *server) checkRevision(ctx context.Context, repo *gitalypb.Repository, revision string) (string, error) {
+func (s *server) checkRevision(ctx context.Context, repo git.RepositoryExecutor, revision string) (string, error) {
var stdout, stderr bytes.Buffer
- revParse, err := s.gitCmdFactory.New(ctx, repo,
+ revParse, err := repo.Exec(ctx,
git.SubCmd{Name: "rev-parse", Args: []string{revision}},
git.WithStdout(&stdout),
git.WithStderr(&stderr),
@@ -132,8 +132,8 @@ func (s *server) checkRevision(ctx context.Context, repo *gitalypb.Repository, r
return text.ChompBytes(stdout.Bytes()), nil
}
-func (s *server) disambiguateRevision(ctx context.Context, repo *gitalypb.Repository, revision string) (string, error) {
- cmd, err := s.gitCmdFactory.New(ctx, repo, git.SubCmd{
+func (s *server) disambiguateRevision(ctx context.Context, repo git.RepositoryExecutor, revision string) (string, error) {
+ cmd, err := repo.Exec(ctx, git.SubCmd{
Name: "for-each-ref",
Flags: []git.Option{git.Flag{Name: "--format=%(refname)"}},
Args: []string{"**/" + revision},
diff --git a/internal/gitaly/service/commit/last_commit_for_path.go b/internal/gitaly/service/commit/last_commit_for_path.go
index 5c6a1cbce..6d3e088a1 100644
--- a/internal/gitaly/service/commit/last_commit_for_path.go
+++ b/internal/gitaly/service/commit/last_commit_for_path.go
@@ -4,7 +4,6 @@ import (
"context"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -29,8 +28,8 @@ func (s *server) lastCommitForPath(ctx context.Context, in *gitalypb.LastCommitF
path = "."
}
- repo := in.GetRepository()
- c, err := catfile.New(ctx, s.gitCmdFactory, repo)
+ repo := s.localrepo(in.GetRepository())
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return nil, err
}
diff --git a/internal/gitaly/service/commit/last_commit_for_path_test.go b/internal/gitaly/service/commit/last_commit_for_path_test.go
index 4c60c7766..bb65ef25e 100644
--- a/internal/gitaly/service/commit/last_commit_for_path_test.go
+++ b/internal/gitaly/service/commit/last_commit_for_path_test.go
@@ -4,6 +4,7 @@ import (
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -121,13 +122,10 @@ func TestSuccessfulLastCommitWithGlobCharacters(t *testing.T) {
const blobID = "c60514b6d3d6bf4bec1030f70026e34dfbd69ad5"
path := ":wq"
- commitID := gittest.CommitBlobWithName(
- t,
- cfg,
- repoPath,
- blobID,
- path,
- "commit for filename with glob characters",
+ commitID := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ Mode: "100644", Path: path, OID: git.ObjectID(blobID),
+ }),
)
request := &gitalypb.LastCommitForPathRequest{
@@ -142,7 +140,7 @@ func TestSuccessfulLastCommitWithGlobCharacters(t *testing.T) {
response, err := client.LastCommitForPath(ctx, request)
require.NoError(t, err)
require.NotNil(t, response.GetCommit())
- require.Equal(t, commitID, response.GetCommit().Id)
+ require.Equal(t, commitID.String(), response.GetCommit().Id)
request.LiteralPathspec = false
response, err = client.LastCommitForPath(ctx, request)
diff --git a/internal/gitaly/service/commit/list_commits_by_oid.go b/internal/gitaly/service/commit/list_commits_by_oid.go
index b0a6463d3..259c60f5b 100644
--- a/internal/gitaly/service/commit/list_commits_by_oid.go
+++ b/internal/gitaly/service/commit/list_commits_by_oid.go
@@ -6,7 +6,6 @@ import (
"github.com/prometheus/client_golang/prometheus/promauto"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- gitlog "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/helper/chunk"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -26,8 +25,9 @@ var (
func (s *server) ListCommitsByOid(in *gitalypb.ListCommitsByOidRequest, stream gitalypb.CommitService_ListCommitsByOidServer) error {
ctx := stream.Context()
+ repo := s.localrepo(in.GetRepository())
- c, err := catfile.New(ctx, s.gitCmdFactory, in.Repository)
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
@@ -36,7 +36,7 @@ func (s *server) ListCommitsByOid(in *gitalypb.ListCommitsByOidRequest, stream g
listCommitsbyOidHistogram.Observe(float64(len(in.Oid)))
for _, oid := range in.Oid {
- commit, err := gitlog.GetCommitCatfile(ctx, c, git.Revision(oid))
+ commit, err := catfile.GetCommit(ctx, c, git.Revision(oid))
if catfile.IsNotFound(err) {
continue
}
diff --git a/internal/gitaly/service/commit/list_commits_by_ref_name.go b/internal/gitaly/service/commit/list_commits_by_ref_name.go
index 6db277149..518408153 100644
--- a/internal/gitaly/service/commit/list_commits_by_ref_name.go
+++ b/internal/gitaly/service/commit/list_commits_by_ref_name.go
@@ -4,7 +4,6 @@ import (
"github.com/golang/protobuf/proto"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- gitlog "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/helper/chunk"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -12,8 +11,9 @@ import (
func (s *server) ListCommitsByRefName(in *gitalypb.ListCommitsByRefNameRequest, stream gitalypb.CommitService_ListCommitsByRefNameServer) error {
ctx := stream.Context()
+ repo := s.localrepo(in.GetRepository())
- c, err := catfile.New(ctx, s.gitCmdFactory, in.Repository)
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return helper.ErrInternal(err)
}
@@ -21,7 +21,7 @@ func (s *server) ListCommitsByRefName(in *gitalypb.ListCommitsByRefNameRequest,
sender := chunk.New(&commitsByRefNameSender{stream: stream})
for _, refName := range in.RefNames {
- commit, err := gitlog.GetCommitCatfile(ctx, c, git.Revision(refName))
+ commit, err := catfile.GetCommit(ctx, c, git.Revision(refName))
if catfile.IsNotFound(err) {
continue
}
diff --git a/internal/gitaly/service/commit/list_files.go b/internal/gitaly/service/commit/list_files.go
index a39d06e30..5e4a7a7a4 100644
--- a/internal/gitaly/service/commit/list_files.go
+++ b/internal/gitaly/service/commit/list_files.go
@@ -8,7 +8,6 @@ import (
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
log "github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git/lstree"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/helper/chunk"
@@ -26,14 +25,14 @@ func (s *server) ListFiles(in *gitalypb.ListFilesRequest, stream gitalypb.Commit
return err
}
- repo := in.Repository
- if _, err := s.locator.GetRepoPath(repo); err != nil {
+ repo := s.localrepo(in.GetRepository())
+ if _, err := repo.Path(); err != nil {
return err
}
revision := string(in.GetRevision())
if len(revision) == 0 {
- defaultBranch, err := defaultBranchName(stream.Context(), s.gitCmdFactory, repo)
+ defaultBranch, err := defaultBranchName(stream.Context(), repo)
if err != nil {
return helper.DecorateError(codes.NotFound, fmt.Errorf("revision not found %q", revision))
}
@@ -45,7 +44,7 @@ func (s *server) ListFiles(in *gitalypb.ListFilesRequest, stream gitalypb.Commit
revision = string(defaultBranch)
}
- contained, err := localrepo.New(s.gitCmdFactory, in.Repository, s.cfg).HasRevision(stream.Context(), git.Revision(revision))
+ contained, err := s.localrepo(repo).HasRevision(stream.Context(), git.Revision(revision))
if err != nil {
return helper.ErrInternal(err)
}
@@ -68,8 +67,8 @@ func validateListFilesRequest(in *gitalypb.ListFilesRequest) error {
return nil
}
-func (s *server) listFiles(repo *gitalypb.Repository, revision string, stream gitalypb.CommitService_ListFilesServer) error {
- cmd, err := s.gitCmdFactory.New(stream.Context(), repo, git.SubCmd{Name: "ls-tree",
+func (s *server) listFiles(repo git.RepositoryExecutor, revision string, stream gitalypb.CommitService_ListFilesServer) error {
+ cmd, err := repo.Exec(stream.Context(), git.SubCmd{Name: "ls-tree",
Flags: []git.Option{git.Flag{Name: "-z"}, git.Flag{Name: "-r"}, git.Flag{Name: "--full-tree"}, git.Flag{Name: "--full-name"}},
PostSepArgs: []string{revision},
})
diff --git a/internal/gitaly/service/commit/list_files_test.go b/internal/gitaly/service/commit/list_files_test.go
index bcfe2fdf2..05582b0ed 100644
--- a/internal/gitaly/service/commit/list_files_test.go
+++ b/internal/gitaly/service/commit/list_files_test.go
@@ -42,7 +42,7 @@ var (
)
func TestListFiles_success(t *testing.T) {
- defaultBranchName = func(context.Context, git.CommandFactory, *gitalypb.Repository) ([]byte, error) {
+ defaultBranchName = func(context.Context, git.RepositoryExecutor) ([]byte, error) {
return []byte("test-do-not-touch"), nil
}
defer func() {
@@ -135,7 +135,7 @@ func TestListFiles_success(t *testing.T) {
func TestListFiles_unbornBranch(t *testing.T) {
cfg, _, _, client := setupCommitServiceWithRepo(t, true)
- repo, _, _ := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ repo, _, _ := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
tests := []struct {
desc string
diff --git a/internal/gitaly/service/commit/list_last_commits_for_tree.go b/internal/gitaly/service/commit/list_last_commits_for_tree.go
index 7c4adc059..bc9f8f4ad 100644
--- a/internal/gitaly/service/commit/list_last_commits_for_tree.go
+++ b/internal/gitaly/service/commit/list_last_commits_for_tree.go
@@ -7,7 +7,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/git/lstree"
"gitlab.com/gitlab-org/gitaly/internal/helper"
@@ -44,8 +43,8 @@ func (s *server) listLastCommitsForTree(in *gitalypb.ListLastCommitsForTreeReque
}
ctx := stream.Context()
- repo := in.GetRepository()
- c, err := catfile.New(ctx, s.gitCmdFactory, repo)
+ repo := s.localrepo(in.GetRepository())
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
diff --git a/internal/gitaly/service/commit/list_last_commits_for_tree_test.go b/internal/gitaly/service/commit/list_last_commits_for_tree_test.go
index 88eef4f15..f43b67a5d 100644
--- a/internal/gitaly/service/commit/list_last_commits_for_tree_test.go
+++ b/internal/gitaly/service/commit/list_last_commits_for_tree_test.go
@@ -331,18 +331,15 @@ func TestNonUtf8ListLastCommitsForTreeRequest(t *testing.T) {
nonUTF8Filename := "hello\x80world"
require.False(t, utf8.ValidString(nonUTF8Filename))
- commitID := gittest.CommitBlobWithName(
- t,
- cfg,
- repoPath,
- blobID,
- nonUTF8Filename,
- "commit for non-utf8 path",
+ commitID := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ Mode: "100644", Path: nonUTF8Filename, OID: blobID,
+ }),
)
request := &gitalypb.ListLastCommitsForTreeRequest{
Repository: repo,
- Revision: commitID,
+ Revision: commitID.String(),
Limit: 100,
Offset: 0,
}
@@ -354,17 +351,15 @@ func TestNonUtf8ListLastCommitsForTreeRequest(t *testing.T) {
}
func TestSuccessfulListLastCommitsForTreeRequestWithGlobCharacters(t *testing.T) {
- _, repo, repoPath, client := setupCommitServiceWithRepo(t, false)
+ cfg, repo, repoPath, client := setupCommitServiceWithRepo(t, false)
path := ":wq"
err := os.Mkdir(filepath.Join(repoPath, path), 0755)
require.NoError(t, err)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "testhelper.TestUser.name", "test@example.com")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "testhelper.TestUser.email", "test@example.com")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "mv", "README.md", path)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "-a", "-m", "renamed test file")
- commitID := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "HEAD"))
+ gittest.Exec(t, cfg, "-C", repoPath, "mv", "README.md", path)
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "-a", "-m", "renamed test file")
+ commitID := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "HEAD"))
request := &gitalypb.ListLastCommitsForTreeRequest{
Repository: repo,
diff --git a/internal/gitaly/service/commit/server.go b/internal/gitaly/service/commit/server.go
index 77d25709d..1e6ea2586 100644
--- a/internal/gitaly/service/commit/server.go
+++ b/internal/gitaly/service/commit/server.go
@@ -2,6 +2,9 @@ package commit
import (
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/linguist"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/ref"
@@ -14,6 +17,7 @@ type server struct {
locator storage.Locator
gitCmdFactory git.CommandFactory
linguist *linguist.Instance
+ catfileCache catfile.Cache
}
var (
@@ -21,6 +25,22 @@ var (
)
// NewServer creates a new instance of a grpc CommitServiceServer
-func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory, ling *linguist.Instance) gitalypb.CommitServiceServer {
- return &server{cfg: cfg, locator: locator, gitCmdFactory: gitCmdFactory, linguist: ling}
+func NewServer(
+ cfg config.Cfg,
+ locator storage.Locator,
+ gitCmdFactory git.CommandFactory,
+ ling *linguist.Instance,
+ catfileCache catfile.Cache,
+) gitalypb.CommitServiceServer {
+ return &server{
+ cfg: cfg,
+ locator: locator,
+ gitCmdFactory: gitCmdFactory,
+ linguist: ling,
+ catfileCache: catfileCache,
+ }
+}
+
+func (s *server) localrepo(repo repository.GitRepo) *localrepo.Repo {
+ return localrepo.New(s.gitCmdFactory, s.catfileCache, repo, s.cfg)
}
diff --git a/internal/gitaly/service/commit/stats.go b/internal/gitaly/service/commit/stats.go
index 588415e6e..e52b67ed8 100644
--- a/internal/gitaly/service/commit/stats.go
+++ b/internal/gitaly/service/commit/stats.go
@@ -8,7 +8,6 @@ import (
"strings"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -27,7 +26,7 @@ func (s *server) CommitStats(ctx context.Context, in *gitalypb.CommitStatsReques
}
func (s *server) commitStats(ctx context.Context, in *gitalypb.CommitStatsRequest) (*gitalypb.CommitStatsResponse, error) {
- repo := localrepo.New(s.gitCmdFactory, in.Repository, s.cfg)
+ repo := s.localrepo(in.GetRepository())
commit, err := repo.ReadCommit(ctx, git.Revision(in.Revision))
if err != nil {
diff --git a/internal/gitaly/service/commit/testhelper_test.go b/internal/gitaly/service/commit/testhelper_test.go
index 91cf07f35..fe593096a 100644
--- a/internal/gitaly/service/commit/testhelper_test.go
+++ b/internal/gitaly/service/commit/testhelper_test.go
@@ -2,18 +2,17 @@ package commit
import (
"io"
- "net"
"os"
"testing"
"github.com/golang/protobuf/ptypes/timestamp"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/linguist"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
)
@@ -43,9 +42,9 @@ func setupCommitServiceWithRepo(
) (config.Cfg, *gitalypb.Repository, string, gitalypb.CommitServiceClient) {
return setupCommitServiceCreateRepo(t, func(tb testing.TB, cfg config.Cfg) (*gitalypb.Repository, string, testhelper.Cleanup) {
if bare {
- return gittest.CloneRepoAtStorage(tb, cfg.Storages[0], t.Name())
+ return gittest.CloneRepoAtStorage(tb, cfg, cfg.Storages[0], t.Name())
}
- return gittest.CloneRepoWithWorktreeAtStorage(tb, cfg.Storages[0])
+ return gittest.CloneRepoWithWorktreeAtStorage(tb, cfg, cfg.Storages[0])
})
}
@@ -67,22 +66,15 @@ func setupCommitServiceCreateRepo(
func startTestServices(t testing.TB, cfg config.Cfg) string {
t.Helper()
-
- server := testhelper.NewTestGrpcServer(t, nil, nil)
- t.Cleanup(server.Stop)
-
- serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
-
- listener, err := net.Listen("unix", serverSocketPath)
- require.NoError(t, err)
-
- ling, err := linguist.New(cfg)
- require.NoError(t, err)
-
- gitalypb.RegisterCommitServiceServer(server, NewServer(cfg, config.NewLocator(cfg), git.NewExecCommandFactory(cfg), ling))
-
- go server.Serve(listener)
- return "unix://" + serverSocketPath
+ return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterCommitServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetLinguist(),
+ deps.GetCatfileCache(),
+ ))
+ })
}
func newCommitServiceClient(t testing.TB, serviceSocketPath string) gitalypb.CommitServiceClient {
diff --git a/internal/gitaly/service/commit/tree_entries.go b/internal/gitaly/service/commit/tree_entries.go
index 3bd5a414d..1349cdeb7 100644
--- a/internal/gitaly/service/commit/tree_entries.go
+++ b/internal/gitaly/service/commit/tree_entries.go
@@ -99,7 +99,9 @@ func (s *server) GetTreeEntries(in *gitalypb.GetTreeEntriesRequest, stream gital
return status.Errorf(codes.InvalidArgument, "TreeEntry: %v", err)
}
- c, err := catfile.New(stream.Context(), s.gitCmdFactory, in.Repository)
+ repo := s.localrepo(in.GetRepository())
+
+ c, err := s.catfileCache.BatchProcess(stream.Context(), repo)
if err != nil {
return err
}
diff --git a/internal/gitaly/service/commit/tree_entries_test.go b/internal/gitaly/service/commit/tree_entries_test.go
index 47f4dddee..fb3db1e1f 100644
--- a/internal/gitaly/service/commit/tree_entries_test.go
+++ b/internal/gitaly/service/commit/tree_entries_test.go
@@ -9,6 +9,7 @@ import (
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -16,7 +17,7 @@ import (
)
func TestSuccessfulGetTreeEntriesWithCurlyBraces(t *testing.T) {
- _, repo, repoPath, client := setupCommitServiceWithRepo(t, false)
+ cfg, repo, repoPath, client := setupCommitServiceWithRepo(t, false)
normalFolderName := "issue-46261/folder"
curlyFolderName := "issue-46261/{{curly}}"
@@ -29,8 +30,8 @@ func TestSuccessfulGetTreeEntriesWithCurlyBraces(t *testing.T) {
testhelper.MustRunCommand(t, nil, "touch", filepath.Join(normalFolder, "/test1.txt"))
testhelper.MustRunCommand(t, nil, "touch", filepath.Join(curlyFolder, "/test2.txt"))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "add", "--all")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "-m", "Test commit")
+ gittest.Exec(t, cfg, "-C", repoPath, "add", "--all")
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "-m", "Test commit")
testCases := []struct {
description string
@@ -394,7 +395,7 @@ func getTreeEntriesFromTreeEntryClient(t *testing.T, client gitalypb.CommitServi
}
func TestSuccessfulGetTreeEntries_FlatPathMaxDeep_SingleFoldersStructure(t *testing.T) {
- _, repo, repoPath, client := setupCommitServiceWithRepo(t, false)
+ cfg, repo, repoPath, client := setupCommitServiceWithRepo(t, false)
folderName := "1/2/3/4/5/6/7/8/9/10/11/12"
require.GreaterOrEqual(t, strings.Count(strings.Trim(folderName, "/"), "/"), defaultFlatTreeRecursion, "sanity check: construct folder deeper than default recursion value")
@@ -403,11 +404,11 @@ func TestSuccessfulGetTreeEntries_FlatPathMaxDeep_SingleFoldersStructure(t *test
require.NoError(t, os.MkdirAll(nestedFolder, 0755))
// put single file into the deepest directory
testhelper.MustRunCommand(t, nil, "touch", filepath.Join(nestedFolder, ".gitkeep"))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "add", "--all")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "-m", "Deep folder struct")
+ gittest.Exec(t, cfg, "-C", repoPath, "add", "--all")
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "-m", "Deep folder struct")
- commitID := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "HEAD"))
- rootOid := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "HEAD^{tree}"))
+ commitID := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "HEAD"))
+ rootOid := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "HEAD^{tree}"))
// make request to folder that contains nothing except one folder
request := &gitalypb.GetTreeEntriesRequest{
diff --git a/internal/gitaly/service/commit/tree_entry.go b/internal/gitaly/service/commit/tree_entry.go
index 1c28d282a..e28c96b04 100644
--- a/internal/gitaly/service/commit/tree_entry.go
+++ b/internal/gitaly/service/commit/tree_entry.go
@@ -122,6 +122,8 @@ func (s *server) TreeEntry(in *gitalypb.TreeEntryRequest, stream gitalypb.Commit
return status.Errorf(codes.InvalidArgument, "TreeEntry: %v", err)
}
+ repo := s.localrepo(in.GetRepository())
+
requestPath := string(in.GetPath())
// filepath.Dir("api/docs") => "api" Correct!
// filepath.Dir("api/docs/") => "api/docs" WRONG!
@@ -129,8 +131,7 @@ func (s *server) TreeEntry(in *gitalypb.TreeEntryRequest, stream gitalypb.Commit
requestPath = strings.TrimRight(requestPath, "/")
}
- c, err := catfile.New(stream.Context(), s.gitCmdFactory, in.Repository)
-
+ c, err := s.catfileCache.BatchProcess(stream.Context(), repo)
if err != nil {
return err
}
diff --git a/internal/gitaly/service/conflicts/list_conflict_files.go b/internal/gitaly/service/conflicts/list_conflict_files.go
index 51189d554..f76ae528b 100644
--- a/internal/gitaly/service/conflicts/list_conflict_files.go
+++ b/internal/gitaly/service/conflicts/list_conflict_files.go
@@ -8,7 +8,6 @@ import (
"unicode/utf8"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git2go"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -22,7 +21,7 @@ func (s *server) ListConflictFiles(request *gitalypb.ListConflictFilesRequest, s
return helper.ErrInvalidArgument(err)
}
- repo := localrepo.New(s.gitCmdFactory, request.Repository, s.cfg)
+ repo := s.localrepo(request.GetRepository())
ours, err := repo.ResolveRevision(ctx, git.Revision(request.OurCommitOid+"^{commit}"))
if err != nil {
diff --git a/internal/gitaly/service/conflicts/list_conflict_files_test.go b/internal/gitaly/service/conflicts/list_conflict_files_test.go
index c8dde14b1..54c138850 100644
--- a/internal/gitaly/service/conflicts/list_conflict_files_test.go
+++ b/internal/gitaly/service/conflicts/list_conflict_files_test.go
@@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
@@ -184,15 +185,15 @@ func buildCommit(t *testing.T, ctx context.Context, cfg config.Cfg, repo *gitaly
for file, contents := range files {
filePath := filepath.Join(repoPath, file)
require.NoError(t, ioutil.WriteFile(filePath, contents, 0666))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "add", filePath)
+ gittest.Exec(t, cfg, "-C", repoPath, "add", filePath)
}
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "-m", "message")
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "-m", "message")
- oid, err := localrepo.New(git.NewExecCommandFactory(cfg), repo, cfg).ResolveRevision(ctx, git.Revision("HEAD"))
+ oid, err := localrepo.NewTestRepo(t, cfg, repo).ResolveRevision(ctx, git.Revision("HEAD"))
require.NoError(t, err)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "reset", "--hard", "HEAD~")
+ gittest.Exec(t, cfg, "-C", repoPath, "reset", "--hard", "HEAD~")
return oid.String()
}
diff --git a/internal/gitaly/service/conflicts/resolve_conflicts.go b/internal/gitaly/service/conflicts/resolve_conflicts.go
index 2b788e798..a653f782e 100644
--- a/internal/gitaly/service/conflicts/resolve_conflicts.go
+++ b/internal/gitaly/service/conflicts/resolve_conflicts.go
@@ -19,10 +19,8 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/remoterepo"
"gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/git2go"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/gitalyssh"
"gitlab.com/gitlab-org/gitaly/internal/helper"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -43,49 +41,8 @@ func (s *server) ResolveConflicts(stream gitalypb.ConflictsService_ResolveConfli
return status.Errorf(codes.InvalidArgument, "ResolveConflicts: %v", err)
}
- if featureflag.IsEnabled(stream.Context(), featureflag.GoResolveConflicts) {
- err := s.resolveConflicts(header, stream)
- return handleResolveConflictsErr(err, stream)
- }
-
- ctx := stream.Context()
- client, err := s.ruby.ConflictsServiceClient(ctx)
- if err != nil {
- return err
- }
-
- clientCtx, err := rubyserver.SetHeaders(ctx, s.locator, header.GetRepository())
- if err != nil {
- return err
- }
-
- rubyStream, err := client.ResolveConflicts(clientCtx)
- if err != nil {
- return err
- }
-
- if err := rubyStream.Send(firstRequest); err != nil {
- return err
- }
-
- err = rubyserver.Proxy(func() error {
- request, err := stream.Recv()
- if err != nil {
- return err
- }
- return rubyStream.Send(request)
- })
-
- if err != nil {
- return err
- }
-
- response, err := rubyStream.CloseAndRecv()
- if err != nil {
- return err
- }
-
- return stream.SendAndClose(response)
+ err = s.resolveConflicts(header, stream)
+ return handleResolveConflictsErr(err, stream)
}
func handleResolveConflictsErr(err error, stream gitalypb.ConflictsService_ResolveConflictsServer) error {
@@ -183,7 +140,7 @@ func (s *server) resolveConflicts(header *gitalypb.ResolveConflictsRequestHeader
}
ctx := stream.Context()
- sourceRepo := localrepo.New(s.gitCmdFactory, header.GetRepository(), s.cfg)
+ sourceRepo := s.localrepo(header.GetRepository())
targetRepo, err := remoterepo.New(ctx, header.GetTargetRepository(), s.pool)
if err != nil {
return err
diff --git a/internal/gitaly/service/conflicts/resolve_conflicts_test.go b/internal/gitaly/service/conflicts/resolve_conflicts_test.go
index 3dca33205..6fb7c8114 100644
--- a/internal/gitaly/service/conflicts/resolve_conflicts_test.go
+++ b/internal/gitaly/service/conflicts/resolve_conflicts_test.go
@@ -2,7 +2,6 @@ package conflicts_test
import (
"bytes"
- "context"
"encoding/json"
"io/ioutil"
"os/exec"
@@ -15,12 +14,8 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/conflicts"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
- "gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/metadata"
@@ -55,35 +50,13 @@ var (
}
)
-func TestWithRubyServer(t *testing.T) {
- cfg := testcfg.Build(t)
-
- rubySrv := rubyserver.New(cfg)
- require.NoError(t, rubySrv.Start())
- t.Cleanup(rubySrv.Stop)
-
- t.Run("testSuccessfulResolveConflictsRequest", func(t *testing.T) { testSuccessfulResolveConflictsRequest(t, cfg, rubySrv) })
- t.Run("testResolveConflictsWithRemoteRepo", func(t *testing.T) { testResolveConflictsWithRemoteRepo(t, cfg, rubySrv) })
- t.Run("testResolveConflictsLineEndings", func(t *testing.T) { testResolveConflictsLineEndings(t, cfg, rubySrv) })
- t.Run("testResolveConflictsNonOIDRequests", func(t *testing.T) { testResolveConflictsNonOIDRequests(t, cfg, rubySrv) })
- t.Run("testResolveConflictsIdenticalContent", func(t *testing.T) { testResolveConflictsIdenticalContent(t, cfg, rubySrv) })
- t.Run("testResolveConflictsStableID", func(t *testing.T) { testResolveConflictsStableID(t, cfg, rubySrv) })
- t.Run("testFailedResolveConflictsRequestDueToResolutionError", func(t *testing.T) { testFailedResolveConflictsRequestDueToResolutionError(t, cfg, rubySrv) })
- t.Run("testFailedResolveConflictsRequestDueToValidation", func(t *testing.T) { testFailedResolveConflictsRequestDueToValidation(t, cfg, rubySrv) })
-}
-
-func testSuccessfulResolveConflictsRequest(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.GoResolveConflicts,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- testSuccessfulResolveConflictsRequestFeatured(t, ctx, cfg, rubySrv)
- })
-}
+func TestSuccessfulResolveConflictsRequest(t *testing.T) {
+ cfg, repoProto, repoPath, client := conflicts.SetupConflictsService(t, true)
-func testSuccessfulResolveConflictsRequestFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- cfg, repoProto, repoPath, client := conflicts.SetupConflictsServiceWithRuby(t, cfg, rubySrv, true)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ ctx, cancel := testhelper.Context()
+ defer cancel()
mdGS := testhelper.GitalyServersMetadataFromCfg(t, cfg)
mdFF, _ := metadata.FromOutgoingContext(ctx)
@@ -130,24 +103,19 @@ func testSuccessfulResolveConflictsRequestFeatured(t *testing.T, ctx context.Con
commitConflict := func(parentCommitID, branch, blob string) string {
blobID, err := repo.WriteBlob(ctx, "", strings.NewReader(blob))
require.NoError(t, err)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "read-tree", branch)
- testhelper.MustRunCommand(t, nil,
- "git", "-C", repoPath,
+ gittest.Exec(t, cfg, "-C", repoPath, "read-tree", branch)
+ gittest.Exec(t, cfg, "-C", repoPath,
"update-index", "--add", "--cacheinfo", "100644", blobID.String(), missingAncestorPath,
)
treeID := bytes.TrimSpace(
- testhelper.MustRunCommand(t, nil,
- "git", "-C", repoPath, "write-tree",
- ),
+ gittest.Exec(t, cfg, "-C", repoPath, "write-tree"),
)
commitID := bytes.TrimSpace(
- testhelper.MustRunCommand(t, nil,
- "git", "-C", repoPath,
+ gittest.Exec(t, cfg, "-C", repoPath,
"commit-tree", string(treeID), "-p", parentCommitID,
),
)
- testhelper.MustRunCommand(t, nil,
- "git", "-C", repoPath, "update-ref", "refs/heads/"+branch, string(commitID))
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/heads/"+branch, string(commitID))
return string(commitID)
}
@@ -203,33 +171,35 @@ func testSuccessfulResolveConflictsRequestFeatured(t *testing.T, ctx context.Con
require.Equal(t, string(headCommit.Subject), conflictResolutionCommitMessage)
}
-func testResolveConflictsWithRemoteRepo(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.GoResolveConflicts,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- testResolveConflictsWithRemoteRepoFeatured(t, ctx, cfg, rubySrv)
- })
-}
-
-func testResolveConflictsWithRemoteRepoFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- cfg, _, _, client := conflicts.SetupConflictsServiceWithRuby(t, cfg, rubySrv, true)
+func TestResolveConflictsWithRemoteRepo(t *testing.T) {
+ cfg, _, _, client := conflicts.SetupConflictsService(t, true)
testhelper.ConfigureGitalySSHBin(t, cfg)
testhelper.ConfigureGitalyHooksBin(t, cfg)
- sourceRepo, sourceRepoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "source")
+ sourceRepo, sourceRepoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "source")
t.Cleanup(cleanup)
- sourceBlobOID := gittest.WriteBlob(t, sourceRepoPath, []byte("contents-1\n"))
- sourceCommitOID := gittest.CommitBlobWithName(t, cfg, sourceRepoPath, sourceBlobOID.String(), "file.txt", "message")
- testhelper.MustRunCommand(t, nil, "git", "-C", sourceRepoPath, "update-ref", "refs/heads/source", sourceCommitOID)
-
- targetRepo, targetRepoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "target")
+ sourceBlobOID := gittest.WriteBlob(t, cfg, sourceRepoPath, []byte("contents-1\n"))
+ sourceCommitOID := gittest.WriteCommit(t, cfg, sourceRepoPath,
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ Path: "file.txt", OID: sourceBlobOID, Mode: "100644",
+ }),
+ )
+ gittest.Exec(t, cfg, "-C", sourceRepoPath, "update-ref", "refs/heads/source", sourceCommitOID.String())
+
+ targetRepo, targetRepoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "target")
t.Cleanup(cleanup)
- targetBlobOID := gittest.WriteBlob(t, targetRepoPath, []byte("contents-2\n"))
- targetCommitOID := gittest.CommitBlobWithName(t, cfg, targetRepoPath, targetBlobOID.String(), "file.txt", "message")
- testhelper.MustRunCommand(t, nil, "git", "-C", targetRepoPath, "update-ref", "refs/heads/target", targetCommitOID)
-
- ctx = testhelper.MergeOutgoingMetadata(ctx, testhelper.GitalyServersMetadata(t, cfg.SocketPath))
+ targetBlobOID := gittest.WriteBlob(t, cfg, targetRepoPath, []byte("contents-2\n"))
+ targetCommitOID := gittest.WriteCommit(t, cfg, targetRepoPath,
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ OID: targetBlobOID, Path: "file.txt", Mode: "100644",
+ }),
+ )
+ gittest.Exec(t, cfg, "-C", targetRepoPath, "update-ref", "refs/heads/target", targetCommitOID.String())
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+ ctx = testhelper.MergeOutgoingMetadata(ctx, testhelper.GitalyServersMetadataFromCfg(t, cfg))
stream, err := client.ResolveConflicts(ctx)
require.NoError(t, err)
@@ -251,8 +221,8 @@ func testResolveConflictsWithRemoteRepoFeatured(t *testing.T, ctx context.Contex
Repository: sourceRepo,
TargetRepository: targetRepo,
CommitMessage: []byte(conflictResolutionCommitMessage),
- OurCommitOid: sourceCommitOID,
- TheirCommitOid: targetCommitOID,
+ OurCommitOid: sourceCommitOID.String(),
+ TheirCommitOid: targetCommitOID.String(),
SourceBranch: []byte("source"),
TargetBranch: []byte("target"),
User: user,
@@ -269,21 +239,15 @@ func testResolveConflictsWithRemoteRepoFeatured(t *testing.T, ctx context.Contex
require.NoError(t, err)
require.Empty(t, response.GetResolutionError())
- require.Equal(t, []byte("contents-2\n"), testhelper.MustRunCommand(t, nil, "git", "-C", sourceRepoPath, "cat-file", "-p", "refs/heads/source:file.txt"))
+ require.Equal(t, []byte("contents-2\n"), gittest.Exec(t, cfg, "-C", sourceRepoPath, "cat-file", "-p", "refs/heads/source:file.txt"))
}
-func testResolveConflictsLineEndings(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.GoResolveConflicts,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- testResolveConflictsLineEndingsFeatured(t, ctx, cfg, rubySrv)
- })
-}
-
-func testResolveConflictsLineEndingsFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- cfg, repo, repoPath, client := conflicts.SetupConflictsServiceWithRuby(t, cfg, rubySrv, true)
+func TestResolveConflictsLineEndings(t *testing.T) {
+ cfg, repo, repoPath, client := conflicts.SetupConflictsService(t, true)
- ctx = testhelper.MergeOutgoingMetadata(ctx, testhelper.GitalyServersMetadata(t, cfg.SocketPath))
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+ ctx = testhelper.MergeOutgoingMetadata(ctx, testhelper.GitalyServersMetadataFromCfg(t, cfg))
for _, tc := range []struct {
desc string
@@ -346,13 +310,21 @@ func testResolveConflictsLineEndingsFeatured(t *testing.T, ctx context.Context,
},
} {
t.Run(tc.desc, func(t *testing.T) {
- ourOID := gittest.WriteBlob(t, repoPath, []byte(tc.ourContent))
- ourCommit := gittest.CommitBlobWithName(t, cfg, repoPath, ourOID.String(), "file.txt", "message")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", "refs/heads/ours", ourCommit)
-
- theirOID := gittest.WriteBlob(t, repoPath, []byte(tc.theirContent))
- theirCommit := gittest.CommitBlobWithName(t, cfg, repoPath, theirOID.String(), "file.txt", "message")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", "refs/heads/theirs", theirCommit)
+ ourOID := gittest.WriteBlob(t, cfg, repoPath, []byte(tc.ourContent))
+ ourCommit := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ OID: ourOID, Path: "file.txt", Mode: "100644",
+ }),
+ )
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/heads/ours", ourCommit.String())
+
+ theirOID := gittest.WriteBlob(t, cfg, repoPath, []byte(tc.theirContent))
+ theirCommit := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ OID: theirOID, Path: "file.txt", Mode: "100644",
+ }),
+ )
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/heads/theirs", theirCommit.String())
stream, err := client.ResolveConflicts(ctx)
require.NoError(t, err)
@@ -366,8 +338,8 @@ func testResolveConflictsLineEndingsFeatured(t *testing.T, ctx context.Context,
Repository: repo,
TargetRepository: repo,
CommitMessage: []byte(conflictResolutionCommitMessage),
- OurCommitOid: ourCommit,
- TheirCommitOid: theirCommit,
+ OurCommitOid: ourCommit.String(),
+ TheirCommitOid: theirCommit.String(),
SourceBranch: []byte("ours"),
TargetBranch: []byte("theirs"),
User: user,
@@ -384,23 +356,17 @@ func testResolveConflictsLineEndingsFeatured(t *testing.T, ctx context.Context,
require.NoError(t, err)
require.Empty(t, response.GetResolutionError())
- require.Equal(t, []byte(tc.expectedContents), testhelper.MustRunCommand(t, nil,
- "git", "-C", repoPath, "cat-file", "-p", "refs/heads/ours:file.txt"))
+ oursFile := gittest.Exec(t, cfg, "-C", repoPath, "cat-file", "-p", "refs/heads/ours:file.txt")
+ require.Equal(t, []byte(tc.expectedContents), oursFile)
})
}
}
-func testResolveConflictsNonOIDRequests(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.GoResolveConflicts,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- testResolveConflictsNonOIDRequestsFeatured(t, ctx, cfg, rubySrv)
- })
-}
-
-func testResolveConflictsNonOIDRequestsFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- cfg, repoProto, _, client := conflicts.SetupConflictsServiceWithRuby(t, cfg, rubySrv, true)
+func TestResolveConflictsNonOIDRequests(t *testing.T) {
+ cfg, repoProto, _, client := conflicts.SetupConflictsService(t, true)
+ ctx, cancel := testhelper.Context()
+ defer cancel()
ctx = testhelper.MergeOutgoingMetadata(ctx, testhelper.GitalyServersMetadataFromCfg(t, cfg))
stream, err := client.ResolveConflicts(ctx)
@@ -433,18 +399,13 @@ func testResolveConflictsNonOIDRequestsFeatured(t *testing.T, ctx context.Contex
require.Equal(t, status.Errorf(codes.Unknown, "Rugged::InvalidError: unable to parse OID - contains invalid characters"), err)
}
-func testResolveConflictsIdenticalContent(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.GoResolveConflicts,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- testResolveConflictsIdenticalContentFeatured(t, ctx, cfg, rubySrv)
- })
-}
+func TestResolveConflictsIdenticalContent(t *testing.T) {
+ cfg, repoProto, repoPath, client := conflicts.SetupConflictsService(t, true)
-func testResolveConflictsIdenticalContentFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- cfg, repoProto, repoPath, client := conflicts.SetupConflictsServiceWithRuby(t, cfg, rubySrv, true)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ ctx, cancel := testhelper.Context()
+ defer cancel()
sourceBranch := "conflict-resolvable"
sourceOID, err := repo.ResolveRevision(ctx, git.Revision(sourceBranch))
@@ -462,7 +423,7 @@ func testResolveConflictsIdenticalContentFeatured(t *testing.T, ctx context.Cont
"6907208d755b60ebeacb2e9dfea74c92c3449a1f",
targetOID.String(),
} {
- contents := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "cat-file", "-p", rev+":files/ruby/popen.rb")
+ contents := gittest.Exec(t, cfg, "-C", repoPath, "cat-file", "-p", rev+":files/ruby/popen.rb")
path := filepath.Join(tempDir, rev)
require.NoError(t, ioutil.WriteFile(path, contents, 0644))
conflictingPaths = append(conflictingPaths, path)
@@ -535,20 +496,13 @@ func testResolveConflictsIdenticalContentFeatured(t *testing.T, ctx context.Cont
}, response)
}
-func testResolveConflictsStableID(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.ConfigureGitalyHooksBin(t, cfg)
-
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.GoResolveConflicts,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- testResolveConflictsStableIDFeatured(t, ctx, cfg, rubySrv)
- })
-}
+func TestResolveConflictsStableID(t *testing.T) {
+ cfg, repoProto, _, client := conflicts.SetupConflictsService(t, true)
-func testResolveConflictsStableIDFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- cfg, repoProto, _, client := conflicts.SetupConflictsServiceWithRuby(t, cfg, rubySrv, true)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ ctx, cancel := testhelper.Context()
+ defer cancel()
md := testhelper.GitalyServersMetadataFromCfg(t, cfg)
ctx = testhelper.MergeOutgoingMetadata(ctx, md)
@@ -611,16 +565,11 @@ func testResolveConflictsStableIDFeatured(t *testing.T, ctx context.Context, cfg
}, resolvedCommit)
}
-func testFailedResolveConflictsRequestDueToResolutionError(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.GoResolveConflicts,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- testFailedResolveConflictsRequestDueToResolutionErrorFeatured(t, ctx, cfg, rubySrv)
- })
-}
+func TestFailedResolveConflictsRequestDueToResolutionError(t *testing.T) {
+ cfg, repo, _, client := conflicts.SetupConflictsService(t, true)
-func testFailedResolveConflictsRequestDueToResolutionErrorFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- cfg, repo, _, client := conflicts.SetupConflictsServiceWithRuby(t, cfg, rubySrv, true)
+ ctx, cancel := testhelper.Context()
+ defer cancel()
mdGS := testhelper.GitalyServersMetadataFromCfg(t, cfg)
mdFF, _ := metadata.FromOutgoingContext(ctx)
@@ -673,16 +622,11 @@ func testFailedResolveConflictsRequestDueToResolutionErrorFeatured(t *testing.T,
require.Equal(t, r.GetResolutionError(), "Missing resolution for section ID: 6eb14e00385d2fb284765eb1cd8d420d33d63fc9_21_21")
}
-func testFailedResolveConflictsRequestDueToValidation(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.GoResolveConflicts,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- testFailedResolveConflictsRequestDueToValidationFeatured(t, ctx, cfg, rubySrv)
- })
-}
+func TestFailedResolveConflictsRequestDueToValidation(t *testing.T) {
+ cfg, repo, _, client := conflicts.SetupConflictsService(t, true)
-func testFailedResolveConflictsRequestDueToValidationFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- cfg, repo, _, client := conflicts.SetupConflictsServiceWithRuby(t, cfg, rubySrv, true)
+ ctx, cancel := testhelper.Context()
+ defer cancel()
mdGS := testhelper.GitalyServersMetadataFromCfg(t, cfg)
ourCommitOid := "1450cd639e0bc6721eb02800169e464f212cde06"
diff --git a/internal/gitaly/service/conflicts/server.go b/internal/gitaly/service/conflicts/server.go
index e80e29c62..0e425f990 100644
--- a/internal/gitaly/service/conflicts/server.go
+++ b/internal/gitaly/service/conflicts/server.go
@@ -3,30 +3,36 @@ package conflicts
import (
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
type server struct {
- ruby *rubyserver.Server
cfg config.Cfg
locator storage.Locator
gitCmdFactory git.CommandFactory
+ catfileCache catfile.Cache
pool *client.Pool
}
// NewServer creates a new instance of a grpc ConflictsServer
-func NewServer(rs *rubyserver.Server, cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory) gitalypb.ConflictsServiceServer {
+func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory, catfileCache catfile.Cache) gitalypb.ConflictsServiceServer {
return &server{
- ruby: rs,
cfg: cfg,
locator: locator,
gitCmdFactory: gitCmdFactory,
+ catfileCache: catfileCache,
pool: client.NewPoolWithOptions(
client.WithDialer(client.HealthCheckDialer(client.DialContext)),
client.WithDialOptions(client.FailOnNonTempDialError()...),
),
}
}
+
+func (s *server) localrepo(repo repository.GitRepo) *localrepo.Repo {
+ return localrepo.New(s.gitCmdFactory, s.catfileCache, repo, s.cfg)
+}
diff --git a/internal/gitaly/service/conflicts/testhelper_test.go b/internal/gitaly/service/conflicts/testhelper_test.go
index 39b4880eb..ee1e1d54c 100644
--- a/internal/gitaly/service/conflicts/testhelper_test.go
+++ b/internal/gitaly/service/conflicts/testhelper_test.go
@@ -10,7 +10,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/hooks"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/commit"
hook_service "gitlab.com/gitlab-org/gitaly/internal/gitaly/service/hook"
@@ -46,21 +45,23 @@ func testMain(m *testing.M) int {
return m.Run()
}
-func SetupConflictsServiceWithRuby(t testing.TB, cfg config.Cfg, rubySrv *rubyserver.Server, bare bool) (config.Cfg, *gitalypb.Repository, string, gitalypb.ConflictsServiceClient) {
+func SetupConflictsService(t testing.TB, bare bool) (config.Cfg, *gitalypb.Repository, string, gitalypb.ConflictsServiceClient) {
+ cfg := testcfg.Build(t)
+
testhelper.ConfigureGitalyGit2GoBin(t, cfg)
var repo *gitalypb.Repository
var repoPath string
var cleanup testhelper.Cleanup
if bare {
- repo, repoPath, cleanup = gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanup = gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanup)
} else {
- repo, repoPath, cleanup = gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repo, repoPath, cleanup = gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
}
- serverSocketPath := runConflictsServer(t, cfg, rubySrv)
+ serverSocketPath := runConflictsServer(t, cfg)
cfg.SocketPath = serverSocketPath
client, conn := NewConflictsClient(t, serverSocketPath)
@@ -69,19 +70,36 @@ func SetupConflictsServiceWithRuby(t testing.TB, cfg config.Cfg, rubySrv *rubyse
return cfg, repo, repoPath, client
}
-func SetupConflictsService(t testing.TB, bare bool) (config.Cfg, *gitalypb.Repository, string, gitalypb.ConflictsServiceClient) {
- cfg := testcfg.Build(t)
-
- return SetupConflictsServiceWithRuby(t, cfg, nil, bare)
-}
-
-func runConflictsServer(t testing.TB, cfg config.Cfg, rubySrv *rubyserver.Server) string {
- return testserver.RunGitalyServer(t, cfg, rubySrv, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterConflictsServiceServer(srv, NewServer(deps.GetRubyServer(), deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory()))
- gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetLocator(), deps.GetTxManager(), deps.GetGitCmdFactory()))
- gitalypb.RegisterSSHServiceServer(srv, ssh.NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory()))
+func runConflictsServer(t testing.TB, cfg config.Cfg) string {
+ return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterConflictsServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetTxManager(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterSSHServiceServer(srv, ssh.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
+ ))
gitalypb.RegisterHookServiceServer(srv, hook_service.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
- gitalypb.RegisterCommitServiceServer(srv, commit.NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory(), deps.GetLinguist()))
+ gitalypb.RegisterCommitServiceServer(srv, commit.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetLinguist(),
+ deps.GetCatfileCache(),
+ ))
})
}
diff --git a/internal/gitaly/service/dependencies.go b/internal/gitaly/service/dependencies.go
index f67d66724..28c728c87 100644
--- a/internal/gitaly/service/dependencies.go
+++ b/internal/gitaly/service/dependencies.go
@@ -3,12 +3,15 @@ package service
import (
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
+ "gitlab.com/gitlab-org/gitaly/internal/cache"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
gitalyhook "gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/linguist"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/storage"
)
@@ -23,7 +26,9 @@ type Dependencies struct {
GitCmdFactory git.CommandFactory
Linguist *linguist.Instance
BackchannelRegistry *backchannel.Registry
- GitlabAPI gitalyhook.GitlabAPI
+ GitlabClient gitlab.Client
+ CatfileCache catfile.Cache
+ DiskCache *cache.Cache
}
// GetCfg returns service configuration.
@@ -71,7 +76,17 @@ func (dc *Dependencies) GetBackchannelRegistry() *backchannel.Registry {
return dc.BackchannelRegistry
}
-// GetGitlabAPI returns client to access GitLab API.
-func (dc *Dependencies) GetGitlabAPI() gitalyhook.GitlabAPI {
- return dc.GitlabAPI
+// GetGitlabClient returns client to access GitLab API.
+func (dc *Dependencies) GetGitlabClient() gitlab.Client {
+ return dc.GitlabClient
+}
+
+// GetCatfileCache returns catfile cache.
+func (dc *Dependencies) GetCatfileCache() catfile.Cache {
+ return dc.CatfileCache
+}
+
+// GetDiskCache returns the disk cache.
+func (dc *Dependencies) GetDiskCache() *cache.Cache {
+ return dc.DiskCache
}
diff --git a/internal/gitaly/service/diff/commit_test.go b/internal/gitaly/service/diff/commit_test.go
index 6b9a4467a..484c2fac1 100644
--- a/internal/gitaly/service/diff/commit_test.go
+++ b/internal/gitaly/service/diff/commit_test.go
@@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/diff"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -15,7 +16,7 @@ import (
)
func TestSuccessfulCommitDiffRequest(t *testing.T) {
- _, repo, repoPath, client := setupDiffService(t)
+ cfg, repo, repoPath, client := setupDiffService(t)
rightCommit := "ab2c9622c02288a2bbaaf35d96088cfdff31d9d9"
leftCommit := "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab"
@@ -169,7 +170,7 @@ func TestSuccessfulCommitDiffRequest(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "diff.noprefix", testCase.noPrefixConfig)
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "diff.noprefix", testCase.noPrefixConfig)
rpcRequest := &gitalypb.CommitDiffRequest{Repository: repo, RightCommitId: rightCommit, LeftCommitId: leftCommit, IgnoreWhitespaceChange: false}
ctx, cancel := testhelper.Context()
@@ -399,13 +400,13 @@ func TestSuccessfulCommitDiffRequestWithIgnoreWhitespaceChange(t *testing.T) {
}
func TestSuccessfulCommitDiffRequestWithWordDiff(t *testing.T) {
- _, repo, repoPath, client := setupDiffService(t)
+ cfg, repo, repoPath, client := setupDiffService(t)
rightCommit := "ab2c9622c02288a2bbaaf35d96088cfdff31d9d9"
leftCommit := "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab"
var diffPatches [][]byte
- output := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "diff", "--word-diff=porcelain", leftCommit, rightCommit)
+ output := gittest.Exec(t, cfg, "-C", repoPath, "diff", "--word-diff=porcelain", leftCommit, rightCommit)
diffPerFile := bytes.Split(output, []byte("diff --git"))
for _, s := range diffPerFile {
@@ -564,7 +565,7 @@ func TestSuccessfulCommitDiffRequestWithWordDiff(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "diff.noprefix", testCase.noPrefixConfig)
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "diff.noprefix", testCase.noPrefixConfig)
rpcRequest := &gitalypb.CommitDiffRequest{
Repository: repo,
RightCommitId: rightCommit,
diff --git a/internal/gitaly/service/diff/find_changed_paths.go b/internal/gitaly/service/diff/find_changed_paths.go
index f87ff7900..5a5b9098d 100644
--- a/internal/gitaly/service/diff/find_changed_paths.go
+++ b/internal/gitaly/service/diff/find_changed_paths.go
@@ -9,7 +9,6 @@ import (
"github.com/golang/protobuf/proto"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/helper/chunk"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
@@ -135,7 +134,7 @@ func (s *server) validateFindChangedPathsRequestParams(ctx context.Context, in *
return err
}
- gitRepo := localrepo.New(s.gitCmdFactory, repo, s.cfg)
+ gitRepo := s.localrepo(in.GetRepository())
for _, commit := range in.GetCommits() {
if commit == "" {
diff --git a/internal/gitaly/service/diff/find_changed_paths_test.go b/internal/gitaly/service/diff/find_changed_paths_test.go
index b1c699fc9..15070e05a 100644
--- a/internal/gitaly/service/diff/find_changed_paths_test.go
+++ b/internal/gitaly/service/diff/find_changed_paths_test.go
@@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -110,7 +111,7 @@ func TestFindChangedPathsRequest_success(t *testing.T) {
}
func TestFindChangedPathsRequest_failing(t *testing.T) {
- cfg, repo, _, client := setupDiffService(t)
+ cfg, repo, _, client := setupDiffService(t, testserver.WithDisablePraefect())
ctx, cancel := testhelper.Context()
defer cancel()
diff --git a/internal/gitaly/service/diff/raw_test.go b/internal/gitaly/service/diff/raw_test.go
index 6c9214f37..ffce40771 100644
--- a/internal/gitaly/service/diff/raw_test.go
+++ b/internal/gitaly/service/diff/raw_test.go
@@ -15,7 +15,7 @@ import (
)
func TestSuccessfulRawDiffRequest(t *testing.T) {
- _, repo, repoPath, client := setupDiffService(t)
+ cfg, repo, repoPath, client := setupDiffService(t)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -27,7 +27,7 @@ func TestSuccessfulRawDiffRequest(t *testing.T) {
c, err := client.RawDiff(ctx, rpcRequest)
require.NoError(t, err)
- _, sandboxRepoPath, cleanupFn := gittest.CloneRepoWithWorktree(t)
+ _, sandboxRepoPath, cleanupFn := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
defer cleanupFn()
reader := streamio.NewReader(func() ([]byte, error) {
@@ -37,17 +37,17 @@ func TestSuccessfulRawDiffRequest(t *testing.T) {
committerName := "Scrooge McDuck"
committerEmail := "scrooge@mcduck.com"
- testhelper.MustRunCommand(t, nil, "git", "-C", sandboxRepoPath, "reset", "--hard", leftCommit)
+ gittest.Exec(t, cfg, "-C", sandboxRepoPath, "reset", "--hard", leftCommit)
- testhelper.MustRunCommand(t, reader, "git", "-C", sandboxRepoPath, "apply")
- testhelper.MustRunCommand(t, reader, "git", "-C", sandboxRepoPath, "add", ".")
- testhelper.MustRunCommand(t, nil, "git", "-C", sandboxRepoPath,
+ gittest.ExecStream(t, cfg, reader, "-C", sandboxRepoPath, "apply")
+ gittest.ExecStream(t, cfg, reader, "-C", sandboxRepoPath, "add", ".")
+ gittest.Exec(t, cfg, "-C", sandboxRepoPath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "-m", "Applying received raw diff")
- expectedTreeStructure := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "ls-tree", "-r", rightCommit)
- actualTreeStructure := testhelper.MustRunCommand(t, nil, "git", "-C", sandboxRepoPath, "ls-tree", "-r", "HEAD")
+ expectedTreeStructure := gittest.Exec(t, cfg, "-C", repoPath, "ls-tree", "-r", rightCommit)
+ actualTreeStructure := gittest.Exec(t, cfg, "-C", sandboxRepoPath, "ls-tree", "-r", "HEAD")
require.Equal(t, expectedTreeStructure, actualTreeStructure)
}
@@ -100,7 +100,7 @@ func TestFailedRawDiffRequestDueToValidations(t *testing.T) {
}
func TestSuccessfulRawPatchRequest(t *testing.T) {
- _, repo, repoPath, client := setupDiffService(t)
+ cfg, repo, repoPath, client := setupDiffService(t)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -117,15 +117,15 @@ func TestSuccessfulRawPatchRequest(t *testing.T) {
return response.GetData(), err
})
- _, sandboxRepoPath, cleanupFn := gittest.CloneRepoWithWorktree(t)
+ _, sandboxRepoPath, cleanupFn := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
defer cleanupFn()
- testhelper.MustRunCommand(t, nil, "git", "-C", sandboxRepoPath, "reset", "--hard", leftCommit)
+ gittest.Exec(t, cfg, "-C", sandboxRepoPath, "reset", "--hard", leftCommit)
- testhelper.MustRunCommand(t, reader, "git", "-C", sandboxRepoPath, "am")
+ gittest.ExecStream(t, cfg, reader, "-C", sandboxRepoPath, "am")
- expectedTreeStructure := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "ls-tree", "-r", rightCommit)
- actualTreeStructure := testhelper.MustRunCommand(t, nil, "git", "-C", sandboxRepoPath, "ls-tree", "-r", "HEAD")
+ expectedTreeStructure := gittest.Exec(t, cfg, "-C", repoPath, "ls-tree", "-r", rightCommit)
+ actualTreeStructure := gittest.Exec(t, cfg, "-C", sandboxRepoPath, "ls-tree", "-r", "HEAD")
require.Equal(t, expectedTreeStructure, actualTreeStructure)
}
diff --git a/internal/gitaly/service/diff/server.go b/internal/gitaly/service/diff/server.go
index 48faf8db1..72310b068 100644
--- a/internal/gitaly/service/diff/server.go
+++ b/internal/gitaly/service/diff/server.go
@@ -2,6 +2,9 @@ package diff
import (
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -14,14 +17,20 @@ type server struct {
cfg config.Cfg
locator storage.Locator
gitCmdFactory git.CommandFactory
+ catfileCache catfile.Cache
}
// NewServer creates a new instance of a gRPC DiffServer
-func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory) gitalypb.DiffServiceServer {
+func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory, catfileCache catfile.Cache) gitalypb.DiffServiceServer {
return &server{
MsgSizeThreshold: msgSizeThreshold,
cfg: cfg,
locator: locator,
gitCmdFactory: gitCmdFactory,
+ catfileCache: catfileCache,
}
}
+
+func (s *server) localrepo(repo repository.GitRepo) *localrepo.Repo {
+ return localrepo.New(s.gitCmdFactory, s.catfileCache, repo, s.cfg)
+}
diff --git a/internal/gitaly/service/diff/testhelper_test.go b/internal/gitaly/service/diff/testhelper_test.go
index 00be98713..4a0f69c8e 100644
--- a/internal/gitaly/service/diff/testhelper_test.go
+++ b/internal/gitaly/service/diff/testhelper_test.go
@@ -1,15 +1,15 @@
package diff
import (
- "net"
"os"
"testing"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
)
@@ -25,39 +25,20 @@ func testMain(m *testing.M) int {
return m.Run()
}
-func setupDiffService(t testing.TB) (config.Cfg, *gitalypb.Repository, string, gitalypb.DiffServiceClient) {
+func setupDiffService(t testing.TB, opt ...testserver.GitalyServerOpt) (config.Cfg, *gitalypb.Repository, string, gitalypb.DiffServiceClient) {
cfg, repo, repoPath := testcfg.BuildWithRepo(t)
- cfg.SocketPath = runDiffServer(t, cfg)
- client, conn := newDiffClient(t, cfg.SocketPath)
- t.Cleanup(func() { conn.Close() })
-
- return cfg, repo, repoPath, client
-}
-
-func runDiffServer(t testing.TB, cfg config.Cfg) string {
- t.Helper()
-
- server := testhelper.NewTestGrpcServer(t, nil, nil)
- t.Cleanup(server.Stop)
-
- serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
- listener, err := net.Listen("unix", serverSocketPath)
- require.NoError(t, err)
-
- gitalypb.RegisterDiffServiceServer(server, NewServer(cfg, config.NewLocator(cfg), git.NewExecCommandFactory(cfg)))
-
- go server.Serve(listener)
-
- return "unix://" + serverSocketPath
-}
-
-func newDiffClient(t testing.TB, serverSocketPath string) (gitalypb.DiffServiceClient, *grpc.ClientConn) {
- connOpts := []grpc.DialOption{
- grpc.WithInsecure(),
- }
-
- conn, err := grpc.Dial(serverSocketPath, connOpts...)
+ addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterDiffServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ }, opt...)
+
+ conn, err := grpc.Dial(addr, grpc.WithInsecure())
require.NoError(t, err)
+ t.Cleanup(func() { testhelper.MustClose(t, conn) })
- return gitalypb.NewDiffServiceClient(conn), conn
+ return cfg, repo, repoPath, gitalypb.NewDiffServiceClient(conn)
}
diff --git a/internal/gitaly/service/hook/pack_objects_test.go b/internal/gitaly/service/hook/pack_objects_test.go
index 3c51806b0..5e21cbfe9 100644
--- a/internal/gitaly/service/hook/pack_objects_test.go
+++ b/internal/gitaly/service/hook/pack_objects_test.go
@@ -10,6 +10,7 @@ import (
"github.com/sirupsen/logrus"
"github.com/sirupsen/logrus/hooks/test"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/streamcache"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
@@ -108,10 +109,11 @@ func TestServer_PackObjectsHook(t *testing.T) {
}
require.Equal(t, io.EOF, err)
- testhelper.MustRunCommand(
+ gittest.ExecStream(
t,
+ cfg,
bytes.NewReader(stdout),
- "git", "-C", repoPath, "index-pack", "--stdin", "--fix-thin",
+ "-C", repoPath, "index-pack", "--stdin", "--fix-thin",
)
for _, msg := range []string{"served bytes", "generated bytes"} {
@@ -215,10 +217,11 @@ func TestServer_PackObjectsHook_separateContext(t *testing.T) {
}
require.Equal(t, io.EOF, err)
- testhelper.MustRunCommand(
+ gittest.ExecStream(
t,
+ cfg,
bytes.NewReader(stdout),
- "git", "-C", repoPath, "index-pack", "--stdin", "--fix-thin",
+ "-C", repoPath, "index-pack", "--stdin", "--fix-thin",
)
}
@@ -259,10 +262,11 @@ func TestServer_PackObjectsHook_usesCache(t *testing.T) {
}
require.Equal(t, io.EOF, err)
- testhelper.MustRunCommand(
+ gittest.ExecStream(
t,
+ cfg,
bytes.NewReader(stdout),
- "git", "-C", repoPath, "index-pack", "--stdin", "--fix-thin",
+ "-C", repoPath, "index-pack", "--stdin", "--fix-thin",
)
}
diff --git a/internal/gitaly/service/hook/post_receive_test.go b/internal/gitaly/service/hook/post_receive_test.go
index e7bad14b1..342e86b86 100644
--- a/internal/gitaly/service/hook/post_receive_test.go
+++ b/internal/gitaly/service/hook/post_receive_test.go
@@ -10,13 +10,14 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- gitalyhook "gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/prometheus"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/streamio"
"google.golang.org/grpc/codes"
@@ -66,7 +67,7 @@ func TestHooksMissingStdin(t *testing.T) {
},
}
- api, err := gitalyhook.NewGitlabAPI(cfg.Gitlab, cfg.TLS)
+ gitlabClient, err := gitlab.NewHTTPClient(cfg.Gitlab, cfg.TLS, prometheus.Config{})
require.NoError(t, err)
testCases := []struct {
@@ -87,7 +88,7 @@ func TestHooksMissingStdin(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabAPI(api))
+ serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabClient(gitlabClient))
client, conn := newHooksClient(t, serverSocketPath)
defer conn.Close()
@@ -98,12 +99,12 @@ func TestHooksMissingStdin(t *testing.T) {
hooksPayload, err := git.NewHooksPayload(
cfg,
repo,
- &metadata.Transaction{
+ &txinfo.Transaction{
ID: 1234,
Node: "node-1",
Primary: tc.primary,
},
- &metadata.PraefectServer{
+ &txinfo.PraefectServer{
SocketPath: "/path/to/socket",
Token: "secret",
},
@@ -217,10 +218,10 @@ To create a merge request for okay, visit:
},
}
- api, err := gitalyhook.NewGitlabAPI(cfg.Gitlab, cfg.TLS)
+ gitlabClient, err := gitlab.NewHTTPClient(cfg.Gitlab, cfg.TLS, prometheus.Config{})
require.NoError(t, err)
- serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabAPI(api))
+ serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabClient(gitlabClient))
client, conn := newHooksClient(t, serverSocketPath)
defer conn.Close()
diff --git a/internal/gitaly/service/hook/pre_receive_test.go b/internal/gitaly/service/hook/pre_receive_test.go
index dc9d03c40..bf95a0fce 100644
--- a/internal/gitaly/service/hook/pre_receive_test.go
+++ b/internal/gitaly/service/hook/pre_receive_test.go
@@ -15,13 +15,14 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- gitalyhook "gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/prometheus"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/streamio"
"google.golang.org/grpc/codes"
@@ -132,10 +133,10 @@ func TestPreReceiveHook_GitlabAPIAccess(t *testing.T) {
SecretFile: secretFilePath,
}
- gitlabAPI, err := gitalyhook.NewGitlabAPI(gitlabConfig, cfg.TLS)
+ gitlabClient, err := gitlab.NewHTTPClient(gitlabConfig, cfg.TLS, prometheus.Config{})
require.NoError(t, err)
- serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabAPI(gitlabAPI))
+ serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabClient(gitlabClient))
client, conn := newHooksClient(t, serverSocketPath)
defer conn.Close()
@@ -248,10 +249,10 @@ func TestPreReceive_APIErrors(t *testing.T) {
SecretFile: secretFilePath,
}
- gitlabAPI, err := gitalyhook.NewGitlabAPI(gitlabConfig, cfg.TLS)
+ gitlabClient, err := gitlab.NewHTTPClient(gitlabConfig, cfg.TLS, prometheus.Config{})
require.NoError(t, err)
- serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabAPI(gitlabAPI))
+ serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabClient(gitlabClient))
client, conn := newHooksClient(t, serverSocketPath)
defer conn.Close()
@@ -321,10 +322,10 @@ exit %d
SecretFile: secretFilePath,
}
- gitlabAPI, err := gitalyhook.NewGitlabAPI(gitlabConfig, cfg.TLS)
+ gitlabClient, err := gitlab.NewHTTPClient(gitlabConfig, cfg.TLS, prometheus.Config{})
require.NoError(t, err)
- serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabAPI(gitlabAPI))
+ serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabClient(gitlabClient))
client, conn := newHooksClient(t, serverSocketPath)
defer conn.Close()
@@ -430,7 +431,7 @@ func TestPreReceiveHook_Primary(t *testing.T) {
for i, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], fmt.Sprintf("repo-%d", i))
+ testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], fmt.Sprintf("repo-%d", i))
defer cleanupFn()
mux := http.NewServeMux()
@@ -446,13 +447,13 @@ func TestPreReceiveHook_Primary(t *testing.T) {
gittest.WriteCustomHook(t, testRepoPath, "pre-receive", []byte(fmt.Sprintf("#!/bin/bash\nexit %d", tc.hookExitCode)))
- gitlabAPI, err := gitalyhook.NewGitlabAPI(config.Gitlab{
+ gitlabClient, err := gitlab.NewHTTPClient(config.Gitlab{
URL: srv.URL,
SecretFile: secretFilePath,
- }, cfg.TLS)
+ }, cfg.TLS, prometheus.Config{})
require.NoError(t, err)
- serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabAPI(gitlabAPI))
+ serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithGitLabClient(gitlabClient))
client, conn := newHooksClient(t, serverSocketPath)
defer conn.Close()
@@ -463,12 +464,12 @@ func TestPreReceiveHook_Primary(t *testing.T) {
hooksPayload, err := git.NewHooksPayload(
cfg,
testRepo,
- &metadata.Transaction{
+ &txinfo.Transaction{
ID: 1234,
Node: "node-1",
Primary: tc.primary,
},
- &metadata.PraefectServer{
+ &txinfo.PraefectServer{
SocketPath: "/path/to/socket",
Token: "secret",
},
diff --git a/internal/gitaly/service/hook/reference_transaction_test.go b/internal/gitaly/service/hook/reference_transaction_test.go
index 9b04a11fb..909c0b974 100644
--- a/internal/gitaly/service/hook/reference_transaction_test.go
+++ b/internal/gitaly/service/hook/reference_transaction_test.go
@@ -11,10 +11,10 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
@@ -51,148 +51,143 @@ func TestReferenceTransactionHookInvalidArgument(t *testing.T) {
}
func TestReferenceTransactionHook(t *testing.T) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.BackchannelVoting,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- testCases := []struct {
- desc string
- stdin []byte
- state gitalypb.ReferenceTransactionHookRequest_State
- voteResponse gitalypb.VoteTransactionResponse_TransactionState
- expectedCode codes.Code
- expectedReftxHash []byte
- }{
- {
- desc: "hook triggers transaction with default state",
- stdin: []byte("foobar"),
- voteResponse: gitalypb.VoteTransactionResponse_COMMIT,
- expectedCode: codes.OK,
- expectedReftxHash: []byte("foobar"),
- },
- {
- desc: "hook triggers transaction with explicit prepared state",
- stdin: []byte("foobar"),
- state: gitalypb.ReferenceTransactionHookRequest_PREPARED,
- voteResponse: gitalypb.VoteTransactionResponse_COMMIT,
- expectedCode: codes.OK,
- expectedReftxHash: []byte("foobar"),
- },
- {
- desc: "hook does not trigger transaction with aborted state",
- stdin: []byte("foobar"),
- state: gitalypb.ReferenceTransactionHookRequest_ABORTED,
- expectedCode: codes.OK,
- },
- {
- desc: "hook does not trigger transaction with committed state",
- stdin: []byte("foobar"),
- state: gitalypb.ReferenceTransactionHookRequest_COMMITTED,
- expectedCode: codes.OK,
- },
- {
- desc: "hook fails with failed vote",
- stdin: []byte("foobar"),
- voteResponse: gitalypb.VoteTransactionResponse_ABORT,
- expectedCode: codes.Aborted,
- expectedReftxHash: []byte("foobar"),
- },
- {
- desc: "hook fails with stopped vote",
- stdin: []byte("foobar"),
- voteResponse: gitalypb.VoteTransactionResponse_STOP,
- expectedCode: codes.FailedPrecondition,
- expectedReftxHash: []byte("foobar"),
- },
- }
-
- transactionServer := &testTransactionServer{}
- grpcServer := grpc.NewServer()
- gitalypb.RegisterRefTransactionServer(grpcServer, transactionServer)
-
- listener, err := net.Listen("tcp", "127.0.0.1:0")
- require.NoError(t, err)
-
- backchannelConn, err := grpc.Dial(listener.Addr().String(), grpc.WithInsecure())
- require.NoError(t, err)
- defer backchannelConn.Close()
-
- registry := backchannel.NewRegistry()
- backchannelID := registry.RegisterBackchannel(backchannelConn)
-
- errQ := make(chan error)
- go func() {
- errQ <- grpcServer.Serve(listener)
- }()
- defer func() {
- grpcServer.Stop()
- require.NoError(t, <-errQ)
- }()
-
- for _, tc := range testCases {
- t.Run(tc.desc, func(t *testing.T) {
- cfg, repo, _ := testcfg.BuildWithRepo(t)
-
- var reftxHash []byte
- transactionServer.handler = func(in *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- reftxHash = in.ReferenceUpdatesHash
- return &gitalypb.VoteTransactionResponse{
- State: tc.voteResponse,
- }, nil
- }
-
- serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithBackchannelRegistry(registry))
-
- praefectServer := &metadata.PraefectServer{ListenAddr: "tcp://" + listener.Addr().String()}
- if featureflag.IsEnabled(ctx, featureflag.BackchannelVoting) {
- praefectServer = &metadata.PraefectServer{BackchannelID: backchannelID}
- }
-
- hooksPayload, err := git.NewHooksPayload(
- cfg,
- repo,
- &metadata.Transaction{
- ID: 1234,
- Node: "node-1",
- },
- praefectServer,
- nil,
- git.ReferenceTransactionHook,
- featureflag.RawFromContext(ctx),
- ).Env()
- require.NoError(t, err)
-
- environment := []string{
- hooksPayload,
- }
-
- client, conn := newHooksClient(t, serverSocketPath)
- defer conn.Close()
-
- stream, err := client.ReferenceTransactionHook(ctx)
- require.NoError(t, err)
- require.NoError(t, stream.Send(&gitalypb.ReferenceTransactionHookRequest{
- Repository: repo,
- State: tc.state,
- EnvironmentVariables: environment,
- }))
- require.NoError(t, stream.Send(&gitalypb.ReferenceTransactionHookRequest{
- Stdin: tc.stdin,
- }))
- require.NoError(t, stream.CloseSend())
-
- resp, err := stream.Recv()
- require.Equal(t, helper.GrpcCode(err), tc.expectedCode)
- if tc.expectedCode == codes.OK {
- require.Equal(t, resp.GetExitStatus().GetValue(), int32(0))
- }
-
- var expectedReftxHash []byte
- if tc.expectedReftxHash != nil {
- hash := sha1.Sum(tc.expectedReftxHash)
- expectedReftxHash = hash[:]
- }
- require.Equal(t, expectedReftxHash[:], reftxHash)
- })
- }
- })
+ testCases := []struct {
+ desc string
+ stdin []byte
+ state gitalypb.ReferenceTransactionHookRequest_State
+ voteResponse gitalypb.VoteTransactionResponse_TransactionState
+ expectedCode codes.Code
+ expectedReftxHash []byte
+ }{
+ {
+ desc: "hook triggers transaction with default state",
+ stdin: []byte("foobar"),
+ voteResponse: gitalypb.VoteTransactionResponse_COMMIT,
+ expectedCode: codes.OK,
+ expectedReftxHash: []byte("foobar"),
+ },
+ {
+ desc: "hook triggers transaction with explicit prepared state",
+ stdin: []byte("foobar"),
+ state: gitalypb.ReferenceTransactionHookRequest_PREPARED,
+ voteResponse: gitalypb.VoteTransactionResponse_COMMIT,
+ expectedCode: codes.OK,
+ expectedReftxHash: []byte("foobar"),
+ },
+ {
+ desc: "hook does not trigger transaction with aborted state",
+ stdin: []byte("foobar"),
+ state: gitalypb.ReferenceTransactionHookRequest_ABORTED,
+ expectedCode: codes.OK,
+ },
+ {
+ desc: "hook triggers transaction with committed state",
+ stdin: []byte("foobar"),
+ state: gitalypb.ReferenceTransactionHookRequest_COMMITTED,
+ expectedCode: codes.OK,
+ expectedReftxHash: []byte("foobar"),
+ },
+ {
+ desc: "hook fails with failed vote",
+ stdin: []byte("foobar"),
+ voteResponse: gitalypb.VoteTransactionResponse_ABORT,
+ expectedCode: codes.Aborted,
+ expectedReftxHash: []byte("foobar"),
+ },
+ {
+ desc: "hook fails with stopped vote",
+ stdin: []byte("foobar"),
+ voteResponse: gitalypb.VoteTransactionResponse_STOP,
+ expectedCode: codes.FailedPrecondition,
+ expectedReftxHash: []byte("foobar"),
+ },
+ }
+
+ transactionServer := &testTransactionServer{}
+ grpcServer := grpc.NewServer()
+ gitalypb.RegisterRefTransactionServer(grpcServer, transactionServer)
+
+ listener, err := net.Listen("tcp", "127.0.0.1:0")
+ require.NoError(t, err)
+
+ backchannelConn, err := grpc.Dial(listener.Addr().String(), grpc.WithInsecure())
+ require.NoError(t, err)
+ defer backchannelConn.Close()
+
+ registry := backchannel.NewRegistry()
+ backchannelID := registry.RegisterBackchannel(backchannelConn)
+
+ errQ := make(chan error)
+ go func() {
+ errQ <- grpcServer.Serve(listener)
+ }()
+ defer func() {
+ grpcServer.Stop()
+ require.NoError(t, <-errQ)
+ }()
+
+ for _, tc := range testCases {
+ t.Run(tc.desc, func(t *testing.T) {
+ cfg, repo, _ := testcfg.BuildWithRepo(t)
+
+ var reftxHash []byte
+ transactionServer.handler = func(in *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
+ reftxHash = in.ReferenceUpdatesHash
+ return &gitalypb.VoteTransactionResponse{
+ State: tc.voteResponse,
+ }, nil
+ }
+
+ serverSocketPath := runHooksServer(t, cfg, nil, testserver.WithBackchannelRegistry(registry))
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ hooksPayload, err := git.NewHooksPayload(
+ cfg,
+ repo,
+ &txinfo.Transaction{
+ ID: 1234,
+ Node: "node-1",
+ },
+ &txinfo.PraefectServer{BackchannelID: backchannelID},
+ nil,
+ git.ReferenceTransactionHook,
+ featureflag.RawFromContext(ctx),
+ ).Env()
+ require.NoError(t, err)
+
+ environment := []string{
+ hooksPayload,
+ }
+
+ client, conn := newHooksClient(t, serverSocketPath)
+ defer conn.Close()
+
+ stream, err := client.ReferenceTransactionHook(ctx)
+ require.NoError(t, err)
+ require.NoError(t, stream.Send(&gitalypb.ReferenceTransactionHookRequest{
+ Repository: repo,
+ State: tc.state,
+ EnvironmentVariables: environment,
+ }))
+ require.NoError(t, stream.Send(&gitalypb.ReferenceTransactionHookRequest{
+ Stdin: tc.stdin,
+ }))
+ require.NoError(t, stream.CloseSend())
+
+ resp, err := stream.Recv()
+ require.Equal(t, helper.GrpcCode(err), tc.expectedCode)
+ if tc.expectedCode == codes.OK {
+ require.Equal(t, resp.GetExitStatus().GetValue(), int32(0))
+ }
+
+ var expectedReftxHash []byte
+ if tc.expectedReftxHash != nil {
+ hash := sha1.Sum(tc.expectedReftxHash)
+ expectedReftxHash = hash[:]
+ }
+ require.Equal(t, expectedReftxHash[:], reftxHash)
+ })
+ }
}
diff --git a/internal/gitaly/service/hook/server_test.go b/internal/gitaly/service/hook/server_test.go
index 07aefae34..e10736249 100644
--- a/internal/gitaly/service/hook/server_test.go
+++ b/internal/gitaly/service/hook/server_test.go
@@ -7,8 +7,9 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- gitalyhook "gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/streamcache"
)
@@ -43,7 +44,7 @@ func TestNewServer(t *testing.T) {
cfg := tc.cfg
poc := NewServer(
cfg,
- gitalyhook.NewManager(config.NewLocator(cfg), transaction.NewManager(cfg, backchannel.NewRegistry()), gitalyhook.GitlabAPIStub, cfg),
+ hook.NewManager(config.NewLocator(cfg), transaction.NewManager(cfg, backchannel.NewRegistry()), gitlab.NewMockClient(), cfg),
git.NewExecCommandFactory(cfg),
).(*server).packObjectsCache
diff --git a/internal/gitaly/service/hook/testhelper_test.go b/internal/gitaly/service/hook/testhelper_test.go
index 3522ce232..195355ce0 100644
--- a/internal/gitaly/service/hook/testhelper_test.go
+++ b/internal/gitaly/service/hook/testhelper_test.go
@@ -58,7 +58,7 @@ func runHooksServer(t testing.TB, cfg config.Cfg, opts []serverOption, serverOpt
return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
hookServer := NewServer(
deps.GetCfg(),
- gitalyhook.NewManager(deps.GetLocator(), deps.GetTxManager(), deps.GetGitlabAPI(), deps.GetCfg()),
+ gitalyhook.NewManager(deps.GetLocator(), deps.GetTxManager(), deps.GetGitlabClient(), deps.GetCfg()),
deps.GetGitCmdFactory(),
)
for _, opt := range opts {
diff --git a/internal/gitaly/service/internalgitaly/testhelper_test.go b/internal/gitaly/service/internalgitaly/testhelper_test.go
index 6d73879af..862e5e327 100644
--- a/internal/gitaly/service/internalgitaly/testhelper_test.go
+++ b/internal/gitaly/service/internalgitaly/testhelper_test.go
@@ -1,14 +1,16 @@
package internalgitaly
import (
- "net"
"os"
"testing"
+ "github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
- "google.golang.org/grpc/reflection"
)
func TestMain(m *testing.M) {
@@ -22,31 +24,13 @@ func testMain(m *testing.M) int {
return m.Run()
}
-func runInternalGitalyServer(t *testing.T, srv gitalypb.InternalGitalyServer) (*grpc.Server, string) {
- serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
- grpcServer := testhelper.NewTestGrpcServer(t, nil, nil)
+func setupInternalGitalyService(t *testing.T, cfg config.Cfg, internalService gitalypb.InternalGitalyServer) gitalypb.InternalGitalyClient {
+ add := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterInternalGitalyServer(srv, internalService)
+ }, testserver.WithDisablePraefect())
+ conn, err := grpc.Dial(add, grpc.WithInsecure())
+ require.NoError(t, err)
+ t.Cleanup(func() { testhelper.MustClose(t, conn) })
- listener, err := net.Listen("unix", serverSocketPath)
- if err != nil {
- t.Fatal(err)
- }
-
- gitalypb.RegisterInternalGitalyServer(grpcServer, srv)
- reflection.Register(grpcServer)
-
- go grpcServer.Serve(listener)
-
- return grpcServer, "unix://" + serverSocketPath
-}
-
-func newInternalGitalyClient(t *testing.T, serverSocketPath string) (gitalypb.InternalGitalyClient, *grpc.ClientConn) {
- connOpts := []grpc.DialOption{
- grpc.WithInsecure(),
- }
- conn, err := grpc.Dial(serverSocketPath, connOpts...)
- if err != nil {
- t.Fatal(err)
- }
-
- return gitalypb.NewInternalGitalyClient(conn), conn
+ return gitalypb.NewInternalGitalyClient(conn)
}
diff --git a/internal/gitaly/service/internalgitaly/walkrepos_test.go b/internal/gitaly/service/internalgitaly/walkrepos_test.go
index a55886217..0bf36cc4c 100644
--- a/internal/gitaly/service/internalgitaly/walkrepos_test.go
+++ b/internal/gitaly/service/internalgitaly/walkrepos_test.go
@@ -11,6 +11,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -35,16 +36,15 @@ func (w *streamWrapper) Send(resp *gitalypb.WalkReposResponse) error {
}
func TestWalkRepos(t *testing.T) {
- testRoot := testhelper.TempDir(t)
-
- storageName := "default"
- storageRoot := filepath.Join(testRoot, "storage")
+ cfg := testcfg.Build(t)
+ storageName := cfg.Storages[0].Name
+ storageRoot := cfg.Storages[0].Path
// file walk happens lexicographically, so we delete repository in the middle
// of the seqeuence to ensure the walk proceeds normally
- testRepo1 := gittest.CloneRepoAtStorageRoot(t, storageRoot, "a")
- deletedRepo := gittest.CloneRepoAtStorageRoot(t, storageRoot, "b")
- testRepo2 := gittest.CloneRepoAtStorageRoot(t, storageRoot, "c")
+ testRepo1 := gittest.CloneRepoAtStorageRoot(t, cfg, storageRoot, "a")
+ deletedRepo := gittest.CloneRepoAtStorageRoot(t, cfg, storageRoot, "b")
+ testRepo2 := gittest.CloneRepoAtStorageRoot(t, cfg, storageRoot, "c")
// to test a directory being deleted during a walk, we must delete a directory after
// the file walk has started. To achieve that, we wrap the server to pass down a wrapped
@@ -65,11 +65,7 @@ func TestWalkRepos(t *testing.T) {
},
}
- server, serverSocketPath := runInternalGitalyServer(t, wsrv)
- defer server.Stop()
-
- client, conn := newInternalGitalyClient(t, serverSocketPath)
- defer conn.Close()
+ client := setupInternalGitalyService(t, cfg, wsrv)
ctx, cancel := testhelper.Context()
defer cancel()
diff --git a/internal/gitaly/service/namespace/namespace_test.go b/internal/gitaly/service/namespace/namespace_test.go
index e8baeeeed..2f33eaf6f 100644
--- a/internal/gitaly/service/namespace/namespace_test.go
+++ b/internal/gitaly/service/namespace/namespace_test.go
@@ -9,6 +9,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
@@ -27,7 +28,7 @@ func testMain(m *testing.M) int {
}
func TestNamespaceExists(t *testing.T) {
- cfg, client := setupNamespaceService(t)
+ cfg, client := setupNamespaceService(t, testserver.WithDisablePraefect())
existingStorage := cfg.Storages[0]
ctx, cancel := testhelper.Context()
@@ -209,7 +210,7 @@ func TestRemoveNamespace(t *testing.T) {
if tc.errorCode == codes.OK {
require.Equal(t, existingStorage.Name, tc.request.StorageName, "sanity check")
- testhelper.AssertPathNotExists(t, filepath.Join(existingStorage.Path, tc.request.Name))
+ require.NoFileExists(t, filepath.Join(existingStorage.Path, tc.request.Name))
}
})
}
diff --git a/internal/gitaly/service/namespace/testhelper_test.go b/internal/gitaly/service/namespace/testhelper_test.go
index fed1f8461..4411cb8c8 100644
--- a/internal/gitaly/service/namespace/testhelper_test.go
+++ b/internal/gitaly/service/namespace/testhelper_test.go
@@ -1,57 +1,29 @@
package namespace
import (
- "net"
"testing"
+ "github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/storage"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
- "google.golang.org/grpc/reflection"
)
-func setupNamespaceService(t testing.TB) (config.Cfg, gitalypb.NamespaceServiceClient) {
+func setupNamespaceService(t testing.TB, opts ...testserver.GitalyServerOpt) (config.Cfg, gitalypb.NamespaceServiceClient) {
cfgBuilder := testcfg.NewGitalyCfgBuilder(testcfg.WithStorages("default", "other"))
cfg := cfgBuilder.Build(t)
- locator := config.NewLocator(cfg)
- server, serverSocketPath := runNamespaceServer(t, locator)
- t.Cleanup(server.Stop)
+ addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterNamespaceServiceServer(srv, NewServer(deps.GetLocator()))
+ }, opts...)
- client, conn := newNamespaceClient(t, serverSocketPath)
- t.Cleanup(func() { conn.Close() })
+ conn, err := grpc.Dial(addr, grpc.WithInsecure())
+ require.NoError(t, err)
+ t.Cleanup(func() { testhelper.MustClose(t, conn) })
- return cfg, client
-}
-
-func runNamespaceServer(t testing.TB, locator storage.Locator) (*grpc.Server, string) {
- server := testhelper.NewTestGrpcServer(t, nil, nil)
- serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
-
- listener, err := net.Listen("unix", serverSocketPath)
- if err != nil {
- t.Fatal(err)
- }
-
- gitalypb.RegisterNamespaceServiceServer(server, NewServer(locator))
- reflection.Register(server)
-
- go server.Serve(listener)
-
- return server, "unix://" + serverSocketPath
-}
-
-func newNamespaceClient(t testing.TB, serverSocketPath string) (gitalypb.NamespaceServiceClient, *grpc.ClientConn) {
- connOpts := []grpc.DialOption{
- grpc.WithInsecure(),
- }
- conn, err := grpc.Dial(serverSocketPath, connOpts...)
- if err != nil {
- t.Fatal(err)
- }
-
- return gitalypb.NewNamespaceServiceClient(conn), conn
+ return cfg, gitalypb.NewNamespaceServiceClient(conn)
}
diff --git a/internal/gitaly/service/objectpool/alternates_test.go b/internal/gitaly/service/objectpool/alternates_test.go
index f4ebf689b..aafcfea4d 100644
--- a/internal/gitaly/service/objectpool/alternates_test.go
+++ b/internal/gitaly/service/objectpool/alternates_test.go
@@ -21,7 +21,7 @@ func TestDisconnectGitAlternates(t *testing.T) {
defer cancel()
gitCmdFactory := git.NewExecCommandFactory(cfg)
- pool, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
defer func() {
require.NoError(t, pool.Remove(ctx))
@@ -29,7 +29,7 @@ func TestDisconnectGitAlternates(t *testing.T) {
require.NoError(t, pool.Create(ctx, repo))
require.NoError(t, pool.Link(ctx, repo))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "gc")
+ gittest.Exec(t, cfg, "-C", repoPath, "gc")
existingObjectID := "55bc176024cfa3baaceb71db584c7e5df900ea65"
@@ -55,24 +55,24 @@ func TestDisconnectGitAlternates(t *testing.T) {
// Check that the object can still be found, even though
// objects/info/alternates is gone. This is the purpose of
// DisconnectGitAlternates.
- testhelper.AssertPathNotExists(t, altPath)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "cat-file", "-e", existingObjectID)
+ require.NoFileExists(t, altPath)
+ gittest.Exec(t, cfg, "-C", repoPath, "cat-file", "-e", existingObjectID)
}
func TestDisconnectGitAlternatesNoAlternates(t *testing.T) {
- _, repo, repoPath, locator, client := setup(t)
+ cfg, repo, repoPath, locator, client := setup(t)
ctx, cancel := testhelper.Context()
defer cancel()
altPath, err := locator.InfoAlternatesPath(repo)
require.NoError(t, err, "find info/alternates")
- testhelper.AssertPathNotExists(t, altPath)
+ require.NoFileExists(t, altPath)
_, err = client.DisconnectGitAlternates(ctx, &gitalypb.DisconnectGitAlternatesRequest{Repository: repo})
require.NoError(t, err, "call DisconnectGitAlternates on repository without alternates")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "fsck")
+ gittest.Exec(t, cfg, "-C", repoPath, "fsck")
}
func TestDisconnectGitAlternatesUnexpectedAlternates(t *testing.T) {
@@ -92,7 +92,7 @@ func TestDisconnectGitAlternatesUnexpectedAlternates(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- repo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
defer cleanupFn()
altPath, err := locator.InfoAlternatesPath(repo)
@@ -103,8 +103,7 @@ func TestDisconnectGitAlternatesUnexpectedAlternates(t *testing.T) {
_, err = client.DisconnectGitAlternates(ctx, &gitalypb.DisconnectGitAlternatesRequest{Repository: repo})
require.Error(t, err, "call DisconnectGitAlternates on repository with unexpected objects/info/alternates")
- contentAfterRPC, err := ioutil.ReadFile(altPath)
- require.NoError(t, err, "read back objects/info/alternates")
+ contentAfterRPC := testhelper.MustReadFile(t, altPath)
require.Equal(t, tc.altContent, string(contentAfterRPC), "objects/info/alternates content should not have changed")
})
}
@@ -142,8 +141,7 @@ func TestRemoveAlternatesIfOk(t *testing.T) {
func assertAlternates(t *testing.T, altPath string, altContent string) {
t.Helper()
- actualContent, err := ioutil.ReadFile(altPath)
- require.NoError(t, err, "read %s after fsck failure", altPath)
+ actualContent := testhelper.MustReadFile(t, altPath)
require.Equal(t, altContent, string(actualContent), "%s content after fsck failure", altPath)
}
diff --git a/internal/gitaly/service/objectpool/create.go b/internal/gitaly/service/objectpool/create.go
index 7b614c8ff..1acf5a616 100644
--- a/internal/gitaly/service/objectpool/create.go
+++ b/internal/gitaly/service/objectpool/create.go
@@ -67,7 +67,7 @@ func (s *server) poolForRequest(req poolRequest) (*objectpool.ObjectPool, error)
return nil, errMissingPool
}
- pool, err := objectpool.NewObjectPool(s.cfg, s.locator, s.gitCmdFactory, poolRepo.GetStorageName(), poolRepo.GetRelativePath())
+ pool, err := objectpool.NewObjectPool(s.cfg, s.locator, s.gitCmdFactory, s.catfileCache, poolRepo.GetStorageName(), poolRepo.GetRelativePath())
if err != nil {
if err == objectpool.ErrInvalidPoolDir {
return nil, errInvalidPoolDir
diff --git a/internal/gitaly/service/objectpool/create_test.go b/internal/gitaly/service/objectpool/create_test.go
index 8171546a8..d45143589 100644
--- a/internal/gitaly/service/objectpool/create_test.go
+++ b/internal/gitaly/service/objectpool/create_test.go
@@ -1,7 +1,6 @@
package objectpool
import (
- "os"
"path/filepath"
"strings"
"testing"
@@ -12,6 +11,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/objectpool"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/status"
)
@@ -22,7 +22,7 @@ func TestCreate(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
poolReq := &gitalypb.CreateObjectPoolRequest{
@@ -40,11 +40,10 @@ func TestCreate(t *testing.T) {
require.True(t, pool.IsValid())
// No hooks
- _, err = os.Stat(filepath.Join(pool.FullPath(), "hooks"))
- assert.True(t, os.IsNotExist(err))
+ assert.NoDirExists(t, filepath.Join(pool.FullPath(), "hooks"))
// No problems
- out := testhelper.MustRunCommand(t, nil, "git", "-C", pool.FullPath(), "cat-file", "-s", "55bc176024cfa3baaceb71db584c7e5df900ea65")
+ out := gittest.Exec(t, cfg, "-C", pool.FullPath(), "cat-file", "-s", "55bc176024cfa3baaceb71db584c7e5df900ea65")
assert.Equal(t, "282\n", string(out))
// Making the same request twice, should result in an error
@@ -54,14 +53,14 @@ func TestCreate(t *testing.T) {
}
func TestUnsuccessfulCreate(t *testing.T) {
- cfg, repo, _, locator, client := setup(t)
+ cfg, repo, _, locator, client := setup(t, testserver.WithDisablePraefect())
ctx, cancel := testhelper.Context()
defer cancel()
validPoolPath := gittest.NewObjectPoolName(t)
storageName := repo.GetStorageName()
- pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), storageName, validPoolPath)
+ pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), nil, storageName, validPoolPath)
require.NoError(t, err)
defer func() {
require.NoError(t, pool.Remove(ctx))
@@ -155,7 +154,7 @@ func TestDelete(t *testing.T) {
defer cancel()
validPoolPath := gittest.NewObjectPoolName(t)
- pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), repo.GetStorageName(), validPoolPath)
+ pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), nil, repo.GetStorageName(), validPoolPath)
require.NoError(t, err)
require.NoError(t, pool.Create(ctx, repo))
diff --git a/internal/gitaly/service/objectpool/fetch_into_object_pool.go b/internal/gitaly/service/objectpool/fetch_into_object_pool.go
index 1680afb1e..301884cde 100644
--- a/internal/gitaly/service/objectpool/fetch_into_object_pool.go
+++ b/internal/gitaly/service/objectpool/fetch_into_object_pool.go
@@ -16,7 +16,7 @@ func (s *server) FetchIntoObjectPool(ctx context.Context, req *gitalypb.FetchInt
return nil, helper.ErrInvalidArgument(err)
}
- objectPool, err := objectpool.FromProto(s.cfg, s.locator, s.gitCmdFactory, req.GetObjectPool())
+ objectPool, err := objectpool.FromProto(s.cfg, s.locator, s.gitCmdFactory, s.catfileCache, req.GetObjectPool())
if err != nil {
return nil, helper.ErrInvalidArgument(fmt.Errorf("object pool invalid: %v", err))
}
diff --git a/internal/gitaly/service/objectpool/fetch_into_object_pool_test.go b/internal/gitaly/service/objectpool/fetch_into_object_pool_test.go
index 3df38142a..0bdce9030 100644
--- a/internal/gitaly/service/objectpool/fetch_into_object_pool_test.go
+++ b/internal/gitaly/service/objectpool/fetch_into_object_pool_test.go
@@ -15,6 +15,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/hooks"
"gitlab.com/gitlab-org/gitaly/internal/git/objectpool"
@@ -23,6 +24,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/labkit/log"
+ "google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
@@ -33,9 +35,9 @@ func TestFetchIntoObjectPool_Success(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- repoCommit := gittest.CreateCommit(t, cfg, repoPath, t.Name(), &gittest.CreateCommitOpts{Message: t.Name()})
+ repoCommit := gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch(t.Name()))
- pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
defer func() {
require.NoError(t, pool.Remove(ctx))
@@ -53,13 +55,13 @@ func TestFetchIntoObjectPool_Success(t *testing.T) {
require.True(t, pool.IsValid(), "ensure underlying repository is valid")
// No problems
- testhelper.MustRunCommand(t, nil, "git", "-C", pool.FullPath(), "fsck")
+ gittest.Exec(t, cfg, "-C", pool.FullPath(), "fsck")
packFiles, err := filepath.Glob(filepath.Join(pool.FullPath(), "objects", "pack", "pack-*.pack"))
require.NoError(t, err)
require.Len(t, packFiles, 1, "ensure commits got packed")
- packContents := testhelper.MustRunCommand(t, nil, "git", "-C", pool.FullPath(), "verify-pack", "-v", packFiles[0])
+ packContents := gittest.Exec(t, cfg, "-C", pool.FullPath(), "verify-pack", "-v", packFiles[0])
require.Contains(t, string(packContents), repoCommit)
_, err = client.FetchIntoObjectPool(ctx, req)
@@ -90,7 +92,7 @@ func TestFetchIntoObjectPool_hooks(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- pool, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
defer func() {
require.NoError(t, pool.Remove(ctx))
@@ -114,24 +116,29 @@ func TestFetchIntoObjectPool_hooks(t *testing.T) {
}
_, err = client.FetchIntoObjectPool(ctx, req)
- require.Equal(t, status.Error(codes.Internal, "exit status 128"), err)
+ require.Equal(t, status.Error(codes.Internal, "fetch into object pool: exit status 128, stderr: \"fatal: ref updates aborted by hook\\n\""), err)
}
func TestFetchIntoObjectPool_CollectLogStatistics(t *testing.T) {
- defer func(old func(tb testing.TB) *logrus.Logger) { testhelper.NewTestLogger = old }(testhelper.NewTestLogger)
+ cfg, repo, _ := testcfg.BuildWithRepo(t)
+ testhelper.ConfigureGitalyHooksBin(t, cfg)
+
+ locator := config.NewLocator(cfg)
logBuffer := &bytes.Buffer{}
- testhelper.NewTestLogger = func(tb testing.TB) *logrus.Logger {
- return &logrus.Logger{Out: logBuffer, Formatter: &logrus.JSONFormatter{}, Level: logrus.InfoLevel}
- }
+ logger := &logrus.Logger{Out: logBuffer, Formatter: &logrus.JSONFormatter{}, Level: logrus.InfoLevel}
+ serverSocketPath := runObjectPoolServer(t, cfg, locator, logger)
- cfg, repo, _, locator, client := setup(t)
+ conn, err := grpc.Dial(serverSocketPath, grpc.WithInsecure())
+ require.NoError(t, err)
+ t.Cleanup(func() { testhelper.MustClose(t, conn) })
+ client := gitalypb.NewObjectPoolServiceClient(conn)
ctx, cancel := testhelper.Context()
defer cancel()
ctx = ctxlogrus.ToContext(ctx, log.WithField("test", "logging"))
- pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
defer func() {
require.NoError(t, pool.Remove(ctx))
@@ -167,12 +174,12 @@ func TestFetchIntoObjectPool_Failure(t *testing.T) {
locator := config.NewLocator(cfg)
gitCmdFactory := git.NewExecCommandFactory(cfg)
- server := NewServer(cfg, locator, gitCmdFactory)
+ server := NewServer(cfg, locator, gitCmdFactory, catfile.NewCache(cfg))
ctx, cancel := testhelper.Context()
defer cancel()
- pool, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, repos[0].StorageName, gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, nil, repos[0].StorageName, gittest.NewObjectPoolName(t))
require.NoError(t, err)
defer func() {
require.NoError(t, pool.Remove(ctx))
diff --git a/internal/gitaly/service/objectpool/get.go b/internal/gitaly/service/objectpool/get.go
index 22ab81898..1f12de05d 100644
--- a/internal/gitaly/service/objectpool/get.go
+++ b/internal/gitaly/service/objectpool/get.go
@@ -15,7 +15,7 @@ func (s *server) GetObjectPool(ctx context.Context, in *gitalypb.GetObjectPoolRe
return nil, helper.ErrInternal(errors.New("repository is empty"))
}
- objectPool, err := objectpool.FromRepo(s.cfg, s.locator, s.gitCmdFactory, in.GetRepository())
+ objectPool, err := objectpool.FromRepo(s.cfg, s.locator, s.gitCmdFactory, s.catfileCache, in.GetRepository())
if err != nil {
ctxlogrus.Extract(ctx).
diff --git a/internal/gitaly/service/objectpool/get_test.go b/internal/gitaly/service/objectpool/get_test.go
index 5d58fc8ff..c5f13a622 100644
--- a/internal/gitaly/service/objectpool/get_test.go
+++ b/internal/gitaly/service/objectpool/get_test.go
@@ -19,7 +19,7 @@ func TestGetObjectPoolSuccess(t *testing.T) {
relativePoolPath := gittest.NewObjectPoolName(t)
- pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), repo.GetStorageName(), relativePoolPath)
+ pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), nil, repo.GetStorageName(), relativePoolPath)
require.NoError(t, err)
poolCtx, cancel := testhelper.Context()
diff --git a/internal/gitaly/service/objectpool/link_test.go b/internal/gitaly/service/objectpool/link_test.go
index 2ca1dfd36..816d67297 100644
--- a/internal/gitaly/service/objectpool/link_test.go
+++ b/internal/gitaly/service/objectpool/link_test.go
@@ -13,19 +13,20 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/objectpool"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
func TestLink(t *testing.T) {
- cfg, repo, _, locator, client := setup(t)
+ cfg, repo, _, locator, client := setup(t, testserver.WithDisablePraefect())
ctx, cancel := testhelper.Context()
defer cancel()
- localRepo := localrepo.New(git.NewExecCommandFactory(cfg), repo, cfg)
+ localRepo := localrepo.NewTestRepo(t, cfg, repo)
- pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
require.NoError(t, pool.Remove(ctx), "make sure pool does not exist at start of test")
@@ -33,7 +34,8 @@ func TestLink(t *testing.T) {
// Mock object in the pool, which should be available to the pool members
// after linking
- poolCommitID := gittest.CreateCommit(t, cfg, pool.FullPath(), "pool-test-branch", nil)
+ poolCommitID := gittest.WriteCommit(t, cfg, pool.FullPath(),
+ gittest.WithBranch("pool-test-branch"))
testCases := []struct {
desc string
@@ -80,7 +82,7 @@ func TestLink(t *testing.T) {
commit, err := localRepo.ReadCommit(ctx, git.Revision(poolCommitID))
require.NoError(t, err)
require.NotNil(t, commit)
- require.Equal(t, poolCommitID, commit.Id)
+ require.Equal(t, poolCommitID.String(), commit.Id)
})
}
}
@@ -91,7 +93,7 @@ func TestLinkIdempotent(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
defer func() {
require.NoError(t, pool.Remove(ctx))
@@ -116,7 +118,7 @@ func TestLinkNoClobber(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
defer func() {
require.NoError(t, pool.Remove(ctx))
@@ -125,7 +127,7 @@ func TestLinkNoClobber(t *testing.T) {
require.NoError(t, pool.Create(ctx, repo))
alternatesFile := filepath.Join(repoPath, "objects/info/alternates")
- testhelper.AssertPathNotExists(t, alternatesFile)
+ require.NoFileExists(t, alternatesFile)
contentBefore := "mock/objects\n"
require.NoError(t, ioutil.WriteFile(alternatesFile, []byte(contentBefore), 0644))
@@ -138,9 +140,7 @@ func TestLinkNoClobber(t *testing.T) {
_, err = client.LinkRepositoryToObjectPool(ctx, request)
require.Error(t, err)
- contentAfter, err := ioutil.ReadFile(alternatesFile)
- require.NoError(t, err)
-
+ contentAfter := testhelper.MustReadFile(t, alternatesFile)
require.Equal(t, contentBefore, string(contentAfter), "contents of existing alternates file should not have changed")
}
@@ -150,7 +150,7 @@ func TestLinkNoPool(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
// intentionally do not call pool.Create
defer func() {
@@ -172,16 +172,16 @@ func TestLinkNoPool(t *testing.T) {
}
func TestUnlink(t *testing.T) {
- cfg, repo, _, locator, client := setup(t)
+ cfg, repo, _, locator, client := setup(t, testserver.WithDisablePraefect())
ctx, cancel := testhelper.Context()
defer cancel()
- deletedRepo, deletedRepoPath, removeDeletedRepo := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "todelete")
+ deletedRepo, deletedRepoPath, removeDeletedRepo := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "todelete")
defer removeDeletedRepo()
gitCmdFactory := git.NewExecCommandFactory(cfg)
- pool, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
defer func() {
require.NoError(t, pool.Remove(ctx))
@@ -192,17 +192,17 @@ func TestUnlink(t *testing.T) {
require.NoError(t, pool.Link(ctx, deletedRepo))
removeDeletedRepo()
- testhelper.AssertPathNotExists(t, deletedRepoPath)
+ require.NoFileExists(t, deletedRepoPath)
- pool2, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool2, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
require.NoError(t, pool2.Create(ctx, repo), "create pool 2")
defer func() {
require.NoError(t, pool2.Remove(ctx))
}()
- require.False(t, gittest.RemoteExists(t, pool.FullPath(), repo.GlRepository), "sanity check: remote exists in pool")
- require.False(t, gittest.RemoteExists(t, pool.FullPath(), deletedRepo.GlRepository), "sanity check: remote exists in pool")
+ require.False(t, gittest.RemoteExists(t, cfg, pool.FullPath(), repo.GlRepository), "sanity check: remote exists in pool")
+ require.False(t, gittest.RemoteExists(t, cfg, pool.FullPath(), deletedRepo.GlRepository), "sanity check: remote exists in pool")
testCases := []struct {
desc string
@@ -276,7 +276,7 @@ func TestUnlink(t *testing.T) {
require.NoError(t, err, "call UnlinkRepositoryFromObjectPool")
remoteName := tc.req.Repository.GlRepository
- require.False(t, gittest.RemoteExists(t, pool.FullPath(), remoteName), "remote should no longer exist in pool")
+ require.False(t, gittest.RemoteExists(t, cfg, pool.FullPath(), remoteName), "remote should no longer exist in pool")
})
}
}
@@ -287,7 +287,7 @@ func TestUnlinkIdempotent(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, git.NewExecCommandFactory(cfg), nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
defer func() {
require.NoError(t, pool.Remove(ctx))
diff --git a/internal/gitaly/service/objectpool/reduplicate_test.go b/internal/gitaly/service/objectpool/reduplicate_test.go
index a10517920..abc928033 100644
--- a/internal/gitaly/service/objectpool/reduplicate_test.go
+++ b/internal/gitaly/service/objectpool/reduplicate_test.go
@@ -19,7 +19,7 @@ func TestReduplicate(t *testing.T) {
defer cancel()
gitCmdFactory := git.NewExecCommandFactory(cfg)
- pool, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(cfg, locator, gitCmdFactory, nil, repo.GetStorageName(), gittest.NewObjectPoolName(t))
require.NoError(t, err)
defer func() {
require.NoError(t, pool.Remove(ctx))
@@ -27,7 +27,7 @@ func TestReduplicate(t *testing.T) {
require.NoError(t, pool.Create(ctx, repo))
require.NoError(t, pool.Link(ctx, repo))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "gc")
+ gittest.Exec(t, cfg, "-C", repoPath, "gc")
existingObjectID := "55bc176024cfa3baaceb71db584c7e5df900ea65"
@@ -47,5 +47,5 @@ func TestReduplicate(t *testing.T) {
require.NoError(t, err)
require.NoError(t, pool.Unlink(ctx, repo))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "cat-file", "-e", existingObjectID)
+ gittest.Exec(t, cfg, "-C", repoPath, "cat-file", "-e", existingObjectID)
}
diff --git a/internal/gitaly/service/objectpool/server.go b/internal/gitaly/service/objectpool/server.go
index e281b3335..ab183d6b1 100644
--- a/internal/gitaly/service/objectpool/server.go
+++ b/internal/gitaly/service/objectpool/server.go
@@ -2,6 +2,7 @@ package objectpool
import (
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -11,9 +12,20 @@ type server struct {
cfg config.Cfg
locator storage.Locator
gitCmdFactory git.CommandFactory
+ catfileCache catfile.Cache
}
// NewServer creates a new instance of a gRPC repo server
-func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory) gitalypb.ObjectPoolServiceServer {
- return &server{cfg: cfg, locator: locator, gitCmdFactory: gitCmdFactory}
+func NewServer(
+ cfg config.Cfg,
+ locator storage.Locator,
+ gitCmdFactory git.CommandFactory,
+ catfileCache catfile.Cache,
+) gitalypb.ObjectPoolServiceServer {
+ return &server{
+ cfg: cfg,
+ locator: locator,
+ gitCmdFactory: gitCmdFactory,
+ catfileCache: catfileCache,
+ }
}
diff --git a/internal/gitaly/service/objectpool/testhelper_test.go b/internal/gitaly/service/objectpool/testhelper_test.go
index a55b10926..b0bbd6484 100644
--- a/internal/gitaly/service/objectpool/testhelper_test.go
+++ b/internal/gitaly/service/objectpool/testhelper_test.go
@@ -1,21 +1,18 @@
package objectpool
import (
- "context"
- "net"
"os"
"testing"
+ "github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/backchannel"
- "gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
hookservice "gitlab.com/gitlab-org/gitaly/internal/gitaly/service/hook"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
)
@@ -31,7 +28,7 @@ func testMain(m *testing.M) int {
return m.Run()
}
-func setup(t *testing.T) (config.Cfg, *gitalypb.Repository, string, storage.Locator, gitalypb.ObjectPoolServiceClient) {
+func setup(t *testing.T, opts ...testserver.GitalyServerOpt) (config.Cfg, *gitalypb.Repository, string, storage.Locator, gitalypb.ObjectPoolServiceClient) {
t.Helper()
cfg, repo, repoPath := testcfg.BuildWithRepo(t)
@@ -39,51 +36,27 @@ func setup(t *testing.T) (config.Cfg, *gitalypb.Repository, string, storage.Loca
testhelper.ConfigureGitalyHooksBin(t, cfg)
locator := config.NewLocator(cfg)
- server, serverSocketPath := runObjectPoolServer(t, cfg, locator)
- t.Cleanup(server.Stop)
+ addr := runObjectPoolServer(t, cfg, locator, testhelper.DiscardTestLogger(t), opts...)
- client, conn := newObjectPoolClient(t, serverSocketPath)
- t.Cleanup(func() { conn.Close() })
-
- return cfg, repo, repoPath, locator, client
-}
-
-func runObjectPoolServer(t *testing.T, cfg config.Cfg, locator storage.Locator) (*grpc.Server, string) {
- server := testhelper.NewTestGrpcServer(t, nil, nil)
-
- serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
- listener, err := net.Listen("unix", serverSocketPath)
- require.NoError(t, err)
-
- internalListener, err := net.Listen("unix", cfg.GitalyInternalSocketPath())
+ conn, err := grpc.Dial(addr, grpc.WithInsecure())
require.NoError(t, err)
+ t.Cleanup(func() { testhelper.MustClose(t, conn) })
- txManager := transaction.NewManager(cfg, backchannel.NewRegistry())
- hookManager := hook.NewManager(locator, txManager, hook.GitlabAPIStub, cfg)
- gitCmdFactory := git.NewExecCommandFactory(cfg)
-
- gitalypb.RegisterObjectPoolServiceServer(server, NewServer(cfg, locator, gitCmdFactory))
- gitalypb.RegisterHookServiceServer(server, hookservice.NewServer(cfg, hookManager, gitCmdFactory))
-
- go server.Serve(listener)
- go server.Serve(internalListener)
-
- return server, serverSocketPath
+ return cfg, repo, repoPath, locator, gitalypb.NewObjectPoolServiceClient(conn)
}
-func newObjectPoolClient(t *testing.T, serverSocketPath string) (gitalypb.ObjectPoolServiceClient, *grpc.ClientConn) {
- connOpts := []grpc.DialOption{
- grpc.WithInsecure(),
- grpc.WithContextDialer(func(ctx context.Context, addr string) (conn net.Conn, err error) {
- d := net.Dialer{}
- return d.DialContext(ctx, "unix", addr)
- }),
- }
-
- conn, err := grpc.Dial(serverSocketPath, connOpts...)
- if err != nil {
- t.Fatal(err)
- }
-
- return gitalypb.NewObjectPoolServiceClient(conn), conn
+func runObjectPoolServer(t *testing.T, cfg config.Cfg, locator storage.Locator, logger *logrus.Logger, opts ...testserver.GitalyServerOpt) string {
+ return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterObjectPoolServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterHookServiceServer(srv, hookservice.NewServer(
+ deps.GetCfg(),
+ deps.GetHookManager(),
+ deps.GetGitCmdFactory(),
+ ))
+ }, append(opts, testserver.WithLocator(locator), testserver.WithLogger(logger))...)
}
diff --git a/internal/gitaly/service/operations/apply_patch_test.go b/internal/gitaly/service/operations/apply_patch_test.go
index 1e94eae05..01f9aafac 100644
--- a/internal/gitaly/service/operations/apply_patch_test.go
+++ b/internal/gitaly/service/operations/apply_patch_test.go
@@ -3,7 +3,6 @@ package operations
import (
"fmt"
"io"
- "io/ioutil"
"os"
"strings"
"testing"
@@ -17,6 +16,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/streamio"
"google.golang.org/grpc/codes"
@@ -28,7 +28,7 @@ func testSuccessfulUserApplyPatch(t *testing.T, cfg config.Cfg, rubySrv *rubyser
ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
testPatchReadme := "testdata/0001-A-commit-from-a-patch.patch"
testPatchFeature := "testdata/0001-This-does-not-apply-to-the-feature-branch.patch"
@@ -68,7 +68,7 @@ func testSuccessfulUserApplyPatch(t *testing.T, cfg config.Cfg, rubySrv *rubyser
stream, err := client.UserApplyPatch(ctx)
require.NoError(t, err)
- headerRequest := applyPatchHeaderRequest(repoProto, testhelper.TestUser, testCase.branchName)
+ headerRequest := applyPatchHeaderRequest(repoProto, gittest.TestUser, testCase.branchName)
require.NoError(t, stream.Send(headerRequest))
writer := streamio.NewWriter(func(p []byte) error {
@@ -95,7 +95,7 @@ func testSuccessfulUserApplyPatch(t *testing.T, cfg config.Cfg, rubySrv *rubyser
response.GetBranchUpdate()
require.Equal(t, testCase.branchCreated, response.GetBranchUpdate().GetBranchCreated())
- branches := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch")
+ branches := gittest.Exec(t, cfg, "-C", repoPath, "branch")
require.Contains(t, string(branches), testCase.branchName)
maxCount := fmt.Sprintf("--max-count=%d", len(testCase.commitMessages))
@@ -110,7 +110,7 @@ func testSuccessfulUserApplyPatch(t *testing.T, cfg config.Cfg, rubySrv *rubyser
"--reverse",
}
- output := testhelper.MustRunCommand(t, nil, "git", gitArgs...)
+ output := gittest.Exec(t, cfg, gitArgs...)
shas := strings.Split(string(output), "\n")
// Throw away the last element, as that's going to be
// an empty string.
@@ -125,7 +125,7 @@ func testSuccessfulUserApplyPatch(t *testing.T, cfg config.Cfg, rubySrv *rubyser
require.NotNil(t, commit)
require.Equal(t, string(commit.Subject), testCase.commitMessages[index])
require.Equal(t, string(commit.Author.Email), "patchuser@gitlab.org")
- require.Equal(t, string(commit.Committer.Email), string(testhelper.TestUser.Email))
+ require.Equal(t, string(commit.Committer.Email), string(gittest.TestUser.Email))
}
})
}
@@ -137,7 +137,7 @@ func testUserApplyPatchStableID(t *testing.T, cfg config.Cfg, rubySrv *rubyserve
ctx, cfg, repoProto, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
stream, err := client.UserApplyPatch(ctx)
require.NoError(t, err)
@@ -146,15 +146,14 @@ func testUserApplyPatchStableID(t *testing.T, cfg config.Cfg, rubySrv *rubyserve
UserApplyPatchRequestPayload: &gitalypb.UserApplyPatchRequest_Header_{
Header: &gitalypb.UserApplyPatchRequest_Header{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
TargetBranch: []byte("branch"),
Timestamp: &timestamp.Timestamp{Seconds: 1234512345},
},
},
}))
- patch, err := ioutil.ReadFile("testdata/0001-A-commit-from-a-patch.patch")
- require.NoError(t, err)
+ patch := testhelper.MustReadFile(t, "testdata/0001-A-commit-from-a-patch.patch")
require.NoError(t, stream.Send(&gitalypb.UserApplyPatchRequest{
UserApplyPatchRequestPayload: &gitalypb.UserApplyPatchRequest_Patches{
Patches: patch,
@@ -183,8 +182,8 @@ func testUserApplyPatchStableID(t *testing.T, cfg config.Cfg, rubySrv *rubyserve
Timezone: []byte("+0200"),
},
Committer: &gitalypb.CommitAuthor{
- Name: testhelper.TestUser.Name,
- Email: testhelper.TestUser.Email,
+ Name: gittest.TestUser.Name,
+ Email: gittest.TestUser.Email,
Date: &timestamp.Timestamp{Seconds: 1234512345},
Timezone: []byte("+0000"),
},
@@ -197,13 +196,12 @@ func testFailedPatchApplyPatch(t *testing.T, cfg config.Cfg, rubySrv *rubyserver
ctx, _, repo, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- testPatch, err := ioutil.ReadFile("testdata/0001-This-does-not-apply-to-the-feature-branch.patch")
- require.NoError(t, err)
+ testPatch := testhelper.MustReadFile(t, "testdata/0001-This-does-not-apply-to-the-feature-branch.patch")
stream, err := client.UserApplyPatch(ctx)
require.NoError(t, err)
- headerRequest := applyPatchHeaderRequest(repo, testhelper.TestUser, "feature")
+ headerRequest := applyPatchHeaderRequest(repo, gittest.TestUser, "feature")
require.NoError(t, stream.Send(headerRequest))
patchRequest := applyPatchPatchesRequest(testPatch)
@@ -214,8 +212,7 @@ func testFailedPatchApplyPatch(t *testing.T, cfg config.Cfg, rubySrv *rubyserver
}
func TestFailedValidationUserApplyPatch(t *testing.T) {
- testRepo, _, cleanupFn := gittest.CloneRepo(t)
- defer cleanupFn()
+ _, repo, _ := testcfg.BuildWithRepo(t)
testCases := []struct {
desc string
@@ -228,27 +225,27 @@ func TestFailedValidationUserApplyPatch(t *testing.T) {
desc: "missing Repository",
errorMessage: "missing Repository",
branchName: "new-branch",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
},
{
desc: "missing Branch",
errorMessage: "missing Branch",
- repo: testRepo,
- user: testhelper.TestUser,
+ repo: repo,
+ user: gittest.TestUser,
},
{
desc: "empty BranchName",
errorMessage: "missing Branch",
- repo: testRepo,
- user: testhelper.TestUser,
+ repo: repo,
+ user: gittest.TestUser,
branchName: "",
},
{
desc: "missing User",
errorMessage: "missing User",
branchName: "new-branch",
- repo: testRepo,
+ repo: repo,
},
}
diff --git a/internal/gitaly/service/operations/branches.go b/internal/gitaly/service/operations/branches.go
index b94b7a2e8..f6f3e0e00 100644
--- a/internal/gitaly/service/operations/branches.go
+++ b/internal/gitaly/service/operations/branches.go
@@ -5,7 +5,6 @@ import (
"errors"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -26,13 +25,13 @@ func (s *Server) UserCreateBranch(ctx context.Context, req *gitalypb.UserCreateB
return nil, status.Errorf(codes.InvalidArgument, "empty start point")
}
- repo := localrepo.New(s.gitCmdFactory, req.Repository, s.cfg)
+ repo := s.localrepo(req.GetRepository())
// BEGIN TODO: Uncomment if StartPoint started behaving sensibly
// like BranchName. See
// https://gitlab.com/gitlab-org/gitaly/-/issues/3331
//
- // startPointReference, err := localrepo.New(req.Repository).GetReference(ctx, "refs/heads/"+string(req.StartPoint))
+ // startPointReference, err := s.localrepo(req.GetRepository()).GetReference(ctx, "refs/heads/"+string(req.StartPoint))
// startPointCommit, err := log.GetCommit(ctx, req.Repository, startPointReference.Target)
startPointCommit, err := repo.ReadCommit(ctx, git.Revision(req.StartPoint))
// END TODO
@@ -170,7 +169,7 @@ func (s *Server) UserDeleteBranch(ctx context.Context, req *gitalypb.UserDeleteB
referenceName := git.NewReferenceNameFromBranchName(string(req.BranchName))
- referenceValue, err := localrepo.New(s.gitCmdFactory, req.Repository, s.cfg).ResolveRevision(ctx, referenceName.Revision())
+ referenceValue, err := s.localrepo(req.GetRepository()).ResolveRevision(ctx, referenceName.Revision())
if err != nil {
return nil, status.Errorf(codes.FailedPrecondition, "branch not found: %s", req.BranchName)
}
diff --git a/internal/gitaly/service/operations/branches_test.go b/internal/gitaly/service/operations/branches_test.go
index 21bbef250..76c2394c6 100644
--- a/internal/gitaly/service/operations/branches_test.go
+++ b/internal/gitaly/service/operations/branches_test.go
@@ -16,10 +16,10 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/hook"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
@@ -44,7 +44,7 @@ func TestSuccessfulCreateBranchRequest(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
startPoint := "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
startPointCommit, err := repo.ReadCommit(ctx, git.Revision(startPoint))
@@ -97,117 +97,119 @@ func TestSuccessfulCreateBranchRequest(t *testing.T) {
Repository: repoProto,
BranchName: []byte(branchName),
StartPoint: []byte(testCase.startPoint),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
response, err := client.UserCreateBranch(ctx, request)
if testCase.expectedBranch != nil {
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-D", branchName)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "branch", "-D", branchName)
}
require.NoError(t, err)
require.Equal(t, testCase.expectedBranch, response.Branch)
require.Empty(t, response.PreReceiveError)
- branches := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref", "--", "refs/heads/"+branchName)
+ branches := gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref", "--", "refs/heads/"+branchName)
require.Contains(t, string(branches), "refs/heads/"+branchName)
})
}
}
func TestUserCreateBranchWithTransaction(t *testing.T) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.BackchannelVoting,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- cfg, repo, repoPath := testcfg.BuildWithRepo(t)
-
- transactionServer := &testTransactionServer{}
+ cfg, repo, repoPath := testcfg.BuildWithRepo(t)
+
+ transactionServer := &testTransactionServer{}
+
+ cfg.ListenAddr = "127.0.0.1:0" // runs gitaly on the TCP address
+ addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterOperationServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ nil,
+ deps.GetHookManager(),
+ deps.GetLocator(),
+ deps.GetConnsPool(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterHookServiceServer(srv, hook.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
+ // Praefect proxy execution disabled as praefect runs only on the UNIX socket, but
+ // the test requires a TCP listening address.
+ }, testserver.WithDisablePraefect())
+
+ addrConfig, err := starter.ParseEndpoint(addr)
+ require.NoError(t, err)
+ _, port, err := net.SplitHostPort(addrConfig.Addr)
+ require.NoError(t, err)
- cfg.ListenAddr = "127.0.0.1:0" // runs gitaly on the TCP address
- addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterOperationServiceServer(srv, NewServer(deps.GetCfg(), nil, deps.GetHookManager(), deps.GetLocator(), deps.GetConnsPool(), deps.GetGitCmdFactory()))
- gitalypb.RegisterHookServiceServer(srv, hook.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
- if featureflag.IsDisabled(ctx, featureflag.BackchannelVoting) {
- gitalypb.RegisterRefTransactionServer(srv, transactionServer)
- }
- // Praefect proxy execution disabled as praefect runs only on the UNIX socket, but
- // the test requires a TCP listening address.
- }, testserver.WithDisablePraefect())
-
- addrConfig, err := starter.ParseEndpoint(addr)
- require.NoError(t, err)
- _, port, err := net.SplitHostPort(addrConfig.Addr)
- require.NoError(t, err)
-
- testcases := []struct {
- desc string
- address string
- server metadata.PraefectServer
- }{
- {
- desc: "explicit TCP address",
- address: addr,
- server: metadata.PraefectServer{
- ListenAddr: addr,
- Token: cfg.Auth.Token,
- },
+ testcases := []struct {
+ desc string
+ address string
+ server txinfo.PraefectServer
+ }{
+ {
+ desc: "explicit TCP address",
+ address: addr,
+ server: txinfo.PraefectServer{
+ ListenAddr: addr,
+ Token: cfg.Auth.Token,
},
- {
- desc: "catch-all TCP address",
- address: addr,
- server: metadata.PraefectServer{
- ListenAddr: "tcp://0.0.0.0:" + port,
- Token: cfg.Auth.Token,
- },
+ },
+ {
+ desc: "catch-all TCP address",
+ address: addr,
+ server: txinfo.PraefectServer{
+ ListenAddr: "tcp://0.0.0.0:" + port,
+ Token: cfg.Auth.Token,
},
- {
- desc: "Unix socket",
- address: "unix://" + cfg.GitalyInternalSocketPath(),
- server: metadata.PraefectServer{
- SocketPath: "unix://" + cfg.GitalyInternalSocketPath(),
- Token: cfg.Auth.Token,
- },
+ },
+ {
+ desc: "Unix socket",
+ address: "unix://" + cfg.GitalyInternalSocketPath(),
+ server: txinfo.PraefectServer{
+ SocketPath: "unix://" + cfg.GitalyInternalSocketPath(),
+ Token: cfg.Auth.Token,
},
- }
+ },
+ }
- for _, tc := range testcases {
- t.Run(tc.desc, func(t *testing.T) {
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-D", "new-branch")
-
- client := newMuxedOperationClient(t, ctx, tc.address, cfg.Auth.Token,
- backchannel.NewClientHandshaker(
- testhelper.DiscardTestEntry(t),
- func() backchannel.Server {
- srv := grpc.NewServer()
- if featureflag.IsEnabled(ctx, featureflag.BackchannelVoting) {
- gitalypb.RegisterRefTransactionServer(srv, transactionServer)
- }
- return srv
- },
- ),
- )
-
- ctx, err := tc.server.Inject(ctx)
- require.NoError(t, err)
- ctx, err = metadata.InjectTransaction(ctx, 1, "node", true)
- require.NoError(t, err)
- ctx = helper.IncomingToOutgoing(ctx)
+ for _, tc := range testcases {
+ t.Run(tc.desc, func(t *testing.T) {
+ defer gittest.Exec(t, cfg, "-C", repoPath, "branch", "-D", "new-branch")
- request := &gitalypb.UserCreateBranchRequest{
- Repository: repo,
- BranchName: []byte("new-branch"),
- StartPoint: []byte("c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"),
- User: testhelper.TestUser,
- }
+ ctx, cancel := testhelper.Context()
+ defer cancel()
- transactionServer.called = 0
- response, err := client.UserCreateBranch(ctx, request)
- require.NoError(t, err)
- require.Empty(t, response.PreReceiveError)
- require.Equal(t, 1, transactionServer.called)
- })
- }
- })
+ client := newMuxedOperationClient(t, ctx, tc.address, cfg.Auth.Token,
+ backchannel.NewClientHandshaker(
+ testhelper.DiscardTestEntry(t),
+ func() backchannel.Server {
+ srv := grpc.NewServer()
+ gitalypb.RegisterRefTransactionServer(srv, transactionServer)
+ return srv
+ },
+ ),
+ )
+
+ ctx, err := tc.server.Inject(ctx)
+ require.NoError(t, err)
+ ctx, err = txinfo.InjectTransaction(ctx, 1, "node", true)
+ require.NoError(t, err)
+ ctx = helper.IncomingToOutgoing(ctx)
+
+ request := &gitalypb.UserCreateBranchRequest{
+ Repository: repo,
+ BranchName: []byte("new-branch"),
+ StartPoint: []byte("c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"),
+ User: gittest.TestUser,
+ }
+
+ transactionServer.called = 0
+ response, err := client.UserCreateBranch(ctx, request)
+ require.NoError(t, err)
+ require.Empty(t, response.PreReceiveError)
+ require.Equal(t, 2, transactionServer.called)
+ })
+ }
}
func TestSuccessfulGitHooksForUserCreateBranchRequest(t *testing.T) {
@@ -217,19 +219,19 @@ func TestSuccessfulGitHooksForUserCreateBranchRequest(t *testing.T) {
}
func testSuccessfulGitHooksForUserCreateBranchRequest(t *testing.T, ctx context.Context) {
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
branchName := "new-branch"
request := &gitalypb.UserCreateBranchRequest{
Repository: repo,
BranchName: []byte(branchName),
StartPoint: []byte("c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
for _, hookName := range GitlabHooks {
t.Run(hookName, func(t *testing.T) {
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-D", branchName)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "branch", "-D", branchName)
hookOutputTempPath := gittest.WriteEnvToCustomHook(t, repoPath, hookName)
@@ -238,7 +240,7 @@ func testSuccessfulGitHooksForUserCreateBranchRequest(t *testing.T, ctx context.
require.Empty(t, response.PreReceiveError)
output := string(testhelper.MustReadFile(t, hookOutputTempPath))
- require.Contains(t, output, "GL_USERNAME="+testhelper.TestUser.GlUsername)
+ require.Contains(t, output, "GL_USERNAME="+gittest.TestUser.GlUsername)
})
}
}
@@ -249,7 +251,7 @@ func TestSuccessfulCreateBranchRequestWithStartPointRefPrefix(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
testCases := []struct {
desc string
@@ -268,20 +270,20 @@ func TestSuccessfulCreateBranchRequestWithStartPointRefPrefix(t *testing.T) {
branchName: "topic",
startPoint: "heads/master",
startPointCommit: "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0", // TODO: see below
- user: testhelper.TestUser,
+ user: gittest.TestUser,
},
{
desc: "the StartPoint parameter does DWYM references (boo!) 2",
branchName: "topic2",
startPoint: "refs/heads/master",
startPointCommit: "c642fe9b8b9f28f9225d7ea953fe14e74748d53b", // TODO: see below
- user: testhelper.TestUser,
+ user: gittest.TestUser,
},
}
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", "refs/heads/"+testCase.startPoint,
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/heads/"+testCase.startPoint,
testCase.startPointCommit,
git.ZeroOID.String(),
)
@@ -310,7 +312,7 @@ func TestSuccessfulCreateBranchRequestWithStartPointRefPrefix(t *testing.T) {
},
}
require.Equal(t, responseOk, response)
- branches := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref", "--", "refs/heads/"+testCase.branchName)
+ branches := gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref", "--", "refs/heads/"+testCase.branchName)
require.Contains(t, string(branches), "refs/heads/"+testCase.branchName)
})
}
@@ -326,7 +328,7 @@ func TestFailedUserCreateBranchDueToHooks(t *testing.T) {
Repository: repo,
BranchName: []byte("new-branch"),
StartPoint: []byte("c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
// Write a hook that will fail with the environment as the error message
// so we can check that string for our env variables.
@@ -337,7 +339,7 @@ func TestFailedUserCreateBranchDueToHooks(t *testing.T) {
response, err := client.UserCreateBranch(ctx, request)
require.Nil(t, err)
- require.Contains(t, response.PreReceiveError, "GL_USERNAME="+testhelper.TestUser.GlUsername)
+ require.Contains(t, response.PreReceiveError, "GL_USERNAME="+gittest.TestUser.GlUsername)
}
}
@@ -358,7 +360,7 @@ func TestFailedUserCreateBranchRequest(t *testing.T) {
desc: "empty start_point",
branchName: "shiny-new-branch",
startPoint: "",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Error(codes.InvalidArgument, "empty start point"),
},
{
@@ -372,7 +374,7 @@ func TestFailedUserCreateBranchRequest(t *testing.T) {
desc: "non-existing starting point",
branchName: "new-branch",
startPoint: "i-dont-exist",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Errorf(codes.FailedPrecondition, "revspec '%s' not found", "i-dont-exist"),
},
@@ -380,7 +382,7 @@ func TestFailedUserCreateBranchRequest(t *testing.T) {
desc: "branch exists",
branchName: "master",
startPoint: "master",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Errorf(codes.FailedPrecondition, "Could not update %s. Please refresh and try again.", "master"),
},
}
@@ -408,7 +410,7 @@ func TestSuccessfulUserDeleteBranchRequest(t *testing.T) {
}
func testSuccessfulUserDeleteBranchRequest(t *testing.T, ctx context.Context) {
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
testCases := []struct {
desc string
@@ -422,28 +424,28 @@ func testSuccessfulUserDeleteBranchRequest(t *testing.T, ctx context.Context) {
desc: "simple successful deletion",
branchNameInput: "to-attempt-to-delete-soon-branch",
branchCommit: "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: &gitalypb.UserDeleteBranchResponse{},
},
{
desc: "partially prefixed successful deletion",
branchNameInput: "heads/to-attempt-to-delete-soon-branch",
branchCommit: "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: &gitalypb.UserDeleteBranchResponse{},
},
{
desc: "branch with refs/heads/ prefix",
branchNameInput: "refs/heads/branch",
branchCommit: "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: &gitalypb.UserDeleteBranchResponse{},
},
}
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", testCase.branchNameInput, testCase.branchCommit)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", testCase.branchNameInput, testCase.branchCommit)
response, err := client.UserDeleteBranch(ctx, &gitalypb.UserDeleteBranchRequest{
Repository: repo,
@@ -453,7 +455,7 @@ func testSuccessfulUserDeleteBranchRequest(t *testing.T, ctx context.Context) {
require.NoError(t, err)
testhelper.ProtoEqual(t, testCase.response, response)
- refs := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref", "--", "refs/heads/"+testCase.branchNameInput)
+ refs := gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref", "--", "refs/heads/"+testCase.branchNameInput)
require.NotContains(t, string(refs), testCase.branchCommit, "branch deleted from refs")
})
}
@@ -463,19 +465,19 @@ func TestSuccessfulGitHooksForUserDeleteBranchRequest(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
branchNameInput := "to-be-deleted-soon-branch"
request := &gitalypb.UserDeleteBranchRequest{
Repository: repo,
BranchName: []byte(branchNameInput),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
for _, hookName := range GitlabHooks {
t.Run(hookName, func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", branchNameInput)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", branchNameInput)
hookOutputTempPath := gittest.WriteEnvToCustomHook(t, repoPath, hookName)
@@ -483,70 +485,69 @@ func TestSuccessfulGitHooksForUserDeleteBranchRequest(t *testing.T) {
require.NoError(t, err)
output := testhelper.MustReadFile(t, hookOutputTempPath)
- require.Contains(t, string(output), "GL_USERNAME="+testhelper.TestUser.GlUsername)
+ require.Contains(t, string(output), "GL_USERNAME="+gittest.TestUser.GlUsername)
})
}
}
func TestUserDeleteBranch_transaction(t *testing.T) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.BackchannelVoting,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- cfg, repo, repoPath := testcfg.BuildWithRepo(t)
-
- // This creates a new branch "delete-me" which exists both in the packed-refs file and as a
- // loose reference. Git will create two reference transactions for this: one transaction to
- // delete the packed-refs reference, and one to delete the loose ref. But given that we want
- // to be independent of how well-packed refs are, we expect to get a single transactional
- // vote, only.
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", "refs/heads/delete-me", "master~")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "pack-refs", "--all")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", "refs/heads/delete-me", "master")
-
- transactionServer := &testTransactionServer{}
-
- testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterOperationServiceServer(srv, NewServer(deps.GetCfg(), nil, deps.GetHookManager(), deps.GetLocator(), deps.GetConnsPool(), deps.GetGitCmdFactory()))
- // We're setting up the RefTransaction server on the same server as the OperationService.
- // Typically it would be hosted on Praefect, but in order to make the already-complex test
- // setup not even more complex we just reuse the same GRPC server.
- if featureflag.IsDisabled(ctx, featureflag.BackchannelVoting) {
- gitalypb.RegisterRefTransactionServer(srv, transactionServer)
- }
- })
+ cfg, repo, repoPath := testcfg.BuildWithRepo(t)
+
+ // This creates a new branch "delete-me" which exists both in the packed-refs file and as a
+ // loose reference. Git will create two reference transactions for this: one transaction to
+ // delete the packed-refs reference, and one to delete the loose ref. But given that we want
+ // to be independent of how well-packed refs are, we expect to get a single transactional
+ // vote, only.
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/heads/delete-me", "master~")
+ gittest.Exec(t, cfg, "-C", repoPath, "pack-refs", "--all")
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/heads/delete-me", "master")
+
+ transactionServer := &testTransactionServer{}
+
+ testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterOperationServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ nil,
+ deps.GetHookManager(),
+ deps.GetLocator(),
+ deps.GetConnsPool(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ })
- praefect := metadata.PraefectServer{
- SocketPath: fmt.Sprintf("unix://" + cfg.GitalyInternalSocketPath()),
- Token: cfg.Auth.Token,
- }
+ praefect := txinfo.PraefectServer{
+ SocketPath: fmt.Sprintf("unix://" + cfg.GitalyInternalSocketPath()),
+ Token: cfg.Auth.Token,
+ }
- ctx, err := praefect.Inject(ctx)
- require.NoError(t, err)
- ctx, err = metadata.InjectTransaction(ctx, 1, "node", true)
- require.NoError(t, err)
- ctx = helper.IncomingToOutgoing(ctx)
-
- client := newMuxedOperationClient(t, ctx, fmt.Sprintf("unix://"+cfg.GitalyInternalSocketPath()), cfg.Auth.Token,
- backchannel.NewClientHandshaker(
- testhelper.DiscardTestEntry(t),
- func() backchannel.Server {
- srv := grpc.NewServer()
- if featureflag.IsEnabled(ctx, featureflag.BackchannelVoting) {
- gitalypb.RegisterRefTransactionServer(srv, transactionServer)
- }
- return srv
- },
- ),
- )
+ ctx, cancel := testhelper.Context()
+ defer cancel()
- _, err = client.UserDeleteBranch(ctx, &gitalypb.UserDeleteBranchRequest{
- Repository: repo,
- BranchName: []byte("delete-me"),
- User: testhelper.TestUser,
- })
- require.NoError(t, err)
- require.Equal(t, 1, transactionServer.called)
+ ctx, err := praefect.Inject(ctx)
+ require.NoError(t, err)
+ ctx, err = txinfo.InjectTransaction(ctx, 1, "node", true)
+ require.NoError(t, err)
+ ctx = helper.IncomingToOutgoing(ctx)
+
+ client := newMuxedOperationClient(t, ctx, fmt.Sprintf("unix://"+cfg.GitalyInternalSocketPath()), cfg.Auth.Token,
+ backchannel.NewClientHandshaker(
+ testhelper.DiscardTestEntry(t),
+ func() backchannel.Server {
+ srv := grpc.NewServer()
+ gitalypb.RegisterRefTransactionServer(srv, transactionServer)
+ return srv
+ },
+ ),
+ )
+
+ _, err = client.UserDeleteBranch(ctx, &gitalypb.UserDeleteBranchRequest{
+ Repository: repo,
+ BranchName: []byte("delete-me"),
+ User: gittest.TestUser,
})
+ require.NoError(t, err)
+ require.Equal(t, 2, transactionServer.called)
}
func TestFailedUserDeleteBranchDueToValidation(t *testing.T) {
@@ -574,7 +575,7 @@ func TestFailedUserDeleteBranchDueToValidation(t *testing.T) {
desc: "empty branch name",
request: &gitalypb.UserDeleteBranchRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
},
response: nil,
err: status.Error(codes.InvalidArgument, "Bad Request (empty branch name)"),
@@ -583,7 +584,7 @@ func TestFailedUserDeleteBranchDueToValidation(t *testing.T) {
desc: "non-existent branch name",
request: &gitalypb.UserDeleteBranchRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
BranchName: []byte("i-do-not-exist"),
},
response: nil,
@@ -604,15 +605,15 @@ func TestFailedUserDeleteBranchDueToHooks(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
branchNameInput := "to-be-deleted-soon-branch"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", branchNameInput)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", branchNameInput)
request := &gitalypb.UserDeleteBranchRequest{
Repository: repo,
BranchName: []byte(branchNameInput),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
hookContent := []byte("#!/bin/sh\necho GL_ID=$GL_ID\nexit 1")
@@ -623,9 +624,9 @@ func TestFailedUserDeleteBranchDueToHooks(t *testing.T) {
response, err := client.UserDeleteBranch(ctx, request)
require.NoError(t, err)
- require.Contains(t, response.PreReceiveError, "GL_ID="+testhelper.TestUser.GlId)
+ require.Contains(t, response.PreReceiveError, "GL_ID="+gittest.TestUser.GlId)
- branches := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref", "--", "refs/heads/"+branchNameInput)
+ branches := gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref", "--", "refs/heads/"+branchNameInput)
require.Contains(t, string(branches), branchNameInput, "branch name does not exist in branches list")
})
}
@@ -635,7 +636,7 @@ func TestBranchHookOutput(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
testCases := []struct {
desc string
@@ -682,12 +683,12 @@ func TestBranchHookOutput(t *testing.T) {
Repository: repo,
BranchName: []byte(branchNameInput),
StartPoint: []byte("master"),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
deleteRequest := &gitalypb.UserDeleteBranchRequest{
Repository: repo,
BranchName: []byte(branchNameInput),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
gittest.WriteCustomHook(t, repoPath, hookName, []byte(testCase.hookContent))
@@ -696,8 +697,8 @@ func TestBranchHookOutput(t *testing.T) {
require.NoError(t, err)
require.Equal(t, testCase.output, createResponse.PreReceiveError)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", branchNameInput)
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-d", branchNameInput)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", branchNameInput)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "branch", "-d", branchNameInput)
deleteResponse, err := client.UserDeleteBranch(ctx, deleteRequest)
require.NoError(t, err)
diff --git a/internal/gitaly/service/operations/cherry_pick.go b/internal/gitaly/service/operations/cherry_pick.go
index 95526b2f5..64c024e65 100644
--- a/internal/gitaly/service/operations/cherry_pick.go
+++ b/internal/gitaly/service/operations/cherry_pick.go
@@ -8,11 +8,8 @@ import (
"github.com/golang/protobuf/ptypes"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git2go"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/helper"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -23,30 +20,12 @@ func (s *Server) UserCherryPick(ctx context.Context, req *gitalypb.UserCherryPic
return nil, status.Errorf(codes.InvalidArgument, "UserCherryPick: %v", err)
}
- if featureflag.IsEnabled(ctx, featureflag.GoUserCherryPick) {
- return s.userCherryPick(ctx, req)
- }
-
- client, err := s.ruby.OperationServiceClient(ctx)
- if err != nil {
- return nil, err
- }
-
- clientCtx, err := rubyserver.SetHeaders(ctx, s.locator, req.GetRepository())
- if err != nil {
- return nil, err
- }
-
- return client.UserCherryPick(clientCtx, req)
-}
-
-func (s *Server) userCherryPick(ctx context.Context, req *gitalypb.UserCherryPickRequest) (*gitalypb.UserCherryPickResponse, error) {
startRevision, err := s.fetchStartRevision(ctx, req)
if err != nil {
return nil, err
}
- localRepo := localrepo.New(s.gitCmdFactory, req.Repository, s.cfg)
+ localRepo := s.localrepo(req.GetRepository())
repoHadBranches, err := localRepo.HasBranches(ctx)
if err != nil {
return nil, err
diff --git a/internal/gitaly/service/operations/cherry_pick_test.go b/internal/gitaly/service/operations/cherry_pick_test.go
index a6f87b25b..bc5b5bed6 100644
--- a/internal/gitaly/service/operations/cherry_pick_test.go
+++ b/internal/gitaly/service/operations/cherry_pick_test.go
@@ -1,7 +1,6 @@
package operations
import (
- "context"
"fmt"
"testing"
@@ -10,25 +9,21 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
-func testServerUserCherryPickSuccessful(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserCherryPick, cfg, rubySrv, testServerUserCherryPickSuccessfulFeatured)
-}
+func TestServer_UserCherryPick_successful(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
destinationBranch := "cherry-picking-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
masterHeadCommit, err := repo.ReadCommit(ctx, "master")
require.NoError(t, err)
@@ -36,10 +31,10 @@ func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Contex
cherryPickedCommit, err := repo.ReadCommit(ctx, "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab")
require.NoError(t, err)
- testRepoCopy, testRepoCopyPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "read-only") // read-only repo
+ testRepoCopy, testRepoCopyPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "read-only") // read-only repo
defer cleanup()
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoCopyPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", testRepoCopyPath, "branch", destinationBranch, "master")
testCases := []struct {
desc string
@@ -50,7 +45,7 @@ func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Contex
desc: "branch exists",
request: &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -61,7 +56,7 @@ func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Contex
desc: "nonexistent branch + start_repository == repository",
request: &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte("to-be-cherry-picked-into-1"),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -73,7 +68,7 @@ func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Contex
desc: "nonexistent branch + start_repository != repository",
request: &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte("to-be-cherry-picked-into-2"),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -86,7 +81,7 @@ func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Contex
desc: "nonexistent branch + empty start_repository",
request: &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte("to-be-cherry-picked-into-3"),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -98,7 +93,7 @@ func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Contex
desc: "branch exists with dry run",
request: &gitalypb.UserCherryPickRequest{
Repository: testRepoCopy,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -110,7 +105,7 @@ func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Contex
desc: "nonexistent branch + start_repository == repository with dry run",
request: &gitalypb.UserCherryPickRequest{
Repository: testRepoCopy,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte("to-be-cherry-picked-into-1"),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -123,7 +118,7 @@ func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Contex
desc: "nonexistent branch + start_repository != repository with dry run",
request: &gitalypb.UserCherryPickRequest{
Repository: testRepoCopy,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte("to-be-cherry-picked-into-2"),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -137,7 +132,7 @@ func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Contex
desc: "nonexistent branch + empty start_repository with dry run",
request: &gitalypb.UserCherryPickRequest{
Repository: testRepoCopy,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte("to-be-cherry-picked-into-3"),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -153,7 +148,7 @@ func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Contex
response, err := client.UserCherryPick(ctx, testCase.request)
require.NoError(t, err)
- testRepo := localrepo.New(git.NewExecCommandFactory(cfg), testCase.request.Repository, cfg)
+ testRepo := localrepo.NewTestRepo(t, cfg, testCase.request.Repository)
headCommit, err := testRepo.ReadCommit(ctx, git.Revision(testCase.request.BranchName))
require.NoError(t, err)
@@ -174,24 +169,23 @@ func testServerUserCherryPickSuccessfulFeatured(t *testing.T, ctx context.Contex
}
}
-func testServerUserCherryPickSuccessfulGitHooks(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserCherryPick, cfg, rubySrv, testServerUserCherryPickSuccessfulGitHooksFeatured)
-}
+func TestServer_UserCherryPick_successfulGitHooks(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserCherryPickSuccessfulGitHooksFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
destinationBranch := "cherry-picking-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
cherryPickedCommit, err := repo.ReadCommit(ctx, "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab")
require.NoError(t, err)
request := &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -209,28 +203,27 @@ func testServerUserCherryPickSuccessfulGitHooksFeatured(t *testing.T, ctx contex
for _, file := range hookOutputFiles {
output := string(testhelper.MustReadFile(t, file))
- require.Contains(t, output, "GL_USERNAME="+testhelper.TestUser.GlUsername)
+ require.Contains(t, output, "GL_USERNAME="+gittest.TestUser.GlUsername)
}
}
-func testServerUserCherryPickStableID(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserCherryPick, cfg, rubySrv, testServerUserCherryPickStableIDFeatured)
-}
+func TestServer_UserCherryPick_stableID(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserCherryPickStableIDFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
destinationBranch := "cherry-picking-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
commitToPick, err := repo.ReadCommit(ctx, "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab")
require.NoError(t, err)
request := &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: commitToPick,
BranchName: []byte(destinationBranch),
Message: []byte("Cherry-picking " + commitToPick.Id),
@@ -260,8 +253,8 @@ func testServerUserCherryPickStableIDFeatured(t *testing.T, ctx context.Context,
Timezone: []byte("+0200"),
},
Committer: &gitalypb.CommitAuthor{
- Name: testhelper.TestUser.Name,
- Email: testhelper.TestUser.Email,
+ Name: gittest.TestUser.Name,
+ Email: gittest.TestUser.Email,
Date: &timestamp.Timestamp{
Seconds: 12345,
},
@@ -270,14 +263,13 @@ func testServerUserCherryPickStableIDFeatured(t *testing.T, ctx context.Context,
}, pickedCommit)
}
-func testServerUserCherryPickFailedValidations(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserCherryPick, cfg, rubySrv, testServerUserCherryPickFailedValidationsFeatured)
-}
+func TestServer_UserCherryPick_failedValidations(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserCherryPickFailedValidationsFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, _, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
cherryPickedCommit, err := repo.ReadCommit(ctx, "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab")
require.NoError(t, err)
@@ -304,7 +296,7 @@ func testServerUserCherryPickFailedValidationsFeatured(t *testing.T, ctx context
desc: "empty commit",
request: &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: nil,
BranchName: []byte(destinationBranch),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -315,7 +307,7 @@ func testServerUserCherryPickFailedValidationsFeatured(t *testing.T, ctx context
desc: "empty branch name",
request: &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: nil,
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -326,7 +318,7 @@ func testServerUserCherryPickFailedValidationsFeatured(t *testing.T, ctx context
desc: "empty message",
request: &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte(destinationBranch),
Message: nil,
@@ -343,24 +335,23 @@ func testServerUserCherryPickFailedValidationsFeatured(t *testing.T, ctx context
}
}
-func testServerUserCherryPickFailedWithPreReceiveError(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserCherryPick, cfg, rubySrv, testServerUserCherryPickFailedWithPreReceiveErrorFeatured)
-}
+func TestServer_UserCherryPick_failedWithPreReceiveError(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserCherryPickFailedWithPreReceiveErrorFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
destinationBranch := "cherry-picking-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
cherryPickedCommit, err := repo.ReadCommit(ctx, "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab")
require.NoError(t, err)
request := &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -374,22 +365,21 @@ func testServerUserCherryPickFailedWithPreReceiveErrorFeatured(t *testing.T, ctx
response, err := client.UserCherryPick(ctx, request)
require.NoError(t, err)
- require.Contains(t, response.PreReceiveError, "GL_ID="+testhelper.TestUser.GlId)
+ require.Contains(t, response.PreReceiveError, "GL_ID="+gittest.TestUser.GlId)
})
}
}
-func testServerUserCherryPickFailedWithCreateTreeError(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserCherryPick, cfg, rubySrv, testServerUserCherryPickFailedWithCreateTreeErrorFeatured)
-}
+func TestServer_UserCherryPick_failedWithCreateTreeError(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserCherryPickFailedWithCreateTreeErrorFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
destinationBranch := "cherry-picking-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
// This commit already exists in master
cherryPickedCommit, err := repo.ReadCommit(ctx, "4a24d82dbca5c11c61556f3b35ca472b7463187e")
@@ -397,7 +387,7 @@ func testServerUserCherryPickFailedWithCreateTreeErrorFeatured(t *testing.T, ctx
request := &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -409,26 +399,25 @@ func testServerUserCherryPickFailedWithCreateTreeErrorFeatured(t *testing.T, ctx
require.Equal(t, gitalypb.UserCherryPickResponse_EMPTY, response.CreateTreeErrorCode)
}
-func testServerUserCherryPickFailedWithCommitError(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserCherryPick, cfg, rubySrv, testServerUserCherryPickFailedWithCommitErrorFeatured)
-}
+func TestServer_UserCherryPick_failedWithCommitError(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserCherryPickFailedWithCommitErrorFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
sourceBranch := "cherry-pick-src"
destinationBranch := "cherry-picking-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", sourceBranch, "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", sourceBranch, "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab")
cherryPickedCommit, err := repo.ReadCommit(ctx, git.Revision(sourceBranch))
require.NoError(t, err)
request := &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte(sourceBranch),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -440,17 +429,16 @@ func testServerUserCherryPickFailedWithCommitErrorFeatured(t *testing.T, ctx con
require.Equal(t, "Branch diverged", response.CommitError)
}
-func testServerUserCherryPickFailedWithConflict(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserCherryPick, cfg, rubySrv, testServerUserCherryPickFailedWithConflictFeatured)
-}
+func TestServer_UserCherryPick_failedWithConflict(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserCherryPickFailedWithConflictFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
destinationBranch := "cherry-picking-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "conflict_branch_a")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "conflict_branch_a")
// This commit cannot be applied to the destinationBranch above
cherryPickedCommit, err := repo.ReadCommit(ctx, git.Revision("f0f390655872bb2772c85a0128b2fbc2d88670cb"))
@@ -458,7 +446,7 @@ func testServerUserCherryPickFailedWithConflictFeatured(t *testing.T, ctx contex
request := &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: cherryPickedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Cherry-picking " + cherryPickedCommit.Id),
@@ -470,14 +458,13 @@ func testServerUserCherryPickFailedWithConflictFeatured(t *testing.T, ctx contex
require.Equal(t, gitalypb.UserCherryPickResponse_CONFLICT, response.CreateTreeErrorCode)
}
-func testServerUserCherryPickSuccessfulWithGivenCommits(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserCherryPick, cfg, rubySrv, testServerUserCherryPickSuccessfulWithGivenCommitsFeatured)
-}
+func TestServer_UserCherryPick_successfulWithGivenCommits(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserCherryPickSuccessfulWithGivenCommitsFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
testCases := []struct {
desc string
@@ -495,14 +482,14 @@ func testServerUserCherryPickSuccessfulWithGivenCommitsFeatured(t *testing.T, ct
t.Run(testCase.desc, func(t *testing.T) {
destinationBranch := fmt.Sprintf("cherry-picking-%d", i)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, testCase.startRevision.String())
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, testCase.startRevision.String())
commit, err := repo.ReadCommit(ctx, testCase.cherryRevision)
require.NoError(t, err)
request := &gitalypb.UserCherryPickRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: commit,
BranchName: []byte(destinationBranch),
Message: []byte("Cherry-picking " + testCase.cherryRevision.String()),
diff --git a/internal/gitaly/service/operations/commit_files.go b/internal/gitaly/service/operations/commit_files.go
index 16f8f2562..4dedd8b29 100644
--- a/internal/gitaly/service/operations/commit_files.go
+++ b/internal/gitaly/service/operations/commit_files.go
@@ -138,7 +138,7 @@ func (s *Server) userCommitFiles(ctx context.Context, header *gitalypb.UserCommi
remoteRepo = nil
}
- localRepo := localrepo.New(s.gitCmdFactory, header.Repository, s.cfg)
+ localRepo := s.localrepo(header.GetRepository())
targetBranchName := git.NewReferenceNameFromBranchName(string(header.BranchName))
targetBranchCommit, err := localRepo.ResolveRevision(ctx, targetBranchName.Revision()+"^{commit}")
diff --git a/internal/gitaly/service/operations/commit_files_test.go b/internal/gitaly/service/operations/commit_files_test.go
index c769d7460..700c411db 100644
--- a/internal/gitaly/service/operations/commit_files_test.go
+++ b/internal/gitaly/service/operations/commit_files_test.go
@@ -46,7 +46,7 @@ func TestUserCommitFiles(t *testing.T) {
// repository there on every test run. This allows us to use deterministic
// paths in the tests.
- startRepo, startRepoPath, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ startRepo, startRepoPath, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
pathToStorage := strings.TrimSuffix(startRepoPath, startRepo.RelativePath)
@@ -880,7 +880,7 @@ func TestUserCommitFiles(t *testing.T) {
} {
t.Run(tc.desc, func(t *testing.T) {
defer os.RemoveAll(repoPath)
- testhelper.MustRunCommand(t, nil, "git", "init", "--bare", repoPath)
+ gittest.Exec(t, cfg, "init", "--bare", repoPath)
const branch = "master"
@@ -894,7 +894,7 @@ func TestUserCommitFiles(t *testing.T) {
for i, step := range tc.steps {
headerRequest := headerRequest(
repo,
- testhelper.TestUser,
+ gittest.TestUser,
branch,
[]byte("commit message"),
)
@@ -929,7 +929,7 @@ func TestUserCommitFiles(t *testing.T) {
require.Equal(t, step.branchCreated, resp.BranchUpdate.BranchCreated, "step %d", i+1)
require.Equal(t, step.repoCreated, resp.BranchUpdate.RepoCreated, "step %d", i+1)
- gittest.RequireTree(t, repoPath, branch, step.treeEntries)
+ gittest.RequireTree(t, cfg, repoPath, branch, step.treeEntries)
}
})
}
@@ -941,9 +941,9 @@ func TestUserCommitFilesStableCommitID(t *testing.T) {
ctx, cfg, _, _, client := setupOperationsService(t, ctx)
- repoProto, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ repoProto, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
defer cleanup()
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
for key, values := range testhelper.GitalyServersMetadataFromCfg(t, cfg) {
for _, value := range values {
@@ -954,7 +954,7 @@ func TestUserCommitFilesStableCommitID(t *testing.T) {
stream, err := client.UserCommitFiles(ctx)
require.NoError(t, err)
- headerRequest := headerRequest(repoProto, testhelper.TestUser, "master", []byte("commit message"))
+ headerRequest := headerRequest(repoProto, gittest.TestUser, "master", []byte("commit message"))
setAuthorAndEmail(headerRequest, []byte("Author Name"), []byte("author.email@example.com"))
setTimestamp(t, headerRequest, time.Unix(12345, 0))
require.NoError(t, stream.Send(headerRequest))
@@ -967,7 +967,7 @@ func TestUserCommitFilesStableCommitID(t *testing.T) {
require.Equal(t, resp.BranchUpdate.CommitId, "4f0ca1fbf05e04dbd5f68d14677034e0afee58ff")
require.True(t, resp.BranchUpdate.BranchCreated)
require.True(t, resp.BranchUpdate.RepoCreated)
- gittest.RequireTree(t, repoPath, "refs/heads/master", []gittest.TreeEntry{
+ gittest.RequireTree(t, cfg, repoPath, "refs/heads/master", []gittest.TreeEntry{
{Mode: "100644", Path: "file.txt", Content: "content"},
})
@@ -986,8 +986,8 @@ func TestUserCommitFilesStableCommitID(t *testing.T) {
Timezone: []byte("+0000"),
},
Committer: &gitalypb.CommitAuthor{
- Name: testhelper.TestUser.Name,
- Email: testhelper.TestUser.Email,
+ Name: gittest.TestUser.Name,
+ Email: gittest.TestUser.Email,
Date: &timestamp.Timestamp{Seconds: 12345},
Timezone: []byte("+0000"),
},
@@ -1000,7 +1000,7 @@ func TestSuccessfulUserCommitFilesRequest(t *testing.T) {
ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
- newRepo, newRepoPath, newRepoCleanupFn := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ newRepo, newRepoPath, newRepoCleanupFn := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
defer newRepoCleanupFn()
filePath := "héllo/wörld"
@@ -1052,7 +1052,7 @@ func TestSuccessfulUserCommitFilesRequest(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- headerRequest := headerRequest(tc.repo, testhelper.TestUser, tc.branchName, commitFilesMessage)
+ headerRequest := headerRequest(tc.repo, gittest.TestUser, tc.branchName, commitFilesMessage)
setAuthorAndEmail(headerRequest, authorName, authorEmail)
actionsRequest1 := createFileHeaderRequest(filePath)
@@ -1073,18 +1073,18 @@ func TestSuccessfulUserCommitFilesRequest(t *testing.T) {
require.Equal(t, tc.repoCreated, resp.GetBranchUpdate().GetRepoCreated())
require.Equal(t, tc.branchCreated, resp.GetBranchUpdate().GetBranchCreated())
- headCommit, err := localrepo.New(git.NewExecCommandFactory(cfg), tc.repo, cfg).ReadCommit(ctx, git.Revision(tc.branchName))
+ headCommit, err := localrepo.NewTestRepo(t, cfg, tc.repo).ReadCommit(ctx, git.Revision(tc.branchName))
require.NoError(t, err)
require.Equal(t, authorName, headCommit.Author.Name)
- require.Equal(t, testhelper.TestUser.Name, headCommit.Committer.Name)
+ require.Equal(t, gittest.TestUser.Name, headCommit.Committer.Name)
require.Equal(t, authorEmail, headCommit.Author.Email)
- require.Equal(t, testhelper.TestUser.Email, headCommit.Committer.Email)
+ require.Equal(t, gittest.TestUser.Email, headCommit.Committer.Email)
require.Equal(t, commitFilesMessage, headCommit.Subject)
- fileContent := testhelper.MustRunCommand(t, nil, "git", "-C", tc.repoPath, "show", headCommit.GetId()+":"+filePath)
+ fileContent := gittest.Exec(t, cfg, "-C", tc.repoPath, "show", headCommit.GetId()+":"+filePath)
require.Equal(t, "My content", string(fileContent))
- commitInfo := testhelper.MustRunCommand(t, nil, "git", "-C", tc.repoPath, "show", headCommit.GetId())
+ commitInfo := gittest.Exec(t, cfg, "-C", tc.repoPath, "show", headCommit.GetId())
expectedFilemode := "100644"
if tc.executeFilemode {
expectedFilemode = "100755"
@@ -1116,11 +1116,11 @@ func TestSuccessfulUserCommitFilesRequestMove(t *testing.T) {
{content: "foo", infer: true},
} {
t.Run(strconv.Itoa(i), func(t *testing.T) {
- testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
defer cleanupFn()
- origFileContent := testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "show", branchName+":"+previousFilePath)
- headerRequest := headerRequest(testRepo, testhelper.TestUser, branchName, commitFilesMessage)
+ origFileContent := gittest.Exec(t, cfg, "-C", testRepoPath, "show", branchName+":"+previousFilePath)
+ headerRequest := headerRequest(testRepo, gittest.TestUser, branchName, commitFilesMessage)
setAuthorAndEmail(headerRequest, authorName, authorEmail)
actionsRequest1 := moveFileHeaderRequest(previousFilePath, filePath, tc.infer)
@@ -1140,7 +1140,7 @@ func TestSuccessfulUserCommitFilesRequestMove(t *testing.T) {
update := resp.GetBranchUpdate()
require.NotNil(t, update)
- fileContent := testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "show", update.CommitId+":"+filePath)
+ fileContent := gittest.Exec(t, cfg, "-C", testRepoPath, "show", update.CommitId+":"+filePath)
if tc.infer {
require.Equal(t, string(origFileContent), string(fileContent))
@@ -1157,7 +1157,7 @@ func TestSuccessfulUserCommitFilesRequestForceCommit(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
authorName := []byte("Jane Doe")
authorEmail := []byte("janedoe@gitlab.com")
@@ -1170,11 +1170,11 @@ func TestSuccessfulUserCommitFilesRequestForceCommit(t *testing.T) {
targetBranchCommit, err := repo.ReadCommit(ctx, git.Revision(targetBranchName))
require.NoError(t, err)
- mergeBaseOut := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "merge-base", targetBranchCommit.Id, startBranchCommit.Id)
+ mergeBaseOut := gittest.Exec(t, cfg, "-C", repoPath, "merge-base", targetBranchCommit.Id, startBranchCommit.Id)
mergeBaseID := text.ChompBytes(mergeBaseOut)
require.NotEqual(t, mergeBaseID, targetBranchCommit.Id, "expected %s not to be an ancestor of %s", targetBranchCommit.Id, startBranchCommit.Id)
- headerRequest := headerRequest(repoProto, testhelper.TestUser, targetBranchName, commitFilesMessage)
+ headerRequest := headerRequest(repoProto, gittest.TestUser, targetBranchName, commitFilesMessage)
setAuthorAndEmail(headerRequest, authorName, authorEmail)
setStartBranchName(headerRequest, startBranchName)
setForce(headerRequest, true)
@@ -1202,14 +1202,14 @@ func TestSuccessfulUserCommitFilesRequestStartSha(t *testing.T) {
ctx, cfg, repoProto, _, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
targetBranchName := "new"
startCommit, err := repo.ReadCommit(ctx, "master")
require.NoError(t, err)
- headerRequest := headerRequest(repoProto, testhelper.TestUser, targetBranchName, commitFilesMessage)
+ headerRequest := headerRequest(repoProto, gittest.TestUser, targetBranchName, commitFilesMessage)
setStartSha(headerRequest, startCommit.Id)
stream, err := client.UserCommitFiles(ctx)
@@ -1250,20 +1250,18 @@ func testSuccessfulUserCommitFilesRemoteRepositoryRequest(setHeader func(header
ctx, cfg, repoProto, _, client := setupOperationsService(t, ctx)
- gitCmdFactory := git.NewExecCommandFactory(cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- repo := localrepo.New(gitCmdFactory, repoProto, cfg)
-
- newRepoProto, _, newRepoCleanupFn := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ newRepoProto, _, newRepoCleanupFn := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
defer newRepoCleanupFn()
- newRepo := localrepo.New(gitCmdFactory, newRepoProto, cfg)
+ newRepo := localrepo.NewTestRepo(t, cfg, newRepoProto)
targetBranchName := "new"
startCommit, err := repo.ReadCommit(ctx, "master")
require.NoError(t, err)
- headerRequest := headerRequest(newRepoProto, testhelper.TestUser, targetBranchName, commitFilesMessage)
+ headerRequest := headerRequest(newRepoProto, gittest.TestUser, targetBranchName, commitFilesMessage)
setHeader(headerRequest)
setStartRepository(headerRequest, repoProto)
@@ -1291,9 +1289,9 @@ func TestSuccessfulUserCommitFilesRequestWithSpecialCharactersInSignature(t *tes
ctx, cfg, _, _, client := setupOperationsService(t, ctx)
- repoProto, _, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ repoProto, _, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
defer cleanup()
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
targetBranchName := "master"
@@ -1304,12 +1302,12 @@ func TestSuccessfulUserCommitFilesRequestWithSpecialCharactersInSignature(t *tes
}{
{
desc: "special characters at start and end",
- user: &gitalypb.User{Name: []byte(".,:;<>\"'\nJane Doe.,:;<>'\"\n"), Email: []byte(".,:;<>'\"\njanedoe@gitlab.com.,:;<>'\"\n"), GlId: testhelper.GlID},
+ user: &gitalypb.User{Name: []byte(".,:;<>\"'\nJane Doe.,:;<>'\"\n"), Email: []byte(".,:;<>'\"\njanedoe@gitlab.com.,:;<>'\"\n"), GlId: gittest.GlID},
author: &gitalypb.CommitAuthor{Name: []byte("Jane Doe"), Email: []byte("janedoe@gitlab.com")},
},
{
desc: "special characters in the middle",
- user: &gitalypb.User{Name: []byte("Ja<ne\n D>oe"), Email: []byte("ja<ne\ndoe>@gitlab.com"), GlId: testhelper.GlID},
+ user: &gitalypb.User{Name: []byte("Ja<ne\n D>oe"), Email: []byte("ja<ne\ndoe>@gitlab.com"), GlId: gittest.GlID},
author: &gitalypb.CommitAuthor{Name: []byte("Jane Doe"), Email: []byte("janedoe@gitlab.com")},
},
}
@@ -1345,7 +1343,7 @@ func TestFailedUserCommitFilesRequestDueToHooks(t *testing.T) {
branchName := "feature"
filePath := "my/file.txt"
- headerRequest := headerRequest(repoProto, testhelper.TestUser, branchName, commitFilesMessage)
+ headerRequest := headerRequest(repoProto, gittest.TestUser, branchName, commitFilesMessage)
actionsRequest1 := createFileHeaderRequest(filePath)
actionsRequest2 := actionContentRequest("My content")
hookContent := []byte("#!/bin/sh\nprintenv | paste -sd ' ' -\nexit 1")
@@ -1363,8 +1361,8 @@ func TestFailedUserCommitFilesRequestDueToHooks(t *testing.T) {
resp, err := stream.CloseAndRecv()
require.NoError(t, err)
- require.Contains(t, resp.PreReceiveError, "GL_ID="+testhelper.TestUser.GlId)
- require.Contains(t, resp.PreReceiveError, "GL_USERNAME="+testhelper.TestUser.GlUsername)
+ require.Contains(t, resp.PreReceiveError, "GL_ID="+gittest.TestUser.GlId)
+ require.Contains(t, resp.PreReceiveError, "GL_USERNAME="+gittest.TestUser.GlUsername)
})
}
}
@@ -1383,7 +1381,7 @@ func TestFailedUserCommitFilesRequestDueToIndexError(t *testing.T) {
{
desc: "file already exists",
requests: []*gitalypb.UserCommitFilesRequest{
- headerRequest(repo, testhelper.TestUser, "feature", commitFilesMessage),
+ headerRequest(repo, gittest.TestUser, "feature", commitFilesMessage),
createFileHeaderRequest("README.md"),
actionContentRequest("This file already exists"),
},
@@ -1392,7 +1390,7 @@ func TestFailedUserCommitFilesRequestDueToIndexError(t *testing.T) {
{
desc: "file doesn't exists",
requests: []*gitalypb.UserCommitFilesRequest{
- headerRequest(repo, testhelper.TestUser, "feature", commitFilesMessage),
+ headerRequest(repo, gittest.TestUser, "feature", commitFilesMessage),
chmodFileHeaderRequest("documents/story.txt", true),
},
indexError: "A file with this name doesn't exist",
@@ -1400,7 +1398,7 @@ func TestFailedUserCommitFilesRequestDueToIndexError(t *testing.T) {
{
desc: "dir already exists",
requests: []*gitalypb.UserCommitFilesRequest{
- headerRequest(repo, testhelper.TestUser, "utf-dir", commitFilesMessage),
+ headerRequest(repo, gittest.TestUser, "utf-dir", commitFilesMessage),
actionRequest(&gitalypb.UserCommitFilesAction{
UserCommitFilesActionPayload: &gitalypb.UserCommitFilesAction_Header{
Header: &gitalypb.UserCommitFilesActionHeader{
@@ -1446,7 +1444,7 @@ func TestFailedUserCommitFilesRequest(t *testing.T) {
}{
{
desc: "empty Repository",
- req: headerRequest(nil, testhelper.TestUser, branchName, commitFilesMessage),
+ req: headerRequest(nil, gittest.TestUser, branchName, commitFilesMessage),
},
{
desc: "empty User",
@@ -1454,15 +1452,15 @@ func TestFailedUserCommitFilesRequest(t *testing.T) {
},
{
desc: "empty BranchName",
- req: headerRequest(repo, testhelper.TestUser, "", commitFilesMessage),
+ req: headerRequest(repo, gittest.TestUser, "", commitFilesMessage),
},
{
desc: "empty CommitMessage",
- req: headerRequest(repo, testhelper.TestUser, branchName, nil),
+ req: headerRequest(repo, gittest.TestUser, branchName, nil),
},
{
desc: "invalid object ID: \"foobar\"",
- req: setStartSha(headerRequest(repo, testhelper.TestUser, branchName, commitFilesMessage), "foobar"),
+ req: setStartSha(headerRequest(repo, gittest.TestUser, branchName, commitFilesMessage), "foobar"),
},
{
desc: "failed to parse signature - Signature cannot have an empty name or email",
diff --git a/internal/gitaly/service/operations/merge.go b/internal/gitaly/service/operations/merge.go
index 603b4706f..86b7bb82e 100644
--- a/internal/gitaly/service/operations/merge.go
+++ b/internal/gitaly/service/operations/merge.go
@@ -9,7 +9,6 @@ import (
"github.com/golang/protobuf/ptypes"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git2go"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -55,7 +54,7 @@ func (s *Server) UserMergeBranch(stream gitalypb.OperationService_UserMergeBranc
referenceName := git.NewReferenceNameFromBranchName(string(firstRequest.Branch))
- revision, err := localrepo.New(s.gitCmdFactory, repo, s.cfg).ResolveRevision(ctx, referenceName.Revision())
+ revision, err := s.localrepo(repo).ResolveRevision(ctx, referenceName.Revision())
if err != nil {
return err
}
@@ -157,7 +156,7 @@ func (s *Server) UserFFBranch(ctx context.Context, in *gitalypb.UserFFBranchRequ
referenceName := git.NewReferenceNameFromBranchName(string(in.Branch))
- repo := localrepo.New(s.gitCmdFactory, in.Repository, s.cfg)
+ repo := s.localrepo(in.GetRepository())
revision, err := repo.ResolveRevision(ctx, referenceName.Revision())
if err != nil {
return nil, helper.ErrInvalidArgument(err)
@@ -239,7 +238,7 @@ func (s *Server) UserMergeToRef(ctx context.Context, request *gitalypb.UserMerge
return nil, err
}
- repo := localrepo.New(s.gitCmdFactory, request.Repository, s.cfg)
+ repo := s.localrepo(request.GetRepository())
revision := git.Revision(request.Branch)
if request.FirstParentRef != nil {
diff --git a/internal/gitaly/service/operations/merge_test.go b/internal/gitaly/service/operations/merge_test.go
index 1ef299929..6ddc34aca 100644
--- a/internal/gitaly/service/operations/merge_test.go
+++ b/internal/gitaly/service/operations/merge_test.go
@@ -46,12 +46,12 @@ func TestSuccessfulMerge(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
mergeBidi, err := client.UserMergeBranch(ctx)
require.NoError(t, err)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
hooks := GitlabHooks
hookTempfiles := make([]string, len(hooks))
@@ -70,7 +70,7 @@ func TestSuccessfulMerge(t *testing.T) {
mergeCommitMessage := "Merged by Gitaly"
firstRequest := &gitalypb.UserMergeBranchRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
CommitId: commitToMerge,
Branch: []byte(mergeBranchName),
Message: []byte(mergeCommitMessage),
@@ -104,13 +104,12 @@ func TestSuccessfulMerge(t *testing.T) {
require.True(t, strings.HasPrefix(string(commit.Body), mergeCommitMessage), "expected %q to start with %q", commit.Body, mergeCommitMessage)
author := commit.Author
- require.Equal(t, testhelper.TestUser.Name, author.Name)
- require.Equal(t, testhelper.TestUser.Email, author.Email)
+ require.Equal(t, gittest.TestUser.Name, author.Name)
+ require.Equal(t, gittest.TestUser.Email, author.Email)
- expectedGlID := "GL_ID=" + testhelper.TestUser.GlId
+ expectedGlID := "GL_ID=" + gittest.TestUser.GlId
for i, h := range hooks {
- hookEnv, err := ioutil.ReadFile(hookTempfiles[i])
- require.NoError(t, err)
+ hookEnv := testhelper.MustReadFile(t, hookTempfiles[i])
lines := strings.Split(string(hookEnv), "\n")
require.Contains(t, lines, expectedGlID, "expected env of hook %q to contain %q", h, expectedGlID)
@@ -128,16 +127,16 @@ func TestSuccessfulMerge_stableMergeIDs(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
mergeBidi, err := client.UserMergeBranch(ctx)
require.NoError(t, err)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
firstRequest := &gitalypb.UserMergeBranchRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
CommitId: commitToMerge,
Branch: []byte(mergeBranchName),
Message: []byte("Merged by Gitaly"),
@@ -175,15 +174,15 @@ func TestSuccessfulMerge_stableMergeIDs(t *testing.T) {
},
TreeId: "86ec18bfe87ad42a782fdabd8310f9b7ac750f51",
Author: &gitalypb.CommitAuthor{
- Name: testhelper.TestUser.Name,
- Email: testhelper.TestUser.Email,
+ Name: gittest.TestUser.Name,
+ Email: gittest.TestUser.Email,
// Nanoseconds get ignored because commit timestamps aren't that granular.
Date: &timestamp.Timestamp{Seconds: 12},
Timezone: []byte("+0000"),
},
Committer: &gitalypb.CommitAuthor{
- Name: testhelper.TestUser.Name,
- Email: testhelper.TestUser.Email,
+ Name: gittest.TestUser.Name,
+ Email: gittest.TestUser.Email,
// Nanoseconds get ignored because commit timestamps aren't that granular.
Date: &timestamp.Timestamp{Seconds: 12},
Timezone: []byte("+0000"),
@@ -197,13 +196,13 @@ func TestAbortedMerge(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
firstRequest := &gitalypb.UserMergeBranchRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
CommitId: commitToMerge,
Branch: []byte(mergeBranchName),
Message: []byte("foobar"),
@@ -260,17 +259,17 @@ func TestFailedMergeConcurrentUpdate(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
mergeBidi, err := client.UserMergeBranch(ctx)
require.NoError(t, err)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
mergeCommitMessage := "Merged by Gitaly"
firstRequest := &gitalypb.UserMergeBranchRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
CommitId: commitToMerge,
Branch: []byte(mergeBranchName),
Message: []byte(mergeCommitMessage),
@@ -281,7 +280,8 @@ func TestFailedMergeConcurrentUpdate(t *testing.T) {
require.NoError(t, err, "receive first response")
// This concurrent update of the branch we are merging into should make the merge fail.
- concurrentCommitID := gittest.CreateCommit(t, cfg, repoPath, mergeBranchName, nil)
+ concurrentCommitID := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithBranch(mergeBranchName))
require.NotEqual(t, firstResponse.CommitId, concurrentCommitID)
require.NoError(t, mergeBidi.Send(&gitalypb.UserMergeBranchRequest{Apply: true}), "apply merge")
@@ -293,7 +293,7 @@ func TestFailedMergeConcurrentUpdate(t *testing.T) {
commit, err := repo.ReadCommit(ctx, git.Revision(mergeBranchName))
require.NoError(t, err, "get commit after RPC finished")
- require.Equal(t, commit.Id, concurrentCommitID, "RPC should not have trampled concurrent update")
+ require.Equal(t, commit.Id, concurrentCommitID.String(), "RPC should not have trampled concurrent update")
}
func TestUserMergeBranch_ambiguousReference(t *testing.T) {
@@ -302,12 +302,12 @@ func TestUserMergeBranch_ambiguousReference(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
merge, err := client.UserMergeBranch(ctx)
require.NoError(t, err)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
masterOID, err := repo.ResolveRevision(ctx, "refs/heads/master")
require.NoError(t, err)
@@ -328,7 +328,7 @@ func TestUserMergeBranch_ambiguousReference(t *testing.T) {
mergeCommitMessage := "Merged by Gitaly"
firstRequest := &gitalypb.UserMergeBranchRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
CommitId: commitToMerge,
Branch: []byte(mergeBranchName),
Message: []byte(mergeCommitMessage),
@@ -354,8 +354,8 @@ func TestUserMergeBranch_ambiguousReference(t *testing.T) {
require.Equal(t, gitalypb.OperationBranchUpdate{CommitId: commit.Id}, *(response.BranchUpdate))
require.Equal(t, mergeCommitMessage, string(commit.Body))
- require.Equal(t, testhelper.TestUser.Name, commit.Author.Name)
- require.Equal(t, testhelper.TestUser.Email, commit.Author.Email)
+ require.Equal(t, gittest.TestUser.Name, commit.Author.Name)
+ require.Equal(t, gittest.TestUser.Email, commit.Author.Email)
require.Equal(t, []string{mergeBranchHeadBefore, commitToMerge}, commit.ParentIds)
}
@@ -363,9 +363,9 @@ func TestFailedMergeDueToHooks(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
hookContent := []byte("#!/bin/sh\necho 'failure'\nexit 1")
@@ -379,7 +379,7 @@ func TestFailedMergeDueToHooks(t *testing.T) {
mergeCommitMessage := "Merged by Gitaly"
firstRequest := &gitalypb.UserMergeBranchRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
CommitId: commitToMerge,
Branch: []byte(mergeBranchName),
Message: []byte(mergeCommitMessage),
@@ -402,7 +402,7 @@ func TestFailedMergeDueToHooks(t *testing.T) {
return err
})
- currentBranchHead := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", mergeBranchName)
+ currentBranchHead := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", mergeBranchName)
require.Equal(t, mergeBranchHeadBefore, text.ChompBytes(currentBranchHead), "branch head updated")
})
}
@@ -412,7 +412,7 @@ func TestSuccessfulUserFFBranchRequest(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
commitID := "cfe32cf61b73a0d5e9f13e774abde7ff789b1660"
branchName := "test-ff-target-branch"
@@ -420,7 +420,7 @@ func TestSuccessfulUserFFBranchRequest(t *testing.T) {
Repository: repo,
CommitId: commitID,
Branch: []byte(branchName),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
expectedResponse := &gitalypb.UserFFBranchResponse{
BranchUpdate: &gitalypb.OperationBranchUpdate{
@@ -430,12 +430,12 @@ func TestSuccessfulUserFFBranchRequest(t *testing.T) {
},
}
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-f", branchName, "6d394385cf567f80a8fd85055db1ab4c5295806f")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "-f", branchName, "6d394385cf567f80a8fd85055db1ab4c5295806f")
resp, err := client.UserFFBranch(ctx, request)
require.NoError(t, err)
testhelper.ProtoEqual(t, expectedResponse, resp)
- newBranchHead := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", branchName)
+ newBranchHead := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", branchName)
require.Equal(t, commitID, text.ChompBytes(newBranchHead), "branch head not updated")
}
@@ -443,12 +443,12 @@ func TestFailedUserFFBranchRequest(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
commitID := "cfe32cf61b73a0d5e9f13e774abde7ff789b1660"
branchName := "test-ff-target-branch"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-f", branchName, "6d394385cf567f80a8fd85055db1ab4c5295806f")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "-f", branchName, "6d394385cf567f80a8fd85055db1ab4c5295806f")
testCases := []struct {
desc string
@@ -460,7 +460,7 @@ func TestFailedUserFFBranchRequest(t *testing.T) {
}{
{
desc: "empty repository",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
branch: []byte(branchName),
commitID: commitID,
code: codes.InvalidArgument,
@@ -475,14 +475,14 @@ func TestFailedUserFFBranchRequest(t *testing.T) {
{
desc: "empty commit",
repo: repo,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
branch: []byte(branchName),
code: codes.InvalidArgument,
},
{
desc: "non-existing commit",
repo: repo,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
branch: []byte(branchName),
commitID: "f001",
code: codes.InvalidArgument,
@@ -490,14 +490,14 @@ func TestFailedUserFFBranchRequest(t *testing.T) {
{
desc: "empty branch",
repo: repo,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
commitID: commitID,
code: codes.InvalidArgument,
},
{
desc: "non-existing branch",
repo: repo,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
branch: []byte("this-isnt-real"),
commitID: commitID,
code: codes.InvalidArgument,
@@ -505,7 +505,7 @@ func TestFailedUserFFBranchRequest(t *testing.T) {
{
desc: "commit is not a descendant of branch head",
repo: repo,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
branch: []byte(branchName),
commitID: "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863",
code: codes.FailedPrecondition,
@@ -530,7 +530,7 @@ func TestFailedUserFFBranchDueToHooks(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
commitID := "cfe32cf61b73a0d5e9f13e774abde7ff789b1660"
branchName := "test-ff-target-branch"
@@ -538,10 +538,10 @@ func TestFailedUserFFBranchDueToHooks(t *testing.T) {
Repository: repo,
CommitId: commitID,
Branch: []byte(branchName),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-f", branchName, "6d394385cf567f80a8fd85055db1ab4c5295806f")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "-f", branchName, "6d394385cf567f80a8fd85055db1ab4c5295806f")
hookContent := []byte("#!/bin/sh\necho 'failure'\nexit 1")
@@ -560,7 +560,7 @@ func TestUserFFBranch_ambiguousReference(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
branchName := "test-ff-target-branch"
@@ -573,17 +573,17 @@ func TestUserFFBranch_ambiguousReference(t *testing.T) {
// old revision when calling git-update-ref. As a result, the
// update would've failed as the branch's current revision
// didn't match the specified old revision.
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath,
+ gittest.Exec(t, cfg, "-C", repoPath,
"branch", branchName,
"6d394385cf567f80a8fd85055db1ab4c5295806f")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", branchName, "6d394385cf567f80a8fd85055db1ab4c5295806f~")
+ gittest.Exec(t, cfg, "-C", repoPath, "tag", branchName, "6d394385cf567f80a8fd85055db1ab4c5295806f~")
commitID := "cfe32cf61b73a0d5e9f13e774abde7ff789b1660"
request := &gitalypb.UserFFBranchRequest{
Repository: repo,
CommitId: commitID,
Branch: []byte(branchName),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
expectedResponse := &gitalypb.UserFFBranchResponse{
BranchUpdate: &gitalypb.OperationBranchUpdate{
@@ -596,7 +596,7 @@ func TestUserFFBranch_ambiguousReference(t *testing.T) {
resp, err := client.UserFFBranch(ctx, request)
require.NoError(t, err)
testhelper.ProtoEqual(t, expectedResponse, resp)
- newBranchHead := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "refs/heads/"+branchName)
+ newBranchHead := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "refs/heads/"+branchName)
require.Equal(t, commitID, text.ChompBytes(newBranchHead), "branch head not updated")
}
@@ -606,9 +606,9 @@ func TestSuccessfulUserMergeToRefRequest(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
existingTargetRef := []byte("refs/merge-requests/x/written")
emptyTargetRef := []byte("refs/merge-requests/x/merge")
@@ -631,7 +631,7 @@ func TestSuccessfulUserMergeToRefRequest(t *testing.T) {
}{
{
desc: "empty target ref merge",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
targetRef: emptyTargetRef,
emptyRef: true,
sourceSha: commitToMerge,
@@ -640,7 +640,7 @@ func TestSuccessfulUserMergeToRefRequest(t *testing.T) {
},
{
desc: "existing target ref",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
targetRef: existingTargetRef,
emptyRef: false,
sourceSha: commitToMerge,
@@ -649,7 +649,7 @@ func TestSuccessfulUserMergeToRefRequest(t *testing.T) {
},
{
desc: "branch is specified and firstParentRef is empty",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
branch: []byte(mergeBranchName),
targetRef: existingTargetRef,
emptyRef: false,
@@ -690,8 +690,8 @@ func TestSuccessfulUserMergeToRefRequest(t *testing.T) {
// Asserts author
author := commit.Author
- require.Equal(t, testhelper.TestUser.Name, author.Name)
- require.Equal(t, testhelper.TestUser.Email, author.Email)
+ require.Equal(t, gittest.TestUser.Name, author.Name)
+ require.Equal(t, gittest.TestUser.Email, author.Email)
require.Equal(t, resp.CommitId, commit.Id)
@@ -710,11 +710,11 @@ func TestConflictsOnUserMergeToRefRequest(t *testing.T) {
ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", mergeBranchName, "824be604a34828eb682305f0d963056cfac87b2d")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", mergeBranchName, "824be604a34828eb682305f0d963056cfac87b2d")
request := &gitalypb.UserMergeToRefRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
TargetRef: []byte("refs/merge-requests/x/written"),
SourceSha: "1450cd639e0bc6721eb02800169e464f212cde06",
Message: []byte("message1"),
@@ -753,13 +753,13 @@ func TestUserMergeToRef_stableMergeID(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
response, err := client.UserMergeToRef(ctx, &gitalypb.UserMergeToRefRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
FirstParentRef: []byte("refs/heads/" + mergeBranchName),
TargetRef: []byte("refs/merge-requests/x/written"),
SourceSha: "1450cd639e0bc6721eb02800169e464f212cde06",
@@ -782,15 +782,15 @@ func TestUserMergeToRef_stableMergeID(t *testing.T) {
},
TreeId: "3d3c2dd807abaf36d7bd5334bf3f8c5cf61bad75",
Author: &gitalypb.CommitAuthor{
- Name: testhelper.TestUser.Name,
- Email: testhelper.TestUser.Email,
+ Name: gittest.TestUser.Name,
+ Email: gittest.TestUser.Email,
// Nanoseconds get ignored because commit timestamps aren't that granular.
Date: &timestamp.Timestamp{Seconds: 12},
Timezone: []byte("+0000"),
},
Committer: &gitalypb.CommitAuthor{
- Name: testhelper.TestUser.Name,
- Email: testhelper.TestUser.Email,
+ Name: gittest.TestUser.Name,
+ Email: gittest.TestUser.Email,
// Nanoseconds get ignored because commit timestamps aren't that granular.
Date: &timestamp.Timestamp{Seconds: 12},
Timezone: []byte("+0000"),
@@ -802,9 +802,9 @@ func TestFailedUserMergeToRefRequest(t *testing.T) {
ctx, cleanup := testhelper.Context()
defer cleanup()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
validTargetRef := []byte("refs/merge-requests/x/merge")
@@ -819,7 +819,7 @@ func TestFailedUserMergeToRefRequest(t *testing.T) {
}{
{
desc: "empty repository",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
branch: []byte(branchName),
sourceSha: commitToMerge,
targetRef: validTargetRef,
@@ -836,7 +836,7 @@ func TestFailedUserMergeToRefRequest(t *testing.T) {
{
desc: "empty source SHA",
repo: repo,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
branch: []byte(branchName),
targetRef: validTargetRef,
code: codes.InvalidArgument,
@@ -844,7 +844,7 @@ func TestFailedUserMergeToRefRequest(t *testing.T) {
{
desc: "non-existing commit",
repo: repo,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
branch: []byte(branchName),
sourceSha: "f001",
targetRef: validTargetRef,
@@ -853,7 +853,7 @@ func TestFailedUserMergeToRefRequest(t *testing.T) {
{
desc: "empty branch and first parent ref",
repo: repo,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
sourceSha: commitToMerge,
targetRef: validTargetRef,
code: codes.InvalidArgument,
@@ -861,7 +861,7 @@ func TestFailedUserMergeToRefRequest(t *testing.T) {
{
desc: "invalid target ref",
repo: repo,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
branch: []byte(branchName),
sourceSha: commitToMerge,
targetRef: []byte("refs/heads/branch"),
@@ -870,7 +870,7 @@ func TestFailedUserMergeToRefRequest(t *testing.T) {
{
desc: "non-existing branch",
repo: repo,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
branch: []byte("this-isnt-real"),
sourceSha: commitToMerge,
targetRef: validTargetRef,
@@ -897,9 +897,9 @@ func TestUserMergeToRefIgnoreHooksRequest(t *testing.T) {
ctx, cleanup := testhelper.Context()
defer cleanup()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", mergeBranchName, mergeBranchHeadBefore)
targetRef := []byte("refs/merge-requests/x/merge")
mergeCommitMessage := "Merged by Gitaly"
@@ -909,7 +909,7 @@ func TestUserMergeToRefIgnoreHooksRequest(t *testing.T) {
SourceSha: commitToMerge,
Branch: []byte(mergeBranchName),
TargetRef: targetRef,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Message: []byte(mergeCommitMessage),
}
diff --git a/internal/gitaly/service/operations/rebase.go b/internal/gitaly/service/operations/rebase.go
index 00ed7c97f..67a1cee2b 100644
--- a/internal/gitaly/service/operations/rebase.go
+++ b/internal/gitaly/service/operations/rebase.go
@@ -4,11 +4,18 @@ package operations
import (
"errors"
+ "fmt"
+ "time"
+ "github.com/golang/protobuf/ptypes"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git2go"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+ "google.golang.org/grpc/codes"
+ "google.golang.org/grpc/status"
)
func (s *Server) UserRebaseConfirmable(stream gitalypb.OperationService_UserRebaseConfirmableServer) error {
@@ -26,6 +33,10 @@ func (s *Server) UserRebaseConfirmable(stream gitalypb.OperationService_UserReba
return helper.ErrInvalidArgumentf("UserRebaseConfirmable: %v", err)
}
+ if featureflag.IsEnabled(stream.Context(), featureflag.GoUserRebaseConfirmable) {
+ return s.userRebaseConfirmableGo(stream, header)
+ }
+
if err := s.userRebaseConfirmable(stream, firstRequest, header.GetRepository()); err != nil {
return helper.ErrInternal(err)
}
@@ -33,6 +44,91 @@ func (s *Server) UserRebaseConfirmable(stream gitalypb.OperationService_UserReba
return nil
}
+func (s *Server) userRebaseConfirmableGo(stream gitalypb.OperationService_UserRebaseConfirmableServer, header *gitalypb.UserRebaseConfirmableRequest_Header) error {
+ ctx := stream.Context()
+
+ repo := header.Repository
+ repoPath, err := s.locator.GetPath(repo)
+ if err != nil {
+ return err
+ }
+
+ branch := git.NewReferenceNameFromBranchName(string(header.Branch))
+ oldrev, err := git.NewObjectIDFromHex(header.BranchSha)
+ if err != nil {
+ return helper.ErrNotFound(err)
+ }
+
+ remoteFetch := rebaseRemoteFetch{header: header}
+ startRevision, err := s.fetchStartRevision(ctx, remoteFetch)
+ if err != nil {
+ return status.Error(codes.Internal, err.Error())
+ }
+
+ committer := git2go.NewSignature(string(header.User.Name), string(header.User.Email), time.Now())
+ if header.Timestamp != nil {
+ committer.When, err = ptypes.Timestamp(header.Timestamp)
+ if err != nil {
+ return helper.ErrInvalidArgumentf("parse timestamp: %w", err)
+ }
+ }
+
+ newrev, err := git2go.RebaseCommand{
+ Repository: repoPath,
+ Committer: committer,
+ BranchName: string(header.Branch),
+ UpstreamRevision: startRevision.String(),
+ }.Run(ctx, s.cfg)
+ if err != nil {
+ return stream.Send(&gitalypb.UserRebaseConfirmableResponse{
+ GitError: err.Error(),
+ })
+ }
+
+ if err := stream.Send(&gitalypb.UserRebaseConfirmableResponse{
+ UserRebaseConfirmableResponsePayload: &gitalypb.UserRebaseConfirmableResponse_RebaseSha{
+ RebaseSha: newrev.String(),
+ },
+ }); err != nil {
+ return fmt.Errorf("send rebase sha: %w", err)
+ }
+
+ secondRequest, err := stream.Recv()
+ if err != nil {
+ return helper.ErrInternalf("recv: %w", err)
+ }
+
+ if !secondRequest.GetApply() {
+ return helper.ErrPreconditionFailedf("rebase aborted by client")
+ }
+
+ if err := s.updateReferenceWithHooks(
+ ctx,
+ header.Repository,
+ header.User,
+ branch,
+ newrev,
+ oldrev,
+ header.GitPushOptions...); err != nil {
+ switch {
+ case errors.As(err, &preReceiveError{}):
+ return stream.Send(&gitalypb.UserRebaseConfirmableResponse{
+ PreReceiveError: err.Error(),
+ })
+ case errors.Is(err, git2go.ErrInvalidArgument):
+ return fmt.Errorf("update ref: %w", err)
+ }
+
+ return err
+ }
+
+ return stream.Send(&gitalypb.UserRebaseConfirmableResponse{
+ UserRebaseConfirmableResponsePayload: &gitalypb.UserRebaseConfirmableResponse_RebaseApplied{
+ RebaseApplied: true,
+ },
+ })
+}
+
func (s *Server) userRebaseConfirmable(stream gitalypb.OperationService_UserRebaseConfirmableServer, firstRequest *gitalypb.UserRebaseConfirmableRequest, repository *gitalypb.Repository) error {
ctx := stream.Context()
client, err := s.ruby.OperationServiceClient(ctx)
@@ -113,3 +209,26 @@ func validateUserRebaseConfirmableHeader(header *gitalypb.UserRebaseConfirmableR
return nil
}
+
+// rebaseRemoteFetch is an intermediate type that implements the
+// `requestFetchingStartRevision` interface. This allows us to use
+// `fetchStartRevision` to get the revision to rebase onto.
+type rebaseRemoteFetch struct {
+ header *gitalypb.UserRebaseConfirmableRequest_Header
+}
+
+func (r rebaseRemoteFetch) GetRepository() *gitalypb.Repository {
+ return r.header.GetRepository()
+}
+
+func (r rebaseRemoteFetch) GetBranchName() []byte {
+ return r.header.GetBranch()
+}
+
+func (r rebaseRemoteFetch) GetStartRepository() *gitalypb.Repository {
+ return r.header.GetRemoteRepository()
+}
+
+func (r rebaseRemoteFetch) GetStartBranchName() []byte {
+ return r.header.GetRemoteBranch()
+}
diff --git a/internal/gitaly/service/operations/rebase_test.go b/internal/gitaly/service/operations/rebase_test.go
index f01de6ac1..7e48da3d4 100644
--- a/internal/gitaly/service/operations/rebase_test.go
+++ b/internal/gitaly/service/operations/rebase_test.go
@@ -5,6 +5,7 @@ package operations
import (
"context"
"fmt"
+ "io"
"strings"
"testing"
"time"
@@ -18,9 +19,11 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/helper"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
+ "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
@@ -30,20 +33,21 @@ var (
)
func testSuccessfulUserRebaseConfirmableRequest(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cancel := testhelper.Context()
- defer cancel()
+ testWithFeature(t, featureflag.GoUserRebaseConfirmable, cfg, rubySrv, testSuccessfulUserRebaseConfirmableRequestFeatured)
+}
+func testSuccessfulUserRebaseConfirmableRequestFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
pushOptions := []string{"ci.skip", "test=value"}
- cfg.Gitlab.URL = setupAndStartGitlabServer(t, testhelper.GlID, "project-1", cfg, pushOptions...)
+ cfg.Gitlab.URL = setupAndStartGitlabServer(t, gittest.GlID, "project-1", cfg, pushOptions...)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- repoCopyProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "copy")
+ repoCopyProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "copy")
defer cleanup()
- branchSha := getBranchSha(t, repoPath, rebaseBranchName)
+ branchSha := getBranchSha(t, cfg, repoPath, rebaseBranchName)
rebaseStream, err := client.UserRebaseConfirmable(ctx)
require.NoError(t, err)
@@ -51,7 +55,7 @@ func testSuccessfulUserRebaseConfirmableRequest(t *testing.T, cfg config.Cfg, ru
preReceiveHookOutputPath := gittest.WriteEnvToCustomHook(t, repoPath, "pre-receive")
postReceiveHookOutputPath := gittest.WriteEnvToCustomHook(t, repoPath, "post-receive")
- headerRequest := buildHeaderRequest(repoProto, testhelper.TestUser, "1", rebaseBranchName, branchSha, repoCopyProto, "master")
+ headerRequest := buildHeaderRequest(repoProto, gittest.TestUser, "1", rebaseBranchName, branchSha, repoCopyProto, "master")
headerRequest.GetHeader().GitPushOptions = pushOptions
require.NoError(t, rebaseStream.Send(headerRequest), "send header")
@@ -67,12 +71,10 @@ func testSuccessfulUserRebaseConfirmableRequest(t *testing.T, cfg config.Cfg, ru
secondResponse, err := rebaseStream.Recv()
require.NoError(t, err, "receive second response")
- testhelper.ReceiveEOFWithTimeout(t, func() error {
- _, err = rebaseStream.Recv()
- return err
- })
+ _, err = rebaseStream.Recv()
+ require.Equal(t, io.EOF, err)
- newBranchSha := getBranchSha(t, repoPath, rebaseBranchName)
+ newBranchSha := getBranchSha(t, cfg, repoPath, rebaseBranchName)
require.NotEqual(t, newBranchSha, branchSha)
require.Equal(t, newBranchSha, firstResponse.GetRebaseSha())
@@ -88,12 +90,13 @@ func testSuccessfulUserRebaseConfirmableRequest(t *testing.T, cfg config.Cfg, ru
}
func testUserRebaseConfirmableTransaction(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cancel := testhelper.Context()
- defer cancel()
+ testWithFeature(t, featureflag.GoUserRebaseConfirmable, cfg, rubySrv, testUserRebaseConfirmableTransactionFeatured)
+}
+func testUserRebaseConfirmableTransactionFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
var voteCount int
txManager := &transaction.MockManager{
- VoteFn: func(context.Context, metadata.Transaction, metadata.PraefectServer, transaction.Vote) error {
+ VoteFn: func(context.Context, txinfo.Transaction, txinfo.PraefectServer, voting.Vote) error {
voteCount++
return nil
},
@@ -105,9 +108,9 @@ func testUserRebaseConfirmableTransaction(t *testing.T, cfg config.Cfg, rubySrv
testserver.WithDisablePraefect(),
testserver.WithTransactionManager(txManager),
)
- cfg.Gitlab.URL = setupAndStartGitlabServer(t, testhelper.GlID, "project-1", cfg)
+ cfg.Gitlab.URL = setupAndStartGitlabServer(t, gittest.GlID, "project-1", cfg)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
for _, tc := range []struct {
desc string
@@ -125,14 +128,14 @@ func testUserRebaseConfirmableTransaction(t *testing.T, cfg config.Cfg, rubySrv
desc: "primary votes and executes hook",
withTransaction: true,
primary: true,
- expectedVotes: 1,
+ expectedVotes: 2,
expectPreReceiveHook: true,
},
{
desc: "secondary votes but does not execute hook",
withTransaction: true,
primary: false,
- expectedVotes: 1,
+ expectedVotes: 2,
expectPreReceiveHook: false,
},
} {
@@ -146,9 +149,9 @@ func testUserRebaseConfirmableTransaction(t *testing.T, cfg config.Cfg, rubySrv
ctx = helper.OutgoingToIncoming(ctx)
var err error
- ctx, err = metadata.InjectTransaction(ctx, 1, "node", tc.primary)
+ ctx, err = txinfo.InjectTransaction(ctx, 1, "node", tc.primary)
require.NoError(t, err)
- ctx, err = (&metadata.PraefectServer{
+ ctx, err = (&txinfo.PraefectServer{
SocketPath: "irrelevant",
}).Inject(ctx)
require.NoError(t, err)
@@ -162,7 +165,7 @@ func testUserRebaseConfirmableTransaction(t *testing.T, cfg config.Cfg, rubySrv
rebaseStream, err := client.UserRebaseConfirmable(ctx)
require.NoError(t, err)
- headerRequest := buildHeaderRequest(repoProto, testhelper.TestUser, "1", rebaseBranchName, branchSha.String(), repoProto, "master")
+ headerRequest := buildHeaderRequest(repoProto, gittest.TestUser, "1", rebaseBranchName, branchSha.String(), repoProto, "master")
require.NoError(t, rebaseStream.Send(headerRequest))
_, err = rebaseStream.Recv()
require.NoError(t, err)
@@ -172,11 +175,9 @@ func testUserRebaseConfirmableTransaction(t *testing.T, cfg config.Cfg, rubySrv
require.NoError(t, err)
require.True(t, secondResponse.GetRebaseApplied(), "the second rebase is applied")
- testhelper.ReceiveEOFWithTimeout(t, func() error {
- response, err := rebaseStream.Recv()
- require.Nil(t, response)
- return err
- })
+ response, err := rebaseStream.Recv()
+ require.Nil(t, response)
+ require.Equal(t, io.EOF, err)
require.Equal(t, tc.expectedVotes, voteCount)
if tc.expectPreReceiveHook {
@@ -189,28 +190,29 @@ func testUserRebaseConfirmableTransaction(t *testing.T, cfg config.Cfg, rubySrv
}
func testUserRebaseConfirmableStableCommitIDs(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cancel := testhelper.Context()
- defer cancel()
+ testWithFeature(t, featureflag.GoUserRebaseConfirmable, cfg, rubySrv, testUserRebaseConfirmableStableCommitIDsFeatured)
+}
+func testUserRebaseConfirmableStableCommitIDsFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- cfg.Gitlab.URL = setupAndStartGitlabServer(t, testhelper.GlID, "project-1", cfg)
+ cfg.Gitlab.URL = setupAndStartGitlabServer(t, gittest.GlID, "project-1", cfg)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
rebaseStream, err := client.UserRebaseConfirmable(ctx)
require.NoError(t, err)
committerDate := &timestamp.Timestamp{Seconds: 100000000}
- parentSha := getBranchSha(t, repoPath, "master")
+ parentSha := getBranchSha(t, cfg, repoPath, "master")
require.NoError(t, rebaseStream.Send(&gitalypb.UserRebaseConfirmableRequest{
UserRebaseConfirmableRequestPayload: &gitalypb.UserRebaseConfirmableRequest_Header_{
Header: &gitalypb.UserRebaseConfirmableRequest_Header{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
RebaseId: "1",
Branch: []byte(rebaseBranchName),
- BranchSha: getBranchSha(t, repoPath, rebaseBranchName),
+ BranchSha: getBranchSha(t, cfg, repoPath, rebaseBranchName),
RemoteRepository: repoProto,
RemoteBranch: []byte("master"),
Timestamp: committerDate,
@@ -229,10 +231,8 @@ func testUserRebaseConfirmableStableCommitIDs(t *testing.T, cfg config.Cfg, ruby
require.NoError(t, err, "receive second response")
require.True(t, response.GetRebaseApplied())
- testhelper.ReceiveEOFWithTimeout(t, func() error {
- _, err = rebaseStream.Recv()
- return err
- })
+ _, err = rebaseStream.Recv()
+ require.Equal(t, io.EOF, err)
commit, err := repo.ReadCommit(ctx, git.Revision(rebaseBranchName))
require.NoError(t, err, "look up git commit")
@@ -251,8 +251,8 @@ func testUserRebaseConfirmableStableCommitIDs(t *testing.T, cfg config.Cfg, ruby
Timezone: []byte("-0600"),
},
Committer: &gitalypb.CommitAuthor{
- Name: testhelper.TestUser.Name,
- Email: testhelper.TestUser.Email,
+ Name: gittest.TestUser.Name,
+ Email: gittest.TestUser.Email,
// Nanoseconds get ignored because commit timestamps aren't that granular.
Date: committerDate,
Timezone: []byte("+0000"),
@@ -261,15 +261,16 @@ func testUserRebaseConfirmableStableCommitIDs(t *testing.T, cfg config.Cfg, ruby
}
func testFailedRebaseUserRebaseConfirmableRequestDueToInvalidHeader(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cancel := testhelper.Context()
- defer cancel()
+ testWithFeature(t, featureflag.GoUserRebaseConfirmable, cfg, rubySrv, testFailedRebaseUserRebaseConfirmableRequestDueToInvalidHeaderFeatured)
+}
+func testFailedRebaseUserRebaseConfirmableRequestDueToInvalidHeaderFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, repo, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- repoCopy, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "copy")
+ repoCopy, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "copy")
defer cleanup()
- branchSha := getBranchSha(t, repoPath, rebaseBranchName)
+ branchSha := getBranchSha(t, cfg, repoPath, rebaseBranchName)
testCases := []struct {
desc string
@@ -277,7 +278,7 @@ func testFailedRebaseUserRebaseConfirmableRequestDueToInvalidHeader(t *testing.T
}{
{
desc: "empty Repository",
- req: buildHeaderRequest(nil, testhelper.TestUser, "1", rebaseBranchName, branchSha, repoCopy, "master"),
+ req: buildHeaderRequest(nil, gittest.TestUser, "1", rebaseBranchName, branchSha, repoCopy, "master"),
},
{
desc: "empty User",
@@ -285,23 +286,23 @@ func testFailedRebaseUserRebaseConfirmableRequestDueToInvalidHeader(t *testing.T
},
{
desc: "empty Branch",
- req: buildHeaderRequest(repo, testhelper.TestUser, "1", "", branchSha, repoCopy, "master"),
+ req: buildHeaderRequest(repo, gittest.TestUser, "1", "", branchSha, repoCopy, "master"),
},
{
desc: "empty BranchSha",
- req: buildHeaderRequest(repo, testhelper.TestUser, "1", rebaseBranchName, "", repoCopy, "master"),
+ req: buildHeaderRequest(repo, gittest.TestUser, "1", rebaseBranchName, "", repoCopy, "master"),
},
{
desc: "empty RemoteRepository",
- req: buildHeaderRequest(repo, testhelper.TestUser, "1", rebaseBranchName, branchSha, nil, "master"),
+ req: buildHeaderRequest(repo, gittest.TestUser, "1", rebaseBranchName, branchSha, nil, "master"),
},
{
desc: "empty RemoteBranch",
- req: buildHeaderRequest(repo, testhelper.TestUser, "1", rebaseBranchName, branchSha, repoCopy, ""),
+ req: buildHeaderRequest(repo, gittest.TestUser, "1", rebaseBranchName, branchSha, repoCopy, ""),
},
{
desc: "invalid branch name",
- req: buildHeaderRequest(repo, testhelper.TestUser, "1", rebaseBranchName, branchSha, repoCopy, "+dev:master"),
+ req: buildHeaderRequest(repo, gittest.TestUser, "1", rebaseBranchName, branchSha, repoCopy, "+dev:master"),
},
}
@@ -321,9 +322,10 @@ func testFailedRebaseUserRebaseConfirmableRequestDueToInvalidHeader(t *testing.T
}
func testAbortedUserRebaseConfirmable(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cancel := testhelper.Context()
- defer cancel()
+ testWithFeature(t, featureflag.GoUserRebaseConfirmable, cfg, rubySrv, testAbortedUserRebaseConfirmableFeatured)
+}
+func testAbortedUserRebaseConfirmableFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, _, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
testCases := []struct {
@@ -339,15 +341,15 @@ func testAbortedUserRebaseConfirmable(t *testing.T, cfg config.Cfg, rubySrv *rub
for i, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- testRepo, testRepoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "repo")
+ testRepo, testRepoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "repo")
defer cleanup()
- testRepoCopy, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "copy")
+ testRepoCopy, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "copy")
defer cleanup()
- branchSha := getBranchSha(t, testRepoPath, rebaseBranchName)
+ branchSha := getBranchSha(t, cfg, testRepoPath, rebaseBranchName)
- headerRequest := buildHeaderRequest(testRepo, testhelper.TestUser, fmt.Sprintf("%v", i), rebaseBranchName, branchSha, testRepoCopy, "master")
+ headerRequest := buildHeaderRequest(testRepo, gittest.TestUser, fmt.Sprintf("%v", i), rebaseBranchName, branchSha, testRepoCopy, "master")
rebaseStream, err := client.UserRebaseConfirmable(ctx)
require.NoError(t, err)
@@ -375,29 +377,30 @@ func testAbortedUserRebaseConfirmable(t *testing.T, cfg config.Cfg, rubySrv *rub
require.Error(t, err)
testhelper.RequireGrpcError(t, err, tc.code)
- newBranchSha := getBranchSha(t, testRepoPath, rebaseBranchName)
+ newBranchSha := getBranchSha(t, cfg, testRepoPath, rebaseBranchName)
require.Equal(t, newBranchSha, branchSha, "branch should not change when the rebase is aborted")
})
}
}
func testFailedUserRebaseConfirmableDueToApplyBeingFalse(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cancel := testhelper.Context()
- defer cancel()
+ testWithFeature(t, featureflag.GoUserRebaseConfirmable, cfg, rubySrv, testFailedUserRebaseConfirmableDueToApplyBeingFalseFeatured)
+}
+func testFailedUserRebaseConfirmableDueToApplyBeingFalseFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- testRepoCopy, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "copy")
+ testRepoCopy, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "copy")
defer cleanup()
- branchSha := getBranchSha(t, repoPath, rebaseBranchName)
+ branchSha := getBranchSha(t, cfg, repoPath, rebaseBranchName)
rebaseStream, err := client.UserRebaseConfirmable(ctx)
require.NoError(t, err)
- headerRequest := buildHeaderRequest(repoProto, testhelper.TestUser, "1", rebaseBranchName, branchSha, testRepoCopy, "master")
+ headerRequest := buildHeaderRequest(repoProto, gittest.TestUser, "1", rebaseBranchName, branchSha, testRepoCopy, "master")
require.NoError(t, rebaseStream.Send(headerRequest), "send header")
firstResponse, err := rebaseStream.Recv()
@@ -414,22 +417,23 @@ func testFailedUserRebaseConfirmableDueToApplyBeingFalse(t *testing.T, cfg confi
testhelper.RequireGrpcError(t, err, codes.FailedPrecondition)
require.False(t, secondResponse.GetRebaseApplied(), "the second rebase is not applied")
- newBranchSha := getBranchSha(t, repoPath, rebaseBranchName)
+ newBranchSha := getBranchSha(t, cfg, repoPath, rebaseBranchName)
require.Equal(t, branchSha, newBranchSha, "branch should not change when the rebase is not applied")
require.NotEqual(t, newBranchSha, firstResponse.GetRebaseSha(), "branch should not be the sha returned when the rebase is not applied")
}
func testFailedUserRebaseConfirmableRequestDueToPreReceiveError(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cancel := testhelper.Context()
- defer cancel()
+ testWithFeature(t, featureflag.GoUserRebaseConfirmable, cfg, rubySrv, testFailedUserRebaseConfirmableRequestDueToPreReceiveErrorFeatured)
+}
+func testFailedUserRebaseConfirmableRequestDueToPreReceiveErrorFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- repoCopyProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "copy")
+ repoCopyProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "copy")
defer cleanup()
- branchSha := getBranchSha(t, repoPath, rebaseBranchName)
+ branchSha := getBranchSha(t, cfg, repoPath, rebaseBranchName)
hookContent := []byte("#!/bin/sh\necho 'failure'\nexit 1")
@@ -440,7 +444,7 @@ func testFailedUserRebaseConfirmableRequestDueToPreReceiveError(t *testing.T, cf
rebaseStream, err := client.UserRebaseConfirmable(ctx)
require.NoError(t, err)
- headerRequest := buildHeaderRequest(repoProto, testhelper.TestUser, fmt.Sprintf("%v", i), rebaseBranchName, branchSha, repoCopyProto, "master")
+ headerRequest := buildHeaderRequest(repoProto, gittest.TestUser, fmt.Sprintf("%v", i), rebaseBranchName, branchSha, repoCopyProto, "master")
require.NoError(t, rebaseStream.Send(headerRequest), "send header")
firstResponse, err := rebaseStream.Recv()
@@ -457,12 +461,10 @@ func testFailedUserRebaseConfirmableRequestDueToPreReceiveError(t *testing.T, cf
require.NoError(t, err, "receive second response")
require.Contains(t, secondResponse.PreReceiveError, "failure")
- testhelper.ReceiveEOFWithTimeout(t, func() error {
- _, err = rebaseStream.Recv()
- return err
- })
+ _, err = rebaseStream.Recv()
+ require.Equal(t, io.EOF, err)
- newBranchSha := getBranchSha(t, repoPath, rebaseBranchName)
+ newBranchSha := getBranchSha(t, cfg, repoPath, rebaseBranchName)
require.Equal(t, branchSha, newBranchSha, "branch should not change when the rebase fails due to PreReceiveError")
require.NotEqual(t, newBranchSha, firstResponse.GetRebaseSha(), "branch should not be the sha returned when the rebase fails due to PreReceiveError")
})
@@ -470,68 +472,68 @@ func testFailedUserRebaseConfirmableRequestDueToPreReceiveError(t *testing.T, cf
}
func testFailedUserRebaseConfirmableDueToGitError(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cancel := testhelper.Context()
- defer cancel()
+ testWithFeature(t, featureflag.GoUserRebaseConfirmable, cfg, rubySrv, testFailedUserRebaseConfirmableDueToGitErrorFeatured)
+}
+func testFailedUserRebaseConfirmableDueToGitErrorFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- repoCopyProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "copy")
+ repoCopyProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "copy")
defer cleanup()
failedBranchName := "rebase-encoding-failure-trigger"
- branchSha := getBranchSha(t, repoPath, failedBranchName)
+ branchSha := getBranchSha(t, cfg, repoPath, failedBranchName)
rebaseStream, err := client.UserRebaseConfirmable(ctx)
require.NoError(t, err)
- headerRequest := buildHeaderRequest(repoProto, testhelper.TestUser, "1", failedBranchName, branchSha, repoCopyProto, "master")
+ headerRequest := buildHeaderRequest(repoProto, gittest.TestUser, "1", failedBranchName, branchSha, repoCopyProto, "master")
require.NoError(t, rebaseStream.Send(headerRequest), "send header")
firstResponse, err := rebaseStream.Recv()
require.NoError(t, err, "receive first response")
- require.Contains(t, firstResponse.GitError, "CONFLICT (content): Merge conflict in README.md")
+ require.Contains(t, firstResponse.GitError, "conflict")
- testhelper.ReceiveEOFWithTimeout(t, func() error {
- _, err = rebaseStream.Recv()
- return err
- })
+ _, err = rebaseStream.Recv()
+ require.Equal(t, io.EOF, err)
- newBranchSha := getBranchSha(t, repoPath, failedBranchName)
+ newBranchSha := getBranchSha(t, cfg, repoPath, failedBranchName)
require.Equal(t, branchSha, newBranchSha, "branch should not change when the rebase fails due to GitError")
}
-func getBranchSha(t *testing.T, repoPath string, branchName string) string {
- branchSha := string(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", branchName))
+func getBranchSha(t *testing.T, cfg config.Cfg, repoPath string, branchName string) string {
+ branchSha := string(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", branchName))
return strings.TrimSpace(branchSha)
}
func testRebaseRequestWithDeletedFile(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cancel := testhelper.Context()
- defer cancel()
+ testWithFeature(t, featureflag.GoUserRebaseConfirmable, cfg, rubySrv, testRebaseRequestWithDeletedFileFeatured)
+}
+func testRebaseRequestWithDeletedFileFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, _, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- repoProto, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repoProto, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- repoCopyProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "copy")
+ repoCopyProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "copy")
defer cleanup()
branch := "rebase-delete-test"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "user.name", string(testhelper.TestUser.Name))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "user.email", string(testhelper.TestUser.Email))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "checkout", "-b", branch, "master~1")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rm", "README")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "-a", "-m", "delete file")
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "user.name", string(gittest.TestUser.Name))
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "user.email", string(gittest.TestUser.Email))
+ gittest.Exec(t, cfg, "-C", repoPath, "checkout", "-b", branch, "master~1")
+ gittest.Exec(t, cfg, "-C", repoPath, "rm", "README")
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "-a", "-m", "delete file")
- branchSha := getBranchSha(t, repoPath, branch)
+ branchSha := getBranchSha(t, cfg, repoPath, branch)
rebaseStream, err := client.UserRebaseConfirmable(ctx)
require.NoError(t, err)
- headerRequest := buildHeaderRequest(repoProto, testhelper.TestUser, "1", branch, branchSha, repoCopyProto, "master")
+ headerRequest := buildHeaderRequest(repoProto, gittest.TestUser, "1", branch, branchSha, repoCopyProto, "master")
require.NoError(t, rebaseStream.Send(headerRequest), "send header")
firstResponse, err := rebaseStream.Recv()
@@ -546,12 +548,10 @@ func testRebaseRequestWithDeletedFile(t *testing.T, cfg config.Cfg, rubySrv *rub
secondResponse, err := rebaseStream.Recv()
require.NoError(t, err, "receive second response")
- testhelper.ReceiveEOFWithTimeout(t, func() error {
- _, err = rebaseStream.Recv()
- return err
- })
+ _, err = rebaseStream.Recv()
+ require.Equal(t, io.EOF, err)
- newBranchSha := getBranchSha(t, repoPath, branch)
+ newBranchSha := getBranchSha(t, cfg, repoPath, branch)
require.NotEqual(t, newBranchSha, branchSha)
require.Equal(t, newBranchSha, firstResponse.GetRebaseSha())
@@ -560,26 +560,27 @@ func testRebaseRequestWithDeletedFile(t *testing.T, cfg config.Cfg, rubySrv *rub
}
func testRebaseOntoRemoteBranch(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cancel := testhelper.Context()
- defer cancel()
+ testWithFeature(t, featureflag.GoUserRebaseConfirmable, cfg, rubySrv, testRebaseOntoRemoteBranchFeatured)
+}
+func testRebaseOntoRemoteBranchFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- remoteRepo, remoteRepoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ remoteRepo, remoteRepoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
defer cleanup()
localBranch := "master"
- localBranchHash := getBranchSha(t, repoPath, localBranch)
+ localBranchHash := getBranchSha(t, cfg, repoPath, localBranch)
remoteBranch := "remote-branch"
- testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath, "config", "user.name", string(testhelper.TestUser.Name))
- testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath, "config", "user.email", string(testhelper.TestUser.Email))
- testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath, "checkout", "-b", remoteBranch, "master")
- testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath, "rm", "README")
- testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath, "commit", "-a", "-m", "remove README")
- remoteBranchHash := getBranchSha(t, remoteRepoPath, remoteBranch)
+ gittest.Exec(t, cfg, "-C", remoteRepoPath, "config", "user.name", string(gittest.TestUser.Name))
+ gittest.Exec(t, cfg, "-C", remoteRepoPath, "config", "user.email", string(gittest.TestUser.Email))
+ gittest.Exec(t, cfg, "-C", remoteRepoPath, "checkout", "-b", remoteBranch, "master")
+ gittest.Exec(t, cfg, "-C", remoteRepoPath, "rm", "README")
+ gittest.Exec(t, cfg, "-C", remoteRepoPath, "commit", "-a", "-m", "remove README")
+ remoteBranchHash := getBranchSha(t, cfg, remoteRepoPath, remoteBranch)
rebaseStream, err := client.UserRebaseConfirmable(ctx)
require.NoError(t, err)
@@ -587,7 +588,7 @@ func testRebaseOntoRemoteBranch(t *testing.T, cfg config.Cfg, rubySrv *rubyserve
_, err = repo.ReadCommit(ctx, git.Revision(remoteBranchHash))
require.Equal(t, localrepo.ErrObjectNotFound, err, "remote commit does not yet exist in local repository")
- headerRequest := buildHeaderRequest(repoProto, testhelper.TestUser, "1", localBranch, localBranchHash, remoteRepo, remoteBranch)
+ headerRequest := buildHeaderRequest(repoProto, gittest.TestUser, "1", localBranch, localBranchHash, remoteRepo, remoteBranch)
require.NoError(t, rebaseStream.Send(headerRequest), "send header")
firstResponse, err := rebaseStream.Recv()
@@ -602,12 +603,10 @@ func testRebaseOntoRemoteBranch(t *testing.T, cfg config.Cfg, rubySrv *rubyserve
secondResponse, err := rebaseStream.Recv()
require.NoError(t, err, "receive second response")
- testhelper.ReceiveEOFWithTimeout(t, func() error {
- _, err = rebaseStream.Recv()
- return err
- })
+ _, err = rebaseStream.Recv()
+ require.Equal(t, io.EOF, err)
- rebasedBranchHash := getBranchSha(t, repoPath, localBranch)
+ rebasedBranchHash := getBranchSha(t, cfg, repoPath, localBranch)
require.NotEqual(t, rebasedBranchHash, localBranchHash)
require.Equal(t, rebasedBranchHash, firstResponse.GetRebaseSha())
@@ -615,6 +614,56 @@ func testRebaseOntoRemoteBranch(t *testing.T, cfg config.Cfg, rubySrv *rubyserve
require.True(t, secondResponse.GetRebaseApplied(), "the second rebase is applied")
}
+func testRebaseFailedWithCode(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
+ testWithFeature(t, featureflag.GoUserRebaseConfirmable, cfg, rubySrv, testRebaseFailedWithCodeFeatured)
+}
+
+func testRebaseFailedWithCodeFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
+ ctx, _, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+
+ branchSha := getBranchSha(t, cfg, repoPath, rebaseBranchName)
+
+ testCases := []struct {
+ desc string
+ buildHeaderRequest func() *gitalypb.UserRebaseConfirmableRequest
+ expectedCode codes.Code
+ }{
+ {
+ desc: "non-existing storage",
+ buildHeaderRequest: func() *gitalypb.UserRebaseConfirmableRequest {
+ repo := *repoProto
+ repo.StorageName = "@this-storage-does-not-exist"
+
+ return buildHeaderRequest(&repo, gittest.TestUser, "1", rebaseBranchName, branchSha, &repo, "master")
+ },
+ expectedCode: codes.InvalidArgument,
+ },
+ {
+ desc: "missing repository path",
+ buildHeaderRequest: func() *gitalypb.UserRebaseConfirmableRequest {
+ repo := *repoProto
+ repo.RelativePath = ""
+
+ return buildHeaderRequest(&repo, gittest.TestUser, "1", rebaseBranchName, branchSha, &repo, "master")
+ },
+ expectedCode: codes.InvalidArgument,
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.desc, func(t *testing.T) {
+ rebaseStream, err := client.UserRebaseConfirmable(ctx)
+ require.NoError(t, err)
+
+ headerRequest := tc.buildHeaderRequest()
+ require.NoError(t, rebaseStream.Send(headerRequest), "send header")
+
+ _, err = rebaseStream.Recv()
+ testhelper.RequireGrpcError(t, err, tc.expectedCode)
+ })
+ }
+}
+
func rebaseRecvTimeout(bidi gitalypb.OperationService_UserRebaseConfirmableClient, timeout time.Duration) (*gitalypb.UserRebaseConfirmableResponse, error) {
type responseError struct {
response *gitalypb.UserRebaseConfirmableResponse
diff --git a/internal/gitaly/service/operations/revert.go b/internal/gitaly/service/operations/revert.go
index 79f2cfffe..aba4beaeb 100644
--- a/internal/gitaly/service/operations/revert.go
+++ b/internal/gitaly/service/operations/revert.go
@@ -8,12 +8,9 @@ import (
"github.com/golang/protobuf/ptypes"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git/remoterepo"
"gitlab.com/gitlab-org/gitaly/internal/git2go"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/helper"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -21,16 +18,13 @@ func (s *Server) UserRevert(ctx context.Context, req *gitalypb.UserRevertRequest
if err := validateCherryPickOrRevertRequest(req); err != nil {
return nil, helper.ErrInvalidArgument(err)
}
- if featureflag.IsDisabled(ctx, featureflag.GoUserRevert) {
- return s.rubyUserRevert(ctx, req)
- }
startRevision, err := s.fetchStartRevision(ctx, req)
if err != nil {
return nil, err
}
- localRepo := localrepo.New(s.gitCmdFactory, req.Repository, s.cfg)
+ localRepo := s.localrepo(req.GetRepository())
repoHadBranches, err := localRepo.HasBranches(ctx)
if err != nil {
return nil, err
@@ -129,20 +123,6 @@ func (s *Server) UserRevert(ctx context.Context, req *gitalypb.UserRevertRequest
}, nil
}
-func (s *Server) rubyUserRevert(ctx context.Context, req *gitalypb.UserRevertRequest) (*gitalypb.UserRevertResponse, error) {
- client, err := s.ruby.OperationServiceClient(ctx)
- if err != nil {
- return nil, err
- }
-
- clientCtx, err := rubyserver.SetHeaders(ctx, s.locator, req.GetRepository())
- if err != nil {
- return nil, err
- }
-
- return client.UserRevert(clientCtx, req)
-}
-
type requestFetchingStartRevision interface {
GetRepository() *gitalypb.Repository
GetBranchName() []byte
@@ -174,7 +154,7 @@ func (s *Server) fetchStartRevision(ctx context.Context, req requestFetchingStar
return startRevision, nil
}
- localRepo := localrepo.New(s.gitCmdFactory, req.GetRepository(), s.cfg)
+ localRepo := s.localrepo(req.GetRepository())
_, err = localRepo.ResolveRevision(ctx, startRevision.Revision()+"^{commit}")
if errors.Is(err, git.ErrReferenceNotFound) {
diff --git a/internal/gitaly/service/operations/revert_test.go b/internal/gitaly/service/operations/revert_test.go
index 84266ce0f..6d45072eb 100644
--- a/internal/gitaly/service/operations/revert_test.go
+++ b/internal/gitaly/service/operations/revert_test.go
@@ -1,7 +1,6 @@
package operations
import (
- "context"
"testing"
"github.com/golang/protobuf/ptypes/timestamp"
@@ -9,25 +8,21 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
-func testServerUserRevertSuccessful(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserRevert, cfg, rubySrv, testServerUserRevertSuccessfulFeatured)
-}
+func TestServer_UserRevert_successful(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
destinationBranch := "revert-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
masterHeadCommit, err := repo.ReadCommit(ctx, "master")
require.NoError(t, err)
@@ -35,10 +30,10 @@ func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, c
revertedCommit, err := repo.ReadCommit(ctx, "d59c60028b053793cecfb4022de34602e1a9218e")
require.NoError(t, err)
- testRepoCopy, testRepoCopyPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "read-only") // read-only repo
+ testRepoCopy, testRepoCopyPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "read-only") // read-only repo
defer cleanup()
- testhelper.MustRunCommand(t, nil, "git", "-C", testRepoCopyPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", testRepoCopyPath, "branch", destinationBranch, "master")
testCases := []struct {
desc string
@@ -49,7 +44,7 @@ func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, c
desc: "branch exists",
request: &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -60,7 +55,7 @@ func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, c
desc: "nonexistent branch + start_repository == repository",
request: &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte("to-be-reverted-into-1"),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -72,7 +67,7 @@ func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, c
desc: "nonexistent branch + start_repository != repository",
request: &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte("to-be-reverted-into-2"),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -85,7 +80,7 @@ func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, c
desc: "nonexistent branch + empty start_repository",
request: &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte("to-be-reverted-into-3"),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -97,7 +92,7 @@ func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, c
desc: "branch exists with dry run",
request: &gitalypb.UserRevertRequest{
Repository: testRepoCopy,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -109,7 +104,7 @@ func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, c
desc: "nonexistent branch + start_repository == repository with dry run",
request: &gitalypb.UserRevertRequest{
Repository: testRepoCopy,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte("to-be-reverted-into-1"),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -122,7 +117,7 @@ func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, c
desc: "nonexistent branch + start_repository != repository with dry run",
request: &gitalypb.UserRevertRequest{
Repository: testRepoCopy,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte("to-be-reverted-into-2"),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -136,7 +131,7 @@ func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, c
desc: "nonexistent branch + empty start_repository with dry run",
request: &gitalypb.UserRevertRequest{
Repository: testRepoCopy,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte("to-be-reverted-into-3"),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -152,7 +147,7 @@ func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, c
response, err := client.UserRevert(ctx, testCase.request)
require.NoError(t, err)
- testCaseRepo := localrepo.New(git.NewExecCommandFactory(cfg), testCase.request.Repository, cfg)
+ testCaseRepo := localrepo.NewTestRepo(t, cfg, testCase.request.Repository)
headCommit, err := testCaseRepo.ReadCommit(ctx, git.Revision(testCase.request.BranchName))
require.NoError(t, err)
@@ -174,21 +169,20 @@ func testServerUserRevertSuccessfulFeatured(t *testing.T, ctx context.Context, c
}
}
-func testServerUserRevertStableID(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserRevert, cfg, rubySrv, testServerUserRevertStableIDFeatured)
-}
+func TestServer_UserRevert_stableID(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserRevertStableIDFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, _, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
commitToRevert, err := repo.ReadCommit(ctx, "d59c60028b053793cecfb4022de34602e1a9218e")
require.NoError(t, err)
response, err := client.UserRevert(ctx, &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: commitToRevert,
BranchName: []byte("master"),
Message: []byte("Reverting commit"),
@@ -229,16 +223,13 @@ func testServerUserRevertStableIDFeatured(t *testing.T, ctx context.Context, cfg
}, revertedCommit)
}
-func testServerUserRevertSuccessfulIntoEmptyRepo(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserRevert, cfg, rubySrv, testServerUserRevertSuccessfulIntoNewRepo)
-}
+func TestServer_UserRevert_successfulIntoEmptyRepo(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserRevertSuccessfulIntoNewRepo(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, startRepoProto, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, startRepoProto, _, client := setupOperationsService(t, ctx)
- gitCmdFactory := git.NewExecCommandFactory(cfg)
-
- startRepo := localrepo.New(gitCmdFactory, startRepoProto, cfg)
+ startRepo := localrepo.NewTestRepo(t, cfg, startRepoProto)
revertedCommit, err := startRepo.ReadCommit(ctx, "d59c60028b053793cecfb4022de34602e1a9218e")
require.NoError(t, err)
@@ -246,13 +237,13 @@ func testServerUserRevertSuccessfulIntoNewRepo(t *testing.T, ctx context.Context
masterHeadCommit, err := startRepo.ReadCommit(ctx, "master")
require.NoError(t, err)
- repoProto, _, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ repoProto, _, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
defer cleanup()
- repo := localrepo.New(gitCmdFactory, repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
request := &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte("dst-branch"),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -279,24 +270,23 @@ func testServerUserRevertSuccessfulIntoNewRepo(t *testing.T, ctx context.Context
require.Equal(t, masterHeadCommit.Id, headCommit.ParentIds[0])
}
-func testServerUserRevertSuccessfulGitHooks(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserRevert, cfg, rubySrv, testServerUserRevertSuccessfulGitHooksFeatured)
-}
+func TestServer_UserRevert_successfulGitHooks(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserRevertSuccessfulGitHooksFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
destinationBranch := "revert-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
revertedCommit, err := repo.ReadCommit(ctx, "d59c60028b053793cecfb4022de34602e1a9218e")
require.NoError(t, err)
request := &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -314,18 +304,17 @@ func testServerUserRevertSuccessfulGitHooksFeatured(t *testing.T, ctx context.Co
for _, file := range hookOutputFiles {
output := string(testhelper.MustReadFile(t, file))
- require.Contains(t, output, "GL_USERNAME="+testhelper.TestUser.GlUsername)
+ require.Contains(t, output, "GL_USERNAME="+gittest.TestUser.GlUsername)
}
}
-func testServerUserRevertFailuedDueToValidations(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserRevert, cfg, rubySrv, testServerUserRevertFailuedDueToValidationsFeatured)
-}
+func TestServer_UserRevert_failuedDueToValidations(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserRevertFailuedDueToValidationsFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, _, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
revertedCommit, err := repo.ReadCommit(ctx, "d59c60028b053793cecfb4022de34602e1a9218e")
require.NoError(t, err)
@@ -352,7 +341,7 @@ func testServerUserRevertFailuedDueToValidationsFeatured(t *testing.T, ctx conte
desc: "empty commit",
request: &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: nil,
BranchName: []byte(destinationBranch),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -363,7 +352,7 @@ func testServerUserRevertFailuedDueToValidationsFeatured(t *testing.T, ctx conte
desc: "empty branch name",
request: &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: nil,
Message: []byte("Reverting " + revertedCommit.Id),
@@ -374,7 +363,7 @@ func testServerUserRevertFailuedDueToValidationsFeatured(t *testing.T, ctx conte
desc: "empty message",
request: &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte(destinationBranch),
Message: nil,
@@ -391,24 +380,23 @@ func testServerUserRevertFailuedDueToValidationsFeatured(t *testing.T, ctx conte
}
}
-func testServerUserRevertFailedDueToPreReceiveError(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserRevert, cfg, rubySrv, testServerUserRevertFailedDueToPreReceiveErrorFeatured)
-}
+func TestServer_UserRevert_failedDueToPreReceiveError(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserRevertFailedDueToPreReceiveErrorFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
destinationBranch := "revert-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
revertedCommit, err := repo.ReadCommit(ctx, "d59c60028b053793cecfb4022de34602e1a9218e")
require.NoError(t, err)
request := &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -422,22 +410,21 @@ func testServerUserRevertFailedDueToPreReceiveErrorFeatured(t *testing.T, ctx co
response, err := client.UserRevert(ctx, request)
require.NoError(t, err)
- require.Contains(t, response.PreReceiveError, "GL_ID="+testhelper.TestUser.GlId)
+ require.Contains(t, response.PreReceiveError, "GL_ID="+gittest.TestUser.GlId)
})
}
}
-func testServerUserRevertFailedDueToCreateTreeErrorConflict(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserRevert, cfg, rubySrv, testServerUserRevertFailedDueToCreateTreeErrorConflictFeatured)
-}
+func TestServer_UserRevert_failedDueToCreateTreeErrorConflict(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserRevertFailedDueToCreateTreeErrorConflictFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
destinationBranch := "revert-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
// This revert patch of the following commit cannot be applied to the destinationBranch above
revertedCommit, err := repo.ReadCommit(ctx, "372ab6950519549b14d220271ee2322caa44d4eb")
@@ -445,7 +432,7 @@ func testServerUserRevertFailedDueToCreateTreeErrorConflictFeatured(t *testing.T
request := &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -457,24 +444,23 @@ func testServerUserRevertFailedDueToCreateTreeErrorConflictFeatured(t *testing.T
require.Equal(t, gitalypb.UserRevertResponse_CONFLICT, response.CreateTreeErrorCode)
}
-func testServerUserRevertFailedDueToCreateTreeErrorEmpty(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserRevert, cfg, rubySrv, testServerUserRevertFailedDueToCreateTreeErrorEmptyFeatured)
-}
+func TestServer_UserRevert_failedDueToCreateTreeErrorEmpty(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserRevertFailedDueToCreateTreeErrorEmptyFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
destinationBranch := "revert-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
revertedCommit, err := repo.ReadCommit(ctx, "d59c60028b053793cecfb4022de34602e1a9218e")
require.NoError(t, err)
request := &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Reverting " + revertedCommit.Id),
@@ -491,26 +477,25 @@ func testServerUserRevertFailedDueToCreateTreeErrorEmptyFeatured(t *testing.T, c
require.Equal(t, gitalypb.UserRevertResponse_EMPTY, response.CreateTreeErrorCode)
}
-func testServerUserRevertFailedDueToCommitError(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testWithFeature(t, featureflag.GoUserRevert, cfg, rubySrv, testServerUserRevertFailedDueToCommitErrorFeatured)
-}
+func TestServer_UserRevert_failedDueToCommitError(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testServerUserRevertFailedDueToCommitErrorFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
sourceBranch := "revert-src"
destinationBranch := "revert-dst"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", destinationBranch, "master")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", sourceBranch, "a5391128b0ef5d21df5dd23d98557f4ef12fae20")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", destinationBranch, "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", sourceBranch, "a5391128b0ef5d21df5dd23d98557f4ef12fae20")
revertedCommit, err := repo.ReadCommit(ctx, git.Revision(sourceBranch))
require.NoError(t, err)
request := &gitalypb.UserRevertRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Commit: revertedCommit,
BranchName: []byte(destinationBranch),
Message: []byte("Reverting " + revertedCommit.Id),
diff --git a/internal/gitaly/service/operations/server.go b/internal/gitaly/service/operations/server.go
index cef135c76..1dfb11630 100644
--- a/internal/gitaly/service/operations/server.go
+++ b/internal/gitaly/service/operations/server.go
@@ -5,6 +5,9 @@ import (
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/git2go"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
@@ -20,10 +23,19 @@ type Server struct {
conns *client.Pool
git2go git2go.Executor
gitCmdFactory git.CommandFactory
+ catfileCache catfile.Cache
}
// NewServer creates a new instance of a grpc OperationServiceServer
-func NewServer(cfg config.Cfg, rs *rubyserver.Server, hookManager hook.Manager, locator storage.Locator, conns *client.Pool, gitCmdFactory git.CommandFactory) *Server {
+func NewServer(
+ cfg config.Cfg,
+ rs *rubyserver.Server,
+ hookManager hook.Manager,
+ locator storage.Locator,
+ conns *client.Pool,
+ gitCmdFactory git.CommandFactory,
+ catfileCache catfile.Cache,
+) *Server {
return &Server{
ruby: rs,
cfg: cfg,
@@ -32,5 +44,10 @@ func NewServer(cfg config.Cfg, rs *rubyserver.Server, hookManager hook.Manager,
conns: conns,
git2go: git2go.New(filepath.Join(cfg.BinDir, "gitaly-git2go"), cfg.Git.BinPath),
gitCmdFactory: gitCmdFactory,
+ catfileCache: catfileCache,
}
}
+
+func (s *Server) localrepo(repo repository.GitRepo) *localrepo.Repo {
+ return localrepo.New(s.gitCmdFactory, s.catfileCache, repo, s.cfg)
+}
diff --git a/internal/gitaly/service/operations/squash.go b/internal/gitaly/service/operations/squash.go
index da5921607..92111fecb 100644
--- a/internal/gitaly/service/operations/squash.go
+++ b/internal/gitaly/service/operations/squash.go
@@ -163,7 +163,7 @@ func (s *Server) userSquashWithDiffInFiles(ctx context.Context, req *gitalypb.Us
}
defer func(worktreeName string) {
- ctx, cancel := context.WithCancel(command.SuppressCancellation(ctx))
+ ctx, cancel := context.WithCancel(helper.SuppressCancellation(ctx))
defer cancel()
if err := s.removeWorktree(ctx, repo, worktreeName); err != nil {
@@ -263,7 +263,7 @@ func (s *Server) userSquashWithNoDiff(ctx context.Context, req *gitalypb.UserSqu
}
defer func(worktreeName string) {
- ctx, cancel := context.WithCancel(command.SuppressCancellation(ctx))
+ ctx, cancel := context.WithCancel(helper.SuppressCancellation(ctx))
defer cancel()
if err := s.removeWorktree(ctx, repo, worktreeName); err != nil {
diff --git a/internal/gitaly/service/operations/squash_test.go b/internal/gitaly/service/operations/squash_test.go
index da6b6ff3f..65b190592 100644
--- a/internal/gitaly/service/operations/squash_test.go
+++ b/internal/gitaly/service/operations/squash_test.go
@@ -14,6 +14,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -49,11 +50,11 @@ func TestSuccessfulUserSquashRequest(t *testing.T) {
func testSuccessfulUserSquashRequest(t *testing.T, ctx context.Context, start, end string) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
request := &gitalypb.UserSquashRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1",
Author: author,
CommitMessage: commitMessage,
@@ -70,11 +71,11 @@ func testSuccessfulUserSquashRequest(t *testing.T, ctx context.Context, start, e
require.Equal(t, []string{start}, commit.ParentIds)
require.Equal(t, author.Name, commit.Author.Name)
require.Equal(t, author.Email, commit.Author.Email)
- require.Equal(t, testhelper.TestUser.Name, commit.Committer.Name)
- require.Equal(t, testhelper.TestUser.Email, commit.Committer.Email)
+ require.Equal(t, gittest.TestUser.Name, commit.Committer.Name)
+ require.Equal(t, gittest.TestUser.Email, commit.Committer.Email)
require.Equal(t, commitMessage, commit.Subject)
- treeData := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "ls-tree", "--name-only", response.SquashSha)
+ treeData := gittest.Exec(t, cfg, "-C", repoPath, "ls-tree", "--name-only", response.SquashSha)
files := strings.Fields(text.ChompBytes(treeData))
require.Subset(t, files, []string{"VERSION", "README", "files", ".gitattributes"}, "ensure the files remain on their places")
}
@@ -85,11 +86,11 @@ func TestUserSquash_stableID(t *testing.T) {
ctx, cfg, repoProto, _, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
response, err := client.UserSquash(ctx, &gitalypb.UserSquashRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1",
Author: author,
CommitMessage: []byte("Squashed commit"),
@@ -118,16 +119,16 @@ func TestUserSquash_stableID(t *testing.T) {
Timezone: []byte("+0000"),
},
Committer: &gitalypb.CommitAuthor{
- Name: testhelper.TestUser.Name,
- Email: testhelper.TestUser.Email,
+ Name: gittest.TestUser.Name,
+ Email: gittest.TestUser.Email,
Date: &timestamp.Timestamp{Seconds: 1234512345},
Timezone: []byte("+0000"),
},
}, commit)
}
-func ensureSplitIndexExists(t *testing.T, repoDir string) bool {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoDir, "update-index", "--add")
+func ensureSplitIndexExists(t *testing.T, cfg config.Cfg, repoDir string) bool {
+ gittest.Exec(t, cfg, "-C", repoDir, "update-index", "--add")
fis, err := ioutil.ReadDir(repoDir)
require.NoError(t, err)
@@ -145,11 +146,11 @@ func TestSuccessfulUserSquashRequestWith3wayMerge(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
request := &gitalypb.UserSquashRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1",
Author: author,
CommitMessage: commitMessage,
@@ -167,8 +168,8 @@ func TestSuccessfulUserSquashRequestWith3wayMerge(t *testing.T) {
require.Equal(t, []string{"6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"}, commit.ParentIds)
require.Equal(t, author.Name, commit.Author.Name)
require.Equal(t, author.Email, commit.Author.Email)
- require.Equal(t, testhelper.TestUser.Name, commit.Committer.Name)
- require.Equal(t, testhelper.TestUser.Email, commit.Committer.Email)
+ require.Equal(t, gittest.TestUser.Name, commit.Committer.Name)
+ require.Equal(t, gittest.TestUser.Email, commit.Committer.Email)
require.Equal(t, commitMessage, commit.Subject)
// Handle symlinks in macOS from /tmp -> /private/tmp
@@ -176,7 +177,7 @@ func TestSuccessfulUserSquashRequestWith3wayMerge(t *testing.T) {
require.NoError(t, err)
// Ensure Git metadata is cleaned up
- worktreeList := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "worktree", "list", "--porcelain"))
+ worktreeList := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "worktree", "list", "--porcelain"))
expectedOut := fmt.Sprintf("worktree %s\nbare\n", repoPath)
require.Equal(t, expectedOut, worktreeList)
@@ -190,13 +191,13 @@ func TestSplitIndex(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
- require.False(t, ensureSplitIndexExists(t, repoPath))
+ require.False(t, ensureSplitIndexExists(t, cfg, repoPath))
request := &gitalypb.UserSquashRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1",
Author: author,
CommitMessage: commitMessage,
@@ -207,7 +208,7 @@ func TestSplitIndex(t *testing.T) {
response, err := client.UserSquash(ctx, request)
require.NoError(t, err)
require.Empty(t, response.GetGitError())
- require.False(t, ensureSplitIndexExists(t, repoPath))
+ require.False(t, ensureSplitIndexExists(t, cfg, repoPath))
}
func TestSquashRequestWithRenamedFiles(t *testing.T) {
@@ -216,39 +217,37 @@ func TestSquashRequestWithRenamedFiles(t *testing.T) {
ctx, cfg, _, _, client := setupOperationsService(t, ctx)
- repoProto, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repoProto, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
originalFilename := "original-file.txt"
renamedFilename := "renamed-file.txt"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "testhelper.TestUser.name", string(author.Name))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "testhelper.TestUser.email", string(author.Email))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "checkout", "-b", "squash-rename-test", "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "checkout", "-b", "squash-rename-test", "master")
require.NoError(t, ioutil.WriteFile(filepath.Join(repoPath, originalFilename), []byte("This is a test"), 0644))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "add", ".")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "-m", "test file")
+ gittest.Exec(t, cfg, "-C", repoPath, "add", ".")
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "-m", "test file")
- startCommitID := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "HEAD"))
+ startCommitID := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "HEAD"))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "mv", originalFilename, renamedFilename)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "-a", "-m", "renamed test file")
+ gittest.Exec(t, cfg, "-C", repoPath, "mv", originalFilename, renamedFilename)
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "-a", "-m", "renamed test file")
// Modify the original file in another branch
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "checkout", "-b", "squash-rename-branch", startCommitID)
+ gittest.Exec(t, cfg, "-C", repoPath, "checkout", "-b", "squash-rename-branch", startCommitID)
require.NoError(t, ioutil.WriteFile(filepath.Join(repoPath, originalFilename), []byte("This is a change"), 0644))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "-a", "-m", "test")
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "-a", "-m", "test")
require.NoError(t, ioutil.WriteFile(filepath.Join(repoPath, originalFilename), []byte("This is another change"), 0644))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "-a", "-m", "test")
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "-a", "-m", "test")
- endCommitID := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "HEAD"))
+ endCommitID := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "HEAD"))
request := &gitalypb.UserSquashRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1",
Author: author,
CommitMessage: commitMessage,
@@ -265,8 +264,8 @@ func TestSquashRequestWithRenamedFiles(t *testing.T) {
require.Equal(t, []string{startCommitID}, commit.ParentIds)
require.Equal(t, author.Name, commit.Author.Name)
require.Equal(t, author.Email, commit.Author.Email)
- require.Equal(t, testhelper.TestUser.Name, commit.Committer.Name)
- require.Equal(t, testhelper.TestUser.Email, commit.Committer.Email)
+ require.Equal(t, gittest.TestUser.Name, commit.Committer.Name)
+ require.Equal(t, gittest.TestUser.Email, commit.Committer.Email)
require.Equal(t, commitMessage, commit.Subject)
}
@@ -280,7 +279,7 @@ func TestSuccessfulUserSquashRequestWithMissingFileOnTargetBranch(t *testing.T)
request := &gitalypb.UserSquashRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1",
Author: author,
CommitMessage: commitMessage,
@@ -308,9 +307,9 @@ func TestFailedUserSquashRequestDueToValidations(t *testing.T) {
desc: "empty Repository",
request: &gitalypb.UserSquashRequest{
Repository: nil,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1",
- Author: testhelper.TestUser,
+ Author: gittest.TestUser,
CommitMessage: commitMessage,
StartSha: startSha,
EndSha: endSha,
@@ -323,7 +322,7 @@ func TestFailedUserSquashRequestDueToValidations(t *testing.T) {
Repository: repo,
User: nil,
SquashId: "1",
- Author: testhelper.TestUser,
+ Author: gittest.TestUser,
CommitMessage: commitMessage,
StartSha: startSha,
EndSha: endSha,
@@ -334,9 +333,9 @@ func TestFailedUserSquashRequestDueToValidations(t *testing.T) {
desc: "empty SquashId",
request: &gitalypb.UserSquashRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "",
- Author: testhelper.TestUser,
+ Author: gittest.TestUser,
CommitMessage: commitMessage,
StartSha: startSha,
EndSha: endSha,
@@ -347,9 +346,9 @@ func TestFailedUserSquashRequestDueToValidations(t *testing.T) {
desc: "empty StartSha",
request: &gitalypb.UserSquashRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1",
- Author: testhelper.TestUser,
+ Author: gittest.TestUser,
CommitMessage: commitMessage,
StartSha: "",
EndSha: endSha,
@@ -360,9 +359,9 @@ func TestFailedUserSquashRequestDueToValidations(t *testing.T) {
desc: "empty EndSha",
request: &gitalypb.UserSquashRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1",
- Author: testhelper.TestUser,
+ Author: gittest.TestUser,
CommitMessage: commitMessage,
StartSha: startSha,
EndSha: "",
@@ -373,7 +372,7 @@ func TestFailedUserSquashRequestDueToValidations(t *testing.T) {
desc: "empty Author",
request: &gitalypb.UserSquashRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1",
Author: nil,
CommitMessage: commitMessage,
@@ -386,9 +385,9 @@ func TestFailedUserSquashRequestDueToValidations(t *testing.T) {
desc: "empty CommitMessage",
request: &gitalypb.UserSquashRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1",
- Author: testhelper.TestUser,
+ Author: gittest.TestUser,
CommitMessage: nil,
StartSha: startSha,
EndSha: endSha,
@@ -399,9 +398,9 @@ func TestFailedUserSquashRequestDueToValidations(t *testing.T) {
desc: "worktree id can't contain slashes",
request: &gitalypb.UserSquashRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
SquashId: "1/2",
- Author: testhelper.TestUser,
+ Author: gittest.TestUser,
CommitMessage: commitMessage,
StartSha: startSha,
EndSha: endSha,
@@ -435,8 +434,8 @@ func TestUserSquashWithGitError(t *testing.T) {
request: &gitalypb.UserSquashRequest{
Repository: repo,
SquashId: "1",
- User: testhelper.TestUser,
- Author: testhelper.TestUser,
+ User: gittest.TestUser,
+ Author: gittest.TestUser,
CommitMessage: commitMessage,
StartSha: "doesntexisting",
EndSha: endSha,
@@ -448,8 +447,8 @@ func TestUserSquashWithGitError(t *testing.T) {
request: &gitalypb.UserSquashRequest{
Repository: repo,
SquashId: "1",
- User: testhelper.TestUser,
- Author: testhelper.TestUser,
+ User: gittest.TestUser,
+ Author: gittest.TestUser,
CommitMessage: commitMessage,
StartSha: startSha,
EndSha: "doesntexisting",
@@ -461,8 +460,8 @@ func TestUserSquashWithGitError(t *testing.T) {
request: &gitalypb.UserSquashRequest{
Repository: repo,
SquashId: "1",
- User: &gitalypb.User{Email: testhelper.TestUser.Email},
- Author: testhelper.TestUser,
+ User: &gitalypb.User{Email: gittest.TestUser.Email},
+ Author: gittest.TestUser,
CommitMessage: commitMessage,
StartSha: startSha,
EndSha: endSha,
@@ -474,8 +473,8 @@ func TestUserSquashWithGitError(t *testing.T) {
request: &gitalypb.UserSquashRequest{
Repository: repo,
SquashId: "1",
- User: testhelper.TestUser,
- Author: &gitalypb.User{Email: testhelper.TestUser.Email},
+ User: gittest.TestUser,
+ Author: &gitalypb.User{Email: gittest.TestUser.Email},
CommitMessage: commitMessage,
StartSha: startSha,
EndSha: endSha,
diff --git a/internal/gitaly/service/operations/submodules.go b/internal/gitaly/service/operations/submodules.go
index f551626f4..828569c9b 100644
--- a/internal/gitaly/service/operations/submodules.go
+++ b/internal/gitaly/service/operations/submodules.go
@@ -11,11 +11,8 @@ import (
"github.com/golang/protobuf/ptypes"
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git2go"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/helper"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -28,21 +25,7 @@ func (s *Server) UserUpdateSubmodule(ctx context.Context, req *gitalypb.UserUpda
return nil, status.Errorf(codes.InvalidArgument, userUpdateSubmoduleName+": %v", err)
}
- if featureflag.IsEnabled(ctx, featureflag.GoUserUpdateSubmodule) {
- return s.userUpdateSubmodule(ctx, req)
- }
-
- client, err := s.ruby.OperationServiceClient(ctx)
- if err != nil {
- return nil, err
- }
-
- clientCtx, err := rubyserver.SetHeaders(ctx, s.locator, req.GetRepository())
- if err != nil {
- return nil, err
- }
-
- return client.UserUpdateSubmodule(clientCtx, req)
+ return s.userUpdateSubmodule(ctx, req)
}
func validateUserUpdateSubmoduleRequest(req *gitalypb.UserUpdateSubmoduleRequest) error {
@@ -78,7 +61,7 @@ func validateUserUpdateSubmoduleRequest(req *gitalypb.UserUpdateSubmoduleRequest
}
func (s *Server) userUpdateSubmodule(ctx context.Context, req *gitalypb.UserUpdateSubmoduleRequest) (*gitalypb.UserUpdateSubmoduleResponse, error) {
- repo := localrepo.New(s.gitCmdFactory, req.GetRepository(), s.cfg)
+ repo := s.localrepo(req.GetRepository())
branches, err := repo.GetBranches(ctx)
if err != nil {
return nil, fmt.Errorf("%s: get branches: %w", userUpdateSubmoduleName, err)
diff --git a/internal/gitaly/service/operations/submodules_test.go b/internal/gitaly/service/operations/submodules_test.go
index 8142f2183..8d3c6a4d7 100644
--- a/internal/gitaly/service/operations/submodules_test.go
+++ b/internal/gitaly/service/operations/submodules_test.go
@@ -2,7 +2,6 @@ package operations
import (
"bytes"
- "context"
"fmt"
"testing"
@@ -12,26 +11,18 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git/lstree"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
-func testSuccessfulUserUpdateSubmoduleRequest(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets(
- []featureflag.FeatureFlag{featureflag.GoUserUpdateSubmodule},
- ).Run(t, func(t *testing.T, ctx context.Context) {
- testSuccessfulUserUpdateSubmoduleRequestFeatured(t, ctx, cfg, rubySrv)
- })
-}
+func TestSuccessfulUserUpdateSubmoduleRequest(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testSuccessfulUserUpdateSubmoduleRequestFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
// This reference is created to check that we can correctly commit onto
// a branch which has a name starting with "refs/heads/".
@@ -78,7 +69,7 @@ func testSuccessfulUserUpdateSubmoduleRequestFeatured(t *testing.T, ctx context.
t.Run(testCase.desc, func(t *testing.T) {
request := &gitalypb.UserUpdateSubmoduleRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte(testCase.submodule),
CommitSha: testCase.commitSha,
Branch: []byte(testCase.branch),
@@ -92,11 +83,11 @@ func testSuccessfulUserUpdateSubmoduleRequestFeatured(t *testing.T, ctx context.
commit, err := repo.ReadCommit(ctx, git.Revision(response.BranchUpdate.CommitId))
require.NoError(t, err)
- require.Equal(t, commit.Author.Email, testhelper.TestUser.Email)
- require.Equal(t, commit.Committer.Email, testhelper.TestUser.Email)
+ require.Equal(t, commit.Author.Email, gittest.TestUser.Email)
+ require.Equal(t, commit.Committer.Email, gittest.TestUser.Email)
require.Equal(t, commit.Subject, commitMessage)
- entry := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "ls-tree", "-z", fmt.Sprintf("%s^{tree}:", response.BranchUpdate.CommitId), testCase.submodule)
+ entry := gittest.Exec(t, cfg, "-C", repoPath, "ls-tree", "-z", fmt.Sprintf("%s^{tree}:", response.BranchUpdate.CommitId), testCase.submodule)
parser := lstree.NewParser(bytes.NewReader(entry))
parsedEntry, err := parser.NextEntry()
require.NoError(t, err)
@@ -106,22 +97,16 @@ func testSuccessfulUserUpdateSubmoduleRequestFeatured(t *testing.T, ctx context.
}
}
-func testUserUpdateSubmoduleStableID(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets(
- []featureflag.FeatureFlag{featureflag.GoUserUpdateSubmodule},
- ).Run(t, func(t *testing.T, ctx context.Context) {
- testUserUpdateSubmoduleStableIDFeatured(t, ctx, cfg, rubySrv)
- })
-}
-
-func testUserUpdateSubmoduleStableIDFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, cfg, repoProto, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+func TestUserUpdateSubmoduleStableID(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ ctx, cfg, repoProto, _, client := setupOperationsService(t, ctx)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
response, err := client.UserUpdateSubmodule(ctx, &gitalypb.UserUpdateSubmoduleRequest{
Repository: repoProto,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte("gitlab-grack"),
CommitSha: "41fa1bc9e0f0630ced6a8a211d60c2af425ecc2d",
Branch: []byte("master"),
@@ -144,30 +129,25 @@ func testUserUpdateSubmoduleStableIDFeatured(t *testing.T, ctx context.Context,
Body: []byte("Update Submodule message"),
BodySize: 24,
Author: &gitalypb.CommitAuthor{
- Name: testhelper.TestUser.Name,
- Email: testhelper.TestUser.Email,
+ Name: gittest.TestUser.Name,
+ Email: gittest.TestUser.Email,
Date: &timestamp.Timestamp{Seconds: 12345},
Timezone: []byte("+0000"),
},
Committer: &gitalypb.CommitAuthor{
- Name: testhelper.TestUser.Name,
- Email: testhelper.TestUser.Email,
+ Name: gittest.TestUser.Name,
+ Email: gittest.TestUser.Email,
Date: &timestamp.Timestamp{Seconds: 12345},
Timezone: []byte("+0000"),
},
}, commit)
}
-func testFailedUserUpdateSubmoduleRequestDueToValidations(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets(
- []featureflag.FeatureFlag{featureflag.GoUserUpdateSubmodule},
- ).Run(t, func(t *testing.T, ctx context.Context) {
- testFailedUserUpdateSubmoduleRequestDueToValidationsFeatured(t, ctx, cfg, rubySrv)
- })
-}
+func TestFailedUserUpdateSubmoduleRequestDueToValidations(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testFailedUserUpdateSubmoduleRequestDueToValidationsFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, _, repo, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, _, repo, _, client := setupOperationsService(t, ctx)
testCases := []struct {
desc string
@@ -178,7 +158,7 @@ func testFailedUserUpdateSubmoduleRequestDueToValidationsFeatured(t *testing.T,
desc: "empty Repository",
request: &gitalypb.UserUpdateSubmoduleRequest{
Repository: nil,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte("six"),
CommitSha: "db54006ff1c999fd485af44581dabe9b6c85a701",
Branch: []byte("some-branch"),
@@ -202,7 +182,7 @@ func testFailedUserUpdateSubmoduleRequestDueToValidationsFeatured(t *testing.T,
desc: "empty Submodule",
request: &gitalypb.UserUpdateSubmoduleRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: nil,
CommitSha: "db54006ff1c999fd485af44581dabe9b6c85a701",
Branch: []byte("some-branch"),
@@ -214,7 +194,7 @@ func testFailedUserUpdateSubmoduleRequestDueToValidationsFeatured(t *testing.T,
desc: "empty CommitSha",
request: &gitalypb.UserUpdateSubmoduleRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte("six"),
CommitSha: "",
Branch: []byte("some-branch"),
@@ -226,7 +206,7 @@ func testFailedUserUpdateSubmoduleRequestDueToValidationsFeatured(t *testing.T,
desc: "invalid CommitSha",
request: &gitalypb.UserUpdateSubmoduleRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte("six"),
CommitSha: "foobar",
Branch: []byte("some-branch"),
@@ -238,7 +218,7 @@ func testFailedUserUpdateSubmoduleRequestDueToValidationsFeatured(t *testing.T,
desc: "invalid CommitSha",
request: &gitalypb.UserUpdateSubmoduleRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte("six"),
CommitSha: "db54006ff1c999fd485a",
Branch: []byte("some-branch"),
@@ -250,7 +230,7 @@ func testFailedUserUpdateSubmoduleRequestDueToValidationsFeatured(t *testing.T,
desc: "empty Branch",
request: &gitalypb.UserUpdateSubmoduleRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte("six"),
CommitSha: "db54006ff1c999fd485af44581dabe9b6c85a701",
Branch: nil,
@@ -262,7 +242,7 @@ func testFailedUserUpdateSubmoduleRequestDueToValidationsFeatured(t *testing.T,
desc: "empty CommitMessage",
request: &gitalypb.UserUpdateSubmoduleRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte("six"),
CommitSha: "db54006ff1c999fd485af44581dabe9b6c85a701",
Branch: []byte("some-branch"),
@@ -281,20 +261,15 @@ func testFailedUserUpdateSubmoduleRequestDueToValidationsFeatured(t *testing.T,
}
}
-func testFailedUserUpdateSubmoduleRequestDueToInvalidBranch(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets(
- []featureflag.FeatureFlag{featureflag.GoUserUpdateSubmodule},
- ).Run(t, func(t *testing.T, ctx context.Context) {
- testFailedUserUpdateSubmoduleRequestDueToInvalidBranchFeatured(t, ctx, cfg, rubySrv)
- })
-}
+func TestFailedUserUpdateSubmoduleRequestDueToInvalidBranch(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testFailedUserUpdateSubmoduleRequestDueToInvalidBranchFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, _, repo, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, _, repo, _, client := setupOperationsService(t, ctx)
request := &gitalypb.UserUpdateSubmoduleRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte("six"),
CommitSha: "db54006ff1c999fd485af44581dabe9b6c85a701",
Branch: []byte("non/existent"),
@@ -306,20 +281,15 @@ func testFailedUserUpdateSubmoduleRequestDueToInvalidBranchFeatured(t *testing.T
require.Contains(t, err.Error(), "Cannot find branch")
}
-func testFailedUserUpdateSubmoduleRequestDueToInvalidSubmodule(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets(
- []featureflag.FeatureFlag{featureflag.GoUserUpdateSubmodule},
- ).Run(t, func(t *testing.T, ctx context.Context) {
- testFailedUserUpdateSubmoduleRequestDueToInvalidSubmoduleFeatured(t, ctx, cfg, rubySrv)
- })
-}
+func TestFailedUserUpdateSubmoduleRequestDueToInvalidSubmodule(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testFailedUserUpdateSubmoduleRequestDueToInvalidSubmoduleFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, _, repo, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, _, repo, _, client := setupOperationsService(t, ctx)
request := &gitalypb.UserUpdateSubmoduleRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte("non-existent-submodule"),
CommitSha: "db54006ff1c999fd485af44581dabe9b6c85a701",
Branch: []byte("master"),
@@ -331,20 +301,15 @@ func testFailedUserUpdateSubmoduleRequestDueToInvalidSubmoduleFeatured(t *testin
require.Equal(t, response.CommitError, "Invalid submodule path")
}
-func testFailedUserUpdateSubmoduleRequestDueToSameReference(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets(
- []featureflag.FeatureFlag{featureflag.GoUserUpdateSubmodule},
- ).Run(t, func(t *testing.T, ctx context.Context) {
- testFailedUserUpdateSubmoduleRequestDueToSameReferenceFeatured(t, ctx, cfg, rubySrv)
- })
-}
+func TestFailedUserUpdateSubmoduleRequestDueToSameReference(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testFailedUserUpdateSubmoduleRequestDueToSameReferenceFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, _, repo, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, _, repo, _, client := setupOperationsService(t, ctx)
request := &gitalypb.UserUpdateSubmoduleRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte("six"),
CommitSha: "41fa1bc9e0f0630ced6a8a211d60c2af425ecc2d",
Branch: []byte("master"),
@@ -359,23 +324,18 @@ func testFailedUserUpdateSubmoduleRequestDueToSameReferenceFeatured(t *testing.T
require.Contains(t, response.CommitError, "is already at")
}
-func testFailedUserUpdateSubmoduleRequestDueToRepositoryEmpty(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- testhelper.NewFeatureSets(
- []featureflag.FeatureFlag{featureflag.GoUserUpdateSubmodule},
- ).Run(t, func(t *testing.T, ctx context.Context) {
- testFailedUserUpdateSubmoduleRequestDueToRepositoryEmptyFeatured(t, ctx, cfg, rubySrv)
- })
-}
+func TestFailedUserUpdateSubmoduleRequestDueToRepositoryEmpty(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testFailedUserUpdateSubmoduleRequestDueToRepositoryEmptyFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
- ctx, _, _, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
+ ctx, cfg, _, _, client := setupOperationsService(t, ctx)
- repo, _, cleanup := gittest.InitRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repo, _, cleanup := gittest.InitRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
request := &gitalypb.UserUpdateSubmoduleRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Submodule: []byte("six"),
CommitSha: "41fa1bc9e0f0630ced6a8a211d60c2af425ecc2d",
Branch: []byte("master"),
diff --git a/internal/gitaly/service/operations/tags.go b/internal/gitaly/service/operations/tags.go
index 4fc8aa907..f79d4b0ae 100644
--- a/internal/gitaly/service/operations/tags.go
+++ b/internal/gitaly/service/operations/tags.go
@@ -12,7 +12,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
- "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
@@ -29,7 +28,7 @@ func (s *Server) UserDeleteTag(ctx context.Context, req *gitalypb.UserDeleteTagR
}
referenceName := git.ReferenceName(fmt.Sprintf("refs/tags/%s", req.TagName))
- revision, err := localrepo.New(s.gitCmdFactory, req.Repository, s.cfg).ResolveRevision(ctx, referenceName.Revision())
+ revision, err := s.localrepo(req.GetRepository()).ResolveRevision(ctx, referenceName.Revision())
if err != nil {
return nil, status.Errorf(codes.FailedPrecondition, "tag not found: %s", req.TagName)
}
@@ -92,8 +91,8 @@ func (s *Server) UserCreateTag(ctx context.Context, req *gitalypb.UserCreateTagR
}
// Setup
- repo := req.GetRepository()
- catFile, err := catfile.New(ctx, s.gitCmdFactory, repo)
+ repo := s.localrepo(req.GetRepository())
+ catFile, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
@@ -144,7 +143,7 @@ func (s *Server) UserCreateTag(ctx context.Context, req *gitalypb.UserCreateTagR
refObjectID := targetObjectID
var tagObject *gitalypb.Tag
if makingTag {
- localRepo := localrepo.New(s.gitCmdFactory, repo, s.cfg)
+ localRepo := s.localrepo(repo)
committerTime := time.Now()
if req.Timestamp != nil {
@@ -168,7 +167,7 @@ func (s *Server) UserCreateTag(ctx context.Context, req *gitalypb.UserCreateTagR
return nil, status.Error(codes.Internal, err.Error())
}
- createdTag, err := log.GetTagCatfile(ctx, catFile, tagObjectID.Revision(), string(req.TagName), false, false)
+ createdTag, err := catfile.GetTag(ctx, catFile, tagObjectID.Revision(), string(req.TagName), false, false)
if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
@@ -249,7 +248,7 @@ func (s *Server) UserCreateTag(ctx context.Context, req *gitalypb.UserCreateTagR
// Save ourselves looking this up earlier in case update-ref
// died
if peeledTargetObjectType == "commit" {
- peeledTargetCommit, err := log.GetCommitCatfile(ctx, catFile, peeledTargetObjectID.Revision())
+ peeledTargetCommit, err := catfile.GetCommit(ctx, catFile, peeledTargetObjectID.Revision())
if err != nil {
return nil, status.Error(codes.Internal, err.Error())
}
diff --git a/internal/gitaly/service/operations/tags_test.go b/internal/gitaly/service/operations/tags_test.go
index 1fbdd2d6f..a218ad75b 100644
--- a/internal/gitaly/service/operations/tags_test.go
+++ b/internal/gitaly/service/operations/tags_test.go
@@ -20,10 +20,10 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
@@ -37,45 +37,42 @@ func TestSuccessfulUserDeleteTagRequest(t *testing.T) {
}
func testSuccessfulUserDeleteTagRequest(t *testing.T, ctx context.Context) {
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
tagNameInput := "to-be-deleted-soon-tag"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", tagNameInput)
+ gittest.Exec(t, cfg, "-C", repoPath, "tag", tagNameInput)
request := &gitalypb.UserDeleteTagRequest{
Repository: repo,
TagName: []byte(tagNameInput),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
_, err := client.UserDeleteTag(ctx, request)
require.NoError(t, err)
- tags := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag")
+ tags := gittest.Exec(t, cfg, "-C", repoPath, "tag")
require.NotContains(t, string(tags), tagNameInput, "tag name still exists in tags list")
}
func TestSuccessfulGitHooksForUserDeleteTagRequest(t *testing.T) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.ReferenceTransactions,
- }).Run(t, testSuccessfulGitHooksForUserDeleteTagRequest)
-}
+ ctx, cancel := testhelper.Context()
+ defer cancel()
-func testSuccessfulGitHooksForUserDeleteTagRequest(t *testing.T, ctx context.Context) {
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
tagNameInput := "to-be-déleted-soon-tag"
request := &gitalypb.UserDeleteTagRequest{
Repository: repo,
TagName: []byte(tagNameInput),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
for _, hookName := range GitlabHooks {
t.Run(hookName, func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", tagNameInput)
+ gittest.Exec(t, cfg, "-C", repoPath, "tag", tagNameInput)
hookOutputTempPath := gittest.WriteEnvToCustomHook(t, repoPath, hookName)
@@ -83,7 +80,7 @@ func testSuccessfulGitHooksForUserDeleteTagRequest(t *testing.T, ctx context.Con
require.NoError(t, err)
output := testhelper.MustReadFile(t, hookOutputTempPath)
- require.Contains(t, string(output), "GL_USERNAME="+testhelper.TestUser.GlUsername)
+ require.Contains(t, string(output), "GL_USERNAME="+gittest.TestUser.GlUsername)
})
}
}
@@ -167,7 +164,7 @@ func TestSuccessfulUserCreateTagRequest(t *testing.T) {
func testSuccessfulUserCreateTagRequest(t *testing.T, ctx context.Context) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
targetRevision := "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
targetRevisionCommit, err := repo.ReadCommit(ctx, git.Revision(targetRevision))
@@ -227,7 +224,7 @@ func testSuccessfulUserCreateTagRequest(t *testing.T, ctx context.Context) {
Repository: repoProto,
TagName: []byte(testCase.tagName),
TargetRevision: []byte(testCase.targetRevision),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Message: []byte(testCase.message),
}
@@ -235,175 +232,175 @@ func testSuccessfulUserCreateTagRequest(t *testing.T, ctx context.Context) {
require.NoError(t, err, "error from calling RPC")
require.Empty(t, response.PreReceiveError, "PreReceiveError must be empty, signalling the push was accepted")
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", "-d", inputTagName)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "tag", "-d", inputTagName)
responseOk := &gitalypb.UserCreateTagResponse{
Tag: testCase.expectedTag,
}
// Fake up *.Id for annotated tags
if len(testCase.expectedTag.Id) == 0 {
- id := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", inputTagName)
+ id := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", inputTagName)
responseOk.Tag.Id = text.ChompBytes(id)
}
require.Equal(t, responseOk, response)
- tag := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag")
+ tag := gittest.Exec(t, cfg, "-C", repoPath, "tag")
require.Contains(t, string(tag), inputTagName)
})
}
}
func TestUserCreateTagWithTransaction(t *testing.T) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.BackchannelVoting,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
-
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
-
- hooksOutputDir := testhelper.TempDir(t)
- hooksOutputPath := filepath.Join(hooksOutputDir, "output")
-
- // We're creating a set of custom hooks which simply
- // write to a file. The intention is that we want to
- // check that the hooks only run on the primary node.
- hooks := []string{"pre-receive", "update", "post-receive"}
- for _, hook := range hooks {
- gittest.WriteCustomHook(t, repoPath, hook,
- []byte(fmt.Sprintf("#!/bin/sh\necho %s >>%s\n", hook, hooksOutputPath)),
- )
- }
+ cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
- // We're creating a custom server with a fake transaction server which
- // simply returns success for every call, but tracks the number of
- // calls. The server is then injected into the client's context to make
- // it available for transactional voting. We cannot use
- // runOperationServiceServer as it puts a Praefect server in between if
- // running Praefect tests, which would break our test setup.
- transactionServer := &testTransactionServer{}
- testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterOperationServiceServer(srv, NewServer(deps.GetCfg(), nil, deps.GetHookManager(), deps.GetLocator(), deps.GetConnsPool(), deps.GetGitCmdFactory()))
- gitalypb.RegisterHookServiceServer(srv, hook.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
-
- if featureflag.IsDisabled(ctx, featureflag.BackchannelVoting) {
- gitalypb.RegisterRefTransactionServer(srv, transactionServer)
- }
- })
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
- // we use new local reference to the context to prevent data race
- // that is caused by adding additional data to the context in the sub-test
- ctxCopy := ctx
- // We're using internal gitaly socket to connect to the server.
- // This is kind of a hack when running tests with Praefect:
- // if we directly connect to the server created above, then our call
- // would be intercepted by Praefect, which would in turn replace the
- // transaction information we inject further down below. So we instead
- // use internal socket so we can circumvent Praefect and just talk
- // to Gitaly directly.
- client := newMuxedOperationClient(t, ctx, "unix://"+cfg.GitalyInternalSocketPath(), cfg.Auth.Token,
- backchannel.NewClientHandshaker(
- testhelper.DiscardTestEntry(t),
- func() backchannel.Server {
- srv := grpc.NewServer()
- if featureflag.IsEnabled(ctxCopy, featureflag.BackchannelVoting) {
- gitalypb.RegisterRefTransactionServer(srv, transactionServer)
- }
- return srv
- },
- ),
+ hooksOutputDir := testhelper.TempDir(t)
+ hooksOutputPath := filepath.Join(hooksOutputDir, "output")
+
+ // We're creating a set of custom hooks which simply
+ // write to a file. The intention is that we want to
+ // check that the hooks only run on the primary node.
+ hooks := []string{"pre-receive", "update", "post-receive"}
+ for _, hook := range hooks {
+ gittest.WriteCustomHook(t, repoPath, hook,
+ []byte(fmt.Sprintf("#!/bin/sh\necho %s >>%s\n", hook, hooksOutputPath)),
)
+ }
- praefectServer := &metadata.PraefectServer{
- SocketPath: "unix://" + cfg.GitalyInternalSocketPath(),
- Token: cfg.Auth.Token,
- }
+ // We're creating a custom server with a fake transaction server which
+ // simply returns success for every call, but tracks the number of
+ // calls. The server is then injected into the client's context to make
+ // it available for transactional voting. We cannot use
+ // runOperationServiceServer as it puts a Praefect server in between if
+ // running Praefect tests, which would break our test setup.
+ transactionServer := &testTransactionServer{}
+ testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterOperationServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ nil,
+ deps.GetHookManager(),
+ deps.GetLocator(),
+ deps.GetConnsPool(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterHookServiceServer(srv, hook.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
+ })
- for i, testCase := range []struct {
- desc string
- primary bool
- message string
- }{
- {
- desc: "primary creates a lightweight tag",
- primary: true,
- },
- {
- desc: "secondary creates a lightweight tag",
- primary: false,
- },
- {
- desc: "primary creates an annotated tag",
- primary: true,
- message: "foobar",
- },
- {
- desc: "secondary creates an annotated tag",
- primary: false,
- message: "foobar",
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ // We're using internal gitaly socket to connect to the server.
+ // This is kind of a hack when running tests with Praefect:
+ // if we directly connect to the server created above, then our call
+ // would be intercepted by Praefect, which would in turn replace the
+ // transaction information we inject further down below. So we instead
+ // use internal socket so we can circumvent Praefect and just talk
+ // to Gitaly directly.
+ client := newMuxedOperationClient(t, ctx, "unix://"+cfg.GitalyInternalSocketPath(), cfg.Auth.Token,
+ backchannel.NewClientHandshaker(
+ testhelper.DiscardTestEntry(t),
+ func() backchannel.Server {
+ srv := grpc.NewServer()
+ gitalypb.RegisterRefTransactionServer(srv, transactionServer)
+ return srv
},
- } {
- t.Run(testCase.desc, func(t *testing.T) {
- if err := os.Remove(hooksOutputPath); err != nil {
- require.True(t, os.IsNotExist(err), "error when cleaning up work area: %v", err)
- }
+ ),
+ )
- tagName := fmt.Sprintf("tag-%d", i)
- targetRevision := "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
- targetCommit, err := repo.ReadCommit(ctx, git.Revision(targetRevision))
- require.NoError(t, err)
+ praefectServer := &txinfo.PraefectServer{
+ SocketPath: "unix://" + cfg.GitalyInternalSocketPath(),
+ Token: cfg.Auth.Token,
+ }
- request := &gitalypb.UserCreateTagRequest{
- Repository: repoProto,
- TagName: []byte(tagName),
- Message: []byte(testCase.message),
- TargetRevision: []byte(targetRevision),
- User: testhelper.TestUser,
- }
+ for i, testCase := range []struct {
+ desc string
+ primary bool
+ message string
+ }{
+ {
+ desc: "primary creates a lightweight tag",
+ primary: true,
+ },
+ {
+ desc: "secondary creates a lightweight tag",
+ primary: false,
+ },
+ {
+ desc: "primary creates an annotated tag",
+ primary: true,
+ message: "foobar",
+ },
+ {
+ desc: "secondary creates an annotated tag",
+ primary: false,
+ message: "foobar",
+ },
+ } {
+ t.Run(testCase.desc, func(t *testing.T) {
+ *transactionServer = testTransactionServer{}
- // We need to convert to an incoming context first in
- // order to preserve the feature flag.
- ctx = helper.OutgoingToIncoming(ctx)
- ctx, err = metadata.InjectTransaction(ctx, 1, "node", testCase.primary)
- require.NoError(t, err)
- ctx, err = praefectServer.Inject(ctx)
- require.NoError(t, err)
- ctx = helper.IncomingToOutgoing(ctx)
+ if err := os.Remove(hooksOutputPath); err != nil {
+ require.True(t, os.IsNotExist(err), "error when cleaning up work area: %v", err)
+ }
- response, err := client.UserCreateTag(ctx, request)
- require.NoError(t, err)
+ tagName := fmt.Sprintf("tag-%d", i)
+ targetRevision := "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
+ targetCommit, err := repo.ReadCommit(ctx, git.Revision(targetRevision))
+ require.NoError(t, err)
- targetOID := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "refs/tags/"+tagName))
- peeledOID := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", targetOID+"^{commit}"))
- targetOIDOK := targetOID
- if len(testCase.message) > 0 {
- targetOIDOK = peeledOID
- }
- require.Equal(t, targetOIDOK, targetRevision)
+ request := &gitalypb.UserCreateTagRequest{
+ Repository: repoProto,
+ TagName: []byte(tagName),
+ Message: []byte(testCase.message),
+ TargetRevision: []byte(targetRevision),
+ User: gittest.TestUser,
+ }
- testhelper.ProtoEqual(t, &gitalypb.UserCreateTagResponse{
- Tag: &gitalypb.Tag{
- Name: []byte(tagName),
- Message: []byte(testCase.message),
- MessageSize: int64(len(testCase.message)),
- Id: targetOID,
- TargetCommit: targetCommit,
- },
- }, response)
-
- // Only the primary node should've executed hooks.
- if testCase.primary {
- contents := testhelper.MustReadFile(t, hooksOutputPath)
- require.Equal(t, "pre-receive\nupdate\npost-receive\n", string(contents))
- } else {
- testhelper.AssertPathNotExists(t, hooksOutputPath)
- }
+ // We need to convert to an incoming context first in
+ // order to preserve the feature flag.
+ ctx = helper.OutgoingToIncoming(ctx)
+ ctx, err = txinfo.InjectTransaction(ctx, 1, "node", testCase.primary)
+ require.NoError(t, err)
+ ctx, err = praefectServer.Inject(ctx)
+ require.NoError(t, err)
+ ctx = helper.IncomingToOutgoing(ctx)
- require.Equal(t, 1, transactionServer.called)
- transactionServer.called = 0
- })
- }
- })
+ response, err := client.UserCreateTag(ctx, request)
+ require.NoError(t, err)
+
+ targetOID := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "refs/tags/"+tagName))
+ peeledOID := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", targetOID+"^{commit}"))
+ targetOIDOK := targetOID
+ if len(testCase.message) > 0 {
+ targetOIDOK = peeledOID
+ }
+ require.Equal(t, targetOIDOK, targetRevision)
+
+ testhelper.ProtoEqual(t, &gitalypb.UserCreateTagResponse{
+ Tag: &gitalypb.Tag{
+ Name: []byte(tagName),
+ Message: []byte(testCase.message),
+ MessageSize: int64(len(testCase.message)),
+ Id: targetOID,
+ TargetCommit: targetCommit,
+ },
+ }, response)
+
+ // Only the primary node should've executed hooks.
+ if testCase.primary {
+ contents := testhelper.MustReadFile(t, hooksOutputPath)
+ require.Equal(t, "pre-receive\nupdate\npost-receive\n", string(contents))
+ } else {
+ require.NoFileExists(t, hooksOutputPath)
+ }
+
+ require.Equal(t, 2, transactionServer.called)
+ transactionServer.called = 0
+ })
+ }
}
func TestSuccessfulUserCreateTagRequestAnnotatedLightweightDisambiguation(t *testing.T) {
@@ -481,7 +478,7 @@ func testSuccessfulUserCreateTagRequestAnnotatedLightweightDisambiguation(t *tes
Repository: repo,
TagName: []byte(tagName),
TargetRevision: []byte("c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Message: []byte(testCase.message),
}
@@ -490,7 +487,7 @@ func testSuccessfulUserCreateTagRequestAnnotatedLightweightDisambiguation(t *tes
if testCase.err != nil {
require.Equal(t, testCase.err, err)
} else {
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", "-d", tagName)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "tag", "-d", tagName)
require.NoError(t, err)
require.Empty(t, response.PreReceiveError)
}
@@ -505,12 +502,12 @@ func TestSuccessfulUserCreateTagRequestWithParsedTargetRevision(t *testing.T) {
}
func testSuccessfulUserCreateTagRequestWithParsedTargetRevision(t *testing.T, ctx context.Context) {
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "heads/master", "master~1")
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-d", "heads/master")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "refs/heads/master", "master~2")
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-d", "refs/heads/master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "heads/master", "master~1")
+ defer gittest.Exec(t, cfg, "-C", repoPath, "branch", "-d", "heads/master")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "refs/heads/master", "master~2")
+ defer gittest.Exec(t, cfg, "-C", repoPath, "branch", "-d", "refs/heads/master")
testCases := []struct {
desc string
@@ -566,15 +563,15 @@ func testSuccessfulUserCreateTagRequestWithParsedTargetRevision(t *testing.T, ct
Repository: repo,
TagName: []byte(tagName),
TargetRevision: []byte(testCase.targetRevision),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
response, err := client.UserCreateTag(ctx, request)
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", "-d", tagName)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "tag", "-d", tagName)
require.NoError(t, err)
require.Empty(t, response.PreReceiveError)
- parsedID := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", tagName)
+ parsedID := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", tagName)
require.Equal(t, text.ChompBytes(parsedID), response.Tag.TargetCommit.Id)
})
}
@@ -663,7 +660,7 @@ func TestSuccessfulUserCreateTagRequestToNonCommit(t *testing.T) {
Repository: repo,
TagName: []byte(testCase.tagName),
TargetRevision: []byte(testCase.targetRevision),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Message: []byte(testCase.message),
}
@@ -673,19 +670,19 @@ func TestSuccessfulUserCreateTagRequestToNonCommit(t *testing.T) {
response, err := client.UserCreateTag(ctx, request)
require.NoError(t, err)
require.Empty(t, response.PreReceiveError)
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", "-d", inputTagName)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "tag", "-d", inputTagName)
// Fake up *.Id for annotated tags
if len(testCase.expectedTag.Id) == 0 {
- tagID := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", inputTagName)
+ tagID := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", inputTagName)
responseOk.Tag.Id = text.ChompBytes(tagID)
}
require.Equal(t, responseOk, response)
- peeledID := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", inputTagName+"^{}")
+ peeledID := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", inputTagName+"^{}")
require.Equal(t, testCase.targetRevision, text.ChompBytes(peeledID))
- objectType := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "cat-file", "-t", inputTagName)
+ objectType := gittest.Exec(t, cfg, "-C", repoPath, "cat-file", "-t", inputTagName)
require.Equal(t, testCase.expectedObjectType, text.ChompBytes(objectType))
})
}
@@ -697,7 +694,7 @@ func TestSuccessfulUserCreateTagNestedTags(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
preReceiveHook := writeAssertObjectTypePreReceiveHook(t, cfg)
@@ -747,15 +744,15 @@ func TestSuccessfulUserCreateTagNestedTags(t *testing.T) {
Repository: repoProto,
TagName: []byte(tagName),
TargetRevision: []byte(targetObject),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Message: []byte(tagMessage),
}
response, err := client.UserCreateTag(ctx, request)
require.NoError(t, err)
require.Empty(t, response.PreReceiveError)
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", "-d", tagName)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "tag", "-d", tagName)
- createdID := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", tagName)
+ createdID := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", tagName)
createdIDStr := text.ChompBytes(createdID)
responseOk := &gitalypb.UserCreateTagResponse{
Tag: &gitalypb.Tag{
@@ -774,7 +771,7 @@ func TestSuccessfulUserCreateTagNestedTags(t *testing.T) {
}
require.Equal(t, responseOk, response)
- peeledID := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", tagName+"^{}")
+ peeledID := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", tagName+"^{}")
peeledIDStr := text.ChompBytes(peeledID)
require.Equal(t, testCase.targetObject, peeledIDStr)
@@ -791,10 +788,10 @@ func TestSuccessfulUserCreateTagNestedTags(t *testing.T) {
Repository: repoProto,
TagName: []byte(tagNameLight),
TargetRevision: []byte(createdIDStr),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
response, err = client.UserCreateTag(ctx, request)
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", "-d", tagNameLight)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "tag", "-d", tagNameLight)
require.NoError(t, err)
require.Empty(t, response.PreReceiveError)
@@ -809,7 +806,7 @@ func TestSuccessfulUserCreateTagNestedTags(t *testing.T) {
}
require.Equal(t, responseOk, response)
- createdIDLight := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", tagNameLight)
+ createdIDLight := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", tagNameLight)
createdIDLightStr := text.ChompBytes(createdIDLight)
require.Equal(t, testCase.targetObject, createdIDLightStr)
}
@@ -828,7 +825,7 @@ func TestUserCreateTagStableTagIDs(t *testing.T) {
TagName: []byte("happy-tag"),
TargetRevision: []byte("dfaa3f97ca337e20154a98ac9d0be76ddd1fcc82"),
Message: []byte("my message"),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
Timestamp: &timestamp.Timestamp{Seconds: 12345},
})
require.NoError(t, err)
@@ -851,7 +848,7 @@ func TestUserDeleteTagsuccessfulDeletionOfPrefixedTag(t *testing.T) {
}
func testUserDeleteTagsuccessfulDeletionOfPrefixedTag(t *testing.T, ctx context.Context) {
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
testCases := []struct {
desc string
@@ -865,7 +862,7 @@ func testUserDeleteTagsuccessfulDeletionOfPrefixedTag(t *testing.T, ctx context.
desc: "possible to delete a tag called refs/tags/something",
tagNameInput: "refs/tags/can-find-this",
tagCommit: "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: &gitalypb.UserDeleteTagResponse{},
err: nil,
},
@@ -873,7 +870,7 @@ func testUserDeleteTagsuccessfulDeletionOfPrefixedTag(t *testing.T, ctx context.
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", testCase.tagNameInput, testCase.tagCommit)
+ gittest.Exec(t, cfg, "-C", repoPath, "tag", testCase.tagNameInput, testCase.tagCommit)
request := &gitalypb.UserDeleteTagRequest{
Repository: repo,
@@ -885,7 +882,7 @@ func testUserDeleteTagsuccessfulDeletionOfPrefixedTag(t *testing.T, ctx context.
require.Equal(t, testCase.err, err)
require.Equal(t, testCase.response, response)
- refs := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref", "--", "refs/tags/"+testCase.tagNameInput)
+ refs := gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref", "--", "refs/tags/"+testCase.tagNameInput)
require.NotContains(t, string(refs), testCase.tagCommit, "tag kept because we stripped off refs/tags/*")
})
}
@@ -897,7 +894,7 @@ func TestUserCreateTagsuccessfulCreationOfPrefixedTag(t *testing.T) {
ctx, cfg, repoProto, repoPath, client := setupOperationsService(t, ctx)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
testCases := []struct {
desc string
@@ -910,14 +907,14 @@ func TestUserCreateTagsuccessfulCreationOfPrefixedTag(t *testing.T) {
desc: "possible to create a tag called refs/tags/something",
tagNameInput: "refs/tags/can-create-this",
tagTargetRevisionInput: "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: nil,
},
}
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", "-d", testCase.tagNameInput)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "tag", "-d", testCase.tagNameInput)
request := &gitalypb.UserCreateTagRequest{
Repository: repoProto,
@@ -941,7 +938,7 @@ func TestUserCreateTagsuccessfulCreationOfPrefixedTag(t *testing.T) {
require.Equal(t, responseOk, response)
- refs := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref", "--", "refs/tags/"+testCase.tagNameInput)
+ refs := gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref", "--", "refs/tags/"+testCase.tagNameInput)
require.Contains(t, string(refs), testCase.tagTargetRevisionInput, "tag created, we did not strip off refs/tags/*")
})
}
@@ -951,7 +948,7 @@ func TestSuccessfulGitHooksForUserCreateTagRequest(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
projectPath := "project/path"
repo.GlProjectPath = projectPath
@@ -962,12 +959,12 @@ func TestSuccessfulGitHooksForUserCreateTagRequest(t *testing.T) {
Repository: repo,
TagName: []byte(tagName),
TargetRevision: []byte("c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
for _, hookName := range GitlabHooks {
t.Run(hookName, func(t *testing.T) {
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", "-d", tagName)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "tag", "-d", tagName)
hookOutputTempPath := gittest.WriteEnvToCustomHook(t, repoPath, hookName)
@@ -976,7 +973,7 @@ func TestSuccessfulGitHooksForUserCreateTagRequest(t *testing.T) {
require.Empty(t, response.PreReceiveError)
output := string(testhelper.MustReadFile(t, hookOutputTempPath))
- require.Contains(t, output, "GL_USERNAME="+testhelper.TestUser.GlUsername)
+ require.Contains(t, output, "GL_USERNAME="+gittest.TestUser.GlUsername)
require.Contains(t, output, "GL_PROJECT_PATH="+projectPath)
})
}
@@ -1007,7 +1004,7 @@ func TestFailedUserDeleteTagRequestDueToValidation(t *testing.T) {
desc: "empty tag name",
request: &gitalypb.UserDeleteTagRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
},
response: nil,
err: status.Error(codes.InvalidArgument, "empty tag name"),
@@ -1016,7 +1013,7 @@ func TestFailedUserDeleteTagRequestDueToValidation(t *testing.T) {
desc: "non-existent tag name",
request: &gitalypb.UserDeleteTagRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
TagName: []byte("i-do-not-exist"),
},
response: nil,
@@ -1026,7 +1023,7 @@ func TestFailedUserDeleteTagRequestDueToValidation(t *testing.T) {
desc: "space in tag name",
request: &gitalypb.UserDeleteTagRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
TagName: []byte("a tag"),
},
response: nil,
@@ -1036,7 +1033,7 @@ func TestFailedUserDeleteTagRequestDueToValidation(t *testing.T) {
desc: "newline in tag name",
request: &gitalypb.UserDeleteTagRequest{
Repository: repo,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
TagName: []byte("a\ntag"),
},
response: nil,
@@ -1060,16 +1057,16 @@ func TestFailedUserDeleteTagDueToHooks(t *testing.T) {
}
func testFailedUserDeleteTagDueToHooks(t *testing.T, ctx context.Context) {
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
tagNameInput := "to-be-deleted-soon-tag"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", tagNameInput)
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", "-d", tagNameInput)
+ gittest.Exec(t, cfg, "-C", repoPath, "tag", tagNameInput)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "tag", "-d", tagNameInput)
request := &gitalypb.UserDeleteTagRequest{
Repository: repo,
TagName: []byte(tagNameInput),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
hookContent := []byte("#!/bin/sh\necho GL_ID=$GL_ID\nexit 1")
@@ -1080,9 +1077,9 @@ func testFailedUserDeleteTagDueToHooks(t *testing.T, ctx context.Context) {
response, err := client.UserDeleteTag(ctx, request)
require.Nil(t, err)
- require.Contains(t, response.PreReceiveError, "GL_ID="+testhelper.TestUser.GlId)
+ require.Contains(t, response.PreReceiveError, "GL_ID="+gittest.TestUser.GlId)
- tags := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag")
+ tags := gittest.Exec(t, cfg, "-C", repoPath, "tag")
require.Contains(t, string(tags), tagNameInput, "tag name does not exist in tags list")
})
}
@@ -1098,7 +1095,7 @@ func TestFailedUserCreateTagDueToHooks(t *testing.T) {
Repository: repo,
TagName: []byte("new-tag"),
TargetRevision: []byte("c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
hookContent := []byte("#!/bin/sh\necho GL_ID=$GL_ID\nexit 1")
@@ -1108,7 +1105,7 @@ func TestFailedUserCreateTagDueToHooks(t *testing.T) {
response, err := client.UserCreateTag(ctx, request)
require.Nil(t, err)
- require.Contains(t, response.PreReceiveError, "GL_ID="+testhelper.TestUser.GlId)
+ require.Contains(t, response.PreReceiveError, "GL_ID="+gittest.TestUser.GlId)
}
}
@@ -1130,7 +1127,7 @@ func TestFailedUserCreateTagRequestDueToTagExistence(t *testing.T) {
desc: "simple existing tag",
tagName: "v1.1.0",
targetRevision: "master",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: &gitalypb.UserCreateTagResponse{
Tag: nil,
Exists: true,
@@ -1141,7 +1138,7 @@ func TestFailedUserCreateTagRequestDueToTagExistence(t *testing.T) {
desc: "existing tag nonexisting target revision",
tagName: "v1.1.0",
targetRevision: "does-not-exist",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: nil,
err: status.Errorf(codes.FailedPrecondition, "revspec '%s' not found", "does-not-exist"),
},
@@ -1183,7 +1180,7 @@ func TestFailedUserCreateTagRequestDueToValidation(t *testing.T) {
desc: "empty target revision",
tagName: "shiny-new-tag",
targetRevision: "",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: nil,
err: status.Error(codes.InvalidArgument, "empty target revision"),
},
@@ -1199,7 +1196,7 @@ func TestFailedUserCreateTagRequestDueToValidation(t *testing.T) {
desc: "empty starting point",
tagName: "new-tag",
targetRevision: "",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: nil,
err: status.Error(codes.InvalidArgument, "empty target revision"),
},
@@ -1207,7 +1204,7 @@ func TestFailedUserCreateTagRequestDueToValidation(t *testing.T) {
desc: "non-existing starting point",
tagName: "new-tag",
targetRevision: "i-dont-exist",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: nil,
err: status.Errorf(codes.FailedPrecondition, "revspec '%s' not found", "i-dont-exist"),
},
@@ -1215,7 +1212,7 @@ func TestFailedUserCreateTagRequestDueToValidation(t *testing.T) {
desc: "space in lightweight tag name",
tagName: "a tag",
targetRevision: "master",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: nil,
err: status.Errorf(codes.Unknown, "Gitlab::Git::CommitError: Could not update refs/tags/%s. Please refresh and try again.", "a tag"),
},
@@ -1224,7 +1221,7 @@ func TestFailedUserCreateTagRequestDueToValidation(t *testing.T) {
tagName: "a tag",
targetRevision: "master",
message: "a message",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: nil,
err: status.Errorf(codes.Unknown, "Gitlab::Git::CommitError: Could not update refs/tags/%s. Please refresh and try again.", "a tag"),
},
@@ -1232,7 +1229,7 @@ func TestFailedUserCreateTagRequestDueToValidation(t *testing.T) {
desc: "newline in lightweight tag name",
tagName: "a\ntag",
targetRevision: "master",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: nil,
err: status.Errorf(codes.Unknown, "Gitlab::Git::CommitError: Could not update refs/tags/%s. Please refresh and try again.", "a\ntag"),
},
@@ -1241,7 +1238,7 @@ func TestFailedUserCreateTagRequestDueToValidation(t *testing.T) {
tagName: "a\ntag",
targetRevision: "master",
message: "a message",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: nil,
err: status.Error(codes.Unknown, "Rugged::InvalidError: failed to parse signature - expected prefix doesn't match actual"),
},
@@ -1249,7 +1246,7 @@ func TestFailedUserCreateTagRequestDueToValidation(t *testing.T) {
desc: "injection in lightweight tag name",
tagName: injectedTag,
targetRevision: "master",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: nil,
err: status.Errorf(codes.Unknown, "Gitlab::Git::CommitError: Could not update refs/tags/%s. Please refresh and try again.", injectedTag),
},
@@ -1258,7 +1255,7 @@ func TestFailedUserCreateTagRequestDueToValidation(t *testing.T) {
tagName: injectedTag,
targetRevision: "master",
message: "a message",
- user: testhelper.TestUser,
+ user: gittest.TestUser,
response: nil,
err: status.Errorf(codes.Unknown, "Gitlab::Git::CommitError: Could not update refs/tags/%s. Please refresh and try again.", injectedTag),
},
@@ -1288,7 +1285,7 @@ func TestTagHookOutput(t *testing.T) {
}
func testTagHookOutput(t *testing.T, ctx context.Context) {
- ctx, _, repo, repoPath, client := setupOperationsService(t, ctx)
+ ctx, cfg, repo, repoPath, client := setupOperationsService(t, ctx)
testCases := []struct {
desc string
@@ -1335,12 +1332,12 @@ func testTagHookOutput(t *testing.T, ctx context.Context) {
Repository: repo,
TagName: []byte(tagNameInput),
TargetRevision: []byte("master"),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
deleteRequest := &gitalypb.UserDeleteTagRequest{
Repository: repo,
TagName: []byte(tagNameInput),
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
gittest.WriteCustomHook(t, repoPath, hookName, []byte(testCase.hookContent))
@@ -1355,8 +1352,8 @@ func testTagHookOutput(t *testing.T, ctx context.Context) {
}
require.Equal(t, createResponseOk, createResponse)
- defer testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", "-d", tagNameInput)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag", tagNameInput)
+ defer gittest.Exec(t, cfg, "-C", repoPath, "tag", "-d", tagNameInput)
+ gittest.Exec(t, cfg, "-C", repoPath, "tag", tagNameInput)
deleteResponse, err := client.UserDeleteTag(ctx, deleteRequest)
require.NoError(t, err)
diff --git a/internal/gitaly/service/operations/testhelper_test.go b/internal/gitaly/service/operations/testhelper_test.go
index 0708d56f2..db8338c06 100644
--- a/internal/gitaly/service/operations/testhelper_test.go
+++ b/internal/gitaly/service/operations/testhelper_test.go
@@ -61,23 +61,6 @@ func TestWithRubySidecar(t *testing.T) {
testSuccessfulUserApplyPatch,
testUserApplyPatchStableID,
testFailedPatchApplyPatch,
- testServerUserCherryPickSuccessful,
- testServerUserCherryPickSuccessfulGitHooks,
- testServerUserCherryPickStableID,
- testServerUserCherryPickFailedValidations,
- testServerUserCherryPickFailedWithPreReceiveError,
- testServerUserCherryPickFailedWithCreateTreeError,
- testServerUserCherryPickFailedWithCommitError,
- testServerUserCherryPickFailedWithConflict,
- testServerUserCherryPickSuccessfulWithGivenCommits,
- testServerUserRevertSuccessful,
- testServerUserRevertStableID,
- testServerUserRevertSuccessfulIntoEmptyRepo,
- testServerUserRevertSuccessfulGitHooks,
- testServerUserRevertFailuedDueToValidations,
- testServerUserRevertFailedDueToPreReceiveError,
- testServerUserRevertFailedDueToCreateTreeErrorConflict,
- testServerUserRevertFailedDueToCommitError,
testSuccessfulUserUpdateBranchRequestToDelete,
testSuccessfulGitHooksForUserUpdateBranchRequest,
testFailedUserUpdateBranchDueToHooks,
@@ -93,14 +76,7 @@ func TestWithRubySidecar(t *testing.T) {
testFailedUserRebaseConfirmableDueToGitError,
testRebaseRequestWithDeletedFile,
testRebaseOntoRemoteBranch,
- testSuccessfulUserUpdateSubmoduleRequest,
- testUserUpdateSubmoduleStableID,
- testFailedUserUpdateSubmoduleRequestDueToValidations,
- testFailedUserUpdateSubmoduleRequestDueToInvalidBranch,
- testFailedUserUpdateSubmoduleRequestDueToInvalidSubmodule,
- testFailedUserUpdateSubmoduleRequestDueToSameReference,
- testFailedUserUpdateSubmoduleRequestDueToRepositoryEmpty,
- testServerUserRevertFailedDueToCreateTreeErrorEmpty,
+ testRebaseFailedWithCode,
}
for _, f := range fs {
t.Run(runtime.FuncForPC(reflect.ValueOf(f).Pointer()).Name(), func(t *testing.T) {
@@ -120,7 +96,7 @@ func setupOperationsService(t testing.TB, ctx context.Context) (context.Context,
func setupOperationsServiceWithRuby(
t testing.TB, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server, options ...testserver.GitalyServerOpt,
) (context.Context, config.Cfg, *gitalypb.Repository, string, gitalypb.OperationServiceClient) {
- repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanup)
testhelper.ConfigureGitalySSHBin(t, cfg)
@@ -143,12 +119,44 @@ func runOperationServiceServer(t testing.TB, cfg config.Cfg, rubySrv *rubyserver
t.Helper()
return testserver.RunGitalyServer(t, cfg, rubySrv, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterOperationServiceServer(srv, NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetHookManager(), deps.GetLocator(), deps.GetConnsPool(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterOperationServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetHookManager(),
+ deps.GetLocator(),
+ deps.GetConnsPool(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
gitalypb.RegisterHookServiceServer(srv, hook.NewServer(cfg, deps.GetHookManager(), deps.GetGitCmdFactory()))
- gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(cfg, rubySrv, deps.GetLocator(), deps.GetTxManager(), deps.GetGitCmdFactory()))
- gitalypb.RegisterRefServiceServer(srv, ref.NewServer(cfg, deps.GetLocator(), deps.GetGitCmdFactory(), deps.GetTxManager()))
- gitalypb.RegisterCommitServiceServer(srv, commit.NewServer(cfg, deps.GetLocator(), deps.GetGitCmdFactory(), nil))
- gitalypb.RegisterSSHServiceServer(srv, ssh.NewServer(cfg, deps.GetLocator(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
+ deps.GetCfg(),
+ rubySrv,
+ deps.GetLocator(),
+ deps.GetTxManager(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterRefServiceServer(srv, ref.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterCommitServiceServer(srv, commit.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ nil,
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterSSHServiceServer(srv, ssh.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
+ ))
}, options...)
}
diff --git a/internal/gitaly/service/operations/update_branches_test.go b/internal/gitaly/service/operations/update_branches_test.go
index 2db6e757f..32e89f0a3 100644
--- a/internal/gitaly/service/operations/update_branches_test.go
+++ b/internal/gitaly/service/operations/update_branches_test.go
@@ -37,7 +37,7 @@ func testSuccessfulUserUpdateBranchRequest(t *testing.T, cfg config.Cfg, rubySrv
func testSuccessfulUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
testCases := []struct {
desc string
@@ -89,7 +89,7 @@ func testSuccessfulUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Con
BranchName: []byte(testCase.updateBranchName),
Newrev: testCase.newRev,
Oldrev: testCase.oldRev,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
response, err := client.UserUpdateBranch(ctx, request)
require.NoError(t, err)
@@ -100,7 +100,7 @@ func testSuccessfulUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Con
require.NoError(t, err)
require.Equal(t, string(testCase.newRev), branchCommit.Id)
- branches := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref", "--", "refs/heads/"+branchName)
+ branches := gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref", "--", "refs/heads/"+branchName)
require.Contains(t, string(branches), "refs/heads/"+branchName)
})
}
@@ -113,7 +113,7 @@ func testSuccessfulUserUpdateBranchRequestToDelete(t *testing.T, cfg config.Cfg,
func testSuccessfulUserUpdateBranchRequestToDeleteFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, repoProto, repoPath, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
testCases := []struct {
desc string
@@ -152,7 +152,7 @@ func testSuccessfulUserUpdateBranchRequestToDeleteFeatured(t *testing.T, ctx con
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
if testCase.createBranch {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "--", testCase.updateBranchName, string(testCase.oldRev))
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "--", testCase.updateBranchName, string(testCase.oldRev))
}
responseOk := &gitalypb.UserUpdateBranchResponse{}
@@ -161,7 +161,7 @@ func testSuccessfulUserUpdateBranchRequestToDeleteFeatured(t *testing.T, ctx con
BranchName: []byte(testCase.updateBranchName),
Newrev: testCase.newRev,
Oldrev: testCase.oldRev,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
response, err := client.UserUpdateBranch(ctx, request)
require.Nil(t, err)
@@ -170,7 +170,7 @@ func testSuccessfulUserUpdateBranchRequestToDeleteFeatured(t *testing.T, ctx con
_, err = repo.ReadCommit(ctx, git.Revision(testCase.updateBranchName))
require.Equal(t, localrepo.ErrObjectNotFound, err, "expected 'not found' error got %v", err)
- refs := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref", "--", "refs/heads/"+testCase.updateBranchName)
+ refs := gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref", "--", "refs/heads/"+testCase.updateBranchName)
require.NotContains(t, string(refs), testCase.oldRev, "branch deleted from refs")
})
}
@@ -185,7 +185,7 @@ func testSuccessfulGitHooksForUserUpdateBranchRequestFeatured(t *testing.T, ctx
for _, hookName := range GitlabHooks {
t.Run(hookName, func(t *testing.T) {
- testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "repo")
+ testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "repo")
defer cleanupFn()
hookOutputTempPath := gittest.WriteEnvToCustomHook(t, testRepoPath, hookName)
@@ -195,7 +195,7 @@ func testSuccessfulGitHooksForUserUpdateBranchRequestFeatured(t *testing.T, ctx
BranchName: []byte(updateBranchName),
Newrev: newrev,
Oldrev: oldrev,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
responseOk := &gitalypb.UserUpdateBranchResponse{}
@@ -205,7 +205,7 @@ func testSuccessfulGitHooksForUserUpdateBranchRequestFeatured(t *testing.T, ctx
require.Equal(t, responseOk, response)
output := string(testhelper.MustReadFile(t, hookOutputTempPath))
- require.Contains(t, output, "GL_USERNAME="+testhelper.TestUser.GlUsername)
+ require.Contains(t, output, "GL_USERNAME="+gittest.TestUser.GlUsername)
})
}
}
@@ -222,7 +222,7 @@ func testFailedUserUpdateBranchDueToHooksFeatured(t *testing.T, ctx context.Cont
BranchName: []byte(updateBranchName),
Newrev: newrev,
Oldrev: oldrev,
- User: testhelper.TestUser,
+ User: gittest.TestUser,
}
// Write a hook that will fail with the environment as the error message
// so we can check that string for our env variables.
@@ -233,7 +233,7 @@ func testFailedUserUpdateBranchDueToHooksFeatured(t *testing.T, ctx context.Cont
response, err := client.UserUpdateBranch(ctx, request)
require.Nil(t, err)
- require.Contains(t, response.PreReceiveError, "GL_USERNAME="+testhelper.TestUser.GlUsername)
+ require.Contains(t, response.PreReceiveError, "GL_USERNAME="+gittest.TestUser.GlUsername)
require.Contains(t, response.PreReceiveError, "PWD="+repoPath)
responseOk := &gitalypb.UserUpdateBranchResponse{
@@ -250,7 +250,7 @@ func testFailedUserUpdateBranchRequest(t *testing.T, cfg config.Cfg, rubySrv *ru
func testFailedUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cfg, repoProto, _, client := setupOperationsServiceWithRuby(t, ctx, cfg, rubySrv)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
revDoesntExist := fmt.Sprintf("%x", sha1.Sum([]byte("we need a non existent sha")))
@@ -271,7 +271,7 @@ func testFailedUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context
newrev: newrev,
oldrev: oldrev,
expectNotFoundError: true,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Error(codes.InvalidArgument, "empty branch name"),
},
{
@@ -279,7 +279,7 @@ func testFailedUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context
branchName: updateBranchName,
newrev: nil,
oldrev: oldrev,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Error(codes.InvalidArgument, "empty newrev"),
},
{
@@ -288,7 +288,7 @@ func testFailedUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context
newrev: newrev,
oldrev: nil,
gotrev: oldrev,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Error(codes.InvalidArgument, "empty oldrev"),
},
{
@@ -305,7 +305,7 @@ func testFailedUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context
newrev: newrev,
oldrev: oldrev,
expectNotFoundError: true,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Errorf(codes.FailedPrecondition, "Could not update %v. Please refresh and try again.", "i-dont-exist"),
},
{
@@ -314,7 +314,7 @@ func testFailedUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context
newrev: []byte(git.ZeroOID.String()),
oldrev: oldrev,
gotrev: []byte("3dd08961455abf80ef9115f4afdc1c6f968b503c"),
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Errorf(codes.FailedPrecondition, "Could not update %v. Please refresh and try again.", "csv"),
},
{
@@ -322,7 +322,7 @@ func testFailedUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context
branchName: updateBranchName,
newrev: []byte(revDoesntExist),
oldrev: oldrev,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Errorf(codes.FailedPrecondition, "Could not update %v. Please refresh and try again.", updateBranchName),
},
{
@@ -331,7 +331,7 @@ func testFailedUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context
newrev: newrev,
oldrev: []byte(revDoesntExist),
gotrev: oldrev,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Errorf(codes.FailedPrecondition, "Could not update %v. Please refresh and try again.", updateBranchName),
},
{
@@ -339,7 +339,7 @@ func testFailedUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context
branchName: "heads/feature",
newrev: []byte("1a35b5a77cf6af7edf6703f88e82f6aff613666f"),
oldrev: []byte("0b4bc9a49b562e85de7cc9e834518ea6828729b9"),
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Errorf(codes.FailedPrecondition, "Could not update %v. Please refresh and try again.", "heads/feature"),
},
{
@@ -347,7 +347,7 @@ func testFailedUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context
branchName: "refs/heads/crlf-diff",
newrev: []byte(git.ZeroOID.String()),
oldrev: []byte("593890758a6f845c600f38ffa05be2749211caee"),
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: status.Errorf(codes.FailedPrecondition, "Could not update %v. Please refresh and try again.", "refs/heads/crlf-diff"),
},
{
@@ -356,7 +356,7 @@ func testFailedUserUpdateBranchRequestFeatured(t *testing.T, ctx context.Context
oldrev: []byte("3dd08961455abf80ef9115f4afdc1c6f968b503c"),
newrev: []byte(git.ZeroOID.String()),
expectNotFoundError: true,
- user: testhelper.TestUser,
+ user: gittest.TestUser,
err: nil,
response: &gitalypb.UserUpdateBranchResponse{},
},
diff --git a/internal/gitaly/service/operations/update_with_hooks.go b/internal/gitaly/service/operations/update_with_hooks.go
index 446a44694..0cd9ce09c 100644
--- a/internal/gitaly/service/operations/update_with_hooks.go
+++ b/internal/gitaly/service/operations/update_with_hooks.go
@@ -12,7 +12,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -50,8 +50,9 @@ func (s *Server) updateReferenceWithHooks(
user *gitalypb.User,
reference git.ReferenceName,
newrev, oldrev git.ObjectID,
+ pushOptions ...string,
) error {
- transaction, praefect, err := metadata.TransactionMetadataFromContext(ctx)
+ transaction, praefect, err := txinfo.FromContext(ctx)
if err != nil {
return err
}
@@ -82,7 +83,7 @@ func (s *Server) updateReferenceWithHooks(
changes := fmt.Sprintf("%s %s %s\n", oldrev, newrev, reference)
var stdout, stderr bytes.Buffer
- if err := s.hookManager.PreReceiveHook(ctx, repo, nil, env, strings.NewReader(changes), &stdout, &stderr); err != nil {
+ if err := s.hookManager.PreReceiveHook(ctx, repo, pushOptions, env, strings.NewReader(changes), &stdout, &stderr); err != nil {
msg := hookErrorMessage(stdout.String(), stderr.String(), err)
return preReceiveError{message: msg}
}
@@ -95,6 +96,8 @@ func (s *Server) updateReferenceWithHooks(
return preReceiveError{message: err.Error()}
}
+ localRepo := s.localrepo(repo)
+
// We are already manually invoking the reference-transaction hook, so there is no need to
// set up hooks again here. One could argue that it would be easier to just have git handle
// execution of the reference-transaction hook. But unfortunately, it has proven to be
@@ -105,7 +108,7 @@ func (s *Server) updateReferenceWithHooks(
// is packed, which is obviously a bad thing as Gitaly nodes may be differently packed. We
// thus continue to manually drive the reference-transaction hook here, which doesn't have
// this problem.
- updater, err := updateref.New(ctx, s.cfg, s.gitCmdFactory, repo, updateref.WithDisabledTransactions())
+ updater, err := updateref.New(ctx, s.cfg, localRepo, updateref.WithDisabledTransactions())
if err != nil {
return err
}
@@ -118,7 +121,11 @@ func (s *Server) updateReferenceWithHooks(
return updateRefError{reference: reference.String()}
}
- if err := s.hookManager.PostReceiveHook(ctx, repo, nil, env, strings.NewReader(changes), &stdout, &stderr); err != nil {
+ if err := s.hookManager.ReferenceTransactionHook(ctx, hook.ReferenceTransactionCommitted, env, strings.NewReader(changes)); err != nil {
+ return preReceiveError{message: err.Error()}
+ }
+
+ if err := s.hookManager.PostReceiveHook(ctx, repo, pushOptions, env, strings.NewReader(changes), &stdout, &stderr); err != nil {
msg := hookErrorMessage(stdout.String(), stderr.String(), err)
return preReceiveError{message: msg}
}
diff --git a/internal/gitaly/service/operations/update_with_hooks_test.go b/internal/gitaly/service/operations/update_with_hooks_test.go
index d4426c956..8cd59d1af 100644
--- a/internal/gitaly/service/operations/update_with_hooks_test.go
+++ b/internal/gitaly/service/operations/update_with_hooks_test.go
@@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
@@ -62,7 +63,7 @@ func TestUpdateReferenceWithHooks_invalidParameters(t *testing.T) {
revA, revB := git.ObjectID(strings.Repeat("a", 40)), git.ObjectID(strings.Repeat("b", 40))
- server := NewServer(cfg, nil, &mockHookManager{}, nil, nil, nil)
+ server := NewServer(cfg, nil, &mockHookManager{}, nil, nil, nil, nil)
testCases := []struct {
desc string
@@ -144,6 +145,7 @@ func TestUpdateReferenceWithHooks(t *testing.T) {
payload,
}
+ referenceTransactionCalls := 0
testCases := []struct {
desc string
preReceive func(t *testing.T, ctx context.Context, repo *gitalypb.Repository, pushOptions, env []string, stdin io.Reader, stdout, stderr io.Writer) error
@@ -182,7 +184,15 @@ func TestUpdateReferenceWithHooks(t *testing.T) {
changes, err := ioutil.ReadAll(stdin)
require.NoError(t, err)
require.Equal(t, fmt.Sprintf("%s %s refs/heads/master\n", oldRev, git.ZeroOID.String()), string(changes))
- require.Equal(t, state, hook.ReferenceTransactionPrepared)
+
+ require.Less(t, referenceTransactionCalls, 2)
+ if referenceTransactionCalls == 0 {
+ require.Equal(t, state, hook.ReferenceTransactionPrepared)
+ } else {
+ require.Equal(t, state, hook.ReferenceTransactionCommitted)
+ }
+ referenceTransactionCalls++
+
require.Equal(t, env, expectedEnv)
return nil
},
@@ -254,6 +264,7 @@ func TestUpdateReferenceWithHooks(t *testing.T) {
}
for _, tc := range testCases {
+ referenceTransactionCalls = 0
t.Run(tc.desc, func(t *testing.T) {
hookManager := &mockHookManager{
t: t,
@@ -264,7 +275,7 @@ func TestUpdateReferenceWithHooks(t *testing.T) {
}
gitCmdFactory := git.NewExecCommandFactory(cfg)
- hookServer := NewServer(cfg, nil, hookManager, nil, nil, gitCmdFactory)
+ hookServer := NewServer(cfg, nil, hookManager, nil, nil, gitCmdFactory, nil)
err := hookServer.updateReferenceWithHooks(ctx, repo, user, git.ReferenceName("refs/heads/master"), git.ZeroOID, git.ObjectID(oldRev))
if tc.expectedErr == "" {
@@ -274,12 +285,12 @@ func TestUpdateReferenceWithHooks(t *testing.T) {
}
if tc.expectedRefDeletion {
- contained, err := localrepo.New(gitCmdFactory, repo, cfg).HasRevision(ctx, git.Revision("refs/heads/master"))
+ contained, err := localrepo.NewTestRepo(t, cfg, repo).HasRevision(ctx, git.Revision("refs/heads/master"))
require.NoError(t, err)
require.False(t, contained, "branch should have been deleted")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "master", oldRev)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "master", oldRev)
} else {
- ref, err := localrepo.New(gitCmdFactory, repo, cfg).GetReference(ctx, "refs/heads/master")
+ ref, err := localrepo.NewTestRepo(t, cfg, repo).GetReference(ctx, "refs/heads/master")
require.NoError(t, err)
require.Equal(t, ref.Target, oldRev)
}
diff --git a/internal/gitaly/service/ref/branches.go b/internal/gitaly/service/ref/branches.go
index 5250786f9..a5dc4dd2a 100644
--- a/internal/gitaly/service/ref/branches.go
+++ b/internal/gitaly/service/ref/branches.go
@@ -5,7 +5,6 @@ import (
"errors"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -16,7 +15,7 @@ func (s *server) FindBranch(ctx context.Context, req *gitalypb.FindBranchRequest
return nil, status.Errorf(codes.InvalidArgument, "Branch name cannot be empty")
}
- repo := localrepo.New(s.gitCmdFactory, req.GetRepository(), s.cfg)
+ repo := s.localrepo(req.GetRepository())
branchName := git.NewReferenceNameFromBranchName(string(req.GetName()))
branchRef, err := repo.GetReference(ctx, branchName)
diff --git a/internal/gitaly/service/ref/branches_test.go b/internal/gitaly/service/ref/branches_test.go
index 20a3fe5c7..3a2bd4e37 100644
--- a/internal/gitaly/service/ref/branches_test.go
+++ b/internal/gitaly/service/ref/branches_test.go
@@ -17,7 +17,7 @@ func TestSuccessfulFindBranchRequest(t *testing.T) {
cfg, repoProto, _, client := setupRefService(t)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
branchesByName := make(map[git.ReferenceName]*gitalypb.Branch)
for branchName, revision := range map[git.ReferenceName]git.Revision{
diff --git a/internal/gitaly/service/ref/delete_refs.go b/internal/gitaly/service/ref/delete_refs.go
index f4aef41d0..2a2c77c3f 100644
--- a/internal/gitaly/service/ref/delete_refs.go
+++ b/internal/gitaly/service/ref/delete_refs.go
@@ -11,6 +11,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/updateref"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -21,12 +22,14 @@ func (s *server) DeleteRefs(ctx context.Context, in *gitalypb.DeleteRefsRequest)
return nil, status.Errorf(codes.InvalidArgument, "DeleteRefs: %v", err)
}
- refnames, err := s.refsToRemove(ctx, in)
+ repo := s.localrepo(in.GetRepository())
+
+ refnames, err := s.refsToRemove(ctx, repo, in)
if err != nil {
return nil, helper.ErrInternal(err)
}
- updater, err := updateref.New(ctx, s.cfg, s.gitCmdFactory, in.GetRepository())
+ updater, err := updateref.New(ctx, s.cfg, repo)
if err != nil {
if errors.Is(err, git.ErrInvalidArg) {
return nil, helper.ErrInvalidArgument(err)
@@ -34,7 +37,7 @@ func (s *server) DeleteRefs(ctx context.Context, in *gitalypb.DeleteRefsRequest)
return nil, helper.ErrInternal(err)
}
- voteHash := transaction.NewVoteHash()
+ voteHash := voting.NewVoteHash()
for _, ref := range refnames {
if err := updater.Delete(ref); err != nil {
return &gitalypb.DeleteRefsResponse{GitError: err.Error()}, nil
@@ -69,7 +72,7 @@ func (s *server) DeleteRefs(ctx context.Context, in *gitalypb.DeleteRefsRequest)
return &gitalypb.DeleteRefsResponse{}, nil
}
-func (s *server) refsToRemove(ctx context.Context, req *gitalypb.DeleteRefsRequest) ([]git.ReferenceName, error) {
+func (s *server) refsToRemove(ctx context.Context, repo *localrepo.Repo, req *gitalypb.DeleteRefsRequest) ([]git.ReferenceName, error) {
if len(req.Refs) > 0 {
refs := make([]git.ReferenceName, len(req.Refs))
for i, ref := range req.Refs {
@@ -83,7 +86,7 @@ func (s *server) refsToRemove(ctx context.Context, req *gitalypb.DeleteRefsReque
prefixes[i] = string(prefix)
}
- existingRefs, err := localrepo.New(s.gitCmdFactory, req.GetRepository(), s.cfg).GetReferences(ctx)
+ existingRefs, err := repo.GetReferences(ctx)
if err != nil {
return nil, err
}
diff --git a/internal/gitaly/service/ref/delete_refs_test.go b/internal/gitaly/service/ref/delete_refs_test.go
index d54b6ccb0..2736e35fc 100644
--- a/internal/gitaly/service/ref/delete_refs_test.go
+++ b/internal/gitaly/service/ref/delete_refs_test.go
@@ -6,17 +6,17 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
hookservice "gitlab.com/gitlab-org/gitaly/internal/gitaly/service/hook"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/helper"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
@@ -45,13 +45,13 @@ func TestSuccessfulDeleteRefs(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
- repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], testCase.desc)
+ repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], testCase.desc)
defer cleanupFn()
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", "refs/delete/a", "b83d6e391c22777fca1ed3012fce84f633d7fed0")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", "refs/also-delete/b", "1b12f15a11fc6e62177bef08f47bc7b5ce50b141")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", "refs/keep/c", "498214de67004b1da3d820901307bed2a68a8ef6")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", "refs/also-keep/d", "b83d6e391c22777fca1ed3012fce84f633d7fed0")
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/delete/a", "b83d6e391c22777fca1ed3012fce84f633d7fed0")
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/also-delete/b", "1b12f15a11fc6e62177bef08f47bc7b5ce50b141")
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/keep/c", "498214de67004b1da3d820901307bed2a68a8ef6")
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/also-keep/d", "b83d6e391c22777fca1ed3012fce84f633d7fed0")
ctx, cancel := testhelper.Context()
defer cancel()
@@ -61,7 +61,7 @@ func TestSuccessfulDeleteRefs(t *testing.T) {
require.NoError(t, err)
// Ensure that the internal refs are gone, but the others still exist
- refs, err := localrepo.New(git.NewExecCommandFactory(cfg), repo, cfg).GetReferences(ctx, "refs/")
+ refs, err := localrepo.NewTestRepo(t, cfg, repo).GetReferences(ctx, "refs/")
require.NoError(t, err)
refNames := make([]string, len(refs))
@@ -85,14 +85,20 @@ func TestDeleteRefs_transaction(t *testing.T) {
var votes int
txManager := &transaction.MockManager{
- VoteFn: func(context.Context, metadata.Transaction, metadata.PraefectServer, transaction.Vote) error {
+ VoteFn: func(context.Context, txinfo.Transaction, txinfo.PraefectServer, voting.Vote) error {
votes++
return nil
},
}
addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterRefServiceServer(srv, NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory(), deps.GetTxManager()))
+ gitalypb.RegisterRefServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
+ deps.GetCatfileCache(),
+ ))
gitalypb.RegisterHookServiceServer(srv, hookservice.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
}, testserver.WithTransactionManager(txManager))
@@ -102,9 +108,9 @@ func TestDeleteRefs_transaction(t *testing.T) {
ctx, cancel := testhelper.Context()
t.Cleanup(cancel)
- ctx, err := metadata.InjectTransaction(ctx, 1, "node", true)
+ ctx, err := txinfo.InjectTransaction(ctx, 1, "node", true)
require.NoError(t, err)
- ctx, err = (&metadata.PraefectServer{SocketPath: "i-dont-care"}).Inject(ctx)
+ ctx, err = (&txinfo.PraefectServer{SocketPath: "i-dont-care"}).Inject(ctx)
require.NoError(t, err)
ctx = helper.IncomingToOutgoing(ctx)
@@ -131,7 +137,7 @@ func TestDeleteRefs_transaction(t *testing.T) {
t.Run(tc.desc, func(t *testing.T) {
votes = 0
- repo, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], tc.desc)
+ repo, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], tc.desc)
t.Cleanup(cleanup)
tc.request.Repository = repo
diff --git a/internal/gitaly/service/ref/list_new_blobs.go b/internal/gitaly/service/ref/list_new_blobs.go
index b55f498cb..4f2f76c9a 100644
--- a/internal/gitaly/service/ref/list_new_blobs.go
+++ b/internal/gitaly/service/ref/list_new_blobs.go
@@ -6,7 +6,6 @@ import (
"strings"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -26,19 +25,20 @@ func (s *server) ListNewBlobs(in *gitalypb.ListNewBlobsRequest, stream gitalypb.
func (s *server) listNewBlobs(in *gitalypb.ListNewBlobsRequest, stream gitalypb.RefService_ListNewBlobsServer, oid string) error {
ctx := stream.Context()
- cmdFlags := []git.Option{git.Flag{Name: "--objects"}, git.Flag{Name: "--not"}, git.Flag{Name: "--all"}}
+ repo := s.localrepo(in.GetRepository())
+ cmdFlags := []git.Option{git.Flag{Name: "--objects"}, git.Flag{Name: "--not"}, git.Flag{Name: "--all"}}
if in.GetLimit() > 0 {
cmdFlags = append(cmdFlags, git.ValueFlag{Name: "--max-count", Value: fmt.Sprint(in.GetLimit())})
}
// the added ^ is to negate the oid since there is a --not option that comes earlier in the arg list
- revList, err := s.gitCmdFactory.New(ctx, in.GetRepository(), git.SubCmd{Name: "rev-list", Flags: cmdFlags, Args: []string{"^" + oid}})
+ revList, err := repo.Exec(ctx, git.SubCmd{Name: "rev-list", Flags: cmdFlags, Args: []string{"^" + oid}})
if err != nil {
return err
}
- batch, err := catfile.New(ctx, s.gitCmdFactory, in.GetRepository())
+ batch, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
diff --git a/internal/gitaly/service/ref/list_new_blobs_test.go b/internal/gitaly/service/ref/list_new_blobs_test.go
index 8093ee9da..3dc6d7c27 100644
--- a/internal/gitaly/service/ref/list_new_blobs_test.go
+++ b/internal/gitaly/service/ref/list_new_blobs_test.go
@@ -5,6 +5,7 @@ import (
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
@@ -15,10 +16,10 @@ func TestListNewBlobs(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- _, repo, repoPath, client := setupRefService(t)
+ cfg, repo, repoPath, client := setupRefService(t)
oid := "ab2c9622c02288a2bbaaf35d96088cfdff31d9d9"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-D", "gitaly-diff-stuff")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "-D", "gitaly-diff-stuff")
testCases := []struct {
revision string
diff --git a/internal/gitaly/service/ref/list_new_commits.go b/internal/gitaly/service/ref/list_new_commits.go
index 7abeca35c..9f051ba40 100644
--- a/internal/gitaly/service/ref/list_new_commits.go
+++ b/internal/gitaly/service/ref/list_new_commits.go
@@ -5,7 +5,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -26,7 +25,9 @@ func (s *server) ListNewCommits(in *gitalypb.ListNewCommitsRequest, stream gital
func (s *server) listNewCommits(in *gitalypb.ListNewCommitsRequest, stream gitalypb.RefService_ListNewCommitsServer, oid string) error {
ctx := stream.Context()
- revList, err := s.gitCmdFactory.New(ctx, in.GetRepository(), git.SubCmd{
+ repo := s.localrepo(in.GetRepository())
+
+ revList, err := repo.Exec(ctx, git.SubCmd{
Name: "rev-list",
Flags: []git.Option{git.Flag{Name: "--not"}, git.Flag{Name: "--all"}},
Args: []string{"^" + oid}, // the added ^ is to negate the oid since there is a --not option that comes earlier in the arg list
@@ -35,7 +36,7 @@ func (s *server) listNewCommits(in *gitalypb.ListNewCommitsRequest, stream gital
return err
}
- batch, err := catfile.New(ctx, s.gitCmdFactory, in.GetRepository())
+ batch, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
@@ -45,7 +46,7 @@ func (s *server) listNewCommits(in *gitalypb.ListNewCommitsRequest, stream gital
for scanner.Scan() {
line := scanner.Text()
- commit, err := log.GetCommitCatfile(ctx, batch, git.Revision(line))
+ commit, err := catfile.GetCommit(ctx, batch, git.Revision(line))
if err != nil {
return err
}
diff --git a/internal/gitaly/service/ref/list_new_commits_test.go b/internal/gitaly/service/ref/list_new_commits_test.go
index ccddf4afc..09aedfa0a 100644
--- a/internal/gitaly/service/ref/list_new_commits_test.go
+++ b/internal/gitaly/service/ref/list_new_commits_test.go
@@ -5,6 +5,7 @@ import (
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
@@ -15,10 +16,10 @@ func TestListNewCommits(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- _, repo, repoPath, client := setupRefService(t)
+ cfg, repo, repoPath, client := setupRefService(t)
oid := "0031876facac3f2b2702a0e53a26e89939a42209"
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-D", "few-commits")
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "-D", "few-commits")
testCases := []struct {
revision string
diff --git a/internal/gitaly/service/ref/pack_refs_test.go b/internal/gitaly/service/ref/pack_refs_test.go
index 5ed920405..f45ab5000 100644
--- a/internal/gitaly/service/ref/pack_refs_test.go
+++ b/internal/gitaly/service/ref/pack_refs_test.go
@@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -25,7 +26,7 @@ func TestPackRefsSuccessfulRequest(t *testing.T) {
packedRefs := linesInPackfile(t, repoPath)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
// creates some new heads
newBranches := 10
@@ -44,7 +45,7 @@ func TestPackRefsSuccessfulRequest(t *testing.T) {
// ensure all refs are reachable
for i := 0; i < newBranches; i++ {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "show-ref", fmt.Sprintf("refs/heads/new-ref-%d", i))
+ gittest.Exec(t, cfg, "-C", repoPath, "show-ref", fmt.Sprintf("refs/heads/new-ref-%d", i))
}
}
diff --git a/internal/gitaly/service/ref/refs.go b/internal/gitaly/service/ref/refs.go
index f545da2a8..3685582b7 100644
--- a/internal/gitaly/service/ref/refs.go
+++ b/internal/gitaly/service/ref/refs.go
@@ -12,7 +12,6 @@ import (
"github.com/golang/protobuf/proto"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- gitlog "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/helper/chunk"
@@ -37,7 +36,7 @@ type findRefsOpts struct {
lines.SenderOpts
}
-func (s *server) findRefs(ctx context.Context, writer lines.Sender, repo *gitalypb.Repository, patterns []string, opts *findRefsOpts) error {
+func (s *server) findRefs(ctx context.Context, writer lines.Sender, repo git.RepositoryExecutor, patterns []string, opts *findRefsOpts) error {
var options []git.Option
if len(opts.cmdArgs) == 0 {
@@ -46,7 +45,7 @@ func (s *server) findRefs(ctx context.Context, writer lines.Sender, repo *gitaly
options = append(options, opts.cmdArgs...)
}
- cmd, err := s.gitCmdFactory.New(ctx, repo, git.SubCmd{
+ cmd, err := repo.Exec(ctx, git.SubCmd{
Name: "for-each-ref",
Flags: options,
Args: patterns,
@@ -85,8 +84,8 @@ func (t *tagSender) Send() error {
})
}
-func (s *server) parseAndReturnTags(ctx context.Context, repo *gitalypb.Repository, stream gitalypb.RefService_FindAllTagsServer) error {
- tagsCmd, err := s.gitCmdFactory.New(ctx, repo, git.SubCmd{
+func (s *server) parseAndReturnTags(ctx context.Context, repo git.RepositoryExecutor, stream gitalypb.RefService_FindAllTagsServer) error {
+ tagsCmd, err := repo.Exec(ctx, git.SubCmd{
Name: "for-each-ref",
Flags: []git.Option{
git.ValueFlag{"--format", tagFormat},
@@ -97,7 +96,7 @@ func (s *server) parseAndReturnTags(ctx context.Context, repo *gitalypb.Reposito
return fmt.Errorf("for-each-ref error: %v", err)
}
- c, err := catfile.New(ctx, s.gitCmdFactory, repo)
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return fmt.Errorf("error creating catfile: %v", err)
}
@@ -134,7 +133,9 @@ func (s *server) FindAllTags(in *gitalypb.FindAllTagsRequest, stream gitalypb.Re
return helper.ErrInvalidArgument(err)
}
- if err := s.parseAndReturnTags(ctx, in.GetRepository(), stream); err != nil {
+ repo := s.localrepo(in.GetRepository())
+
+ if err := s.parseAndReturnTags(ctx, repo, stream); err != nil {
return helper.ErrInternal(err)
}
return nil
@@ -152,10 +153,10 @@ func (s *server) validateFindAllTagsRequest(request *gitalypb.FindAllTagsRequest
return nil
}
-func _findBranchNames(ctx context.Context, gitCmdFactory git.CommandFactory, repo *gitalypb.Repository) ([][]byte, error) {
+func _findBranchNames(ctx context.Context, repo git.RepositoryExecutor) ([][]byte, error) {
var names [][]byte
- cmd, err := gitCmdFactory.New(ctx, repo, git.SubCmd{
+ cmd, err := repo.Exec(ctx, git.SubCmd{
Name: "for-each-ref",
Flags: []git.Option{git.Flag{Name: "--format=%(refname)"}},
Args: []string{"refs/heads"}},
@@ -179,10 +180,10 @@ func _findBranchNames(ctx context.Context, gitCmdFactory git.CommandFactory, rep
return names, nil
}
-func _headReference(ctx context.Context, gitCmdFactory git.CommandFactory, repo *gitalypb.Repository) ([]byte, error) {
+func _headReference(ctx context.Context, repo git.RepositoryExecutor) ([]byte, error) {
var headRef []byte
- cmd, err := gitCmdFactory.New(ctx, repo, git.SubCmd{
+ cmd, err := repo.Exec(ctx, git.SubCmd{
Name: "rev-parse",
Flags: []git.Option{git.Flag{Name: "--symbolic-full-name"}},
Args: []string{"HEAD"},
@@ -224,8 +225,8 @@ func SetDefaultBranchRef(ctx context.Context, gitCmdFactory git.CommandFactory,
}
// DefaultBranchName looks up the name of the default branch given a repoPath
-func DefaultBranchName(ctx context.Context, gitCmdFactory git.CommandFactory, repo *gitalypb.Repository) ([]byte, error) {
- branches, err := FindBranchNames(ctx, gitCmdFactory, repo)
+func DefaultBranchName(ctx context.Context, repo git.RepositoryExecutor) ([]byte, error) {
+ branches, err := FindBranchNames(ctx, repo)
if err != nil {
return nil, err
@@ -242,7 +243,7 @@ func DefaultBranchName(ctx context.Context, gitCmdFactory git.CommandFactory, re
}
hasDefaultRef := false
- headRef, err := headReference(ctx, gitCmdFactory, repo)
+ headRef, err := headReference(ctx, repo)
if err != nil {
return nil, err
}
@@ -269,7 +270,9 @@ func DefaultBranchName(ctx context.Context, gitCmdFactory git.CommandFactory, re
// FindDefaultBranchName returns the default branch name for the given repository
func (s *server) FindDefaultBranchName(ctx context.Context, in *gitalypb.FindDefaultBranchNameRequest) (*gitalypb.FindDefaultBranchNameResponse, error) {
- defaultBranchName, err := DefaultBranchName(ctx, s.gitCmdFactory, in.Repository)
+ repo := s.localrepo(in.GetRepository())
+
+ defaultBranchName, err := DefaultBranchName(ctx, repo)
if err != nil {
return nil, helper.ErrInternal(err)
}
@@ -301,7 +304,9 @@ func (s *server) FindLocalBranches(in *gitalypb.FindLocalBranchesRequest, stream
func (s *server) findLocalBranches(in *gitalypb.FindLocalBranchesRequest, stream gitalypb.RefService_FindLocalBranchesServer) error {
ctx := stream.Context()
- c, err := catfile.New(ctx, s.gitCmdFactory, in.Repository)
+ repo := s.localrepo(in.GetRepository())
+
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
@@ -314,7 +319,7 @@ func (s *server) findLocalBranches(in *gitalypb.FindLocalBranchesRequest, stream
git.Flag{Name: "--sort=" + parseSortKey(in.GetSortBy())},
}
- return s.findRefs(ctx, writer, in.Repository, []string{"refs/heads"}, opts)
+ return s.findRefs(ctx, writer, repo, []string{"refs/heads"}, opts)
}
func (s *server) FindAllBranches(in *gitalypb.FindAllBranchesRequest, stream gitalypb.RefService_FindAllBranchesServer) error {
@@ -326,6 +331,8 @@ func (s *server) FindAllBranches(in *gitalypb.FindAllBranchesRequest, stream git
}
func (s *server) findAllBranches(in *gitalypb.FindAllBranchesRequest, stream gitalypb.RefService_FindAllBranchesServer) error {
+ repo := s.localrepo(in.GetRepository())
+
args := []git.Option{
// %00 inserts the null character into the output (see for-each-ref docs)
git.Flag{Name: "--format=" + strings.Join(localBranchFormatFields, "%00")},
@@ -334,7 +341,7 @@ func (s *server) findAllBranches(in *gitalypb.FindAllBranchesRequest, stream git
patterns := []string{"refs/heads", "refs/remotes"}
if in.MergedOnly {
- defaultBranchName, err := DefaultBranchName(stream.Context(), s.gitCmdFactory, in.Repository)
+ defaultBranchName, err := DefaultBranchName(stream.Context(), repo)
if err != nil {
return err
}
@@ -351,7 +358,7 @@ func (s *server) findAllBranches(in *gitalypb.FindAllBranchesRequest, stream git
}
ctx := stream.Context()
- c, err := catfile.New(ctx, s.gitCmdFactory, in.Repository)
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
@@ -361,18 +368,18 @@ func (s *server) findAllBranches(in *gitalypb.FindAllBranchesRequest, stream git
writer := newFindAllBranchesWriter(stream, c)
- return s.findRefs(ctx, writer, in.Repository, patterns, opts)
+ return s.findRefs(ctx, writer, repo, patterns, opts)
}
func (s *server) FindTag(ctx context.Context, in *gitalypb.FindTagRequest) (*gitalypb.FindTagResponse, error) {
- var err error
- if err = s.validateFindTagRequest(in); err != nil {
+ if err := s.validateFindTagRequest(in); err != nil {
return nil, helper.ErrInvalidArgument(err)
}
- var tag *gitalypb.Tag
+ repo := s.localrepo(in.GetRepository())
- if tag, err = s.findTag(ctx, in.GetRepository(), in.GetTagName()); err != nil {
+ tag, err := s.findTag(ctx, repo, in.GetTagName())
+ if err != nil {
return nil, helper.ErrInternal(err)
}
@@ -396,13 +403,13 @@ func parseTagLine(ctx context.Context, c catfile.Batch, tagLine string) (*gitaly
switch refType {
// annotated tag
case "tag":
- tag, err := gitlog.GetTagCatfile(ctx, c, git.Revision(tagID), refName, true, true)
+ tag, err := catfile.GetTag(ctx, c, git.Revision(tagID), refName, true, true)
if err != nil {
return nil, fmt.Errorf("getting annotated tag: %v", err)
}
return tag, nil
case "commit":
- commit, err := gitlog.GetCommitCatfile(ctx, c, git.Revision(tagID))
+ commit, err := catfile.GetCommit(ctx, c, git.Revision(tagID))
if err != nil {
return nil, fmt.Errorf("getting commit catfile: %v", err)
}
@@ -413,8 +420,8 @@ func parseTagLine(ctx context.Context, c catfile.Batch, tagLine string) (*gitaly
}
}
-func (s *server) findTag(ctx context.Context, repository *gitalypb.Repository, tagName []byte) (*gitalypb.Tag, error) {
- tagCmd, err := s.gitCmdFactory.New(ctx, repository,
+func (s *server) findTag(ctx context.Context, repo git.RepositoryExecutor, tagName []byte) (*gitalypb.Tag, error) {
+ tagCmd, err := repo.Exec(ctx,
git.SubCmd{
Name: "tag",
Flags: []git.Option{
@@ -422,13 +429,13 @@ func (s *server) findTag(ctx context.Context, repository *gitalypb.Repository, t
},
Args: []string{string(tagName)},
},
- git.WithRefTxHook(ctx, repository, s.cfg),
+ git.WithRefTxHook(ctx, repo, s.cfg),
)
if err != nil {
return nil, fmt.Errorf("for-each-ref error: %v", err)
}
- c, err := catfile.New(ctx, s.gitCmdFactory, repository)
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return nil, err
}
diff --git a/internal/gitaly/service/ref/refs_test.go b/internal/gitaly/service/ref/refs_test.go
index cb2d9df0a..c226021b1 100644
--- a/internal/gitaly/service/ref/refs_test.go
+++ b/internal/gitaly/service/ref/refs_test.go
@@ -17,7 +17,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
- "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/git/updateref"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
@@ -56,21 +55,20 @@ func TestSuccessfulFindAllBranchNames(t *testing.T) {
names = append(names, r.GetNames()...)
}
- expectedBranches, err := ioutil.ReadFile("testdata/branches.txt")
- require.NoError(t, err)
-
+ expectedBranches := testhelper.MustReadFile(t, "testdata/branches.txt")
for _, branch := range bytes.Split(bytes.TrimSpace(expectedBranches), []byte("\n")) {
require.Contains(t, names, branch)
}
}
func TestFindAllBranchNamesVeryLargeResponse(t *testing.T) {
- cfg, repo, _, client := setupRefService(t)
+ cfg, repoProto, _, client := setupRefService(t)
ctx, cancel := testhelper.Context()
defer cancel()
- updater, err := updateref.New(ctx, cfg, git.NewExecCommandFactory(cfg), repo)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
+ updater, err := updateref.New(ctx, cfg, repo)
require.NoError(t, err)
// We want to create enough refs to overflow the default bufio.Scanner
@@ -92,7 +90,7 @@ func TestFindAllBranchNamesVeryLargeResponse(t *testing.T) {
require.NoError(t, updater.Wait())
- rpcRequest := &gitalypb.FindAllBranchNamesRequest{Repository: repo}
+ rpcRequest := &gitalypb.FindAllBranchNamesRequest{Repository: repoProto}
c, err := client.FindAllBranchNames(ctx, rpcRequest)
require.NoError(t, err)
@@ -228,7 +226,7 @@ func TestHeadReference(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- headRef, err := headReference(ctx, git.NewExecCommandFactory(cfg), repo)
+ headRef, err := headReference(ctx, localrepo.NewTestRepo(t, cfg, repo))
require.NoError(t, err)
require.Equal(t, git.DefaultRef, headRef)
@@ -246,7 +244,7 @@ func TestHeadReferenceWithNonExistingHead(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- headRef, err := headReference(ctx, git.NewExecCommandFactory(cfg), repo)
+ headRef, err := headReference(ctx, localrepo.NewTestRepo(t, cfg, repo))
require.NoError(t, err)
if headRef != nil {
t.Fatal("Expected HEAD reference to be nil, got '", string(headRef), "'")
@@ -254,7 +252,8 @@ func TestHeadReferenceWithNonExistingHead(t *testing.T) {
}
func TestSetDefaultBranchRef(t *testing.T) {
- cfg, repo, _ := testcfg.BuildWithRepo(t)
+ cfg, repoProto, _ := testcfg.BuildWithRepo(t)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
testCases := []struct {
desc string
@@ -279,10 +278,10 @@ func TestSetDefaultBranchRef(t *testing.T) {
defer cancel()
gitCmdFactory := git.NewExecCommandFactory(cfg)
- err := SetDefaultBranchRef(ctx, gitCmdFactory, repo, tc.ref, cfg)
+ err := SetDefaultBranchRef(ctx, gitCmdFactory, repoProto, tc.ref, cfg)
require.NoError(t, err)
- newRef, err := DefaultBranchName(ctx, gitCmdFactory, repo)
+ newRef, err := DefaultBranchName(ctx, repo)
require.NoError(t, err)
require.Equal(t, tc.expectedRef, string(newRef))
@@ -297,55 +296,56 @@ func TestDefaultBranchName(t *testing.T) {
headReference = _headReference
}()
- cfg, repo, _ := testcfg.BuildWithRepo(t)
+ cfg, repoProto, _ := testcfg.BuildWithRepo(t)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
testCases := []struct {
desc string
- findBranchNames func(context.Context, git.CommandFactory, *gitalypb.Repository) ([][]byte, error)
- headReference func(context.Context, git.CommandFactory, *gitalypb.Repository) ([]byte, error)
+ findBranchNames func(context.Context, git.RepositoryExecutor) ([][]byte, error)
+ headReference func(context.Context, git.RepositoryExecutor) ([]byte, error)
expected []byte
}{
{
desc: "Get first branch when only one branch exists",
expected: []byte("refs/heads/foo"),
- findBranchNames: func(context.Context, git.CommandFactory, *gitalypb.Repository) ([][]byte, error) {
+ findBranchNames: func(context.Context, git.RepositoryExecutor) ([][]byte, error) {
return [][]byte{[]byte("refs/heads/foo")}, nil
},
- headReference: func(context.Context, git.CommandFactory, *gitalypb.Repository) ([]byte, error) { return nil, nil },
+ headReference: func(context.Context, git.RepositoryExecutor) ([]byte, error) { return nil, nil },
},
{
desc: "Get empy ref if no branches exists",
expected: nil,
- findBranchNames: func(context.Context, git.CommandFactory, *gitalypb.Repository) ([][]byte, error) {
+ findBranchNames: func(context.Context, git.RepositoryExecutor) ([][]byte, error) {
return [][]byte{}, nil
},
- headReference: func(context.Context, git.CommandFactory, *gitalypb.Repository) ([]byte, error) { return nil, nil },
+ headReference: func(context.Context, git.RepositoryExecutor) ([]byte, error) { return nil, nil },
},
{
desc: "Get the name of the head reference when more than one branch exists",
expected: []byte("refs/heads/bar"),
- findBranchNames: func(context.Context, git.CommandFactory, *gitalypb.Repository) ([][]byte, error) {
+ findBranchNames: func(context.Context, git.RepositoryExecutor) ([][]byte, error) {
return [][]byte{[]byte("refs/heads/foo"), []byte("refs/heads/bar")}, nil
},
- headReference: func(context.Context, git.CommandFactory, *gitalypb.Repository) ([]byte, error) {
+ headReference: func(context.Context, git.RepositoryExecutor) ([]byte, error) {
return []byte("refs/heads/bar"), nil
},
},
{
desc: "Get `ref/heads/master` when several branches exist",
expected: git.DefaultRef,
- findBranchNames: func(context.Context, git.CommandFactory, *gitalypb.Repository) ([][]byte, error) {
+ findBranchNames: func(context.Context, git.RepositoryExecutor) ([][]byte, error) {
return [][]byte{[]byte("refs/heads/foo"), []byte("refs/heads/master"), []byte("refs/heads/bar")}, nil
},
- headReference: func(context.Context, git.CommandFactory, *gitalypb.Repository) ([]byte, error) { return nil, nil },
+ headReference: func(context.Context, git.RepositoryExecutor) ([]byte, error) { return nil, nil },
},
{
desc: "Get the name of the first branch when several branches exists and no other conditions are met",
expected: []byte("refs/heads/foo"),
- findBranchNames: func(context.Context, git.CommandFactory, *gitalypb.Repository) ([][]byte, error) {
+ findBranchNames: func(context.Context, git.RepositoryExecutor) ([][]byte, error) {
return [][]byte{[]byte("refs/heads/foo"), []byte("refs/heads/bar"), []byte("refs/heads/baz")}, nil
},
- headReference: func(context.Context, git.CommandFactory, *gitalypb.Repository) ([]byte, error) { return nil, nil },
+ headReference: func(context.Context, git.RepositoryExecutor) ([]byte, error) { return nil, nil },
},
}
@@ -355,7 +355,7 @@ func TestDefaultBranchName(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- defaultBranch, err := DefaultBranchName(ctx, git.NewExecCommandFactory(cfg), repo)
+ defaultBranch, err := DefaultBranchName(ctx, repo)
require.NoError(t, err)
if !bytes.Equal(defaultBranch, testCase.expected) {
t.Fatalf("%s: expected %s, got %s instead", testCase.desc, testCase.expected, defaultBranch)
@@ -405,18 +405,17 @@ func TestInvalidRepoFindDefaultBranchNameRequest(t *testing.T) {
func TestSuccessfulFindAllTagsRequest(t *testing.T) {
cfg, client := setupRefServiceWithoutRepo(t)
- repoProto, repoPath, cleanupFn := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repoProto, repoPath, cleanupFn := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
defer cleanupFn()
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
// reconstruct the v1.1.2 tag from patches to test truncated tag message
// with partial PGP block
- truncatedPGPTagMsg, err := ioutil.ReadFile("testdata/truncated_pgp_msg.patch")
- require.NoError(t, err)
+ truncatedPGPTagMsg := testhelper.MustReadFile(t, "testdata/truncated_pgp_msg.patch")
- truncatedPGPTagID := string(testhelper.MustRunCommand(t, bytes.NewBuffer(truncatedPGPTagMsg), "git", "-C", repoPath, "mktag"))
+ truncatedPGPTagID := string(gittest.ExecStream(t, cfg, bytes.NewBuffer(truncatedPGPTagMsg), "-C", repoPath, "mktag"))
truncatedPGPTagID = strings.TrimSpace(truncatedPGPTagID) // remove trailing newline
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", "refs/tags/pgp-long-tag-message", truncatedPGPTagID)
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/tags/pgp-long-tag-message", truncatedPGPTagID)
blobID := "faaf198af3a36dbf41961466703cc1d47c61d051"
commitID := "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
@@ -426,33 +425,34 @@ func TestSuccessfulFindAllTagsRequest(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- bigCommitID := gittest.CreateCommit(t, cfg, repoPath, "local-big-commits", &gittest.CreateCommitOpts{
- Message: "An empty commit with REALLY BIG message\n\n" + strings.Repeat("a", helper.MaxCommitOrTagMessageSize+1),
- ParentID: "60ecb67744cb56576c30214ff52294f8ce2def98",
- })
+ bigCommitID := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithBranch("local-big-commits"),
+ gittest.WithMessage("An empty commit with REALLY BIG message\n\n"+strings.Repeat("a", helper.MaxCommitOrTagMessageSize+1)),
+ gittest.WithParents("60ecb67744cb56576c30214ff52294f8ce2def98"),
+ )
bigCommit, err := repo.ReadCommit(ctx, git.Revision(bigCommitID))
require.NoError(t, err)
- annotatedTagID := testhelper.CreateTag(t, repoPath, "v1.2.0", blobID, &testhelper.CreateTagOpts{Message: "Blob tag"})
+ annotatedTagID := gittest.CreateTag(t, cfg, repoPath, "v1.2.0", blobID, &gittest.CreateTagOpts{Message: "Blob tag"})
- testhelper.CreateTag(t, repoPath, "v1.3.0", commitID, nil)
- testhelper.CreateTag(t, repoPath, "v1.4.0", blobID, nil)
+ gittest.CreateTag(t, cfg, repoPath, "v1.3.0", commitID, nil)
+ gittest.CreateTag(t, cfg, repoPath, "v1.4.0", blobID, nil)
// To test recursive resolving to a commit
- testhelper.CreateTag(t, repoPath, "v1.5.0", "v1.3.0", nil)
+ gittest.CreateTag(t, cfg, repoPath, "v1.5.0", "v1.3.0", nil)
// A tag to commit with a big message
- testhelper.CreateTag(t, repoPath, "v1.6.0", bigCommitID, nil)
+ gittest.CreateTag(t, cfg, repoPath, "v1.6.0", bigCommitID.String(), nil)
// A tag with a big message
bigMessage := strings.Repeat("a", 11*1024)
- bigMessageTag1ID := testhelper.CreateTag(t, repoPath, "v1.7.0", commitID, &testhelper.CreateTagOpts{Message: bigMessage})
+ bigMessageTag1ID := gittest.CreateTag(t, cfg, repoPath, "v1.7.0", commitID, &gittest.CreateTagOpts{Message: bigMessage})
// A tag with a commit id as its name
- commitTagID := testhelper.CreateTag(t, repoPath, commitID, commitID, &testhelper.CreateTagOpts{Message: "commit tag with a commit sha as the name"})
+ commitTagID := gittest.CreateTag(t, cfg, repoPath, commitID, commitID, &gittest.CreateTagOpts{Message: "commit tag with a commit sha as the name"})
// a tag of a tag
- tagOfTagID := testhelper.CreateTag(t, repoPath, "tag-of-tag", commitTagID, &testhelper.CreateTagOpts{Message: "tag of a tag"})
+ tagOfTagID := gittest.CreateTag(t, cfg, repoPath, "tag-of-tag", commitTagID, &gittest.CreateTagOpts{Message: "tag of a tag"})
rpcRequest := &gitalypb.FindAllTagsRequest{Repository: repoProto}
@@ -578,7 +578,7 @@ func TestSuccessfulFindAllTagsRequest(t *testing.T) {
},
{
Name: []byte("v1.6.0"),
- Id: bigCommitID,
+ Id: bigCommitID.String(),
TargetCommit: bigCommit,
},
{
@@ -603,8 +603,9 @@ func TestSuccessfulFindAllTagsRequest(t *testing.T) {
func TestFindAllTagNestedTags(t *testing.T) {
cfg, client := setupRefServiceWithoutRepo(t)
- testRepoCopy, testRepoCopyPath, cleanupFn := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repoProto, repoPath, cleanupFn := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
defer cleanupFn()
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
blobID := "faaf198af3a36dbf41961466703cc1d47c61d051"
commitID := "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
@@ -641,10 +642,11 @@ func TestFindAllTagNestedTags(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.description, func(t *testing.T) {
- tags := bytes.NewReader(testhelper.MustRunCommand(t, nil, "git", "-C", testRepoCopyPath, "tag"))
- testhelper.MustRunCommand(t, tags, "xargs", cfg.Git.BinPath, "-C", testRepoCopyPath, "tag", "-d")
+ tags := bytes.NewReader(gittest.Exec(t, cfg, "-C", repoPath, "tag"))
+ testhelper.MustRunCommand(t, tags, "xargs", cfg.Git.BinPath, "-C", repoPath, "tag", "-d")
- batch, err := catfile.New(ctx, git.NewExecCommandFactory(cfg), testRepoCopy)
+ catfileCache := catfile.NewCache(cfg)
+ batch, err := catfileCache.BatchProcess(ctx, repo)
require.NoError(t, err)
info, err := batch.Info(ctx, git.Revision(tc.originalOid))
@@ -656,7 +658,7 @@ func TestFindAllTagNestedTags(t *testing.T) {
for depth := 0; depth < tc.depth; depth++ {
tagName := fmt.Sprintf("tag-depth-%d", depth)
tagMessage := fmt.Sprintf("a commit %d deep", depth)
- tagID = testhelper.CreateTag(t, testRepoCopyPath, tagName, tagID, &testhelper.CreateTagOpts{Message: tagMessage})
+ tagID = gittest.CreateTag(t, cfg, repoPath, tagName, tagID, &gittest.CreateTagOpts{Message: tagMessage})
expectedTag := &gitalypb.Tag{
Name: []byte(tagName),
@@ -672,8 +674,8 @@ func TestFindAllTagNestedTags(t *testing.T) {
}
// only expect the TargetCommit to be populated if it is a commit and if its less than 10 tags deep
- if info.Type == "commit" && depth < log.MaxTagReferenceDepth {
- commit, err := log.GetCommitCatfile(ctx, batch, git.Revision(tc.originalOid))
+ if info.Type == "commit" && depth < catfile.MaxTagReferenceDepth {
+ commit, err := catfile.GetCommit(ctx, batch, git.Revision(tc.originalOid))
require.NoError(t, err)
expectedTag.TargetCommit = commit
}
@@ -681,7 +683,7 @@ func TestFindAllTagNestedTags(t *testing.T) {
expectedTags[string(expectedTag.Name)] = expectedTag
}
- rpcRequest := &gitalypb.FindAllTagsRequest{Repository: testRepoCopy}
+ rpcRequest := &gitalypb.FindAllTagsRequest{Repository: repoProto}
c, err := client.FindAllTags(ctx, rpcRequest)
require.NoError(t, err)
@@ -983,18 +985,18 @@ func TestSuccessfulFindAllBranchesRequest(t *testing.T) {
func TestSuccessfulFindAllBranchesRequestWithMergedBranches(t *testing.T) {
cfg, repoProto, repoPath, client := setupRefService(t)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
ctx, cancel := testhelper.Context()
defer cancel()
- localRefs := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref", "--format=%(refname:strip=2)", "refs/heads")
+ localRefs := gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref", "--format=%(refname:strip=2)", "refs/heads")
for _, ref := range strings.Split(string(localRefs), "\n") {
ref = strings.TrimSpace(ref)
if _, ok := localBranches["refs/heads/"+ref]; ok || ref == "master" || ref == "" {
continue
}
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-D", ref)
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "-D", ref)
}
expectedRefs := []string{"refs/heads/100%branch", "refs/heads/improve/awesome", "refs/heads/'test'"}
@@ -1281,7 +1283,7 @@ func TestListBranchNamesContainingCommit(t *testing.T) {
func TestSuccessfulFindTagRequest(t *testing.T) {
cfg, repoProto, repoPath, client := setupRefService(t)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
blobID := "faaf198af3a36dbf41961466703cc1d47c61d051"
commitID := "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
@@ -1291,33 +1293,34 @@ func TestSuccessfulFindTagRequest(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- bigCommitID := gittest.CreateCommit(t, cfg, repoPath, "local-big-commits", &gittest.CreateCommitOpts{
- Message: "An empty commit with REALLY BIG message\n\n" + strings.Repeat("a", helper.MaxCommitOrTagMessageSize+1),
- ParentID: "60ecb67744cb56576c30214ff52294f8ce2def98",
- })
+ bigCommitID := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithBranch("local-big-commits"),
+ gittest.WithMessage("An empty commit with REALLY BIG message\n\n"+strings.Repeat("a", helper.MaxCommitOrTagMessageSize+1)),
+ gittest.WithParents("60ecb67744cb56576c30214ff52294f8ce2def98"),
+ )
bigCommit, err := repo.ReadCommit(ctx, git.Revision(bigCommitID))
require.NoError(t, err)
- annotatedTagID := testhelper.CreateTag(t, repoPath, "v1.2.0", blobID, &testhelper.CreateTagOpts{Message: "Blob tag"})
+ annotatedTagID := gittest.CreateTag(t, cfg, repoPath, "v1.2.0", blobID, &gittest.CreateTagOpts{Message: "Blob tag"})
- testhelper.CreateTag(t, repoPath, "v1.3.0", commitID, nil)
- testhelper.CreateTag(t, repoPath, "v1.4.0", blobID, nil)
+ gittest.CreateTag(t, cfg, repoPath, "v1.3.0", commitID, nil)
+ gittest.CreateTag(t, cfg, repoPath, "v1.4.0", blobID, nil)
// To test recursive resolving to a commit
- testhelper.CreateTag(t, repoPath, "v1.5.0", "v1.3.0", nil)
+ gittest.CreateTag(t, cfg, repoPath, "v1.5.0", "v1.3.0", nil)
// A tag to commit with a big message
- testhelper.CreateTag(t, repoPath, "v1.6.0", bigCommitID, nil)
+ gittest.CreateTag(t, cfg, repoPath, "v1.6.0", bigCommitID.String(), nil)
// A tag with a big message
bigMessage := strings.Repeat("a", 11*1024)
- bigMessageTag1ID := testhelper.CreateTag(t, repoPath, "v1.7.0", commitID, &testhelper.CreateTagOpts{Message: bigMessage})
+ bigMessageTag1ID := gittest.CreateTag(t, cfg, repoPath, "v1.7.0", commitID, &gittest.CreateTagOpts{Message: bigMessage})
// A tag with a commit id as its name
- commitTagID := testhelper.CreateTag(t, repoPath, commitID, commitID, &testhelper.CreateTagOpts{Message: "commit tag with a commit sha as the name"})
+ commitTagID := gittest.CreateTag(t, cfg, repoPath, commitID, commitID, &gittest.CreateTagOpts{Message: "commit tag with a commit sha as the name"})
// a tag of a tag
- tagOfTagID := testhelper.CreateTag(t, repoPath, "tag-of-tag", commitTagID, &testhelper.CreateTagOpts{Message: "tag of a tag"})
+ tagOfTagID := gittest.CreateTag(t, cfg, repoPath, "tag-of-tag", commitTagID, &gittest.CreateTagOpts{Message: "tag of a tag"})
expectedTags := []*gitalypb.Tag{
{
@@ -1415,7 +1418,7 @@ func TestSuccessfulFindTagRequest(t *testing.T) {
},
{
Name: []byte("v1.6.0"),
- Id: bigCommitID,
+ Id: bigCommitID.String(),
TargetCommit: bigCommit,
},
{
@@ -1446,8 +1449,9 @@ func TestSuccessfulFindTagRequest(t *testing.T) {
func TestFindTagNestedTag(t *testing.T) {
cfg, client := setupRefServiceWithoutRepo(t)
- repo, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repoProto, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
blobID := "faaf198af3a36dbf41961466703cc1d47c61d051"
commitID := "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
@@ -1484,10 +1488,11 @@ func TestFindTagNestedTag(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.description, func(t *testing.T) {
- tags := bytes.NewReader(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag"))
+ tags := bytes.NewReader(gittest.Exec(t, cfg, "-C", repoPath, "tag"))
testhelper.MustRunCommand(t, tags, "xargs", cfg.Git.BinPath, "-C", repoPath, "tag", "-d")
- batch, err := catfile.New(ctx, git.NewExecCommandFactory(cfg), repo)
+ catfileCache := catfile.NewCache(cfg)
+ batch, err := catfileCache.BatchProcess(ctx, repo)
require.NoError(t, err)
info, err := batch.Info(ctx, git.Revision(tc.originalOid))
@@ -1499,7 +1504,7 @@ func TestFindTagNestedTag(t *testing.T) {
for depth := 0; depth < tc.depth; depth++ {
tagName = fmt.Sprintf("tag-depth-%d", depth)
tagMessage = fmt.Sprintf("a commit %d deep", depth)
- tagID = testhelper.CreateTag(t, repoPath, tagName, tagID, &testhelper.CreateTagOpts{Message: tagMessage})
+ tagID = gittest.CreateTag(t, cfg, repoPath, tagName, tagID, &gittest.CreateTagOpts{Message: tagMessage})
}
expectedTag := &gitalypb.Tag{
Name: []byte(tagName),
@@ -1514,12 +1519,12 @@ func TestFindTagNestedTag(t *testing.T) {
},
}
// only expect the TargetCommit to be populated if it is a commit and if its less than 10 tags deep
- if info.Type == "commit" && tc.depth < log.MaxTagReferenceDepth {
- commit, err := log.GetCommitCatfile(ctx, batch, git.Revision(tc.originalOid))
+ if info.Type == "commit" && tc.depth < catfile.MaxTagReferenceDepth {
+ commit, err := catfile.GetCommit(ctx, batch, git.Revision(tc.originalOid))
require.NoError(t, err)
expectedTag.TargetCommit = commit
}
- rpcRequest := &gitalypb.FindTagRequest{Repository: repo, TagName: []byte(tagName)}
+ rpcRequest := &gitalypb.FindTagRequest{Repository: repoProto, TagName: []byte(tagName)}
resp, err := client.FindTag(ctx, rpcRequest)
require.NoError(t, err)
diff --git a/internal/gitaly/service/ref/remote_branches.go b/internal/gitaly/service/ref/remote_branches.go
index f753c4c9a..14db57b87 100644
--- a/internal/gitaly/service/ref/remote_branches.go
+++ b/internal/gitaly/service/ref/remote_branches.go
@@ -5,7 +5,6 @@ import (
"strings"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -23,6 +22,8 @@ func (s *server) FindAllRemoteBranches(req *gitalypb.FindAllRemoteBranchesReques
}
func (s *server) findAllRemoteBranches(req *gitalypb.FindAllRemoteBranchesRequest, stream gitalypb.RefService_FindAllRemoteBranchesServer) error {
+ repo := s.localrepo(req.GetRepository())
+
args := []git.Option{
git.Flag{Name: "--format=" + strings.Join(localBranchFormatFields, "%00")},
}
@@ -30,7 +31,7 @@ func (s *server) findAllRemoteBranches(req *gitalypb.FindAllRemoteBranchesReques
patterns := []string{"refs/remotes/" + req.GetRemoteName()}
ctx := stream.Context()
- c, err := catfile.New(ctx, s.gitCmdFactory, req.GetRepository())
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
@@ -39,7 +40,7 @@ func (s *server) findAllRemoteBranches(req *gitalypb.FindAllRemoteBranchesReques
opts.cmdArgs = args
writer := newFindAllRemoteBranchesWriter(stream, c)
- return s.findRefs(ctx, writer, req.GetRepository(), patterns, opts)
+ return s.findRefs(ctx, writer, repo, patterns, opts)
}
func validateFindAllRemoteBranchesRequest(req *gitalypb.FindAllRemoteBranchesRequest) error {
diff --git a/internal/gitaly/service/ref/remote_branches_test.go b/internal/gitaly/service/ref/remote_branches_test.go
index ac732396c..a1676da1a 100644
--- a/internal/gitaly/service/ref/remote_branches_test.go
+++ b/internal/gitaly/service/ref/remote_branches_test.go
@@ -19,7 +19,7 @@ func TestSuccessfulFindAllRemoteBranchesRequest(t *testing.T) {
cfg, repoProto, repoPath, client := setupRefService(t)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
remoteName := "my-remote"
expectedBranches := map[string]string{
diff --git a/internal/gitaly/service/ref/server.go b/internal/gitaly/service/ref/server.go
index 7c0da1771..0dfb457d4 100644
--- a/internal/gitaly/service/ref/server.go
+++ b/internal/gitaly/service/ref/server.go
@@ -2,6 +2,9 @@ package ref
import (
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/storage"
@@ -13,14 +16,26 @@ type server struct {
txManager transaction.Manager
locator storage.Locator
gitCmdFactory git.CommandFactory
+ catfileCache catfile.Cache
}
// NewServer creates a new instance of a grpc RefServer
-func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory, txManager transaction.Manager) gitalypb.RefServiceServer {
+func NewServer(
+ cfg config.Cfg,
+ locator storage.Locator,
+ gitCmdFactory git.CommandFactory,
+ txManager transaction.Manager,
+ catfileCache catfile.Cache,
+) gitalypb.RefServiceServer {
return &server{
cfg: cfg,
txManager: txManager,
locator: locator,
gitCmdFactory: gitCmdFactory,
+ catfileCache: catfileCache,
}
}
+
+func (s *server) localrepo(repo repository.GitRepo) *localrepo.Repo {
+ return localrepo.New(s.gitCmdFactory, s.catfileCache, repo, s.cfg)
+}
diff --git a/internal/gitaly/service/ref/tag_messages.go b/internal/gitaly/service/ref/tag_messages.go
index b5c536926..7fa5e3a1c 100644
--- a/internal/gitaly/service/ref/tag_messages.go
+++ b/internal/gitaly/service/ref/tag_messages.go
@@ -7,7 +7,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/streamio"
@@ -36,14 +35,15 @@ func validateGetTagMessagesRequest(request *gitalypb.GetTagMessagesRequest) erro
func (s *server) getAndStreamTagMessages(request *gitalypb.GetTagMessagesRequest, stream gitalypb.RefService_GetTagMessagesServer) error {
ctx := stream.Context()
+ repo := s.localrepo(request.GetRepository())
- c, err := catfile.New(ctx, s.gitCmdFactory, request.GetRepository())
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
for _, tagID := range request.GetTagIds() {
- tag, err := log.GetTagCatfile(ctx, c, git.Revision(tagID), "", false, false)
+ tag, err := catfile.GetTag(ctx, c, git.Revision(tagID), "", false, false)
if err != nil {
return fmt.Errorf("failed to get tag: %v", err)
}
diff --git a/internal/gitaly/service/ref/tag_messages_test.go b/internal/gitaly/service/ref/tag_messages_test.go
index 81c0b4a9c..2a4017c74 100644
--- a/internal/gitaly/service/ref/tag_messages_test.go
+++ b/internal/gitaly/service/ref/tag_messages_test.go
@@ -6,6 +6,7 @@ import (
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -13,7 +14,7 @@ import (
)
func TestSuccessfulGetTagMessagesRequest(t *testing.T) {
- _, repo, repoPath, client := setupRefService(t)
+ cfg, repo, repoPath, client := setupRefService(t)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -21,8 +22,8 @@ func TestSuccessfulGetTagMessagesRequest(t *testing.T) {
message1 := strings.Repeat("a", helper.MaxCommitOrTagMessageSize*2)
message2 := strings.Repeat("b", helper.MaxCommitOrTagMessageSize)
- tag1ID := testhelper.CreateTag(t, repoPath, "big-tag-1", "master", &testhelper.CreateTagOpts{Message: message1})
- tag2ID := testhelper.CreateTag(t, repoPath, "big-tag-2", "master~", &testhelper.CreateTagOpts{Message: message2})
+ tag1ID := gittest.CreateTag(t, cfg, repoPath, "big-tag-1", "master", &gittest.CreateTagOpts{Message: message1})
+ tag2ID := gittest.CreateTag(t, cfg, repoPath, "big-tag-2", "master~", &gittest.CreateTagOpts{Message: message2})
request := &gitalypb.GetTagMessagesRequest{
Repository: repo,
diff --git a/internal/gitaly/service/ref/testhelper_test.go b/internal/gitaly/service/ref/testhelper_test.go
index 0d54f3b64..90e0af56d 100644
--- a/internal/gitaly/service/ref/testhelper_test.go
+++ b/internal/gitaly/service/ref/testhelper_test.go
@@ -46,7 +46,7 @@ func testMain(m *testing.M) int {
func setupRefService(t testing.TB) (config.Cfg, *gitalypb.Repository, string, gitalypb.RefServiceClient) {
cfg, client := setupRefServiceWithoutRepo(t)
- repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanup)
testhelper.ConfigureGitalyHooksBin(t, cfg)
@@ -70,7 +70,13 @@ func setupRefServiceWithoutRepo(t testing.TB) (config.Cfg, gitalypb.RefServiceCl
func runRefServiceServer(t testing.TB, cfg config.Cfg) string {
return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterRefServiceServer(srv, NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory(), deps.GetTxManager()))
+ gitalypb.RegisterRefServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
+ deps.GetCatfileCache(),
+ ))
gitalypb.RegisterHookServiceServer(srv, hookservice.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
})
}
diff --git a/internal/gitaly/service/ref/util.go b/internal/gitaly/service/ref/util.go
index 87285d49f..645c51d99 100644
--- a/internal/gitaly/service/ref/util.go
+++ b/internal/gitaly/service/ref/util.go
@@ -7,7 +7,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- "gitlab.com/gitlab-org/gitaly/internal/git/log"
"gitlab.com/gitlab-org/gitaly/internal/helper/lines"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -60,7 +59,7 @@ func buildLocalBranch(name []byte, target *gitalypb.GitCommit) *gitalypb.FindLoc
}
func buildAllBranchesBranch(ctx context.Context, c catfile.Batch, elements [][]byte) (*gitalypb.FindAllBranchesResponse_Branch, error) {
- target, err := log.GetCommitCatfile(ctx, c, git.Revision(elements[1]))
+ target, err := catfile.GetCommit(ctx, c, git.Revision(elements[1]))
if err != nil {
return nil, err
}
@@ -72,7 +71,7 @@ func buildAllBranchesBranch(ctx context.Context, c catfile.Batch, elements [][]b
}
func buildBranch(ctx context.Context, c catfile.Batch, elements [][]byte) (*gitalypb.Branch, error) {
- target, err := log.GetCommitCatfile(ctx, c, git.Revision(elements[1]))
+ target, err := catfile.GetCommit(ctx, c, git.Revision(elements[1]))
if err != nil {
return nil, err
}
@@ -94,7 +93,7 @@ func newFindLocalBranchesWriter(stream gitalypb.RefService_FindLocalBranchesServ
return err
}
- target, err := log.GetCommitCatfile(ctx, c, git.Revision(elements[1]))
+ target, err := catfile.GetCommit(ctx, c, git.Revision(elements[1]))
if err != nil {
return err
}
diff --git a/internal/gitaly/service/remote/fetch_internal_remote.go b/internal/gitaly/service/remote/fetch_internal_remote.go
index d5050a52f..c8dcd89b9 100644
--- a/internal/gitaly/service/remote/fetch_internal_remote.go
+++ b/internal/gitaly/service/remote/fetch_internal_remote.go
@@ -27,6 +27,8 @@ func (s *server) FetchInternalRemote(ctx context.Context, req *gitalypb.FetchInt
return nil, status.Errorf(codes.InvalidArgument, "FetchInternalRemote: %v", err)
}
+ repo := s.localrepo(req.GetRepository())
+
env, err := gitalyssh.UploadPackEnv(ctx, s.cfg, &gitalypb.SSHUploadPackRequest{Repository: req.RemoteRepository})
if err != nil {
return nil, fmt.Errorf("upload pack environment: %w", err)
@@ -41,10 +43,10 @@ func (s *server) FetchInternalRemote(ctx context.Context, req *gitalypb.FetchInt
options := []git.CmdOpt{
git.WithEnv(env...),
git.WithStderr(stderr),
- git.WithRefTxHook(ctx, req.Repository, s.cfg),
+ git.WithRefTxHook(ctx, repo, s.cfg),
}
- cmd, err := s.gitCmdFactory.New(ctx, req.Repository,
+ cmd, err := repo.Exec(ctx,
git.SubCmd{
Name: "fetch",
Flags: flags,
@@ -75,13 +77,13 @@ func (s *server) FetchInternalRemote(ctx context.Context, req *gitalypb.FetchInt
return nil, status.Errorf(codes.Internal, "FetchInternalRemote: remote default branch: %v", err)
}
- defaultBranch, err := ref.DefaultBranchName(ctx, s.gitCmdFactory, req.Repository)
+ defaultBranch, err := ref.DefaultBranchName(ctx, repo)
if err != nil {
return nil, status.Errorf(codes.Internal, "FetchInternalRemote: default branch: %v", err)
}
if !bytes.Equal(defaultBranch, remoteDefaultBranch) {
- if err := ref.SetDefaultBranchRef(ctx, s.gitCmdFactory, req.Repository, string(remoteDefaultBranch), s.cfg); err != nil {
+ if err := ref.SetDefaultBranchRef(ctx, s.gitCmdFactory, req.GetRepository(), string(remoteDefaultBranch), s.cfg); err != nil {
return nil, status.Errorf(codes.Internal, "FetchInternalRemote: set default branch: %v", err)
}
}
diff --git a/internal/gitaly/service/remote/fetch_internal_remote_test.go b/internal/gitaly/service/remote/fetch_internal_remote_test.go
index d6e70dbbf..94ee16f6c 100644
--- a/internal/gitaly/service/remote/fetch_internal_remote_test.go
+++ b/internal/gitaly/service/remote/fetch_internal_remote_test.go
@@ -151,12 +151,23 @@ func TestSuccessfulFetchInternalRemote(t *testing.T) {
testhelper.ConfigureGitalyHooksBin(t, remoteCfg)
remoteAddr := testserver.RunGitalyServer(t, remoteCfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterSSHServiceServer(srv, ssh.NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory()))
- gitalypb.RegisterRefServiceServer(srv, ref.NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory(), deps.GetTxManager()))
+ gitalypb.RegisterSSHServiceServer(srv, ssh.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
+ ))
+ gitalypb.RegisterRefServiceServer(srv, ref.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
+ deps.GetCatfileCache(),
+ ))
gitalypb.RegisterHookServiceServer(srv, hook.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
}, testserver.WithDisablePraefect())
- gittest.CreateCommit(t, remoteCfg, remoteRepoPath, "master", nil)
+ gittest.WriteCommit(t, remoteCfg, remoteRepoPath, gittest.WithBranch("master"))
localCfgBuilder := testcfg.NewGitalyCfgBuilder(testcfg.WithStorages("gitaly-1"))
@@ -170,12 +181,19 @@ func TestSuccessfulFetchInternalRemote(t *testing.T) {
hookManager := &mockHookManager{}
localAddr := testserver.RunGitalyServer(t, localCfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterRemoteServiceServer(srv, NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetLocator(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterRemoteServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ deps.GetTxManager(),
+ ))
gitalypb.RegisterHookServiceServer(srv, hook.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
}, testserver.WithHookManager(hookManager), testserver.WithDisablePraefect())
localRepoPath := filepath.Join(localCfg.Storages[0].Path, localRepo.GetRelativePath())
- testhelper.MustRunCommand(t, nil, "git", "-C", localRepoPath, "symbolic-ref", "HEAD", "refs/heads/feature")
+ gittest.Exec(t, remoteCfg, "-C", localRepoPath, "symbolic-ref", "HEAD", "refs/heads/feature")
client, conn := newRemoteClient(t, localAddr)
t.Cleanup(func() { conn.Close() })
@@ -194,8 +212,8 @@ func TestSuccessfulFetchInternalRemote(t *testing.T) {
require.True(t, c.GetResult())
require.Equal(t,
- string(testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath, "show-ref", "--head")),
- string(testhelper.MustRunCommand(t, nil, "git", "-C", localRepoPath, "show-ref", "--head")),
+ string(gittest.Exec(t, remoteCfg, "-C", remoteRepoPath, "show-ref", "--head")),
+ string(gittest.Exec(t, remoteCfg, "-C", localRepoPath, "show-ref", "--head")),
)
gitalySSHInvocationParams := getGitalySSHInvocationParams()
diff --git a/internal/gitaly/service/remote/find_remote_root_ref.go b/internal/gitaly/service/remote/find_remote_root_ref.go
index 374c483d9..7474751d4 100644
--- a/internal/gitaly/service/remote/find_remote_root_ref.go
+++ b/internal/gitaly/service/remote/find_remote_root_ref.go
@@ -3,9 +3,11 @@ package remote
import (
"bufio"
"context"
+ "fmt"
"strings"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -13,14 +15,32 @@ import (
const headPrefix = "HEAD branch: "
-func (s *server) findRemoteRootRef(ctx context.Context, repo *gitalypb.Repository, remote string) (string, error) {
- cmd, err := s.gitCmdFactory.New(ctx, repo,
+func (s *server) findRemoteRootRef(ctx context.Context, request *gitalypb.FindRemoteRootRefRequest) (string, error) {
+ remoteName := request.Remote
+ var config []git.ConfigPair
+
+ if request.RemoteUrl != "" {
+ remoteName = "inmemory"
+ config = []git.ConfigPair{
+ {Key: "remote.inmemory.url", Value: request.RemoteUrl},
+ }
+
+ if authHeader := request.GetHttpAuthorizationHeader(); authHeader != "" {
+ config = append(config, git.ConfigPair{
+ Key: fmt.Sprintf("http.%s.extraHeader", request.RemoteUrl),
+ Value: "Authorization: " + authHeader,
+ })
+ }
+ }
+
+ cmd, err := s.gitCmdFactory.New(ctx, request.Repository,
git.SubSubCmd{
Name: "remote",
Action: "show",
- Args: []string{remote},
+ Args: []string{remoteName},
},
- git.WithRefTxHook(ctx, repo, s.cfg),
+ git.WithRefTxHook(ctx, request.Repository, s.cfg),
+ git.WithConfigEnv(config...),
)
if err != nil {
return "", err
@@ -31,7 +51,11 @@ func (s *server) findRemoteRootRef(ctx context.Context, repo *gitalypb.Repositor
line := strings.TrimSpace(scanner.Text())
if strings.HasPrefix(line, headPrefix) {
- return strings.TrimPrefix(line, headPrefix), nil
+ rootRef := strings.TrimPrefix(line, headPrefix)
+ if rootRef == "(unknown)" {
+ return "", status.Error(codes.NotFound, "no remote HEAD found")
+ }
+ return rootRef, nil
}
}
@@ -48,18 +72,21 @@ func (s *server) findRemoteRootRef(ctx context.Context, repo *gitalypb.Repositor
// FindRemoteRootRef queries the remote to determine its HEAD
func (s *server) FindRemoteRootRef(ctx context.Context, in *gitalypb.FindRemoteRootRefRequest) (*gitalypb.FindRemoteRootRefResponse, error) {
- remote := in.GetRemote()
- if remote == "" {
- return nil, status.Error(codes.InvalidArgument, "empty remote can't be queried")
+ //nolint:staticcheck // GetRemote() is deprecated
+ if in.GetRemote() == "" && in.GetRemoteUrl() == "" {
+ return nil, status.Error(codes.InvalidArgument, "got neither remote name nor URL")
+ }
+ //nolint:staticcheck // GetRemote() is deprecated
+ if in.GetRemote() != "" && in.GetRemoteUrl() != "" {
+ return nil, status.Error(codes.InvalidArgument, "got remote name and URL")
+ }
+ if in.Repository == nil {
+ return nil, status.Error(codes.InvalidArgument, "missing repository")
}
- ref, err := s.findRemoteRootRef(ctx, in.GetRepository(), remote)
+ ref, err := s.findRemoteRootRef(ctx, in)
if err != nil {
- if _, ok := status.FromError(err); ok {
- return nil, err
- }
-
- return nil, status.Errorf(codes.Internal, err.Error())
+ return nil, helper.ErrInternal(err)
}
return &gitalypb.FindRemoteRootRefResponse{Ref: ref}, nil
diff --git a/internal/gitaly/service/remote/find_remote_root_ref_test.go b/internal/gitaly/service/remote/find_remote_root_ref_test.go
index 47161233b..0ffe0a09a 100644
--- a/internal/gitaly/service/remote/find_remote_root_ref_test.go
+++ b/internal/gitaly/service/remote/find_remote_root_ref_test.go
@@ -4,21 +4,75 @@ import (
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
+ "gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
+ "google.golang.org/grpc/status"
)
func TestFindRemoteRootRefSuccess(t *testing.T) {
- _, repo, _, client := setupRemoteService(t)
+ cfg, repo, repoPath, client := setupRemoteService(t)
+
+ originURL := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "remote", "get-url", "origin"))
+
+ for _, tc := range []struct {
+ desc string
+ request *gitalypb.FindRemoteRootRefRequest
+ }{
+ {
+ desc: "with remote name",
+ request: &gitalypb.FindRemoteRootRefRequest{Repository: repo, Remote: "origin"},
+ },
+ {
+ desc: "with remote URL",
+ request: &gitalypb.FindRemoteRootRefRequest{Repository: repo, RemoteUrl: originURL},
+ },
+ {
+ // Unfortunately, we do not really have a nice way to verify we actually got
+ // the auth header. So this test case only really verifies that it doesn't
+ // break the world to set up one.
+ desc: "with remote URL and auth header",
+ request: &gitalypb.FindRemoteRootRefRequest{
+ Repository: repo,
+ RemoteUrl: originURL,
+ HttpAuthorizationHeader: "mysecret",
+ },
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
- request := &gitalypb.FindRemoteRootRefRequest{Repository: repo, Remote: "origin"}
- testCtx, cancelCtx := testhelper.Context()
- defer cancelCtx()
+ response, err := client.FindRemoteRootRef(ctx, tc.request)
+ require.NoError(t, err)
+ require.Equal(t, "master", response.Ref)
+ })
+ }
+}
+
+func TestFindRemoteRootRefWithUnbornRemoteHead(t *testing.T) {
+ cfg, remoteRepo, remoteRepoPath, client := setupRemoteService(t)
- response, err := client.FindRemoteRootRef(testCtx, request)
- require.NoError(t, err)
- require.Equal(t, "master", response.Ref)
+ // We're creating an empty repository. Empty repositories do have a HEAD set up, but they
+ // point to an unborn branch because the default branch hasn't yet been created.
+ _, clientRepoPath, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
+ defer cleanup()
+ gittest.Exec(t, cfg, "-C", remoteRepoPath, "remote", "add",
+ "foo", "file://"+clientRepoPath)
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ for _, request := range []*gitalypb.FindRemoteRootRefRequest{
+ &gitalypb.FindRemoteRootRefRequest{Repository: remoteRepo, Remote: "foo"},
+ &gitalypb.FindRemoteRootRefRequest{Repository: remoteRepo, RemoteUrl: "file://" + clientRepoPath},
+ } {
+ response, err := client.FindRemoteRootRef(ctx, request)
+ require.Equal(t, status.Error(codes.NotFound, "no remote HEAD found"), err)
+ require.Nil(t, response)
+ }
}
func TestFindRemoteRootRefFailedDueToValidation(t *testing.T) {
@@ -27,48 +81,92 @@ func TestFindRemoteRootRefFailedDueToValidation(t *testing.T) {
invalidRepo := &gitalypb.Repository{StorageName: "fake", RelativePath: "path"}
testCases := []struct {
- desc string
- request *gitalypb.FindRemoteRootRefRequest
- code codes.Code
+ desc string
+ request *gitalypb.FindRemoteRootRefRequest
+ expectedErr []error
}{
{
- desc: "Invalid repository",
- request: &gitalypb.FindRemoteRootRefRequest{Repository: invalidRepo},
- code: codes.InvalidArgument,
+ desc: "Invalid repository",
+ request: &gitalypb.FindRemoteRootRefRequest{
+ Repository: invalidRepo,
+ Remote: "remote-name",
+ },
+ expectedErr: []error{
+ status.Error(codes.InvalidArgument, "GetStorageByName: no such storage: \"fake\""),
+ status.Error(codes.InvalidArgument, "repo scoped: invalid Repository"),
+ },
},
{
- desc: "Repository is nil",
- request: &gitalypb.FindRemoteRootRefRequest{},
- code: codes.InvalidArgument,
+ desc: "Repository is nil",
+ request: &gitalypb.FindRemoteRootRefRequest{
+ Remote: "remote-name",
+ },
+ expectedErr: []error{
+ status.Error(codes.InvalidArgument, "missing repository"),
+ status.Error(codes.InvalidArgument, "repo scoped: empty Repository"),
+ },
},
{
- desc: "Remote is nil",
- request: &gitalypb.FindRemoteRootRefRequest{Repository: repo},
- code: codes.InvalidArgument,
+ desc: "Remote name and URL is empty",
+ request: &gitalypb.FindRemoteRootRefRequest{
+ Repository: repo,
+ },
+ expectedErr: []error{
+ status.Error(codes.InvalidArgument, "got neither remote name nor URL"),
+ },
},
{
- desc: "Remote is empty",
- request: &gitalypb.FindRemoteRootRefRequest{Repository: repo, Remote: ""},
- code: codes.InvalidArgument,
+ desc: "Remote name and URL is set",
+ request: &gitalypb.FindRemoteRootRefRequest{
+ Repository: repo,
+ Remote: "remote-name",
+ RemoteUrl: "remote-url",
+ },
+ expectedErr: []error{
+ status.Error(codes.InvalidArgument, "got remote name and URL"),
+ },
},
}
for _, testCase := range testCases {
- testCtx, cancelCtx := testhelper.Context()
- defer cancelCtx()
+ t.Run(testCase.desc, func(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
- _, err := client.FindRemoteRootRef(testCtx, testCase.request)
- testhelper.RequireGrpcError(t, err, testCase.code)
+ _, err := client.FindRemoteRootRef(ctx, testCase.request)
+ // We cannot test for equality given that some errors depend on whether we
+ // proxy via Praefect or not. We thus simply assert that the actual error is
+ // one of the possible errors, which is the same as equality for all the
+ // other tests.
+ require.Contains(t, testCase.expectedErr, err)
+ })
}
}
func TestFindRemoteRootRefFailedDueToInvalidRemote(t *testing.T) {
_, repo, _, client := setupRemoteService(t)
- request := &gitalypb.FindRemoteRootRefRequest{Repository: repo, Remote: "invalid"}
- testCtx, cancelCtx := testhelper.Context()
- defer cancelCtx()
+ t.Run("invalid remote name", func(t *testing.T) {
+ request := &gitalypb.FindRemoteRootRefRequest{Repository: repo, Remote: "invalid"}
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ _, err := client.FindRemoteRootRef(ctx, request)
+ testhelper.RequireGrpcError(t, err, codes.Internal)
+ })
+
+ t.Run("invalid remote URL", func(t *testing.T) {
+ fakeRepoDir := testhelper.TempDir(t)
+
+ // We're using a nonexistent filepath remote URL so we avoid hitting the internet.
+ request := &gitalypb.FindRemoteRootRefRequest{
+ Repository: repo, RemoteUrl: "file://" + fakeRepoDir,
+ }
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
- _, err := client.FindRemoteRootRef(testCtx, request)
- testhelper.RequireGrpcError(t, err, codes.Internal)
+ _, err := client.FindRemoteRootRef(ctx, request)
+ testhelper.RequireGrpcError(t, err, codes.Internal)
+ })
}
diff --git a/internal/gitaly/service/remote/remotes.go b/internal/gitaly/service/remote/remotes.go
index d020112a8..95f078e6e 100644
--- a/internal/gitaly/service/remote/remotes.go
+++ b/internal/gitaly/service/remote/remotes.go
@@ -4,12 +4,17 @@ import (
"bytes"
"context"
"fmt"
+ "io"
"io/ioutil"
"strings"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -31,7 +36,20 @@ func (s *server) AddRemote(ctx context.Context, req *gitalypb.AddRemoteRequest)
return nil, err
}
- return client.AddRemote(clientCtx, req)
+ if err := s.voteOnRemote(ctx, req.GetRepository(), req.GetName()); err != nil {
+ return nil, helper.ErrInternalf("preimage vote on remote: %v", err)
+ }
+
+ response, err := client.AddRemote(clientCtx, req)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := s.voteOnRemote(ctx, req.GetRepository(), req.GetName()); err != nil {
+ return nil, helper.ErrInternalf("postimage vote on remote: %v", err)
+ }
+
+ return response, nil
}
func validateAddRemoteRequest(req *gitalypb.AddRemoteRequest) error {
@@ -51,7 +69,7 @@ func (s *server) RemoveRemote(ctx context.Context, req *gitalypb.RemoveRemoteReq
return nil, status.Errorf(codes.InvalidArgument, "RemoveRemote: %v", err)
}
- remote := localrepo.New(s.gitCmdFactory, req.GetRepository(), s.cfg).Remote()
+ remote := s.localrepo(req.GetRepository()).Remote()
hasRemote, err := remote.Exists(ctx, req.Name)
if err != nil {
@@ -61,10 +79,18 @@ func (s *server) RemoveRemote(ctx context.Context, req *gitalypb.RemoveRemoteReq
return &gitalypb.RemoveRemoteResponse{Result: false}, nil
}
+ if err := s.voteOnRemote(ctx, req.GetRepository(), req.GetName()); err != nil {
+ return nil, helper.ErrInternalf("preimage vote on remote: %v", err)
+ }
+
if err := remote.Remove(ctx, req.Name); err != nil {
return nil, err
}
+ if err := s.voteOnRemote(ctx, req.GetRepository(), req.GetName()); err != nil {
+ return nil, helper.ErrInternalf("postimage vote on remote: %v", err)
+ }
+
return &gitalypb.RemoveRemoteResponse{Result: true}, nil
}
@@ -111,3 +137,37 @@ func validateRemoveRemoteRequest(req *gitalypb.RemoveRemoteRequest) error {
return nil
}
+
+func (s *server) voteOnRemote(ctx context.Context, repo *gitalypb.Repository, remoteName string) error {
+ if featureflag.IsDisabled(ctx, featureflag.TxRemote) {
+ return nil
+ }
+
+ return transaction.RunOnContext(ctx, func(tx txinfo.Transaction, server txinfo.PraefectServer) error {
+ localrepo := s.localrepo(repo)
+
+ configEntries, err := localrepo.Config().GetRegexp(ctx, "remote\\."+remoteName+"\\.", git.ConfigGetRegexpOpts{})
+ if err != nil {
+ return fmt.Errorf("get remote configuration: %w", err)
+ }
+
+ hash := voting.NewVoteHash()
+ for _, configEntry := range configEntries {
+ config := fmt.Sprintf("%s\t%s\n", configEntry.Key, configEntry.Value)
+ if _, err := io.WriteString(hash, config); err != nil {
+ return fmt.Errorf("hash remote config entry: %w", err)
+ }
+ }
+
+ vote, err := hash.Vote()
+ if err != nil {
+ return fmt.Errorf("compute remote config vote: %w", err)
+ }
+
+ if err := s.txManager.Vote(ctx, tx, server, vote); err != nil {
+ return fmt.Errorf("vote: %w", err)
+ }
+
+ return nil
+ })
+}
diff --git a/internal/gitaly/service/remote/remotes_test.go b/internal/gitaly/service/remote/remotes_test.go
index 189d494d4..3ba800e5e 100644
--- a/internal/gitaly/service/remote/remotes_test.go
+++ b/internal/gitaly/service/remote/remotes_test.go
@@ -2,6 +2,7 @@ package remote
import (
"bytes"
+ "context"
"fmt"
"io"
"net/http"
@@ -9,9 +10,17 @@ import (
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/internal/helper/text"
+ "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
@@ -73,13 +82,13 @@ func testSuccessfulAddRemote(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.S
_, err := client.AddRemote(ctx, request)
require.NoError(t, err)
- remotes := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "remote", "-v")
+ remotes := gittest.Exec(t, cfg, "-C", repoPath, "remote", "-v")
require.Contains(t, string(remotes), fmt.Sprintf("%s\t%s (fetch)", tc.remoteName, tc.url))
require.Contains(t, string(remotes), fmt.Sprintf("%s\t%s (push)", tc.remoteName, tc.url))
mirrorConfigRegexp := fmt.Sprintf("remote.%s", tc.remoteName)
- mirrorConfig := string(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "--get-regexp", mirrorConfigRegexp))
+ mirrorConfig := string(gittest.Exec(t, cfg, "-C", repoPath, "config", "--get-regexp", mirrorConfigRegexp))
if len(tc.resolvedMirrorRefmaps) > 0 {
for _, resolvedMirrorRefmap := range tc.resolvedMirrorRefmaps {
require.Contains(t, mirrorConfig, resolvedMirrorRefmap)
@@ -93,6 +102,47 @@ func testSuccessfulAddRemote(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.S
}
}
+func testAddRemoteTransactional(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
+ testhelper.NewFeatureSets([]featureflag.FeatureFlag{
+ featureflag.TxRemote,
+ }).Run(t, func(t *testing.T, ctx context.Context) {
+ var votes []voting.Vote
+ txManager := transaction.MockManager{
+ VoteFn: func(_ context.Context, _ txinfo.Transaction, _ txinfo.PraefectServer, vote voting.Vote) error {
+ votes = append(votes, vote)
+ return nil
+ },
+ }
+
+ _, repo, repoPath, client := setupRemoteServiceWithRuby(t, cfg, rubySrv, testserver.WithTransactionManager(&txManager))
+
+ ctx, err := (&txinfo.PraefectServer{SocketPath: "i-dont-care"}).Inject(ctx)
+ require.NoError(t, err)
+ ctx, err = txinfo.InjectTransaction(ctx, 1, "node", true)
+ require.NoError(t, err)
+ ctx = helper.IncomingToOutgoing(ctx)
+
+ preimageURL := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "remote", "get-url", "origin"))
+
+ _, err = client.AddRemote(ctx, &gitalypb.AddRemoteRequest{
+ Repository: repo,
+ Name: "origin",
+ Url: "foo/bar",
+ })
+ require.NoError(t, err)
+
+ if featureflag.IsEnabled(ctx, featureflag.TxRemote) {
+ preimageVote := fmt.Sprintf("remote.origin.url\t%s\n", preimageURL)
+ require.Equal(t, []voting.Vote{
+ voting.VoteFromData([]byte(preimageVote)),
+ voting.VoteFromData([]byte("remote.origin.url\tfoo/bar\n")),
+ }, votes)
+ } else {
+ require.Len(t, votes, 0)
+ }
+ })
+}
+
func TestFailedAddRemoteDueToValidation(t *testing.T) {
_, repo, _, client := setupRemoteService(t)
@@ -134,12 +184,12 @@ func TestFailedAddRemoteDueToValidation(t *testing.T) {
}
func TestSuccessfulRemoveRemote(t *testing.T) {
- _, repo, repoPath, client := setupRemoteService(t)
+ cfg, repo, repoPath, client := setupRemoteService(t)
ctx, cancel := testhelper.Context()
defer cancel()
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "remote", "add", "my-remote", "http://my-repo.git")
+ gittest.Exec(t, cfg, "-C", repoPath, "remote", "add", "my-remote", "http://my-repo.git")
testCases := []struct {
description string
@@ -169,7 +219,7 @@ func TestSuccessfulRemoveRemote(t *testing.T) {
require.NoError(t, err)
require.Equal(t, tc.result, r.GetResult())
- remotes := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "remote")
+ remotes := gittest.Exec(t, cfg, "-C", repoPath, "remote")
require.NotContains(t, string(remotes), tc.remoteName)
})
@@ -188,6 +238,46 @@ func TestFailedRemoveRemoteDueToValidation(t *testing.T) {
testhelper.RequireGrpcError(t, err, codes.InvalidArgument)
}
+func TestRemoveRemoteTransactional(t *testing.T) {
+ testhelper.NewFeatureSets([]featureflag.FeatureFlag{
+ featureflag.TxRemote,
+ }).Run(t, func(t *testing.T, ctx context.Context) {
+ var votes []voting.Vote
+ txManager := transaction.MockManager{
+ VoteFn: func(_ context.Context, _ txinfo.Transaction, _ txinfo.PraefectServer, vote voting.Vote) error {
+ votes = append(votes, vote)
+ return nil
+ },
+ }
+
+ cfg, repo, repoPath, client := setupRemoteService(t, testserver.WithTransactionManager(&txManager))
+
+ ctx, err := (&txinfo.PraefectServer{SocketPath: "i-dont-care"}).Inject(ctx)
+ require.NoError(t, err)
+ ctx, err = txinfo.InjectTransaction(ctx, 1, "node", true)
+ require.NoError(t, err)
+ ctx = helper.IncomingToOutgoing(ctx)
+
+ preimageURL := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "remote", "get-url", "origin"))
+
+ _, err = client.RemoveRemote(ctx, &gitalypb.RemoveRemoteRequest{
+ Repository: repo,
+ Name: "origin",
+ })
+ require.NoError(t, err)
+
+ if featureflag.IsEnabled(ctx, featureflag.TxRemote) {
+ preimageVote := fmt.Sprintf("remote.origin.url\t%s\n", preimageURL)
+ require.Equal(t, []voting.Vote{
+ voting.VoteFromData([]byte(preimageVote)),
+ voting.VoteFromData([]byte{}),
+ }, votes)
+ } else {
+ require.Len(t, votes, 0)
+ }
+ })
+}
+
func TestFindRemoteRepository(t *testing.T) {
_, repo, _, client := setupRemoteService(t)
diff --git a/internal/gitaly/service/remote/server.go b/internal/gitaly/service/remote/server.go
index 05ddd7122..6ae0fb04b 100644
--- a/internal/gitaly/service/remote/server.go
+++ b/internal/gitaly/service/remote/server.go
@@ -3,8 +3,12 @@ package remote
import (
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -14,20 +18,35 @@ type server struct {
ruby *rubyserver.Server
locator storage.Locator
gitCmdFactory git.CommandFactory
+ catfileCache catfile.Cache
+ txManager transaction.Manager
conns *client.Pool
}
// NewServer creates a new instance of a grpc RemoteServiceServer
-func NewServer(cfg config.Cfg, rs *rubyserver.Server, locator storage.Locator, gitCmdFactory git.CommandFactory) gitalypb.RemoteServiceServer {
+func NewServer(
+ cfg config.Cfg,
+ rs *rubyserver.Server,
+ locator storage.Locator,
+ gitCmdFactory git.CommandFactory,
+ catfileCache catfile.Cache,
+ txManager transaction.Manager,
+) gitalypb.RemoteServiceServer {
return &server{
cfg: cfg,
ruby: rs,
locator: locator,
gitCmdFactory: gitCmdFactory,
+ catfileCache: catfileCache,
+ txManager: txManager,
conns: client.NewPoolWithOptions(
client.WithDialer(client.HealthCheckDialer(client.DialContext)),
client.WithDialOptions(client.FailOnNonTempDialError()...),
),
}
}
+
+func (s *server) localrepo(repo repository.GitRepo) *localrepo.Repo {
+ return localrepo.New(s.gitCmdFactory, s.catfileCache, repo, s.cfg)
+}
diff --git a/internal/gitaly/service/remote/testhelper_test.go b/internal/gitaly/service/remote/testhelper_test.go
index cf317c7e3..942a18b23 100644
--- a/internal/gitaly/service/remote/testhelper_test.go
+++ b/internal/gitaly/service/remote/testhelper_test.go
@@ -44,6 +44,7 @@ func TestWithRubySidecar(t *testing.T) {
testSuccessfulUpdateRemoteMirrorRequestWithKeepDivergentRefs,
testFailedUpdateRemoteMirrorRequestDueToValidation,
testSuccessfulAddRemote,
+ testAddRemoteTransactional,
testUpdateRemoteMirror,
}
@@ -54,15 +55,22 @@ func TestWithRubySidecar(t *testing.T) {
}
}
-func setupRemoteServiceWithRuby(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) (config.Cfg, *gitalypb.Repository, string, gitalypb.RemoteServiceClient) {
+func setupRemoteServiceWithRuby(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server, opts ...testserver.GitalyServerOpt) (config.Cfg, *gitalypb.Repository, string, gitalypb.RemoteServiceClient) {
t.Helper()
- repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanup)
addr := testserver.RunGitalyServer(t, cfg, rubySrv, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterRemoteServiceServer(srv, NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetLocator(), deps.GetGitCmdFactory()))
- })
+ gitalypb.RegisterRemoteServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ deps.GetTxManager(),
+ ))
+ }, opts...)
cfg.SocketPath = addr
client, conn := newRemoteClient(t, addr)
@@ -71,11 +79,11 @@ func setupRemoteServiceWithRuby(t *testing.T, cfg config.Cfg, rubySrv *rubyserve
return cfg, repo, repoPath, client
}
-func setupRemoteService(t *testing.T) (config.Cfg, *gitalypb.Repository, string, gitalypb.RemoteServiceClient) {
+func setupRemoteService(t *testing.T, opts ...testserver.GitalyServerOpt) (config.Cfg, *gitalypb.Repository, string, gitalypb.RemoteServiceClient) {
t.Helper()
cfg := testcfg.Build(t)
- return setupRemoteServiceWithRuby(t, cfg, nil)
+ return setupRemoteServiceWithRuby(t, cfg, nil, opts...)
}
func newRemoteClient(t *testing.T, serverSocketPath string) (gitalypb.RemoteServiceClient, *grpc.ClientConn) {
diff --git a/internal/gitaly/service/remote/update_remote_mirror.go b/internal/gitaly/service/remote/update_remote_mirror.go
index 4aac6fe0c..05e5a782c 100644
--- a/internal/gitaly/service/remote/update_remote_mirror.go
+++ b/internal/gitaly/service/remote/update_remote_mirror.go
@@ -113,7 +113,7 @@ func (s *server) goUpdateRemoteMirror(stream gitalypb.RemoteService_UpdateRemote
return fmt.Errorf("create reference matcher: %w", err)
}
- repo := localrepo.New(s.gitCmdFactory, firstRequest.GetRepository(), s.cfg)
+ repo := s.localrepo(firstRequest.GetRepository())
remoteRefsSlice, err := repo.GetRemoteReferences(ctx, firstRequest.GetRefName(), "refs/heads/*", "refs/tags/*")
if err != nil {
return fmt.Errorf("get remote references: %w", err)
@@ -160,7 +160,12 @@ func (s *server) goUpdateRemoteMirror(stream gitalypb.RemoteService_UpdateRemote
if !isAncestor {
// The mirror's reference has diverged from the local ref, or the mirror contains a commit
// which is not present in the local repository.
- divergentRefs = append(divergentRefs, []byte(localRef.Name))
+ if referenceMatcher.MatchString(localRef.Name.String()) {
+ // diverged branches on the mirror are only included in the response if they match
+ // one of the branches in the selector
+ divergentRefs = append(divergentRefs, []byte(localRef.Name))
+ }
+
delete(remoteRefs, localRef.Name)
continue
}
diff --git a/internal/gitaly/service/remote/update_remote_mirror_test.go b/internal/gitaly/service/remote/update_remote_mirror_test.go
index 9056e11c3..705ea747f 100644
--- a/internal/gitaly/service/remote/update_remote_mirror_test.go
+++ b/internal/gitaly/service/remote/update_remote_mirror_test.go
@@ -137,6 +137,24 @@ func testUpdateRemoteMirrorFeatured(t *testing.T, ctx context.Context, cfg confi
},
},
{
+ desc: "ignores diverged branches not matched by the branch selector",
+ sourceRefs: refs{
+ "refs/heads/matched": {"commit 1"},
+ "refs/heads/diverged": {"commit 1"},
+ },
+ onlyBranchesMatching: []string{"matched"},
+ keepDivergentRefs: true,
+ mirrorRefs: refs{
+ "refs/heads/matched": {"commit 1"},
+ "refs/heads/diverged": {"commit 2"},
+ },
+ response: &gitalypb.UpdateRemoteMirrorResponse{},
+ expectedMirrorRefs: map[string]string{
+ "refs/heads/matched": "commit 1",
+ "refs/heads/diverged": "commit 2",
+ },
+ },
+ {
desc: "does not delete refs with KeepDivergentRefs",
sourceRefs: refs{
"refs/heads/master": {"commit 1"},
@@ -350,14 +368,14 @@ func testUpdateRemoteMirrorFeatured(t *testing.T, ctx context.Context, cfg confi
},
} {
t.Run(tc.desc, func(t *testing.T) {
- _, mirrorRepoPath, cleanMirrorRepo := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ _, mirrorRepoPath, cleanMirrorRepo := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
defer cleanMirrorRepo()
- sourceRepoPb, sourceRepoPath, cleanSourceRepo := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ sourceRepoPb, sourceRepoPath, cleanSourceRepo := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
defer cleanSourceRepo()
// configure the mirror repository as a remote in the source
- testhelper.MustRunCommand(t, nil, "git", "-C", sourceRepoPath, "remote", "add", "mirror", mirrorRepoPath)
+ gittest.Exec(t, cfg, "-C", sourceRepoPath, "remote", "add", "mirror", mirrorRepoPath)
// create identical commits in both repositories so we can use them for
// the references
@@ -383,7 +401,7 @@ func testUpdateRemoteMirrorFeatured(t *testing.T, ctx context.Context, cfg confi
require.NoError(t, err)
}
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", reference, commitOID.String())
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", reference, commitOID.String())
}
}
for repoPath, symRefs := range map[string]map[string]string{
@@ -391,7 +409,7 @@ func testUpdateRemoteMirrorFeatured(t *testing.T, ctx context.Context, cfg confi
mirrorRepoPath: tc.mirrorSymRefs,
} {
for symRef, targetRef := range symRefs {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "symbolic-ref", symRef, targetRef)
+ gittest.Exec(t, cfg, "-C", repoPath, "symbolic-ref", symRef, targetRef)
}
}
@@ -425,7 +443,7 @@ func testUpdateRemoteMirrorFeatured(t *testing.T, ctx context.Context, cfg confi
// the same.
actualMirrorRefs := map[string]string{}
- refLines := strings.Split(text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", mirrorRepoPath, "for-each-ref", "--format=%(refname)%00%(contents:subject)")), "\n")
+ refLines := strings.Split(text.ChompBytes(gittest.Exec(t, cfg, "-C", mirrorRepoPath, "for-each-ref", "--format=%(refname)%00%(contents:subject)")), "\n")
for _, line := range refLines {
if line == "" {
continue
@@ -450,27 +468,34 @@ func testSuccessfulUpdateRemoteMirrorRequest(t *testing.T, cfg config.Cfg, rubyS
func testSuccessfulUpdateRemoteMirrorRequestFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
serverSocketPath := testserver.RunGitalyServer(t, cfg, rubySrv, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterRemoteServiceServer(srv, NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetLocator(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterRemoteServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ deps.GetTxManager(),
+ ))
})
client, conn := newRemoteClient(t, serverSocketPath)
defer conn.Close()
- testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "source")
+ testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "source")
defer cleanupFn()
- _, mirrorPath, mirrorCleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "mirror")
+ _, mirrorPath, mirrorCleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "mirror")
defer mirrorCleanupFn()
remoteName := "remote_mirror_1"
- testhelper.CreateTag(t, mirrorPath, "v0.0.1", "master", nil) // I needed another tag for the tests
- testhelper.CreateTag(t, testRepoPath, "new-tag", "60ecb67744cb56576c30214ff52294f8ce2def98", nil)
- testhelper.CreateTag(t, testRepoPath, "v1.0.0", "0b4bc9a49b562e85de7cc9e834518ea6828729b9", &testhelper.CreateTagOpts{
+ gittest.CreateTag(t, cfg, mirrorPath, "v0.0.1", "master", nil) // I needed another tag for the tests
+ gittest.CreateTag(t, cfg, testRepoPath, "new-tag", "60ecb67744cb56576c30214ff52294f8ce2def98", nil)
+ gittest.CreateTag(t, cfg, testRepoPath, "v1.0.0", "0b4bc9a49b562e85de7cc9e834518ea6828729b9", &gittest.CreateTagOpts{
Message: "Overriding tag", Force: true})
// Create a commit that only exists in the mirror
- mirrorOnlyCommitOid := gittest.CreateCommit(t, cfg, mirrorPath, "master", nil)
+ mirrorOnlyCommitOid := gittest.WriteCommit(t, cfg, mirrorPath, gittest.WithBranch("master"))
require.NotEmpty(t, mirrorOnlyCommitOid)
setupCommands := [][]string{
@@ -493,10 +518,10 @@ func testSuccessfulUpdateRemoteMirrorRequestFeatured(t *testing.T, ctx context.C
for _, args := range setupCommands {
gitArgs := []string{"-C", testRepoPath}
gitArgs = append(gitArgs, args...)
- testhelper.MustRunCommand(t, nil, "git", gitArgs...)
+ gittest.Exec(t, cfg, gitArgs...)
}
- newTagOid := string(testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "rev-parse", "v1.0.0"))
+ newTagOid := string(gittest.Exec(t, cfg, "-C", testRepoPath, "rev-parse", "v1.0.0"))
newTagOid = strings.TrimSpace(newTagOid)
require.NotEqual(t, newTagOid, "f4e6814c3e4e7a0de82a9e7cd20c626cc963a2f8") // Sanity check that the tag did in fact change
@@ -523,10 +548,10 @@ func testSuccessfulUpdateRemoteMirrorRequestFeatured(t *testing.T, ctx context.C
require.Empty(t, response.DivergentRefs)
// Ensure the local repository still has no reference to the mirror-only commit
- localRefs := string(testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "for-each-ref"))
+ localRefs := string(gittest.Exec(t, cfg, "-C", testRepoPath, "for-each-ref"))
require.NotContains(t, localRefs, mirrorOnlyCommitOid)
- mirrorRefs := string(testhelper.MustRunCommand(t, nil, "git", "-C", mirrorPath, "for-each-ref"))
+ mirrorRefs := string(gittest.Exec(t, cfg, "-C", mirrorPath, "for-each-ref"))
require.Contains(t, mirrorRefs, mirrorOnlyCommitOid)
require.Contains(t, mirrorRefs, "60ecb67744cb56576c30214ff52294f8ce2def98 commit\trefs/heads/new-branch")
@@ -550,16 +575,23 @@ func testSuccessfulUpdateRemoteMirrorRequestWithWildcards(t *testing.T, cfg conf
func testSuccessfulUpdateRemoteMirrorRequestWithWildcardsFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
serverSocketPath := testserver.RunGitalyServer(t, cfg, rubySrv, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterRemoteServiceServer(srv, NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetLocator(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterRemoteServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ deps.GetTxManager(),
+ ))
})
client, conn := newRemoteClient(t, serverSocketPath)
defer conn.Close()
- testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "source")
+ testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "source")
defer cleanupFn()
- _, mirrorPath, mirrorCleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "mirror")
+ _, mirrorPath, mirrorCleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "mirror")
defer mirrorCleanupFn()
remoteName := "remote_mirror_2"
@@ -579,21 +611,21 @@ func testSuccessfulUpdateRemoteMirrorRequestWithWildcardsFeatured(t *testing.T,
{"tag", "--delete", "v1.1.0"}, // v1.1.0 is ambiguous, maps to a branch and a tag in gitlab-test repository
}
- testhelper.CreateTag(t, testRepoPath, "new-tag", "60ecb67744cb56576c30214ff52294f8ce2def98", nil) // Add tag
- testhelper.CreateTag(t, testRepoPath, "v1.0.0", "0b4bc9a49b562e85de7cc9e834518ea6828729b9",
- &testhelper.CreateTagOpts{Message: "Overriding tag", Force: true}) // Update tag
+ gittest.CreateTag(t, cfg, testRepoPath, "new-tag", "60ecb67744cb56576c30214ff52294f8ce2def98", nil) // Add tag
+ gittest.CreateTag(t, cfg, testRepoPath, "v1.0.0", "0b4bc9a49b562e85de7cc9e834518ea6828729b9",
+ &gittest.CreateTagOpts{Message: "Overriding tag", Force: true}) // Update tag
for _, args := range setupCommands {
gitArgs := []string{"-C", testRepoPath}
gitArgs = append(gitArgs, args...)
- testhelper.MustRunCommand(t, nil, "git", gitArgs...)
+ gittest.Exec(t, cfg, gitArgs...)
}
// Workaround for https://gitlab.com/gitlab-org/gitaly/issues/1439
// Create a tag on the remote to ensure it gets deleted later
- testhelper.CreateTag(t, mirrorPath, "v1.2.0", "master", nil)
+ gittest.CreateTag(t, cfg, mirrorPath, "v1.2.0", "master", nil)
- newTagOid := string(testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "rev-parse", "v1.0.0"))
+ newTagOid := string(gittest.Exec(t, cfg, "-C", testRepoPath, "rev-parse", "v1.0.0"))
newTagOid = strings.TrimSpace(newTagOid)
require.NotEqual(t, newTagOid, "f4e6814c3e4e7a0de82a9e7cd20c626cc963a2f8") // Sanity check that the tag did in fact change
firstRequest := &gitalypb.UpdateRemoteMirrorRequest{
@@ -610,7 +642,7 @@ func testSuccessfulUpdateRemoteMirrorRequestWithWildcardsFeatured(t *testing.T,
require.NoError(t, err)
require.Empty(t, response.DivergentRefs)
- mirrorRefs := string(testhelper.MustRunCommand(t, nil, "git", "-C", mirrorPath, "for-each-ref"))
+ mirrorRefs := string(gittest.Exec(t, cfg, "-C", mirrorPath, "for-each-ref"))
require.Contains(t, mirrorRefs, "60ecb67744cb56576c30214ff52294f8ce2def98 commit\trefs/heads/11-0-stable")
require.Contains(t, mirrorRefs, "60ecb67744cb56576c30214ff52294f8ce2def98 commit\trefs/heads/11-1-stable")
require.Contains(t, mirrorRefs, "0b4bc9a49b562e85de7cc9e834518ea6828729b9 commit\trefs/heads/feature")
@@ -634,21 +666,28 @@ func testSuccessfulUpdateRemoteMirrorRequestWithKeepDivergentRefs(t *testing.T,
func testSuccessfulUpdateRemoteMirrorRequestWithKeepDivergentRefsFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
serverSocketPath := testserver.RunGitalyServer(t, cfg, rubySrv, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterRemoteServiceServer(srv, NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetLocator(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterRemoteServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ deps.GetTxManager(),
+ ))
})
client, conn := newRemoteClient(t, serverSocketPath)
defer conn.Close()
- testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "source")
+ testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "source")
defer cleanupFn()
- _, mirrorPath, mirrorCleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "mirror")
+ _, mirrorPath, mirrorCleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "mirror")
defer mirrorCleanupFn()
remoteName := "remote_mirror_1"
- testhelper.CreateTag(t, mirrorPath, "v2.0.0", "master", nil)
+ gittest.CreateTag(t, cfg, mirrorPath, "v2.0.0", "master", nil)
setupCommands := [][]string{
// Preconditions
@@ -666,7 +705,7 @@ func testSuccessfulUpdateRemoteMirrorRequestWithKeepDivergentRefsFeatured(t *tes
for _, args := range setupCommands {
gitArgs := []string{"-C", testRepoPath}
gitArgs = append(gitArgs, args...)
- testhelper.MustRunCommand(t, nil, "git", gitArgs...)
+ gittest.Exec(t, cfg, gitArgs...)
}
firstRequest := &gitalypb.UpdateRemoteMirrorRequest{
Repository: testRepo,
@@ -682,7 +721,7 @@ func testSuccessfulUpdateRemoteMirrorRequestWithKeepDivergentRefsFeatured(t *tes
require.NoError(t, err)
require.ElementsMatch(t, response.DivergentRefs, [][]byte{[]byte("refs/heads/master")})
- mirrorRefs := string(testhelper.MustRunCommand(t, nil, "git", "-C", mirrorPath, "for-each-ref"))
+ mirrorRefs := string(gittest.Exec(t, cfg, "-C", mirrorPath, "for-each-ref"))
// Verify `master` didn't get updated, since its HEAD is no longer an ancestor of remote's version
require.Contains(t, mirrorRefs, "1e292f8fedd741b75372e19097c76d327140c312 commit\trefs/heads/master")
@@ -701,7 +740,7 @@ func testSuccessfulUpdateRemoteMirrorRequestWithKeepDivergentRefsFeatured(t *tes
_, err = stream.CloseAndRecv()
require.NoError(t, err)
- mirrorRefs = string(testhelper.MustRunCommand(t, nil, "git", "-C", mirrorPath, "for-each-ref"))
+ mirrorRefs = string(gittest.Exec(t, cfg, "-C", mirrorPath, "for-each-ref"))
// Verify `master` gets overwritten with the value from the source
require.Contains(t, mirrorRefs, "ba3faa7dbecdb555c748b36e8bc0f427e69de5e7 commit\trefs/heads/master")
@@ -720,13 +759,20 @@ func testFailedUpdateRemoteMirrorRequestDueToValidation(t *testing.T, cfg config
func testFailedUpdateRemoteMirrorRequestDueToValidationFeatured(t *testing.T, ctx context.Context, cfg config.Cfg, rubySrv *rubyserver.Server) {
serverSocketPath := testserver.RunGitalyServer(t, cfg, rubySrv, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterRemoteServiceServer(srv, NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetLocator(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterRemoteServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ deps.GetTxManager(),
+ ))
})
client, conn := newRemoteClient(t, serverSocketPath)
defer conn.Close()
- testRepo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ testRepo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
defer cleanupFn()
testCases := []struct {
diff --git a/internal/gitaly/service/repository/apply_gitattributes.go b/internal/gitaly/service/repository/apply_gitattributes.go
index 7a876fb81..a2df4aa01 100644
--- a/internal/gitaly/service/repository/apply_gitattributes.go
+++ b/internal/gitaly/service/repository/apply_gitattributes.go
@@ -11,8 +11,8 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -93,12 +93,12 @@ func (s *server) applyGitattributes(ctx context.Context, c catfile.Batch, repoPa
}
func (s *server) vote(ctx context.Context, oid git.ObjectID) error {
- tx, err := metadata.TransactionFromContext(ctx)
- if errors.Is(err, metadata.ErrTransactionNotFound) {
+ tx, err := txinfo.TransactionFromContext(ctx)
+ if errors.Is(err, txinfo.ErrTransactionNotFound) {
return nil
}
- praefect, err := metadata.PraefectFromContext(ctx)
+ praefect, err := txinfo.PraefectFromContext(ctx)
if err != nil {
return fmt.Errorf("vote has invalid Praefect info: %w", err)
}
@@ -108,7 +108,7 @@ func (s *server) vote(ctx context.Context, oid git.ObjectID) error {
return fmt.Errorf("vote with invalid object ID: %w", err)
}
- vote, err := transaction.VoteFromHash(hash)
+ vote, err := voting.VoteFromHash(hash)
if err != nil {
return fmt.Errorf("cannot convert OID to vote: %w", err)
}
@@ -121,7 +121,7 @@ func (s *server) vote(ctx context.Context, oid git.ObjectID) error {
}
func (s *server) ApplyGitattributes(ctx context.Context, in *gitalypb.ApplyGitattributesRequest) (*gitalypb.ApplyGitattributesResponse, error) {
- repo := in.GetRepository()
+ repo := s.localrepo(in.GetRepository())
repoPath, err := s.locator.GetRepoPath(repo)
if err != nil {
return nil, err
@@ -131,7 +131,7 @@ func (s *server) ApplyGitattributes(ctx context.Context, in *gitalypb.ApplyGitat
return nil, status.Errorf(codes.InvalidArgument, "ApplyGitAttributes: revision: %v", err)
}
- c, err := catfile.New(ctx, s.gitCmdFactory, repo)
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return nil, err
}
diff --git a/internal/gitaly/service/repository/apply_gitattributes_test.go b/internal/gitaly/service/repository/apply_gitattributes_test.go
index 49b08beee..07169f013 100644
--- a/internal/gitaly/service/repository/apply_gitattributes_test.go
+++ b/internal/gitaly/service/repository/apply_gitattributes_test.go
@@ -6,7 +6,6 @@ import (
"errors"
"fmt"
"io/ioutil"
- "net"
"os"
"path/filepath"
"testing"
@@ -15,13 +14,12 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/helper"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
@@ -86,136 +84,126 @@ func (s *testTransactionServer) VoteTransaction(ctx context.Context, in *gitalyp
}
func TestApplyGitattributesWithTransaction(t *testing.T) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.BackchannelVoting,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- cfg, repo, repoPath := testcfg.BuildWithRepo(t)
- registry := backchannel.NewRegistry()
- txManager := transaction.NewManager(cfg, registry)
- locator := config.NewLocator(cfg)
- transactionServer := &testTransactionServer{}
-
- logger := testhelper.DiscardTestEntry(t)
- srv := testhelper.NewServerWithAuth(t, nil, nil, cfg.Auth.Token, registry)
- if featureflag.IsDisabled(ctx, featureflag.BackchannelVoting) {
- gitalypb.RegisterRefTransactionServer(srv.GrpcServer(), transactionServer)
- }
-
- gitalypb.RegisterRepositoryServiceServer(srv.GrpcServer(), NewServer(cfg, nil, locator, txManager, git.NewExecCommandFactory(cfg)))
-
- srv.Start(t)
- defer srv.Stop()
-
- // We're creating a secondary listener in order to route around
- // Praefect in our tests. Otherwise Praefect would replace our
- // carefully crafted transaction and server information.
- transactionServerListener, err := net.Listen("unix", testhelper.GetTemporaryGitalySocketFileName(t))
- require.NoError(t, err)
- go func() { require.NoError(t, srv.GrpcServer().Serve(transactionServerListener)) }()
+ cfg, repo, repoPath := testcfg.BuildWithRepo(t)
+
+ transactionServer := &testTransactionServer{}
+ testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterRepositoryServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetTxManager(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ })
- client := newMuxedRepositoryClient(t, ctx, cfg, "unix://"+transactionServerListener.Addr().String(),
- backchannel.NewClientHandshaker(logger, func() backchannel.Server {
- srv := grpc.NewServer()
+ // We're using internal listener in order to route around
+ // Praefect in our tests. Otherwise Praefect would replace our
+ // carefully crafted transaction and server information.
+ logger := testhelper.DiscardTestEntry(t)
- if featureflag.IsEnabled(ctx, featureflag.BackchannelVoting) {
- gitalypb.RegisterRefTransactionServer(srv, transactionServer)
- }
+ ctx, cancel := testhelper.Context()
+ defer cancel()
- return srv
- }),
- )
+ client := newMuxedRepositoryClient(t, ctx, cfg, "unix://"+cfg.GitalyInternalSocketPath(),
+ backchannel.NewClientHandshaker(logger, func() backchannel.Server {
+ srv := grpc.NewServer()
+ gitalypb.RegisterRefTransactionServer(srv, transactionServer)
+ return srv
+ }),
+ )
+
+ praefect := txinfo.PraefectServer{
+ SocketPath: "unix://" + cfg.GitalyInternalSocketPath(),
+ Token: cfg.Auth.Token,
+ }
- praefect := metadata.PraefectServer{
- SocketPath: "unix://" + transactionServerListener.Addr().String(),
- Token: cfg.Auth.Token,
- }
+ for _, tc := range []struct {
+ desc string
+ revision []byte
+ voteFn func(*testing.T, *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error)
+ shouldExist bool
+ expectedErr error
+ }{
+ {
+ desc: "successful vote writes gitattributes",
+ revision: []byte("e63f41fe459e62e1228fcef60d7189127aeba95a"),
+ voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
+ oid, err := git.NewObjectIDFromHex("36814a3da051159a1683479e7a1487120309db8f")
+ require.NoError(t, err)
+ hash, err := oid.Bytes()
+ require.NoError(t, err)
- for _, tc := range []struct {
- desc string
- revision []byte
- voteFn func(*testing.T, *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error)
- shouldExist bool
- expectedErr error
- }{
- {
- desc: "successful vote writes gitattributes",
- revision: []byte("e63f41fe459e62e1228fcef60d7189127aeba95a"),
- voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- oid, err := git.NewObjectIDFromHex("36814a3da051159a1683479e7a1487120309db8f")
- require.NoError(t, err)
- hash, err := oid.Bytes()
- require.NoError(t, err)
-
- require.Equal(t, hash, request.ReferenceUpdatesHash)
- return &gitalypb.VoteTransactionResponse{
- State: gitalypb.VoteTransactionResponse_COMMIT,
- }, nil
- },
- shouldExist: true,
+ require.Equal(t, hash, request.ReferenceUpdatesHash)
+ return &gitalypb.VoteTransactionResponse{
+ State: gitalypb.VoteTransactionResponse_COMMIT,
+ }, nil
},
- {
- desc: "aborted vote does not write gitattributes",
- revision: []byte("e63f41fe459e62e1228fcef60d7189127aeba95a"),
- voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- return &gitalypb.VoteTransactionResponse{
- State: gitalypb.VoteTransactionResponse_ABORT,
- }, nil
- },
- shouldExist: false,
- expectedErr: status.Error(codes.Unknown, "could not commit gitattributes: vote failed: transaction was aborted"),
+ shouldExist: true,
+ },
+ {
+ desc: "aborted vote does not write gitattributes",
+ revision: []byte("e63f41fe459e62e1228fcef60d7189127aeba95a"),
+ voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
+ return &gitalypb.VoteTransactionResponse{
+ State: gitalypb.VoteTransactionResponse_ABORT,
+ }, nil
},
- {
- desc: "failing vote does not write gitattributes",
- revision: []byte("e63f41fe459e62e1228fcef60d7189127aeba95a"),
- voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- return nil, errors.New("foobar")
- },
- shouldExist: false,
- expectedErr: status.Error(codes.Unknown, "could not commit gitattributes: vote failed: rpc error: code = Unknown desc = foobar"),
+ shouldExist: false,
+ expectedErr: status.Error(codes.Unknown, "could not commit gitattributes: vote failed: transaction was aborted"),
+ },
+ {
+ desc: "failing vote does not write gitattributes",
+ revision: []byte("e63f41fe459e62e1228fcef60d7189127aeba95a"),
+ voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
+ return nil, errors.New("foobar")
},
- {
- desc: "commit without gitattributes performs vote",
- revision: []byte("7efb185dd22fd5c51ef044795d62b7847900c341"),
- voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- require.Equal(t, bytes.Repeat([]byte{0x00}, 20), request.ReferenceUpdatesHash)
- return &gitalypb.VoteTransactionResponse{
- State: gitalypb.VoteTransactionResponse_COMMIT,
- }, nil
- },
- shouldExist: false,
+ shouldExist: false,
+ expectedErr: status.Error(codes.Unknown, "could not commit gitattributes: vote failed: rpc error: code = Unknown desc = foobar"),
+ },
+ {
+ desc: "commit without gitattributes performs vote",
+ revision: []byte("7efb185dd22fd5c51ef044795d62b7847900c341"),
+ voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
+ require.Equal(t, bytes.Repeat([]byte{0x00}, 20), request.ReferenceUpdatesHash)
+ return &gitalypb.VoteTransactionResponse{
+ State: gitalypb.VoteTransactionResponse_COMMIT,
+ }, nil
},
- } {
- t.Run(tc.desc, func(t *testing.T) {
- infoPath := filepath.Join(repoPath, "info")
- require.NoError(t, os.RemoveAll(infoPath))
+ shouldExist: false,
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ infoPath := filepath.Join(repoPath, "info")
+ require.NoError(t, os.RemoveAll(infoPath))
+
+ ctx, err := txinfo.InjectTransaction(ctx, 1, "primary", true)
+ require.NoError(t, err)
+ ctx, err = praefect.Inject(ctx)
+ require.NoError(t, err)
+ ctx = helper.IncomingToOutgoing(ctx)
+
+ transactionServer.vote = func(request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
+ return tc.voteFn(t, request)
+ }
- ctx, err := metadata.InjectTransaction(ctx, 1, "primary", true)
- require.NoError(t, err)
- ctx, err = praefect.Inject(ctx)
- require.NoError(t, err)
- ctx = helper.IncomingToOutgoing(ctx)
-
- transactionServer.vote = func(request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- return tc.voteFn(t, request)
- }
-
- _, err = client.ApplyGitattributes(ctx, &gitalypb.ApplyGitattributesRequest{
- Repository: repo,
- Revision: tc.revision,
- })
- require.Equal(t, tc.expectedErr, err)
-
- path := filepath.Join(infoPath, "attributes")
- if tc.shouldExist {
- require.FileExists(t, path)
- contents := testhelper.MustReadFile(t, path)
- require.Equal(t, []byte("/custom-highlighting/*.gitlab-custom gitlab-language=ruby\n"), contents)
- } else {
- require.NoFileExists(t, path)
- }
+ _, err = client.ApplyGitattributes(ctx, &gitalypb.ApplyGitattributesRequest{
+ Repository: repo,
+ Revision: tc.revision,
})
- }
- })
+ require.Equal(t, tc.expectedErr, err)
+
+ path := filepath.Join(infoPath, "attributes")
+ if tc.shouldExist {
+ require.FileExists(t, path)
+ contents := testhelper.MustReadFile(t, path)
+ require.Equal(t, []byte("/custom-highlighting/*.gitlab-custom gitlab-language=ruby\n"), contents)
+ } else {
+ require.NoFileExists(t, path)
+ }
+ })
+ }
}
func TestApplyGitattributesFailure(t *testing.T) {
diff --git a/internal/gitaly/service/repository/archive.go b/internal/gitaly/service/repository/archive.go
index 027f43cbb..f765f8d9e 100644
--- a/internal/gitaly/service/repository/archive.go
+++ b/internal/gitaly/service/repository/archive.go
@@ -12,7 +12,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/commit"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/log"
@@ -39,8 +38,9 @@ type archiveParams struct {
func (s *server) GetArchive(in *gitalypb.GetArchiveRequest, stream gitalypb.RepositoryService_GetArchiveServer) error {
ctx := stream.Context()
compressCmd, format := parseArchiveFormat(in.GetFormat())
+ repo := s.localrepo(in.GetRepository())
- repoRoot, err := s.locator.GetRepoPath(in.GetRepository())
+ repoRoot, err := repo.Path()
if err != nil {
return err
}
@@ -62,7 +62,7 @@ func (s *server) GetArchive(in *gitalypb.GetArchiveRequest, stream gitalypb.Repo
return err
}
- if err := s.validateGetArchivePrecondition(ctx, in, path, exclude); err != nil {
+ if err := s.validateGetArchivePrecondition(ctx, repo, in.GetCommitId(), path, exclude); err != nil {
return err
}
@@ -134,15 +134,21 @@ func validateGetArchiveRequest(in *gitalypb.GetArchiveRequest, format string, pa
return nil
}
-func (s *server) validateGetArchivePrecondition(ctx context.Context, in *gitalypb.GetArchiveRequest, path string, exclude []string) error {
- c, err := catfile.New(ctx, s.gitCmdFactory, in.GetRepository())
+func (s *server) validateGetArchivePrecondition(
+ ctx context.Context,
+ repo git.RepositoryExecutor,
+ commitID string,
+ path string,
+ exclude []string,
+) error {
+ c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
f := commit.NewTreeEntryFinder(c)
if path != "." {
- if ok, err := findGetArchivePath(ctx, f, in.GetCommitId(), path); err != nil {
+ if ok, err := findGetArchivePath(ctx, f, commitID, path); err != nil {
return err
} else if !ok {
return helper.ErrPreconditionFailedf("path doesn't exist")
@@ -150,7 +156,7 @@ func (s *server) validateGetArchivePrecondition(ctx context.Context, in *gitalyp
}
for i, exclude := range exclude {
- if ok, err := findGetArchivePath(ctx, f, in.GetCommitId(), exclude); err != nil {
+ if ok, err := findGetArchivePath(ctx, f, commitID, exclude); err != nil {
return err
} else if !ok {
return helper.ErrPreconditionFailedf("exclude[%d] doesn't exist", i)
diff --git a/internal/gitaly/service/repository/archive_test.go b/internal/gitaly/service/repository/archive_test.go
index aec83ffec..044dccd75 100644
--- a/internal/gitaly/service/repository/archive_test.go
+++ b/internal/gitaly/service/repository/archive_test.go
@@ -188,10 +188,10 @@ func TestGetArchiveWithLfsSuccess(t *testing.T) {
serverSocketPath := runRepositoryServerWithConfig(t, cfg, nil)
client := newRepositoryClient(t, cfg, serverSocketPath)
- repo, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanup)
- testhelper.ConfigureGitalyLfsSmudge(cfg.BinDir)
+ testhelper.ConfigureGitalyLfsSmudge(t, cfg.BinDir)
// lfs-moar branch SHA
sha := "46abbb087fcc0fd02c340f0f2f052bd2c7708da3"
@@ -405,7 +405,7 @@ func TestGetArchiveFailure(t *testing.T) {
}
func TestGetArchivePathInjection(t *testing.T) {
- _, repo, repoPath, client := setupRepositoryServiceWithWorktree(t)
+ cfg, repo, repoPath, client := setupRepositoryServiceWithWorktree(t)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -436,9 +436,9 @@ func TestGetArchivePathInjection(t *testing.T) {
require.NoError(t, f.Close())
// Add the directory to the repository
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "add", ".")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit", "-m", "adding fake key file")
- commitID := strings.TrimRight(string(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "HEAD")), "\n")
+ gittest.Exec(t, cfg, "-C", repoPath, "add", ".")
+ gittest.Exec(t, cfg, "-C", repoPath, "commit", "-m", "adding fake key file")
+ commitID := strings.TrimRight(string(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "HEAD")), "\n")
injectionPath := fmt.Sprintf("--output=%s", authorizedKeysPath)
@@ -482,16 +482,22 @@ env | grep -E "^GL_|CORRELATION|GITALY_"`))
require.NoError(t, err)
require.NoError(t, tmpFile.Close())
- cfg := testcfg.Build(t, testcfg.WithBase(config.Cfg{Git: config.Git{BinPath: tmpFile.Name()}}))
+ cfg := testcfg.Build(t)
testhelper.ConfigureGitalyHooksBin(t, cfg)
- serverSocketPath := runRepositoryServerWithConfig(t, cfg, nil)
+ // We re-define path to the git executable to catch parameters used to call it.
+ // This replacement only needs to be done for the configuration used to invoke git commands.
+ // Other operations should use actual path to the git binary to work properly.
+ spyGitCfg := cfg
+ spyGitCfg.Git.BinPath = tmpFile.Name()
+
+ serverSocketPath := runRepositoryServerWithConfig(t, spyGitCfg, nil)
cfg.SocketPath = serverSocketPath
client := newRepositoryClient(t, cfg, serverSocketPath)
- repo, _, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repo, _, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
commitID := "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863"
diff --git a/internal/gitaly/service/repository/calculate_checksum_test.go b/internal/gitaly/service/repository/calculate_checksum_test.go
index e4d445e9c..f5a49a320 100644
--- a/internal/gitaly/service/repository/calculate_checksum_test.go
+++ b/internal/gitaly/service/repository/calculate_checksum_test.go
@@ -67,7 +67,7 @@ func TestRefWhitelist(t *testing.T) {
func TestEmptyRepositoryCalculateChecksum(t *testing.T) {
cfg, client := setupRepositoryServiceWithoutRepo(t)
- repo, _, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ repo, _, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
request := &gitalypb.CalculateChecksumRequest{Repository: repo}
@@ -82,7 +82,7 @@ func TestEmptyRepositoryCalculateChecksum(t *testing.T) {
func TestBrokenRepositoryCalculateChecksum(t *testing.T) {
cfg, client := setupRepositoryServiceWithoutRepo(t)
- repo, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ repo, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
// Force an empty HEAD file
diff --git a/internal/gitaly/service/repository/cleanup.go b/internal/gitaly/service/repository/cleanup.go
index 907c1de71..92f153498 100644
--- a/internal/gitaly/service/repository/cleanup.go
+++ b/internal/gitaly/service/repository/cleanup.go
@@ -8,7 +8,6 @@ import (
"time"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/housekeeping"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
@@ -16,7 +15,7 @@ import (
)
func (s *server) Cleanup(ctx context.Context, in *gitalypb.CleanupRequest) (*gitalypb.CleanupResponse, error) {
- repo := localrepo.New(s.gitCmdFactory, in.GetRepository(), s.cfg)
+ repo := s.localrepo(in.GetRepository())
if err := s.cleanupRepo(ctx, repo); err != nil {
return nil, err
@@ -39,10 +38,6 @@ func (s *server) cleanupRepo(ctx context.Context, repo *localrepo.Repo) error {
return status.Errorf(codes.Internal, "Cleanup: cleanDisconnectedWorktrees: %v", err)
}
- if err := housekeeping.Perform(ctx, repo); err != nil {
- return status.Errorf(codes.Internal, "Cleanup: houskeeping: %v", err)
- }
-
return nil
}
diff --git a/internal/gitaly/service/repository/cleanup_test.go b/internal/gitaly/service/repository/cleanup_test.go
index 24b762c3d..e281a75be 100644
--- a/internal/gitaly/service/repository/cleanup_test.go
+++ b/internal/gitaly/service/repository/cleanup_test.go
@@ -14,105 +14,6 @@ import (
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
-func TestCleanupDeletesRefsLocks(t *testing.T) {
- _, repo, repoPath, client := setupRepositoryService(t)
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- req := &gitalypb.CleanupRequest{Repository: repo}
- refsPath := filepath.Join(repoPath, "refs")
-
- keepRefPath := filepath.Join(refsPath, "heads", "keepthis")
- mustCreateFileWithTimes(t, keepRefPath, freshTime)
- keepOldRefPath := filepath.Join(refsPath, "heads", "keepthisalso")
- mustCreateFileWithTimes(t, keepOldRefPath, oldTime)
- keepDeceitfulRef := filepath.Join(refsPath, "heads", " .lock.not-actually-a-lock.lock ")
- mustCreateFileWithTimes(t, keepDeceitfulRef, oldTime)
-
- keepLockPath := filepath.Join(refsPath, "heads", "keepthis.lock")
- mustCreateFileWithTimes(t, keepLockPath, freshTime)
-
- deleteLockPath := filepath.Join(refsPath, "heads", "deletethis.lock")
- mustCreateFileWithTimes(t, deleteLockPath, oldTime)
-
- c, err := client.Cleanup(ctx, req)
- assert.NoError(t, err)
- assert.NotNil(t, c)
-
- // Sanity checks
- assert.FileExists(t, keepRefPath)
- assert.FileExists(t, keepOldRefPath)
- assert.FileExists(t, keepDeceitfulRef)
-
- assert.FileExists(t, keepLockPath)
-
- testhelper.AssertPathNotExists(t, deleteLockPath)
-}
-
-func TestCleanupDeletesPackedRefsLock(t *testing.T) {
- cfg, client := setupRepositoryServiceWithoutRepo(t)
-
- testCases := []struct {
- desc string
- lockTime *time.Time
- shouldExist bool
- }{
- {
- desc: "with a recent lock",
- lockTime: &freshTime,
- shouldExist: true,
- },
- {
- desc: "with an old lock",
- lockTime: &oldTime,
- shouldExist: false,
- },
- {
- desc: "with a non-existing lock",
- lockTime: nil,
- shouldExist: false,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.desc, func(t *testing.T) {
- repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
- t.Cleanup(cleanupFn)
-
- // Force the packed-refs file to have an old time to test that even
- // in that case it doesn't get deleted
- packedRefsPath := filepath.Join(repoPath, "packed-refs")
- require.NoError(t, os.Chtimes(packedRefsPath, oldTime, oldTime))
-
- req := &gitalypb.CleanupRequest{Repository: repo}
- lockPath := filepath.Join(repoPath, "packed-refs.lock")
-
- if tc.lockTime != nil {
- mustCreateFileWithTimes(t, lockPath, *tc.lockTime)
- }
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- c, err := client.Cleanup(ctx, req)
-
- // Sanity checks
- assert.FileExists(t, filepath.Join(repoPath, "HEAD")) // For good measure
- assert.FileExists(t, packedRefsPath)
-
- if tc.shouldExist {
- assert.FileExists(t, lockPath)
- } else {
- assert.NoError(t, err)
- assert.NotNil(t, c)
-
- testhelper.AssertPathNotExists(t, lockPath)
- }
- })
- }
-}
-
// TODO: replace emulated rebase RPC with actual
// https://gitlab.com/gitlab-org/gitaly/issues/1750
func TestCleanupDeletesStaleWorktrees(t *testing.T) {
@@ -142,13 +43,13 @@ func TestCleanupDeletesStaleWorktrees(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanupFn)
req := &gitalypb.CleanupRequest{Repository: repo}
worktreeCheckoutPath := filepath.Join(repoPath, worktreePrefix, "test-worktree")
- gittest.AddWorktree(t, repoPath, worktreeCheckoutPath)
+ gittest.AddWorktree(t, cfg, repoPath, worktreeCheckoutPath)
basePath := filepath.Join(repoPath, "worktrees")
worktreePath := filepath.Join(basePath, "test-worktree")
@@ -169,8 +70,8 @@ func TestCleanupDeletesStaleWorktrees(t *testing.T) {
assert.NoError(t, err)
assert.NotNil(t, c)
- testhelper.AssertPathNotExists(t, worktreeCheckoutPath)
- testhelper.AssertPathNotExists(t, worktreePath)
+ require.NoFileExists(t, worktreeCheckoutPath)
+ require.NoFileExists(t, worktreePath)
}
})
}
@@ -193,7 +94,7 @@ func TestCleanupDisconnectedWorktrees(t *testing.T) {
req := &gitalypb.CleanupRequest{Repository: repo}
- gittest.AddWorktree(t, repoPath, worktreePath)
+ gittest.AddWorktree(t, cfg, repoPath, worktreePath)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -212,93 +113,9 @@ func TestCleanupDisconnectedWorktrees(t *testing.T) {
// cleanup should prune the disconnected worktree administrative files
_, err = client.Cleanup(ctx, req)
require.NoError(t, err)
- testhelper.AssertPathNotExists(t, worktreeAdminPath)
+ require.NoFileExists(t, worktreeAdminPath)
// if the worktree administrative files are pruned, then we should be able
// to checkout another worktree at the same path
- gittest.AddWorktree(t, repoPath, worktreePath)
-}
-
-func TestCleanupFileLocks(t *testing.T) {
- _, repo, repoPath, client := setupRepositoryService(t)
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- req := &gitalypb.CleanupRequest{Repository: repo}
-
- for _, fileName := range []string{
- "config.lock",
- "HEAD.lock",
- "objects/info/commit-graphs/commit-graph-chain.lock",
- } {
- lockPath := filepath.Join(repoPath, fileName)
- // No file on the lock path
- _, err := client.Cleanup(ctx, req)
- assert.NoError(t, err)
-
- // Fresh lock should remain
- mustCreateFileWithTimes(t, lockPath, freshTime)
- _, err = client.Cleanup(ctx, req)
- assert.NoError(t, err)
- assert.FileExists(t, lockPath)
-
- // Old lock should be removed
- mustCreateFileWithTimes(t, lockPath, oldTime)
- _, err = client.Cleanup(ctx, req)
- assert.NoError(t, err)
- testhelper.AssertPathNotExists(t, lockPath)
- }
-}
-
-func TestCleanupDeletesPackedRefsNew(t *testing.T) {
- cfg, client := setupRepositoryServiceWithoutRepo(t)
-
- testCases := []struct {
- desc string
- lockTime *time.Time
- shouldExist bool
- }{
- {
- desc: "created recently",
- lockTime: &freshTime,
- shouldExist: true,
- },
- {
- desc: "exists for too long",
- lockTime: &oldTime,
- shouldExist: false,
- },
- {
- desc: "nothing to clean up",
- shouldExist: false,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.desc, func(t *testing.T) {
- repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
- t.Cleanup(cleanupFn)
-
- req := &gitalypb.CleanupRequest{Repository: repo}
- packedRefsNewPath := filepath.Join(repoPath, "packed-refs.new")
-
- if tc.lockTime != nil {
- mustCreateFileWithTimes(t, packedRefsNewPath, *tc.lockTime)
- }
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- c, err := client.Cleanup(ctx, req)
- require.NotNil(t, c)
- require.NoError(t, err)
-
- if tc.shouldExist {
- require.FileExists(t, packedRefsNewPath)
- } else {
- testhelper.AssertPathNotExists(t, packedRefsNewPath)
- }
- })
- }
+ gittest.AddWorktree(t, cfg, repoPath, worktreePath)
}
diff --git a/internal/gitaly/service/repository/clone_from_pool.go b/internal/gitaly/service/repository/clone_from_pool.go
index 8dacfc4a7..f2281a530 100644
--- a/internal/gitaly/service/repository/clone_from_pool.go
+++ b/internal/gitaly/service/repository/clone_from_pool.go
@@ -32,7 +32,7 @@ func (s *server) CloneFromPool(ctx context.Context, req *gitalypb.CloneFromPoolR
return nil, helper.ErrInternalf("fetch http remote: %v", err)
}
- objectPool, err := objectpool.FromProto(s.cfg, s.locator, s.gitCmdFactory, req.GetPool())
+ objectPool, err := objectpool.FromProto(s.cfg, s.locator, s.gitCmdFactory, s.catfileCache, req.GetPool())
if err != nil {
return nil, helper.ErrInternalf("get object pool from request: %v", err)
}
@@ -54,7 +54,7 @@ func (s *server) validateCloneFromPoolRequestRepositoryState(req *gitalypb.Clone
return errors.New("target reopsitory already exists")
}
- objectPool, err := objectpool.FromProto(s.cfg, s.locator, s.gitCmdFactory, req.GetPool())
+ objectPool, err := objectpool.FromProto(s.cfg, s.locator, s.gitCmdFactory, s.catfileCache, req.GetPool())
if err != nil {
return fmt.Errorf("getting object pool from repository: %v", err)
}
diff --git a/internal/gitaly/service/repository/clone_from_pool_internal.go b/internal/gitaly/service/repository/clone_from_pool_internal.go
index cf1ee1cd1..ccacc52e8 100644
--- a/internal/gitaly/service/repository/clone_from_pool_internal.go
+++ b/internal/gitaly/service/repository/clone_from_pool_internal.go
@@ -46,7 +46,7 @@ func (s *server) CloneFromPoolInternal(ctx context.Context, req *gitalypb.CloneF
return nil, helper.ErrInternalf("fetch internal remote failed")
}
- objectPool, err := objectpool.FromProto(s.cfg, s.locator, s.gitCmdFactory, req.GetPool())
+ objectPool, err := objectpool.FromProto(s.cfg, s.locator, s.gitCmdFactory, s.catfileCache, req.GetPool())
if err != nil {
return nil, helper.ErrInternalf("get object pool from request: %v", err)
}
@@ -68,7 +68,7 @@ func (s *server) validateCloneFromPoolInternalRequestRepositoryState(req *gitaly
return errors.New("target reopsitory already exists")
}
- objectPool, err := objectpool.FromProto(s.cfg, s.locator, s.gitCmdFactory, req.GetPool())
+ objectPool, err := objectpool.FromProto(s.cfg, s.locator, s.gitCmdFactory, s.catfileCache, req.GetPool())
if err != nil {
return fmt.Errorf("getting object pool from repository: %v", err)
}
diff --git a/internal/gitaly/service/repository/clone_from_pool_internal_test.go b/internal/gitaly/service/repository/clone_from_pool_internal_test.go
index 08b12ef7a..3616dad9a 100644
--- a/internal/gitaly/service/repository/clone_from_pool_internal_test.go
+++ b/internal/gitaly/service/repository/clone_from_pool_internal_test.go
@@ -1,7 +1,6 @@
package repository
import (
- "fmt"
"os"
"path/filepath"
"testing"
@@ -9,6 +8,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/objectpool"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
@@ -23,7 +23,16 @@ func newTestObjectPool(t *testing.T, cfg config.Cfg) (*objectpool.ObjectPool, *g
relativePath := gittest.NewObjectPoolName(t)
repo := gittest.InitRepoDir(t, cfg.Storages[0].Path, relativePath)
- pool, err := objectpool.NewObjectPool(cfg, config.NewLocator(cfg), git.NewExecCommandFactory(cfg), repo.GetStorageName(), relativePath)
+ gitCmdFactory := git.NewExecCommandFactory(cfg)
+
+ pool, err := objectpool.NewObjectPool(
+ cfg,
+ config.NewLocator(cfg),
+ gitCmdFactory,
+ catfile.NewCache(cfg),
+ repo.GetStorageName(),
+ relativePath,
+ )
require.NoError(t, err)
return pool, repo
@@ -57,9 +66,9 @@ func TestCloneFromPoolInternal(t *testing.T) {
require.NoError(t, pool.Create(ctx, repo))
require.NoError(t, pool.Link(ctx, repo))
- fullRepack(t, repoPath)
+ fullRepack(t, cfg, repoPath)
- _, newBranch := gittest.CreateCommitOnNewBranch(t, cfg, repoPath)
+ gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch("branch"))
forkedRepo, forkRepoPath, forkRepoCleanup := getForkDestination(t, cfg.Storages[0])
defer forkRepoCleanup()
@@ -83,11 +92,11 @@ func TestCloneFromPoolInternal(t *testing.T) {
// feature is a branch known to exist in the source repository. By looking it up in the target
// we establish that the target has branches, even though (as we saw above) it has no objects.
- testhelper.MustRunCommand(t, nil, "git", "-C", forkRepoPath, "show-ref", "--verify", "refs/heads/feature")
- testhelper.MustRunCommand(t, nil, "git", "-C", forkRepoPath, "show-ref", "--verify", fmt.Sprintf("refs/heads/%s", newBranch))
+ gittest.Exec(t, cfg, "-C", forkRepoPath, "show-ref", "--verify", "refs/heads/feature")
+ gittest.Exec(t, cfg, "-C", forkRepoPath, "show-ref", "--verify", "refs/heads/branch")
}
// fullRepack does a full repack on the repository, which means if it has a pool repository linked, it will get rid of redundant objects that are reachable in the pool
-func fullRepack(t *testing.T, repoPath string) {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "repack", "-A", "-l", "-d")
+func fullRepack(t *testing.T, cfg config.Cfg, repoPath string) {
+ gittest.Exec(t, cfg, "-C", repoPath, "repack", "-A", "-l", "-d")
}
diff --git a/internal/gitaly/service/repository/clone_from_pool_test.go b/internal/gitaly/service/repository/clone_from_pool_test.go
index 8807ffc0d..e49c8f8f5 100644
--- a/internal/gitaly/service/repository/clone_from_pool_test.go
+++ b/internal/gitaly/service/repository/clone_from_pool_test.go
@@ -1,7 +1,6 @@
package repository
import (
- "fmt"
"testing"
"github.com/stretchr/testify/assert"
@@ -31,9 +30,9 @@ func testCloneFromPoolHTTP(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Ser
require.NoError(t, pool.Create(ctx, repo))
require.NoError(t, pool.Link(ctx, repo))
- fullRepack(t, repoPath)
+ fullRepack(t, cfg, repoPath)
- _, newBranch := gittest.CreateCommitOnNewBranch(t, cfg, repoPath)
+ gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch("branch"))
forkedRepo, forkRepoPath, forkRepoCleanup := getForkDestination(t, cfg.Storages[0])
defer forkRepoCleanup()
@@ -64,6 +63,6 @@ func testCloneFromPoolHTTP(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Ser
// feature is a branch known to exist in the source repository. By looking it up in the target
// we establish that the target has branches, even though (as we saw above) it has no objects.
- testhelper.MustRunCommand(t, nil, "git", "-C", forkRepoPath, "show-ref", "--verify", "refs/heads/feature")
- testhelper.MustRunCommand(t, nil, "git", "-C", forkRepoPath, "show-ref", "--verify", fmt.Sprintf("refs/heads/%s", newBranch))
+ gittest.Exec(t, cfg, "-C", forkRepoPath, "show-ref", "--verify", "refs/heads/feature")
+ gittest.Exec(t, cfg, "-C", forkRepoPath, "show-ref", "--verify", "refs/heads/branch")
}
diff --git a/internal/gitaly/service/repository/commit_graph.go b/internal/gitaly/service/repository/commit_graph.go
index 4fe9ba38e..a7b5c9cf8 100644
--- a/internal/gitaly/service/repository/commit_graph.go
+++ b/internal/gitaly/service/repository/commit_graph.go
@@ -2,42 +2,63 @@ package repository
import (
"context"
- "fmt"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
const (
- CommitGraphRelPath = "objects/info/commit-graph"
+ CommitGraphRelPath = "objects/info/commit-graph"
+ CommitGraphsRelPath = "objects/info/commit-graphs"
+ CommitGraphChainRelPath = CommitGraphsRelPath + "/commit-graph-chain"
)
// WriteCommitGraph write or update commit-graph file in a repository
-func (s *server) WriteCommitGraph(ctx context.Context, in *gitalypb.WriteCommitGraphRequest) (*gitalypb.WriteCommitGraphResponse, error) {
- if err := s.writeCommitGraph(ctx, in); err != nil {
- return nil, helper.ErrInternal(fmt.Errorf("WriteCommitGraph: gitCommand: %v", err))
+func (s *server) WriteCommitGraph(
+ ctx context.Context,
+ in *gitalypb.WriteCommitGraphRequest,
+) (*gitalypb.WriteCommitGraphResponse, error) {
+ if err := s.writeCommitGraph(ctx, in.GetRepository(), in.GetSplitStrategy()); err != nil {
+ return nil, err
}
return &gitalypb.WriteCommitGraphResponse{}, nil
}
-func (s *server) writeCommitGraph(ctx context.Context, in *gitalypb.WriteCommitGraphRequest) error {
- cmd, err := s.gitCmdFactory.New(ctx, in.GetRepository(),
+func (s *server) writeCommitGraph(
+ ctx context.Context,
+ repo repository.GitRepo,
+ splitStrategy gitalypb.WriteCommitGraphRequest_SplitStrategy,
+) error {
+ flags := []git.Option{
+ git.Flag{Name: "--reachable"},
+ }
+
+ switch splitStrategy {
+ case gitalypb.WriteCommitGraphRequest_SizeMultiple:
+ flags = append(flags,
+ git.Flag{Name: "--split"},
+ git.ValueFlag{Name: "--size-multiple", Value: "4"},
+ )
+ default:
+ return helper.ErrInvalidArgumentf("unsupported split strategy: %v", splitStrategy)
+ }
+
+ cmd, err := s.gitCmdFactory.New(ctx, repo,
git.SubSubCmd{
Name: "commit-graph",
Action: "write",
- Flags: []git.Option{
- git.Flag{Name: "--reachable"},
- },
+ Flags: flags,
},
)
if err != nil {
- return err
+ return helper.ErrInternal(err)
}
if err := cmd.Wait(); err != nil {
- return err
+ return helper.ErrInternal(err)
}
return nil
diff --git a/internal/gitaly/service/repository/commit_graph_test.go b/internal/gitaly/service/repository/commit_graph_test.go
index 83be54e32..76239f3b5 100644
--- a/internal/gitaly/service/repository/commit_graph_test.go
+++ b/internal/gitaly/service/repository/commit_graph_test.go
@@ -6,81 +6,148 @@ import (
"testing"
"time"
- "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+ "google.golang.org/grpc/codes"
+ "google.golang.org/grpc/status"
)
-func TestWriteCommitGraph(t *testing.T) {
+func TestWriteCommitGraph_withExistingCommitGraphCreatedWithDefaults(t *testing.T) {
cfg, repo, repoPath, client := setupRepositoryService(t)
- ctx, cancel := testhelper.Context()
- defer cancel()
-
commitGraphPath := filepath.Join(repoPath, CommitGraphRelPath)
+ require.NoFileExists(t, commitGraphPath, "sanity check no commit graph")
- _, err := os.Stat(commitGraphPath)
- assert.True(t, os.IsNotExist(err))
+ chainPath := filepath.Join(repoPath, CommitGraphChainRelPath)
+ require.NoFileExists(t, chainPath, "sanity check no commit graph chain exists")
- gittest.CreateCommit(
+ // write commit graph using an old approach
+ gittest.Exec(t, cfg, "-C", repoPath, "commit-graph", "write", "--reachable")
+ require.FileExists(t, commitGraphPath)
+
+ treeEntry := gittest.TreeEntry{Mode: "100644", Path: "file.txt", Content: "something"}
+ gittest.WriteCommit(
t,
cfg,
repoPath,
- t.Name(),
- &gittest.CreateCommitOpts{Message: t.Name()},
+ gittest.WithBranch(t.Name()),
+ gittest.WithTreeEntries(treeEntry),
)
- res, err := client.WriteCommitGraph(ctx, &gitalypb.WriteCommitGraphRequest{Repository: repo})
- assert.NoError(t, err)
- assert.NotNil(t, res)
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ res, err := client.WriteCommitGraph(ctx, &gitalypb.WriteCommitGraphRequest{
+ Repository: repo,
+ SplitStrategy: gitalypb.WriteCommitGraphRequest_SizeMultiple,
+ })
+ require.NoError(t, err)
+ require.NotNil(t, res)
- assert.FileExists(t, commitGraphPath)
+ require.FileExists(t, chainPath, "commit graph chain should be created")
+ require.NoFileExists(t, commitGraphPath, "commit-graph file should be replaced with commit graph chain")
}
-func TestUpdateCommitGraph(t *testing.T) {
- cfg, repo, repoPath, client := setupRepositoryService(t)
+func TestWriteCommitGraph(t *testing.T) {
+ _, repo, repoPath, client := setupRepositoryService(t)
ctx, cancel := testhelper.Context()
defer cancel()
- gittest.CreateCommit(
- t,
- cfg,
- repoPath,
- t.Name(),
- &gittest.CreateCommitOpts{Message: t.Name()},
- )
+ chainPath := filepath.Join(repoPath, CommitGraphChainRelPath)
- commitGraphPath := filepath.Join(repoPath, CommitGraphRelPath)
+ require.NoFileExists(t, chainPath)
+
+ res, err := client.WriteCommitGraph(ctx, &gitalypb.WriteCommitGraphRequest{
+ Repository: repo,
+ SplitStrategy: gitalypb.WriteCommitGraphRequest_SizeMultiple,
+ })
+ require.NoError(t, err)
+ require.NotNil(t, res)
+
+ require.FileExists(t, chainPath)
+}
+
+func TestWriteCommitGraph_validationChecks(t *testing.T) {
+ _, repo, _, client := setupRepositoryService(t, testserver.WithDisablePraefect())
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ for _, tc := range []struct {
+ desc string
+ req *gitalypb.WriteCommitGraphRequest
+ expErr error
+ }{
+ {
+ desc: "invalid split strategy",
+ req: &gitalypb.WriteCommitGraphRequest{
+ Repository: repo,
+ SplitStrategy: gitalypb.WriteCommitGraphRequest_SplitStrategy(42),
+ },
+ expErr: status.Error(codes.InvalidArgument, "unsupported split strategy: 42"),
+ },
+ {
+ desc: "no repository",
+ req: &gitalypb.WriteCommitGraphRequest{},
+ expErr: status.Error(codes.InvalidArgument, `GetStorageByName: no such storage: ""`),
+ },
+ {
+ desc: "invalid storage",
+ req: &gitalypb.WriteCommitGraphRequest{Repository: &gitalypb.Repository{StorageName: "invalid"}},
+ expErr: status.Error(codes.InvalidArgument, `GetStorageByName: no such storage: "invalid"`),
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ _, err := client.WriteCommitGraph(ctx, tc.req)
+ require.Equal(t, tc.expErr, err)
+ })
+ }
+}
+
+func TestUpdateCommitGraph(t *testing.T) {
+ cfg, repo, repoPath, client := setupRepositoryService(t)
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
- _, err := os.Stat(commitGraphPath)
- assert.True(t, os.IsNotExist(err))
+ chainPath := filepath.Join(repoPath, CommitGraphChainRelPath)
+ require.NoFileExists(t, chainPath)
- res, err := client.WriteCommitGraph(ctx, &gitalypb.WriteCommitGraphRequest{Repository: repo})
- assert.NoError(t, err)
- assert.NotNil(t, res)
- assert.FileExists(t, commitGraphPath)
+ res, err := client.WriteCommitGraph(ctx, &gitalypb.WriteCommitGraphRequest{
+ Repository: repo,
+ SplitStrategy: gitalypb.WriteCommitGraphRequest_SizeMultiple,
+ })
+ require.NoError(t, err)
+ require.NotNil(t, res)
+ require.FileExists(t, chainPath)
- // Reset the mtime of commit-graph file to use
+ // Reset the mtime of commit-graph-chain file to use
// as basis to detect changes
- assert.NoError(t, os.Chtimes(commitGraphPath, time.Time{}, time.Time{}))
- info, err := os.Stat(commitGraphPath)
- assert.NoError(t, err)
+ require.NoError(t, os.Chtimes(chainPath, time.Time{}, time.Time{}))
+ info, err := os.Stat(chainPath)
+ require.NoError(t, err)
mt := info.ModTime()
- gittest.CreateCommit(
+ treeEntry := gittest.TreeEntry{Mode: "100644", Path: "file.txt", Content: "something"}
+ gittest.WriteCommit(
t,
cfg,
repoPath,
- t.Name(),
- &gittest.CreateCommitOpts{Message: t.Name()},
+ gittest.WithBranch(t.Name()),
+ gittest.WithTreeEntries(treeEntry),
)
- res, err = client.WriteCommitGraph(ctx, &gitalypb.WriteCommitGraphRequest{Repository: repo})
- assert.NoError(t, err)
- assert.NotNil(t, res)
- assert.FileExists(t, commitGraphPath)
+ res, err = client.WriteCommitGraph(ctx, &gitalypb.WriteCommitGraphRequest{
+ Repository: repo,
+ SplitStrategy: gitalypb.WriteCommitGraphRequest_SizeMultiple,
+ })
+ require.NoError(t, err)
+ require.NotNil(t, res)
+ require.FileExists(t, chainPath)
- assertModTimeAfter(t, mt, commitGraphPath)
+ assertModTimeAfter(t, mt, chainPath)
}
diff --git a/internal/gitaly/service/repository/config.go b/internal/gitaly/service/repository/config.go
index 98f31a154..f757f5e04 100644
--- a/internal/gitaly/service/repository/config.go
+++ b/internal/gitaly/service/repository/config.go
@@ -2,16 +2,75 @@ package repository
import (
"context"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+ "gitlab.com/gitlab-org/gitaly/streamio"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
+// GetConfig reads the repository's gitconfig file and returns its contents.
+func (s *server) GetConfig(
+ request *gitalypb.GetConfigRequest,
+ stream gitalypb.RepositoryService_GetConfigServer,
+) error {
+ repoPath, err := s.locator.GetPath(request.GetRepository())
+ if err != nil {
+ return err
+ }
+
+ configPath := filepath.Join(repoPath, "config")
+
+ gitconfig, err := os.Open(configPath)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return status.Errorf(codes.NotFound, "opening gitconfig: %v", err)
+ }
+ return helper.ErrInternalf("opening gitconfig: %v", err)
+ }
+
+ writer := streamio.NewWriter(func(p []byte) error {
+ return stream.Send(&gitalypb.GetConfigResponse{
+ Data: p,
+ })
+ })
+
+ if _, err := io.Copy(writer, gitconfig); err != nil {
+ return helper.ErrInternalf("sending config: %v", err)
+ }
+
+ return nil
+}
+
func (s *server) DeleteConfig(ctx context.Context, req *gitalypb.DeleteConfigRequest) (*gitalypb.DeleteConfigResponse, error) {
+ /*
+ * We need to vote both before and after the change because we don't have proper commit
+ * semantics: it's not easily feasible to lock the config manually, vote on it and only
+ * commit the change if the vote was successful. Git automatically does this for us for ref
+ * updates via the reference-transaction hook, but here we'll need to use an approximation.
+ *
+ * As an approximation, we thus vote both before and after the change. Praefect requires the
+ * vote up front because if an RPC failed and no vote exists, it assumes no change was
+ * performed, and that's bad for us if we fail _after_ the modification but _before_ the
+ * vote on changed data. And the second vote is required such that we can assert that all
+ * Gitaly nodes actually did perform the same change.
+ */
+ if err := s.voteOnConfig(ctx, req.GetRepository()); err != nil {
+ return nil, helper.ErrInternal(fmt.Errorf("preimage vote on config: %w", err))
+ }
+
for _, k := range req.Keys {
// We assume k does not contain any secrets; it is leaked via 'ps'.
cmd, err := s.gitCmdFactory.New(ctx, req.Repository, git.SubCmd{
@@ -32,6 +91,10 @@ func (s *server) DeleteConfig(ctx context.Context, req *gitalypb.DeleteConfigReq
}
}
+ if err := s.voteOnConfig(ctx, req.GetRepository()); err != nil {
+ return nil, helper.ErrInternal(fmt.Errorf("postimage vote on config: %w", err))
+ }
+
return &gitalypb.DeleteConfigResponse{}, nil
}
@@ -49,5 +112,56 @@ func (s *server) SetConfig(ctx context.Context, req *gitalypb.SetConfigRequest)
return nil, err
}
- return client.SetConfig(clientCtx, req)
+ /*
+ * We're voting twice, once on the preimage and once on the postimage. Please refer to the
+ * comment in DeleteConfig() for the reason.
+ */
+ if err := s.voteOnConfig(ctx, req.GetRepository()); err != nil {
+ return nil, helper.ErrInternalf("preimage vote on config: %v", err)
+ }
+
+ response, err := client.SetConfig(clientCtx, req)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := s.voteOnConfig(ctx, req.GetRepository()); err != nil {
+ return nil, helper.ErrInternalf("postimage vote on config: %v", err)
+ }
+
+ return response, nil
+}
+
+func (s *server) voteOnConfig(ctx context.Context, repo *gitalypb.Repository) error {
+ if featureflag.IsDisabled(ctx, featureflag.TxConfig) {
+ return nil
+ }
+
+ return transaction.RunOnContext(ctx, func(tx txinfo.Transaction, praefect txinfo.PraefectServer) error {
+ repoPath, err := s.locator.GetPath(repo)
+ if err != nil {
+ return fmt.Errorf("get repo path: %w", err)
+ }
+
+ config, err := os.Open(filepath.Join(repoPath, "config"))
+ if err != nil {
+ return fmt.Errorf("open repo config: %w", err)
+ }
+
+ hash := voting.NewVoteHash()
+ if _, err := io.Copy(hash, config); err != nil {
+ return fmt.Errorf("seeding vote: %w", err)
+ }
+
+ vote, err := hash.Vote()
+ if err != nil {
+ return fmt.Errorf("computing vote: %w", err)
+ }
+
+ if err := s.txManager.Vote(ctx, tx, praefect, vote); err != nil {
+ return fmt.Errorf("casting vote: %w", err)
+ }
+
+ return nil
+ })
}
diff --git a/internal/gitaly/service/repository/config_test.go b/internal/gitaly/service/repository/config_test.go
index c8f108075..00a365b7f 100644
--- a/internal/gitaly/service/repository/config_test.go
+++ b/internal/gitaly/service/repository/config_test.go
@@ -3,6 +3,10 @@ package repository
import (
"bufio"
"bytes"
+ "context"
+ "io/ioutil"
+ "os"
+ "path/filepath"
"strings"
"testing"
@@ -10,12 +14,70 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+ "gitlab.com/gitlab-org/gitaly/streamio"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
+func TestGetConfig(t *testing.T) {
+ cfg, client := setupRepositoryServiceWithoutRepo(t)
+
+ getConfig := func(
+ t *testing.T,
+ client gitalypb.RepositoryServiceClient,
+ repo *gitalypb.Repository,
+ ) (string, error) {
+ ctx, cleanup := testhelper.Context()
+ defer cleanup()
+
+ stream, err := client.GetConfig(ctx, &gitalypb.GetConfigRequest{
+ Repository: repo,
+ })
+ require.NoError(t, err)
+
+ reader := streamio.NewReader(func() ([]byte, error) {
+ response, err := stream.Recv()
+ var bytes []byte
+ if response != nil {
+ bytes = response.Data
+ }
+ return bytes, err
+ })
+
+ contents, err := ioutil.ReadAll(reader)
+ return string(contents), err
+ }
+
+ t.Run("normal repo", func(t *testing.T) {
+ repo, _, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
+ defer cleanup()
+
+ config, err := getConfig(t, client, repo)
+ require.NoError(t, err)
+ require.Equal(t, "[core]\n\trepositoryformatversion = 0\n\tfilemode = true\n\tbare = true\n", config)
+ })
+
+ t.Run("missing config", func(t *testing.T) {
+ repo, repoPath, cleanup := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
+ defer cleanup()
+
+ configPath := filepath.Join(repoPath, "config")
+ require.NoError(t, os.Remove(configPath))
+
+ config, err := getConfig(t, client, repo)
+ require.Equal(t, status.Errorf(codes.NotFound, "opening gitconfig: open %s: no such file or directory", configPath), err)
+ require.Equal(t, "", config)
+ })
+}
+
func TestDeleteConfig(t *testing.T) {
cfg, client := setupRepositoryServiceWithoutRepo(t)
@@ -44,11 +106,11 @@ func TestDeleteConfig(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanupFn)
for _, k := range tc.addKeys {
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", k, "blabla")
+ gittest.Exec(t, cfg, "-C", repoPath, "config", k, "blabla")
}
_, err := client.DeleteConfig(ctx, &gitalypb.DeleteConfigRequest{Repository: repo, Keys: tc.reqKeys})
@@ -59,7 +121,7 @@ func TestDeleteConfig(t *testing.T) {
return
}
- actualConfig := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "-l")
+ actualConfig := gittest.Exec(t, cfg, "-C", repoPath, "config", "-l")
scanner := bufio.NewScanner(bytes.NewReader(actualConfig))
for scanner.Scan() {
for _, k := range tc.reqKeys {
@@ -72,6 +134,47 @@ func TestDeleteConfig(t *testing.T) {
}
}
+func TestDeleteConfigTransactional(t *testing.T) {
+ testhelper.NewFeatureSets([]featureflag.FeatureFlag{
+ featureflag.TxConfig,
+ }).Run(t, func(t *testing.T, ctx context.Context) {
+ var votes []voting.Vote
+ txManager := transaction.MockManager{
+ VoteFn: func(_ context.Context, _ txinfo.Transaction, _ txinfo.PraefectServer, vote voting.Vote) error {
+ votes = append(votes, vote)
+ return nil
+ },
+ }
+
+ cfg, repo, repoPath, client := setupRepositoryService(t, testserver.WithTransactionManager(&txManager))
+
+ ctx, err := (&txinfo.PraefectServer{SocketPath: "i-dont-care"}).Inject(ctx)
+ require.NoError(t, err)
+ ctx, err = txinfo.InjectTransaction(ctx, 1, "node", true)
+ require.NoError(t, err)
+ ctx = helper.IncomingToOutgoing(ctx)
+
+ unmodifiedContents := testhelper.MustReadFile(t, filepath.Join(repoPath, "config"))
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "delete.me", "now")
+ modifiedContents := testhelper.MustReadFile(t, filepath.Join(repoPath, "config"))
+
+ _, err = client.DeleteConfig(ctx, &gitalypb.DeleteConfigRequest{
+ Repository: repo,
+ Keys: []string{"delete.me"},
+ })
+ require.NoError(t, err)
+
+ if featureflag.IsEnabled(ctx, featureflag.TxConfig) {
+ require.Equal(t, []voting.Vote{
+ voting.VoteFromData(modifiedContents),
+ voting.VoteFromData(unmodifiedContents),
+ }, votes)
+ } else {
+ require.Len(t, votes, 0)
+ }
+ })
+}
+
func testSetConfig(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
cfg, _, _, client := setupRepositoryServiceWithRuby(t, cfg, rubySrv)
@@ -104,7 +207,7 @@ func testSetConfig(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
ctx, cancel := testhelper.Context()
defer cancel()
- testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ testRepo, testRepoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
defer cleanupFn()
_, err := client.SetConfig(ctx, &gitalypb.SetConfigRequest{Repository: testRepo, Entries: tc.entries})
@@ -115,7 +218,7 @@ func testSetConfig(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
return
}
- actualConfigBytes := testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "config", "--local", "-l")
+ actualConfigBytes := gittest.Exec(t, cfg, "-C", testRepoPath, "config", "--local", "-l")
scanner := bufio.NewScanner(bytes.NewReader(actualConfigBytes))
var actualConfig []string
@@ -130,3 +233,51 @@ func testSetConfig(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
})
}
}
+
+func testSetConfigTransactional(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
+ testhelper.NewFeatureSets([]featureflag.FeatureFlag{
+ featureflag.TxConfig,
+ }).Run(t, func(t *testing.T, ctx context.Context) {
+ var votes []voting.Vote
+
+ txManager := transaction.MockManager{
+ VoteFn: func(_ context.Context, _ txinfo.Transaction, _ txinfo.PraefectServer, vote voting.Vote) error {
+ votes = append(votes, vote)
+ return nil
+ },
+ }
+
+ _, repo, repoPath, client := setupRepositoryServiceWithRuby(t, cfg, rubySrv, testserver.WithTransactionManager(&txManager))
+
+ ctx, err := (&txinfo.PraefectServer{SocketPath: "i-dont-care"}).Inject(ctx)
+ require.NoError(t, err)
+ ctx, err = txinfo.InjectTransaction(ctx, 1, "node", true)
+ require.NoError(t, err)
+ ctx = helper.IncomingToOutgoing(ctx)
+
+ unmodifiedContents := testhelper.MustReadFile(t, filepath.Join(repoPath, "config"))
+
+ _, err = client.SetConfig(ctx, &gitalypb.SetConfigRequest{
+ Repository: repo,
+ Entries: []*gitalypb.SetConfigRequest_Entry{
+ &gitalypb.SetConfigRequest_Entry{
+ Key: "set.me",
+ Value: &gitalypb.SetConfigRequest_Entry_ValueStr{
+ "something",
+ },
+ },
+ },
+ })
+ require.NoError(t, err)
+
+ if featureflag.IsEnabled(ctx, featureflag.TxConfig) {
+ modifiedContents := string(unmodifiedContents) + "[set]\n\tme = something\n"
+ require.Equal(t, []voting.Vote{
+ voting.VoteFromData(unmodifiedContents),
+ voting.VoteFromData([]byte(modifiedContents)),
+ }, votes)
+ } else {
+ require.Len(t, votes, 0)
+ }
+ })
+}
diff --git a/internal/gitaly/service/repository/create.go b/internal/gitaly/service/repository/create.go
index 59ff6aabf..118c95526 100644
--- a/internal/gitaly/service/repository/create.go
+++ b/internal/gitaly/service/repository/create.go
@@ -3,11 +3,17 @@ package repository
import (
"bytes"
"context"
+ "fmt"
"os"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+ "google.golang.org/grpc/codes"
+ "google.golang.org/grpc/status"
)
func (s *server) CreateRepository(ctx context.Context, req *gitalypb.CreateRepositoryRequest) (*gitalypb.CreateRepositoryResponse, error) {
@@ -40,5 +46,39 @@ func (s *server) CreateRepository(ctx context.Context, req *gitalypb.CreateRepos
return nil, helper.ErrInternalf("git init stderr: %q, err: %w", stderr, err)
}
+ // Given that git-init(1) does not create any refs, we never cast a vote on it. We thus do
+ // manual voting here by hashing all references of the repository. While this would in the
+ // general case hash nothing given that no refs exist yet, due to the idempotency of this
+ // RPC it may be that we already do have some preexisting refs (e.g. CreateRepository is
+ // called for a repo which already exists and has refs). In that case, voting ensures that
+ // all replicas have the same set of preexisting refs.
+ if err := transaction.RunOnContext(ctx, func(tx txinfo.Transaction, server txinfo.PraefectServer) error {
+ hash := voting.NewVoteHash()
+
+ cmd, err := s.gitCmdFactory.New(ctx, req.GetRepository(), git.SubCmd{
+ Name: "for-each-ref",
+ }, git.WithStdout(hash))
+ if err != nil {
+ return fmt.Errorf("for-each-ref: %v", err)
+ }
+
+ if err := cmd.Wait(); err != nil {
+ return fmt.Errorf("waiting for for-each-ref: %v", err)
+ }
+
+ vote, err := hash.Vote()
+ if err != nil {
+ return err
+ }
+
+ if err := s.txManager.Vote(ctx, tx, server, vote); err != nil {
+ return fmt.Errorf("casting vote: %w", err)
+ }
+
+ return nil
+ }); err != nil {
+ return nil, status.Errorf(codes.Aborted, "vote failed after initializing repo: %v", err)
+ }
+
return &gitalypb.CreateRepositoryResponse{}, nil
}
diff --git a/internal/gitaly/service/repository/create_bundle_test.go b/internal/gitaly/service/repository/create_bundle_test.go
index dea2a1d9d..d5a77a33a 100644
--- a/internal/gitaly/service/repository/create_bundle_test.go
+++ b/internal/gitaly/service/repository/create_bundle_test.go
@@ -24,17 +24,17 @@ func TestSuccessfulCreateBundleRequest(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- // create a work tree with a HEAD pointing to a commit that is missing.
- // CreateBundle should clean this up before creating the bundle
- sha, branchName := gittest.CreateCommitOnNewBranch(t, cfg, repoPath)
+ // Create a work tree with a HEAD pointing to a commit that is missing. CreateBundle should
+ // clean this up before creating the bundle.
+ sha := gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch("branch"))
require.NoError(t, os.MkdirAll(filepath.Join(repoPath, "gitlab-worktree"), 0755))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "worktree", "add", "gitlab-worktree/worktree1", sha)
+ gittest.Exec(t, cfg, "-C", repoPath, "worktree", "add", "gitlab-worktree/worktree1", sha.String())
require.NoError(t, os.Chtimes(filepath.Join(repoPath, "gitlab-worktree", "worktree1"), time.Now().Add(-7*time.Hour), time.Now().Add(-7*time.Hour)))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "-D", branchName)
- require.NoError(t, os.Remove(filepath.Join(repoPath, "objects", sha[0:2], sha[2:])))
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "-D", "branch")
+ require.NoError(t, os.Remove(filepath.Join(repoPath, "objects", sha.String()[0:2], sha.String()[2:])))
request := &gitalypb.CreateBundleRequest{Repository: repo}
@@ -56,7 +56,7 @@ func TestSuccessfulCreateBundleRequest(t *testing.T) {
_, err = io.Copy(dstFile, reader)
require.NoError(t, err)
- output := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "bundle", "verify", dstFile.Name())
+ output := gittest.Exec(t, cfg, "-C", repoPath, "bundle", "verify", dstFile.Name())
// Extra sanity; running verify should fail on bad bundles
require.Contains(t, string(output), "The bundle records a complete history")
}
diff --git a/internal/gitaly/service/repository/create_from_bundle_test.go b/internal/gitaly/service/repository/create_from_bundle_test.go
index de7890895..24cf8a0e9 100644
--- a/internal/gitaly/service/repository/create_from_bundle_test.go
+++ b/internal/gitaly/service/repository/create_from_bundle_test.go
@@ -9,7 +9,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/tempdir"
@@ -17,6 +17,7 @@ import (
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/streamio"
"google.golang.org/grpc/codes"
+ "google.golang.org/grpc/status"
)
func TestServer_CreateRepositoryFromBundle_successful(t *testing.T) {
@@ -30,9 +31,9 @@ func TestServer_CreateRepositoryFromBundle_successful(t *testing.T) {
require.NoError(t, err)
bundlePath := filepath.Join(tmpdir, "original.bundle")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", "refs/custom-refs/ref1", "HEAD")
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "refs/custom-refs/ref1", "HEAD")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "bundle", "create", bundlePath, "--all")
+ gittest.Exec(t, cfg, "-C", repoPath, "bundle", "create", bundlePath, "--all")
defer os.RemoveAll(bundlePath)
stream, err := client.CreateRepositoryFromBundle(ctx)
@@ -42,7 +43,7 @@ func TestServer_CreateRepositoryFromBundle_successful(t *testing.T) {
StorageName: repo.GetStorageName(),
RelativePath: "a-repo-from-bundle",
}
- importedRepo := localrepo.New(git.NewExecCommandFactory(cfg), importedRepoProto, cfg)
+ importedRepo := localrepo.NewTestRepo(t, cfg, importedRepoProto)
importedRepoPath, err := locator.GetPath(importedRepoProto)
require.NoError(t, err)
defer os.RemoveAll(importedRepoPath)
@@ -70,7 +71,7 @@ func TestServer_CreateRepositoryFromBundle_successful(t *testing.T) {
_, err = stream.CloseAndRecv()
require.NoError(t, err)
- testhelper.MustRunCommand(t, nil, "git", "-C", importedRepoPath, "fsck")
+ gittest.Exec(t, cfg, "-C", importedRepoPath, "fsck")
info, err := os.Lstat(filepath.Join(importedRepoPath, "hooks"))
require.NoError(t, err)
@@ -147,8 +148,7 @@ func TestServer_CreateRepositoryFromBundle_failed_existing_directory(t *testing.
}))
_, err = stream.CloseAndRecv()
- testhelper.RequireGrpcError(t, err, codes.FailedPrecondition)
- testhelper.GrpcErrorHasMessage(t, err, "CreateRepositoryFromBundle: target directory is non-empty")
+ require.Equal(t, status.Error(codes.FailedPrecondition, "CreateRepositoryFromBundle: target directory is non-empty"), err)
}
func TestSanitizedError(t *testing.T) {
diff --git a/internal/gitaly/service/repository/create_from_snapshot_test.go b/internal/gitaly/service/repository/create_from_snapshot_test.go
index 54d579f15..219c2e61f 100644
--- a/internal/gitaly/service/repository/create_from_snapshot_test.go
+++ b/internal/gitaly/service/repository/create_from_snapshot_test.go
@@ -71,7 +71,7 @@ func createFromSnapshot(t *testing.T, req *gitalypb.CreateRepositoryFromSnapshot
func TestCreateRepositoryFromSnapshotSuccess(t *testing.T) {
cfg := testcfg.Build(t)
- _, sourceRepoPath, cleanTestRepo := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ _, sourceRepoPath, cleanTestRepo := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanTestRepo)
// Ensure these won't be in the archive
@@ -116,7 +116,7 @@ func TestCreateRepositoryFromSnapshotSuccess(t *testing.T) {
func TestCreateRepositoryFromSnapshotFailsIfRepositoryExists(t *testing.T) {
cfg := testcfg.Build(t)
- repo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanupFn)
req := &gitalypb.CreateRepositoryFromSnapshotRequest{Repository: repo}
@@ -128,7 +128,7 @@ func TestCreateRepositoryFromSnapshotFailsIfRepositoryExists(t *testing.T) {
func TestCreateRepositoryFromSnapshotFailsIfBadURL(t *testing.T) {
cfg := testcfg.Build(t)
- repo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
cleanupFn() // free up the destination dir for use
req := &gitalypb.CreateRepositoryFromSnapshotRequest{
@@ -144,7 +144,7 @@ func TestCreateRepositoryFromSnapshotFailsIfBadURL(t *testing.T) {
func TestCreateRepositoryFromSnapshotBadRequests(t *testing.T) {
cfg := testcfg.Build(t)
- repo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
cleanupFn() // free up the destination dir for use
testCases := []struct {
@@ -199,7 +199,7 @@ func TestCreateRepositoryFromSnapshotBadRequests(t *testing.T) {
func TestCreateRepositoryFromSnapshotHandlesMalformedResponse(t *testing.T) {
cfg := testcfg.Build(t)
- repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanupFn)
require.NoError(t, os.Remove(filepath.Join(repoPath, "config")))
@@ -227,5 +227,5 @@ func TestCreateRepositoryFromSnapshotHandlesMalformedResponse(t *testing.T) {
require.Nil(t, rsp)
// Ensure that a partial result is not left in place
- testhelper.AssertPathNotExists(t, repoPath)
+ require.NoFileExists(t, repoPath)
}
diff --git a/internal/gitaly/service/repository/create_from_url_test.go b/internal/gitaly/service/repository/create_from_url_test.go
index e543d01f7..58e7fba91 100644
--- a/internal/gitaly/service/repository/create_from_url_test.go
+++ b/internal/gitaly/service/repository/create_from_url_test.go
@@ -48,9 +48,9 @@ func TestSuccessfulCreateRepositoryFromURLRequest(t *testing.T) {
_, err := client.CreateRepositoryFromURL(ctx, req)
require.NoError(t, err)
- testhelper.MustRunCommand(t, nil, "git", "-C", importedRepoPath, "fsck")
+ gittest.Exec(t, cfg, "-C", importedRepoPath, "fsck")
- remotes := testhelper.MustRunCommand(t, nil, "git", "-C", importedRepoPath, "remote")
+ remotes := gittest.Exec(t, cfg, "-C", importedRepoPath, "remote")
require.NotContains(t, string(remotes), "origin")
info, err := os.Lstat(filepath.Join(importedRepoPath, "hooks"))
diff --git a/internal/gitaly/service/repository/create_test.go b/internal/gitaly/service/repository/create_test.go
index ff6156b54..2c916bea0 100644
--- a/internal/gitaly/service/repository/create_test.go
+++ b/internal/gitaly/service/repository/create_test.go
@@ -1,8 +1,8 @@
package repository
import (
+ "context"
"fmt"
- "io/ioutil"
"os"
"path"
"path/filepath"
@@ -12,11 +12,18 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config/auth"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+ "golang.org/x/sys/unix"
"google.golang.org/grpc/codes"
)
@@ -40,30 +47,29 @@ func TestCreateRepositorySuccess(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- storageDir := cfg.Storages[0].Path
relativePath := "create-repository-test.git"
- repoDir := filepath.Join(storageDir, relativePath)
- require.NoError(t, os.RemoveAll(repoDir))
+ repoDir := filepath.Join(cfg.Storages[0].Path, relativePath)
repo := &gitalypb.Repository{StorageName: cfg.Storages[0].Name, RelativePath: relativePath}
req := &gitalypb.CreateRepositoryRequest{Repository: repo}
_, err := client.CreateRepository(ctx, req)
require.NoError(t, err)
- defer func() { require.NoError(t, os.RemoveAll(repoDir)) }()
- fi, err := os.Stat(repoDir)
- require.NoError(t, err)
- require.Equal(t, "drwxr-x---", fi.Mode().String())
+ require.NoError(t, unix.Access(repoDir, unix.R_OK))
+ require.NoError(t, unix.Access(repoDir, unix.W_OK))
+ require.NoError(t, unix.Access(repoDir, unix.X_OK))
for _, dir := range []string{repoDir, filepath.Join(repoDir, "refs")} {
fi, err := os.Stat(dir)
require.NoError(t, err)
require.True(t, fi.IsDir(), "%q must be a directory", fi.Name())
- }
- symRef, err := ioutil.ReadFile(path.Join(repoDir, "HEAD"))
- require.NoError(t, err)
+ require.NoError(t, unix.Access(dir, unix.R_OK))
+ require.NoError(t, unix.Access(dir, unix.W_OK))
+ require.NoError(t, unix.Access(dir, unix.X_OK))
+ }
+ symRef := testhelper.MustReadFile(t, path.Join(repoDir, "HEAD"))
require.Equal(t, symRef, []byte(fmt.Sprintf("ref: %s\n", git.DefaultRef)))
}
@@ -78,7 +84,6 @@ func TestCreateRepositoryFailure(t *testing.T) {
_, err := os.Create(fullPath)
require.NoError(t, err)
- defer os.RemoveAll(fullPath)
_, err = client.CreateRepository(ctx, &gitalypb.CreateRepositoryRequest{
Repository: &gitalypb.Repository{StorageName: cfg.Storages[0].Name, RelativePath: "foo.git"},
@@ -113,19 +118,79 @@ func TestCreateRepositoryFailureInvalidArgs(t *testing.T) {
}
}
+func TestCreateRepositoryTransactional(t *testing.T) {
+ var actualVote voting.Vote
+ var called int
+
+ mockTxManager := transaction.MockManager{
+ VoteFn: func(ctx context.Context, tx txinfo.Transaction, server txinfo.PraefectServer, v voting.Vote) error {
+ actualVote = v
+ called++
+ return nil
+ },
+ }
+
+ cfg, client := setupRepositoryServiceWithoutRepo(t, testserver.WithTransactionManager(&mockTxManager))
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ ctx, err := (&txinfo.PraefectServer{SocketPath: "something"}).Inject(ctx)
+ require.NoError(t, err)
+ ctx, err = txinfo.InjectTransaction(ctx, 1, "node", true)
+ require.NoError(t, err)
+ ctx = helper.IncomingToOutgoing(ctx)
+
+ t.Run("initial creation without refs", func(t *testing.T) {
+ called = 0
+ actualVote = voting.Vote{}
+
+ _, err = client.CreateRepository(ctx, &gitalypb.CreateRepositoryRequest{
+ Repository: &gitalypb.Repository{
+ StorageName: cfg.Storages[0].Name,
+ RelativePath: "repo.git",
+ },
+ })
+ require.NoError(t, err)
+
+ require.DirExists(t, filepath.Join(cfg.Storages[0].Path, "repo.git"))
+ require.Equal(t, 1, called, "expected transactional vote")
+ require.Equal(t, voting.VoteFromData([]byte{}), actualVote)
+ })
+
+ t.Run("idempotent creation with preexisting refs", func(t *testing.T) {
+ called = 0
+ actualVote = voting.Vote{}
+
+ repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "clone.git")
+ defer cleanup()
+
+ _, err = client.CreateRepository(ctx, &gitalypb.CreateRepositoryRequest{
+ Repository: repo,
+ })
+ require.NoError(t, err)
+
+ refs := gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref")
+ require.NotEmpty(t, refs)
+
+ require.Equal(t, 1, called, "expected transactional vote")
+ require.Equal(t, voting.VoteFromData(refs), actualVote)
+ })
+}
+
func TestCreateRepositoryIdempotent(t *testing.T) {
- _, repo, repoPath, client := setupRepositoryService(t)
+ cfg, repo, repoPath, client := setupRepositoryService(t)
ctx, cancel := testhelper.Context()
defer cancel()
- refsBefore := strings.Split(string(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref")), "\n")
+ refsBefore := strings.Split(string(gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref")), "\n")
req := &gitalypb.CreateRepositoryRequest{Repository: repo}
_, err := client.CreateRepository(ctx, req)
require.NoError(t, err)
- refsAfter := strings.Split(string(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref")), "\n")
+ refsAfter := strings.Split(string(gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref")), "\n")
assert.Equal(t, refsBefore, refsAfter)
}
diff --git a/internal/gitaly/service/repository/fetch.go b/internal/gitaly/service/repository/fetch.go
index 39d4ecf2f..4e3109e38 100644
--- a/internal/gitaly/service/repository/fetch.go
+++ b/internal/gitaly/service/repository/fetch.go
@@ -6,7 +6,6 @@ import (
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git/remoterepo"
"gitlab.com/gitlab-org/gitaly/internal/gitalyssh"
"gitlab.com/gitlab-org/gitaly/internal/helper"
@@ -22,7 +21,7 @@ func (s *server) FetchSourceBranch(ctx context.Context, req *gitalypb.FetchSourc
return nil, helper.ErrInvalidArgument(err)
}
- targetRepo := localrepo.New(s.gitCmdFactory, req.GetRepository(), s.cfg)
+ targetRepo := s.localrepo(req.GetRepository())
sourceRepo, err := remoterepo.New(ctx, req.GetSourceRepository(), s.conns)
if err != nil {
diff --git a/internal/gitaly/service/repository/fetch_remote.go b/internal/gitaly/service/repository/fetch_remote.go
index 8dd428a13..1aa6f5a9d 100644
--- a/internal/gitaly/service/repository/fetch_remote.go
+++ b/internal/gitaly/service/repository/fetch_remote.go
@@ -14,7 +14,8 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/helper"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -38,7 +39,7 @@ func (s *server) FetchRemote(ctx context.Context, req *gitalypb.FetchRemoteReque
opts.Tags = localrepo.FetchOptsTagsNone
}
- repo := localrepo.New(s.gitCmdFactory, req.GetRepository(), s.cfg)
+ repo := s.localrepo(req.GetRepository())
remoteName := req.GetRemote()
if params := req.GetRemoteParams(); params != nil {
@@ -110,8 +111,8 @@ func (s *server) FetchRemote(ctx context.Context, req *gitalypb.FetchRemoteReque
// is of course racy and may conflict with other mutators, causing the vote to fail. But it
// is arguably preferable to accept races in favour always replicating. If loosing the race,
// we'd fail this RPC and schedule a replication job afterwards.
- if err := transaction.RunOnContext(ctx, func(tx metadata.Transaction, praefect metadata.PraefectServer) error {
- hash := transaction.NewVoteHash()
+ if err := transaction.RunOnContext(ctx, func(tx txinfo.Transaction, praefect txinfo.PraefectServer) error {
+ hash := voting.NewVoteHash()
if err := repo.ExecAndWait(ctx, git.SubCmd{
Name: "for-each-ref",
diff --git a/internal/gitaly/service/repository/fetch_remote_test.go b/internal/gitaly/service/repository/fetch_remote_test.go
index 5bcaa07e4..3c6f2f202 100644
--- a/internal/gitaly/service/repository/fetch_remote_test.go
+++ b/internal/gitaly/service/repository/fetch_remote_test.go
@@ -3,7 +3,6 @@ package repository
import (
"context"
"fmt"
- "io/ioutil"
"net/http"
"net/http/httptest"
"os"
@@ -14,49 +13,52 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+ "google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
-func copyRepoWithNewRemote(t *testing.T, repo *gitalypb.Repository, repoPath string, remote string) (*gitalypb.Repository, string) {
+func copyRepoWithNewRemote(t *testing.T, cfg config.Cfg, repo *gitalypb.Repository, repoPath string, remote string) (*gitalypb.Repository, string) {
cloneRepo := &gitalypb.Repository{StorageName: repo.GetStorageName(), RelativePath: "fetch-remote-clone.git"}
clonePath := filepath.Join(filepath.Dir(repoPath), "fetch-remote-clone.git")
require.NoError(t, os.RemoveAll(clonePath))
- testhelper.MustRunCommand(t, nil, "git", "clone", "--bare", repoPath, clonePath)
+ gittest.Exec(t, cfg, "clone", "--bare", repoPath, clonePath)
- testhelper.MustRunCommand(t, nil, "git", "-C", clonePath, "remote", "add", remote, repoPath)
+ gittest.Exec(t, cfg, "-C", clonePath, "remote", "add", remote, repoPath)
return cloneRepo, clonePath
}
func TestFetchRemoteSuccess(t *testing.T) {
- _, repo, repoPath, client := setupRepositoryService(t)
+ cfg, repo, repoPath, client := setupRepositoryService(t)
ctx, cancel := testhelper.Context()
defer cancel()
- cloneRepo, cloneRepoPath := copyRepoWithNewRemote(t, repo, repoPath, "my-remote")
+ cloneRepo, cloneRepoPath := copyRepoWithNewRemote(t, cfg, repo, repoPath, "my-remote")
defer func() {
require.NoError(t, os.RemoveAll(cloneRepoPath))
}()
// Ensure there's a new tag to fetch
- testhelper.CreateTag(t, repoPath, "testtag", "master", nil)
+ gittest.CreateTag(t, cfg, repoPath, "testtag", "master", nil)
req := &gitalypb.FetchRemoteRequest{Repository: cloneRepo, Remote: "my-remote", Timeout: 120, CheckTagsChanged: true}
resp, err := client.FetchRemote(ctx, req)
@@ -93,12 +95,15 @@ func TestFetchRemote_sshCommand(t *testing.T) {
exit 7`, outputPath)
testhelper.WriteExecutable(t, gitPath, []byte(script))
- cfg, repo, _ := testcfg.BuildWithRepo(t, testcfg.WithBase(config.Cfg{
- Git: config.Git{BinPath: gitPath},
- }))
+ cfg, repo, _ := testcfg.BuildWithRepo(t)
- client, serverSocketPath := runRepositoryService(t, cfg, nil)
- cfg.SocketPath = serverSocketPath
+ // We re-define path to the git executable to catch parameters used to call it.
+ // This replacement only needs to be done for the configuration used to invoke git commands.
+ // Other operations should use actual path to the git binary to work properly.
+ spyGitCfg := cfg
+ spyGitCfg.Git.BinPath = gitPath
+
+ client, _ := runRepositoryService(t, spyGitCfg, nil)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -163,15 +168,13 @@ func TestFetchRemote_sshCommand(t *testing.T) {
func TestFetchRemote_withDefaultRefmaps(t *testing.T) {
cfg, sourceRepoProto, sourceRepoPath, client := setupRepositoryService(t)
- gitCmdFactory := git.NewExecCommandFactory(cfg)
-
- sourceRepo := localrepo.New(gitCmdFactory, sourceRepoProto, cfg)
+ sourceRepo := localrepo.NewTestRepo(t, cfg, sourceRepoProto)
- targetRepoProto, targetRepoPath := copyRepoWithNewRemote(t, sourceRepoProto, sourceRepoPath, "my-remote")
+ targetRepoProto, targetRepoPath := copyRepoWithNewRemote(t, cfg, sourceRepoProto, sourceRepoPath, "my-remote")
defer func() {
require.NoError(t, os.RemoveAll(targetRepoPath))
}()
- targetRepo := localrepo.New(gitCmdFactory, targetRepoProto, cfg)
+ targetRepo := localrepo.NewTestRepo(t, cfg, targetRepoProto)
port, stopGitServer := gittest.GitServer(t, cfg, sourceRepoPath, nil)
defer func() { require.NoError(t, stopGitServer()) }()
@@ -205,7 +208,7 @@ type mockTxManager struct {
votes int
}
-func (m *mockTxManager) Vote(context.Context, metadata.Transaction, metadata.PraefectServer, transaction.Vote) error {
+func (m *mockTxManager) Vote(context.Context, txinfo.Transaction, txinfo.PraefectServer, voting.Vote) error {
m.votes++
return nil
}
@@ -213,16 +216,19 @@ func (m *mockTxManager) Vote(context.Context, metadata.Transaction, metadata.Pra
func TestFetchRemote_transaction(t *testing.T) {
sourceCfg, _, sourceRepoPath := testcfg.BuildWithRepo(t)
- locator := config.NewLocator(sourceCfg)
txManager := &mockTxManager{}
- gitCmdFactory := git.NewExecCommandFactory(sourceCfg)
-
- srv := testhelper.NewServerWithAuth(t, nil, nil, sourceCfg.Auth.Token, backchannel.NewRegistry(), testhelper.WithInternalSocket(sourceCfg))
- gitalypb.RegisterRepositoryServiceServer(srv.GrpcServer(), NewServer(sourceCfg, nil, locator, txManager, gitCmdFactory))
- srv.Start(t)
- defer srv.Stop()
-
- client := newRepositoryClient(t, sourceCfg, "unix://"+srv.Socket())
+ addr := testserver.RunGitalyServer(t, sourceCfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterRepositoryServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetTxManager(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ }, testserver.WithTransactionManager(txManager))
+
+ client := newRepositoryClient(t, sourceCfg, addr)
targetCfg, targetRepoProto, targetRepoPath := testcfg.BuildWithRepo(t)
port, stopGitServer := gittest.GitServer(t, targetCfg, targetRepoPath, nil)
@@ -230,9 +236,9 @@ func TestFetchRemote_transaction(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- ctx, err := metadata.InjectTransaction(ctx, 1, "node", true)
+ ctx, err := txinfo.InjectTransaction(ctx, 1, "node", true)
require.NoError(t, err)
- ctx, err = (&metadata.PraefectServer{SocketPath: "i-dont-care"}).Inject(ctx)
+ ctx, err = (&txinfo.PraefectServer{SocketPath: "i-dont-care"}).Inject(ctx)
require.NoError(t, err)
ctx = helper.IncomingToOutgoing(ctx)
@@ -328,11 +334,11 @@ func TestFetchRemote_prune(t *testing.T) {
},
} {
t.Run(tc.desc, func(t *testing.T) {
- targetRepoProto, targetRepoPath := copyRepoWithNewRemote(t, sourceRepo, sourceRepoPath, "my-remote")
+ targetRepoProto, targetRepoPath := copyRepoWithNewRemote(t, cfg, sourceRepo, sourceRepoPath, "my-remote")
defer func() {
require.NoError(t, os.RemoveAll(targetRepoPath))
}()
- targetRepo := localrepo.New(git.NewExecCommandFactory(cfg), targetRepoProto, cfg)
+ targetRepo := localrepo.NewTestRepo(t, cfg, targetRepoProto)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -356,9 +362,8 @@ func TestFetchRemote_force(t *testing.T) {
defer cancel()
cfg, sourceRepoProto, sourceRepoPath, client := setupRepositoryService(t)
- gitCommandFactory := git.NewExecCommandFactory(cfg)
- sourceRepo := localrepo.New(gitCommandFactory, sourceRepoProto, cfg)
+ sourceRepo := localrepo.NewTestRepo(t, cfg, sourceRepoProto)
branchOID, err := sourceRepo.ResolveRevision(ctx, "refs/heads/master")
require.NoError(t, err)
@@ -366,8 +371,8 @@ func TestFetchRemote_force(t *testing.T) {
tagOID, err := sourceRepo.ResolveRevision(ctx, "refs/tags/v1.0.0")
require.NoError(t, err)
- divergingBranchOID, _ := gittest.CreateCommitOnNewBranch(t, cfg, sourceRepoPath)
- divergingTagOID, _ := gittest.CreateCommitOnNewBranch(t, cfg, sourceRepoPath)
+ divergingBranchOID := gittest.WriteCommit(t, cfg, sourceRepoPath, gittest.WithBranch("b1"))
+ divergingTagOID := gittest.WriteCommit(t, cfg, sourceRepoPath, gittest.WithBranch("b2"))
port, stopGitServer := gittest.GitServer(t, cfg, sourceRepoPath, nil)
defer func() { require.NoError(t, stopGitServer()) }()
@@ -402,7 +407,7 @@ func TestFetchRemote_force(t *testing.T) {
// branches would get updated.
expectedRefs: map[git.ReferenceName]git.ObjectID{
"refs/heads/master": branchOID,
- "refs/tags/v1.0.0": git.ObjectID(divergingTagOID),
+ "refs/tags/v1.0.0": divergingTagOID,
},
},
{
@@ -427,8 +432,8 @@ func TestFetchRemote_force(t *testing.T) {
Force: true,
},
expectedRefs: map[git.ReferenceName]git.ObjectID{
- "refs/heads/master": git.ObjectID(divergingBranchOID),
- "refs/tags/v1.0.0": git.ObjectID(divergingTagOID),
+ "refs/heads/master": divergingBranchOID,
+ "refs/tags/v1.0.0": divergingTagOID,
},
},
{
@@ -446,7 +451,7 @@ func TestFetchRemote_force(t *testing.T) {
// diverge.
expectedErr: status.Error(codes.Unknown, "fetch remote: exit status 1"),
expectedRefs: map[git.ReferenceName]git.ObjectID{
- "refs/heads/master": git.ObjectID(divergingBranchOID),
+ "refs/heads/master": divergingBranchOID,
"refs/tags/v1.0.0": tagOID,
},
},
@@ -462,8 +467,8 @@ func TestFetchRemote_force(t *testing.T) {
Force: true,
},
expectedRefs: map[git.ReferenceName]git.ObjectID{
- "refs/heads/master": git.ObjectID(divergingBranchOID),
- "refs/tags/v1.0.0": git.ObjectID(divergingTagOID),
+ "refs/heads/master": divergingBranchOID,
+ "refs/tags/v1.0.0": divergingTagOID,
},
},
{
@@ -478,31 +483,31 @@ func TestFetchRemote_force(t *testing.T) {
NoTags: true,
},
expectedRefs: map[git.ReferenceName]git.ObjectID{
- "refs/heads/master": git.ObjectID(divergingBranchOID),
+ "refs/heads/master": divergingBranchOID,
"refs/tags/v1.0.0": tagOID,
},
},
} {
t.Run(tc.desc, func(t *testing.T) {
- targetRepoProto, targetRepoPath := copyRepoWithNewRemote(t, sourceRepoProto, sourceRepoPath, "my-remote")
+ targetRepoProto, targetRepoPath := copyRepoWithNewRemote(t, cfg, sourceRepoProto, sourceRepoPath, "my-remote")
defer func() {
require.NoError(t, os.RemoveAll(targetRepoPath))
}()
- targetRepo := localrepo.New(gitCommandFactory, targetRepoProto, cfg)
+ targetRepo := localrepo.NewTestRepo(t, cfg, targetRepoProto)
// We're force-updating a branch and a tag in the source repository to point
// to a diverging object ID in order to verify that the `force` parameter
// takes effect.
- require.NoError(t, sourceRepo.UpdateRef(ctx, "refs/heads/master", git.ObjectID(divergingBranchOID), branchOID))
- require.NoError(t, sourceRepo.UpdateRef(ctx, "refs/tags/v1.0.0", git.ObjectID(divergingTagOID), tagOID))
+ require.NoError(t, sourceRepo.UpdateRef(ctx, "refs/heads/master", divergingBranchOID, branchOID))
+ require.NoError(t, sourceRepo.UpdateRef(ctx, "refs/tags/v1.0.0", divergingTagOID, tagOID))
defer func() {
// Restore references after the current testcase again. Moving
// source repository setup into the testcases is not easily possible
// because hosting the gitserver requires the repo path, and we need
// the URL for our testcases.
- require.NoError(t, sourceRepo.UpdateRef(ctx, "refs/heads/master", branchOID, git.ObjectID(divergingBranchOID)))
- require.NoError(t, sourceRepo.UpdateRef(ctx, "refs/tags/v1.0.0", tagOID, git.ObjectID(divergingTagOID)))
+ require.NoError(t, sourceRepo.UpdateRef(ctx, "refs/heads/master", branchOID, divergingBranchOID))
+ require.NoError(t, sourceRepo.UpdateRef(ctx, "refs/tags/v1.0.0", tagOID, divergingTagOID))
}()
tc.request.Repository = targetRepoProto
@@ -630,8 +635,7 @@ const (
)
func remoteHTTPServer(t *testing.T, repoName, httpToken string) (*httptest.Server, string) {
- b, err := ioutil.ReadFile("testdata/advertise.txt")
- require.NoError(t, err)
+ b := testhelper.MustReadFile(t, "testdata/advertise.txt")
s := httptest.NewServer(
// https://github.com/git/git/blob/master/Documentation/technical/http-protocol.txt
@@ -647,7 +651,7 @@ func remoteHTTPServer(t *testing.T, repoName, httpToken string) (*httptest.Serve
}
w.Header().Set("Content-Type", "application/x-git-upload-pack-advertisement")
- _, err = w.Write(b)
+ _, err := w.Write(b)
assert.NoError(t, err)
}),
)
@@ -655,8 +659,8 @@ func remoteHTTPServer(t *testing.T, repoName, httpToken string) (*httptest.Serve
return s, fmt.Sprintf("%s/%s.git", s.URL, repoName)
}
-func getRefnames(t *testing.T, repoPath string) []string {
- result := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "for-each-ref", "--format", "%(refname:lstrip=2)")
+func getRefnames(t *testing.T, cfg config.Cfg, repoPath string) []string {
+ result := gittest.Exec(t, cfg, "-C", repoPath, "for-each-ref", "--format", "%(refname:lstrip=2)")
return strings.Split(text.ChompBytes(result), "\n")
}
@@ -683,7 +687,7 @@ func testFetchRemoteOverHTTP(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.S
for _, tc := range testCases {
t.Run(tc.description, func(t *testing.T) {
- forkedRepo, forkedRepoPath, forkedRepoCleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ forkedRepo, forkedRepoPath, forkedRepoCleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
defer forkedRepoCleanup()
s, remoteURL := remoteHTTPServer(t, "my-repo", tc.httpToken)
@@ -702,13 +706,13 @@ func testFetchRemoteOverHTTP(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.S
req.RemoteParams.Url = s.URL + tc.remoteURL
}
- refs := getRefnames(t, forkedRepoPath)
+ refs := getRefnames(t, cfg, forkedRepoPath)
require.True(t, len(refs) > 1, "the advertisement.txt should have deleted all refs except for master")
_, err := client.FetchRemote(ctx, req)
require.NoError(t, err)
- refs = getRefnames(t, forkedRepoPath)
+ refs = getRefnames(t, cfg, forkedRepoPath)
require.Len(t, refs, 1)
assert.Equal(t, "master", refs[0])
diff --git a/internal/gitaly/service/repository/fetch_test.go b/internal/gitaly/service/repository/fetch_test.go
index 6e58a4a43..a8bf02c3e 100644
--- a/internal/gitaly/service/repository/fetch_test.go
+++ b/internal/gitaly/service/repository/fetch_test.go
@@ -21,12 +21,12 @@ func TestFetchSourceBranchSourceRepositorySuccess(t *testing.T) {
md := testhelper.GitalyServersMetadataFromCfg(t, cfg)
ctx = testhelper.MergeOutgoingMetadata(ctx, md)
- targetRepoProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "fetch-source-target.git")
+ targetRepoProto, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "fetch-source-target.git")
defer cleanup()
- targetRepo := localrepo.New(git.NewExecCommandFactory(cfg), targetRepoProto, cfg)
+ targetRepo := localrepo.NewTestRepo(t, cfg, targetRepoProto)
sourceBranch := "fetch-source-branch-test-branch"
- newCommitID := gittest.CreateCommit(t, cfg, sourcePath, sourceBranch, nil)
+ newCommitID := gittest.WriteCommit(t, cfg, sourcePath, gittest.WithBranch(sourceBranch))
targetRef := "refs/tmp/fetch-source-branch-test"
req := &gitalypb.FetchSourceBranchRequest{
@@ -42,7 +42,7 @@ func TestFetchSourceBranchSourceRepositorySuccess(t *testing.T) {
fetchedCommit, err := targetRepo.ReadCommit(ctx, git.Revision(targetRef))
require.NoError(t, err)
- require.Equal(t, newCommitID, fetchedCommit.GetId())
+ require.Equal(t, newCommitID.String(), fetchedCommit.GetId())
}
func TestFetchSourceBranchSameRepositorySuccess(t *testing.T) {
@@ -54,10 +54,10 @@ func TestFetchSourceBranchSameRepositorySuccess(t *testing.T) {
md := testhelper.GitalyServersMetadataFromCfg(t, cfg)
ctx = testhelper.MergeOutgoingMetadata(ctx, md)
- repo := localrepo.New(git.NewExecCommandFactory(cfg), repoProto, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
sourceBranch := "fetch-source-branch-test-branch"
- newCommitID := gittest.CreateCommit(t, cfg, repoPath, sourceBranch, nil)
+ newCommitID := gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch(sourceBranch))
targetRef := "refs/tmp/fetch-source-branch-test"
req := &gitalypb.FetchSourceBranchRequest{
@@ -73,7 +73,7 @@ func TestFetchSourceBranchSameRepositorySuccess(t *testing.T) {
fetchedCommit, err := repo.ReadCommit(ctx, git.Revision(targetRef))
require.NoError(t, err)
- require.Equal(t, newCommitID, fetchedCommit.GetId())
+ require.Equal(t, newCommitID.String(), fetchedCommit.GetId())
}
func TestFetchSourceBranchBranchNotFound(t *testing.T) {
@@ -85,7 +85,7 @@ func TestFetchSourceBranchBranchNotFound(t *testing.T) {
md := testhelper.GitalyServersMetadataFromCfg(t, cfg)
ctx = testhelper.MergeOutgoingMetadata(ctx, md)
- sourceRepo, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "fetch-source-source.git")
+ sourceRepo, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "fetch-source-source.git")
t.Cleanup(cleanup)
sourceBranch := "does-not-exist"
@@ -133,11 +133,11 @@ func TestFetchSourceBranchWrongRef(t *testing.T) {
md := testhelper.GitalyServersMetadataFromCfg(t, cfg)
ctx = testhelper.MergeOutgoingMetadata(ctx, md)
- sourceRepo, sourceRepoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "fetch-source-source.git")
+ sourceRepo, sourceRepoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "fetch-source-source.git")
defer cleanup()
sourceBranch := "fetch-source-branch-testmas-branch"
- gittest.CreateCommit(t, cfg, sourceRepoPath, sourceBranch, nil)
+ gittest.WriteCommit(t, cfg, sourceRepoPath, gittest.WithBranch(sourceBranch))
targetRef := "refs/tmp/fetch-source-branch-test"
diff --git a/internal/gitaly/service/repository/fork_test.go b/internal/gitaly/service/repository/fork_test.go
index c7ea4c15b..76eca1e42 100644
--- a/internal/gitaly/service/repository/fork_test.go
+++ b/internal/gitaly/service/repository/fork_test.go
@@ -12,7 +12,10 @@ import (
gitalyauth "gitlab.com/gitlab-org/gitaly/auth"
gclient "gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
+ "gitlab.com/gitlab-org/gitaly/internal/cache"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/hook"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
@@ -24,6 +27,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/remote"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/ssh"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
@@ -102,9 +106,9 @@ func TestSuccessfulCreateForkRequest(t *testing.T) {
require.NoError(t, err)
defer func() { require.NoError(t, os.RemoveAll(forkedRepoPath)) }()
- testhelper.MustRunCommand(t, nil, "git", "-C", forkedRepoPath, "fsck")
+ gittest.Exec(t, cfg, "-C", forkedRepoPath, "fsck")
- remotes := testhelper.MustRunCommand(t, nil, "git", "-C", forkedRepoPath, "remote")
+ remotes := gittest.Exec(t, cfg, "-C", forkedRepoPath, "remote")
require.NotContains(t, string(remotes), "origin")
info, err := os.Lstat(filepath.Join(forkedRepoPath, "hooks"))
@@ -198,8 +202,7 @@ func injectCustomCATestCerts(t *testing.T, cfg *config.Cfg) *x509.CertPool {
revertEnv := testhelper.ModifyEnvironment(t, gitaly_x509.SSLCertFile, certFile)
t.Cleanup(revertEnv)
- caPEMBytes, err := ioutil.ReadFile(certFile)
- require.NoError(t, err)
+ caPEMBytes := testhelper.MustReadFile(t, certFile)
pool := x509.NewCertPool()
require.True(t, pool.AppendCertsFromPEM(caPEMBytes))
@@ -210,21 +213,23 @@ func runSecureServer(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) s
t.Helper()
registry := backchannel.NewRegistry()
- server, err := gserver.New(true, cfg, testhelper.DiscardTestEntry(t), registry)
+ locator := config.NewLocator(cfg)
+ cache := cache.New(cfg, locator)
+ server, err := gserver.New(true, cfg, testhelper.DiscardTestEntry(t), registry, cache)
require.NoError(t, err)
listener, addr := testhelper.GetLocalhostListener(t)
- locator := config.NewLocator(cfg)
txManager := transaction.NewManager(cfg, registry)
- hookManager := hook.NewManager(locator, txManager, hook.GitlabAPIStub, cfg)
+ hookManager := hook.NewManager(locator, txManager, gitlab.NewMockClient(), cfg)
gitCmdFactory := git.NewExecCommandFactory(cfg)
+ catfileCache := catfile.NewCache(cfg)
- gitalypb.RegisterRepositoryServiceServer(server, NewServer(cfg, rubySrv, locator, txManager, gitCmdFactory))
+ gitalypb.RegisterRepositoryServiceServer(server, NewServer(cfg, rubySrv, locator, txManager, gitCmdFactory, catfileCache))
gitalypb.RegisterHookServiceServer(server, hookservice.NewServer(cfg, hookManager, gitCmdFactory))
- gitalypb.RegisterRemoteServiceServer(server, remote.NewServer(cfg, rubySrv, locator, gitCmdFactory))
- gitalypb.RegisterSSHServiceServer(server, ssh.NewServer(cfg, locator, gitCmdFactory))
- gitalypb.RegisterRefServiceServer(server, ref.NewServer(cfg, locator, gitCmdFactory, txManager))
- gitalypb.RegisterCommitServiceServer(server, commit.NewServer(cfg, locator, gitCmdFactory, nil))
+ gitalypb.RegisterRemoteServiceServer(server, remote.NewServer(cfg, rubySrv, locator, gitCmdFactory, catfileCache, txManager))
+ gitalypb.RegisterSSHServiceServer(server, ssh.NewServer(cfg, locator, gitCmdFactory, txManager))
+ gitalypb.RegisterRefServiceServer(server, ref.NewServer(cfg, locator, gitCmdFactory, txManager, catfileCache))
+ gitalypb.RegisterCommitServiceServer(server, commit.NewServer(cfg, locator, gitCmdFactory, nil, catfileCache))
errQ := make(chan error, 1)
// This creates a secondary GRPC server which isn't "secure". Reusing
diff --git a/internal/gitaly/service/repository/gc.go b/internal/gitaly/service/repository/gc.go
index 69e87eebf..de8523d83 100644
--- a/internal/gitaly/service/repository/gc.go
+++ b/internal/gitaly/service/repository/gc.go
@@ -26,7 +26,7 @@ func (s *server) GarbageCollect(ctx context.Context, in *gitalypb.GarbageCollect
"WriteBitmaps": in.GetCreateBitmap(),
}).Debug("GarbageCollect")
- repo := localrepo.New(s.gitCmdFactory, in.GetRepository(), s.cfg)
+ repo := s.localrepo(in.GetRepository())
if err := s.cleanupRepo(ctx, repo); err != nil {
return nil, err
@@ -36,32 +36,26 @@ func (s *server) GarbageCollect(ctx context.Context, in *gitalypb.GarbageCollect
return nil, err
}
- if err := s.gc(ctx, in); err != nil {
- return nil, err
+ // Perform housekeeping to cleanup stale lockfiles that may block GC
+ if err := housekeeping.Perform(ctx, repo); err != nil {
+ ctxlogger.WithError(err).Warn("Pre gc housekeeping failed")
}
- if err := s.configureCommitGraph(ctx, in); err != nil {
+ if err := s.gc(ctx, in); err != nil {
return nil, err
}
- if err := s.writeCommitGraph(ctx, &gitalypb.WriteCommitGraphRequest{
- Repository: in.GetRepository(),
- }); err != nil {
+ if err := s.writeCommitGraph(ctx, repo, gitalypb.WriteCommitGraphRequest_SizeMultiple); err != nil {
return nil, err
}
- // Perform housekeeping post GC
- if err := housekeeping.Perform(ctx, repo); err != nil {
- ctxlogger.WithError(err).Warn("Post gc housekeeping failed")
- }
-
stats.LogObjectsInfo(ctx, s.gitCmdFactory, repo)
return &gitalypb.GarbageCollectResponse{}, nil
}
func (s *server) gc(ctx context.Context, in *gitalypb.GarbageCollectRequest) error {
- config := repackConfig(ctx, in.CreateBitmap)
+ config := append(repackConfig(ctx, in.CreateBitmap), git.ConfigPair{Key: "gc.writeCommitGraph", Value: "false"})
var flags []git.Option
if in.Prune {
@@ -91,35 +85,13 @@ func (s *server) gc(ctx context.Context, in *gitalypb.GarbageCollectRequest) err
return nil
}
-func (s *server) configureCommitGraph(ctx context.Context, in *gitalypb.GarbageCollectRequest) error {
- cmd, err := s.gitCmdFactory.New(ctx, in.GetRepository(), git.SubCmd{
- Name: "config",
- Flags: []git.Option{
- git.ConfigPair{Key: "core.commitGraph", Value: "true"},
- },
- })
- if err != nil {
- if _, ok := status.FromError(err); ok {
- return err
- }
-
- return helper.ErrInternal(fmt.Errorf("GarbageCollect: config gitCommand: %v", err))
- }
-
- if err := cmd.Wait(); err != nil {
- return helper.ErrInternal(fmt.Errorf("GarbageCollect: config cmd wait: %v", err))
- }
-
- return nil
-}
-
func (s *server) cleanupKeepArounds(ctx context.Context, repo *localrepo.Repo) error {
repoPath, err := repo.Path()
if err != nil {
return nil
}
- batch, err := catfile.New(ctx, s.gitCmdFactory, repo)
+ batch, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return nil
}
diff --git a/internal/gitaly/service/repository/gc_test.go b/internal/gitaly/service/repository/gc_test.go
index fc554dd04..944825ec0 100644
--- a/internal/gitaly/service/repository/gc_test.go
+++ b/internal/gitaly/service/repository/gc_test.go
@@ -14,7 +14,6 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
@@ -29,7 +28,7 @@ var (
)
func TestGarbageCollectCommitGraph(t *testing.T) {
- cfg, repo, repoPath, client := setupRepositoryService(t)
+ _, repo, repoPath, client := setupRepositoryService(t)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -38,20 +37,8 @@ func TestGarbageCollectCommitGraph(t *testing.T) {
assert.NoError(t, err)
assert.NotNil(t, c)
- assert.FileExistsf(t,
- filepath.Join(repoPath, "objects/info/commit-graph"),
- "pre-computed commit-graph should exist after running garbage collect",
- )
-
- repoCfgPath := filepath.Join(repoPath, "config")
-
- cfgF, err := os.Open(repoCfgPath)
- require.NoError(t, err)
- defer cfgF.Close()
-
- cfgCmd, err := localrepo.New(git.NewExecCommandFactory(cfg), repo, cfg).Config().GetRegexp(ctx, "core.commitgraph", git.ConfigGetRegexpOpts{})
- require.NoError(t, err)
- require.Equal(t, []git.ConfigPair{{Key: "core.commitgraph", Value: "true"}}, cfgCmd)
+ chainPath := filepath.Join(repoPath, CommitGraphChainRelPath)
+ require.FileExists(t, chainPath, "pre-computed commit-graph should exist after running garbage collect")
}
func TestGarbageCollectSuccess(t *testing.T) {
@@ -111,13 +98,17 @@ func TestGarbageCollectWithPrune(t *testing.T) {
cfg, repo, repoPath, client := setupRepositoryService(t)
- blobHashes := gittest.WriteBlobs(t, repoPath, 3)
+ blobHashes := gittest.WriteBlobs(t, cfg, repoPath, 3)
oldDanglingObjFile := filepath.Join(repoPath, "objects", blobHashes[0][:2], blobHashes[0][2:])
newDanglingObjFile := filepath.Join(repoPath, "objects", blobHashes[1][:2], blobHashes[1][2:])
oldReferencedObjFile := filepath.Join(repoPath, "objects", blobHashes[2][:2], blobHashes[2][2:])
// create a reference to the blob, so it should not be removed by gc
- gittest.CommitBlobWithName(t, cfg, repoPath, blobHashes[2], t.Name(), t.Name())
+ gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ OID: git.ObjectID(blobHashes[2]), Path: t.Name(), Mode: "100644",
+ }),
+ )
// change modification time of the blobs to make them attractive for the gc
aBitMoreThan30MinutesAgo := time.Now().Add(-30*time.Minute - time.Second)
@@ -137,8 +128,7 @@ func TestGarbageCollectWithPrune(t *testing.T) {
require.NoError(t, err)
require.NotNil(t, c)
- _, err = os.Stat(oldDanglingObjFile)
- require.True(t, os.IsNotExist(err), "blob should be removed from object storage as it is too old and there are no references to it")
+ require.NoFileExists(t, oldDanglingObjFile, "blob should be removed from object storage as it is too old and there are no references to it")
require.FileExists(t, newDanglingObjFile, "blob should not be removed from object storage as it is fresh enough despite there are no references to it")
require.FileExists(t, oldReferencedObjFile, "blob should not be removed from object storage as it is referenced by something despite it is too old")
}
@@ -196,7 +186,163 @@ func TestGarbageCollectDeletesRefsLocks(t *testing.T) {
assert.FileExists(t, keepLockPath)
- testhelper.AssertPathNotExists(t, deleteLockPath)
+ require.NoFileExists(t, deleteLockPath)
+}
+
+func TestGarbageCollectDeletesPackedRefsLock(t *testing.T) {
+ cfg, client := setupRepositoryServiceWithoutRepo(t)
+
+ testCases := []struct {
+ desc string
+ lockTime *time.Time
+ shouldExist bool
+ }{
+ {
+ desc: "with a recent lock",
+ lockTime: &freshTime,
+ shouldExist: true,
+ },
+ {
+ desc: "with an old lock",
+ lockTime: &oldTime,
+ shouldExist: false,
+ },
+ {
+ desc: "with a non-existing lock",
+ lockTime: nil,
+ shouldExist: false,
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.desc, func(t *testing.T) {
+ repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
+ t.Cleanup(cleanupFn)
+
+ // Force the packed-refs file to have an old time to test that even
+ // in that case it doesn't get deleted
+ packedRefsPath := filepath.Join(repoPath, "packed-refs")
+ require.NoError(t, os.Chtimes(packedRefsPath, oldTime, oldTime))
+
+ req := &gitalypb.GarbageCollectRequest{Repository: repo}
+ lockPath := filepath.Join(repoPath, "packed-refs.lock")
+
+ if tc.lockTime != nil {
+ mustCreateFileWithTimes(t, lockPath, *tc.lockTime)
+ }
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ c, err := client.GarbageCollect(ctx, req)
+
+ // Sanity checks
+ assert.FileExists(t, filepath.Join(repoPath, "HEAD")) // For good measure
+ assert.FileExists(t, packedRefsPath)
+
+ if tc.shouldExist {
+ assert.Error(t, err)
+ testhelper.RequireGrpcError(t, err, codes.Internal)
+
+ require.FileExists(t, lockPath)
+ } else {
+ assert.NoError(t, err)
+ assert.NotNil(t, c)
+
+ require.NoFileExists(t, lockPath)
+ }
+ })
+ }
+}
+
+func TestGarbageCollectDeletesFileLocks(t *testing.T) {
+ _, repo, repoPath, client := setupRepositoryService(t)
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ req := &gitalypb.GarbageCollectRequest{Repository: repo}
+
+ for _, tc := range []string{
+ "config.lock",
+ "HEAD.lock",
+ "objects/info/commit-graphs/commit-graph-chain.lock",
+ } {
+ lockPath := filepath.Join(repoPath, tc)
+ // No file on the lock path
+ _, err := client.GarbageCollect(ctx, req)
+ assert.NoError(t, err)
+
+ // Fresh lock should remain
+ mustCreateFileWithTimes(t, lockPath, freshTime)
+ _, err = client.GarbageCollect(ctx, req)
+
+ assert.NoError(t, err)
+
+ assert.FileExists(t, lockPath)
+
+ // Old lock should be removed
+ mustCreateFileWithTimes(t, lockPath, oldTime)
+ _, err = client.GarbageCollect(ctx, req)
+ assert.NoError(t, err)
+ require.NoFileExists(t, lockPath)
+ }
+}
+
+func TestGarbageCollectDeletesPackedRefsNew(t *testing.T) {
+ cfg, client := setupRepositoryServiceWithoutRepo(t)
+
+ testCases := []struct {
+ desc string
+ lockTime *time.Time
+ shouldExist bool
+ }{
+ {
+ desc: "created recently",
+ lockTime: &freshTime,
+ shouldExist: true,
+ },
+ {
+ desc: "exists for too long",
+ lockTime: &oldTime,
+ shouldExist: false,
+ },
+ {
+ desc: "nothing to clean up",
+ shouldExist: false,
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.desc, func(t *testing.T) {
+ repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
+ t.Cleanup(cleanupFn)
+
+ req := &gitalypb.GarbageCollectRequest{Repository: repo}
+ packedRefsNewPath := filepath.Join(repoPath, "packed-refs.new")
+
+ if tc.lockTime != nil {
+ mustCreateFileWithTimes(t, packedRefsNewPath, *tc.lockTime)
+ }
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ c, err := client.GarbageCollect(ctx, req)
+
+ if tc.shouldExist {
+ require.Error(t, err)
+ testhelper.RequireGrpcError(t, err, codes.Internal)
+
+ require.FileExists(t, packedRefsNewPath)
+ } else {
+ require.NotNil(t, c)
+ require.NoError(t, err)
+
+ require.NoFileExists(t, packedRefsNewPath)
+ }
+ })
+ }
}
func TestGarbageCollectFailure(t *testing.T) {
@@ -223,7 +369,7 @@ func TestGarbageCollectFailure(t *testing.T) {
}
func TestCleanupInvalidKeepAroundRefs(t *testing.T) {
- _, repo, repoPath, client := setupRepositoryService(t)
+ cfg, repo, repoPath, client := setupRepositoryService(t)
// Make the directory, so we can create random reflike things in it
require.NoError(t, os.MkdirAll(filepath.Join(repoPath, "refs", "keep-around"), 0755))
@@ -271,7 +417,7 @@ func TestCleanupInvalidKeepAroundRefs(t *testing.T) {
// Create a proper keep-around loose ref
existingSha := "1e292f8fedd741b75372e19097c76d327140c312"
existingRefName := fmt.Sprintf("refs/keep-around/%s", existingSha)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", existingRefName, existingSha)
+ gittest.Exec(t, cfg, "-C", repoPath, "update-ref", existingRefName, existingSha)
// Create an invalid ref that should should be removed with the testcase
bogusSha := "b3f5e4adf6277b571b7943a4f0405a6dd7ee7e15"
@@ -288,20 +434,18 @@ func TestCleanupInvalidKeepAroundRefs(t *testing.T) {
require.NoError(t, err)
// The existing keeparound still exists
- commitSha := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", existingRefName)
+ commitSha := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", existingRefName)
require.Equal(t, existingSha, text.ChompBytes(commitSha))
//The invalid one was removed
- _, err = os.Stat(bogusPath)
- require.True(t, os.IsNotExist(err), "expected 'does not exist' error, got %v", err)
+ require.NoFileExists(t, bogusPath)
if testcase.shouldExist {
keepAroundName := fmt.Sprintf("refs/keep-around/%s", testcase.refName)
- commitSha := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", keepAroundName)
+ commitSha := gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", keepAroundName)
require.Equal(t, testcase.refName, text.ChompBytes(commitSha))
} else {
- _, err := os.Stat(refPath)
- require.True(t, os.IsNotExist(err), "expected 'does not exist' error, got %v", err)
+ require.NoFileExists(t, refPath)
}
})
}
@@ -316,12 +460,12 @@ func mustCreateFileWithTimes(t testing.TB, path string, mTime time.Time) {
}
func TestGarbageCollectDeltaIslands(t *testing.T) {
- _, repo, repoPath, client := setupRepositoryService(t)
+ cfg, repo, repoPath, client := setupRepositoryService(t)
ctx, cancel := testhelper.Context()
defer cancel()
- gittest.TestDeltaIslands(t, repoPath, func() error {
+ gittest.TestDeltaIslands(t, cfg, repoPath, func() error {
_, err := client.GarbageCollect(ctx, &gitalypb.GarbageCollectRequest{Repository: repo})
return err
})
diff --git a/internal/gitaly/service/repository/license.go b/internal/gitaly/service/repository/license.go
index ee28b3d8d..fe2b528de 100644
--- a/internal/gitaly/service/repository/license.go
+++ b/internal/gitaly/service/repository/license.go
@@ -25,7 +25,7 @@ func (s *server) FindLicense(ctx context.Context, req *gitalypb.FindLicenseReque
if req.GetRepository() == nil {
return &gitalypb.FindLicenseResponse{}, nil
}
- repo := localrepo.New(s.gitCmdFactory, req.GetRepository(), s.cfg)
+ repo := localrepo.New(s.gitCmdFactory, s.catfileCache, req.GetRepository(), s.cfg)
hasHeadRevision, err := repo.HasRevision(ctx, "HEAD")
if err != nil {
diff --git a/internal/gitaly/service/repository/midx_test.go b/internal/gitaly/service/repository/midx_test.go
index 2cf9a5768..4abcef8da 100644
--- a/internal/gitaly/service/repository/midx_test.go
+++ b/internal/gitaly/service/repository/midx_test.go
@@ -40,7 +40,7 @@ func TestMidxWrite(t *testing.T) {
require.NoError(t, err)
defer cfgF.Close()
- cfgCmd, err := localrepo.New(git.NewExecCommandFactory(cfg), repo, cfg).Config().GetRegexp(ctx, "core.multipackindex", git.ConfigGetRegexpOpts{})
+ cfgCmd, err := localrepo.NewTestRepo(t, cfg, repo).Config().GetRegexp(ctx, "core.multipackindex", git.ConfigGetRegexpOpts{})
require.NoError(t, err)
require.Equal(t, []git.ConfigPair{{Key: "core.multipackindex", Value: "true"}}, cfgCmd)
}
@@ -117,7 +117,7 @@ func TestMidxRepackExpire(t *testing.T) {
for _, packsAdded := range []int{3, 5, 11, 20} {
t.Run(fmt.Sprintf("Test repack expire with %d added packs", packsAdded),
func(t *testing.T) {
- repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanupFn)
ctx, cancel := testhelper.Context()
@@ -216,7 +216,8 @@ func addPackFiles(
// create some pack files with different sizes
for i := 0; i < packCount; i++ {
for y := packCount + 1 - i; y > 0; y-- {
- gittest.CreateCommitOnNewBranch(t, cfg, repoPath)
+ branch := fmt.Sprintf("branch-%d-%d", i, y)
+ gittest.WriteCommit(t, cfg, repoPath, gittest.WithMessage(branch), gittest.WithBranch(branch))
}
_, err = client.RepackIncremental(ctx, &gitalypb.RepackIncrementalRequest{Repository: repo})
diff --git a/internal/gitaly/service/repository/optimize.go b/internal/gitaly/service/repository/optimize.go
index 86d88540f..351bfb015 100644
--- a/internal/gitaly/service/repository/optimize.go
+++ b/internal/gitaly/service/repository/optimize.go
@@ -6,7 +6,6 @@ import (
"os"
"gitlab.com/gitlab-org/gitaly/internal/git/housekeeping"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git/stats"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -54,7 +53,7 @@ func (s *server) optimizeRepository(ctx context.Context, repository *gitalypb.Re
return fmt.Errorf("could not repack: %w", err)
}
- repo := localrepo.New(s.gitCmdFactory, repository, s.cfg)
+ repo := s.localrepo(repository)
if err := housekeeping.Perform(ctx, repo); err != nil {
return fmt.Errorf("could not execute houskeeping: %w", err)
diff --git a/internal/gitaly/service/repository/optimize_test.go b/internal/gitaly/service/repository/optimize_test.go
index 780ce0b97..8ea2429a2 100644
--- a/internal/gitaly/service/repository/optimize_test.go
+++ b/internal/gitaly/service/repository/optimize_test.go
@@ -2,7 +2,6 @@ package repository
import (
"bytes"
- "io/ioutil"
"os"
"path/filepath"
"testing"
@@ -12,7 +11,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/stats"
- "gitlab.com/gitlab-org/gitaly/internal/git/updateref"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
@@ -41,9 +39,9 @@ func getNewestPackfileModtime(t *testing.T, repoPath string) time.Time {
}
func TestOptimizeRepository(t *testing.T) {
- cfg, repo, repoPath, client := setupRepositoryService(t)
+ cfg, repoProto, repoPath, client := setupRepositoryService(t)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "repack", "-A", "-b")
+ gittest.Exec(t, cfg, "-C", repoPath, "repack", "-A", "-b")
ctx, cancel := testhelper.Context()
defer cancel()
@@ -55,17 +53,16 @@ func TestOptimizeRepository(t *testing.T) {
// get timestamp of latest packfile
newestsPackfileTime := getNewestPackfileModtime(t, repoPath)
- gittest.CreateCommit(t, cfg, repoPath, "master", nil)
+ gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch("master"))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "http.http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c1.git.extraHeader", "Authorization: Basic secret-password")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "http.http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c2.git.extraHeader", "Authorization: Basic secret-password")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "randomStart-http.http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c3.git.extraHeader", "Authorization: Basic secret-password")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "http.http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c4.git.extraHeader-randomEnd", "Authorization: Basic secret-password")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "hTTp.http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c5.git.ExtrAheaDeR", "Authorization: Basic secret-password")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "http.http://extraHeader/extraheader/EXTRAHEADER.git.extraHeader", "Authorization: Basic secret-password")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "config", "https.https://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c5.git.extraHeader", "Authorization: Basic secret-password")
- confFileData, err := ioutil.ReadFile(filepath.Join(repoPath, "config"))
- require.NoError(t, err)
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "http.http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c1.git.extraHeader", "Authorization: Basic secret-password")
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "http.http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c2.git.extraHeader", "Authorization: Basic secret-password")
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "randomStart-http.http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c3.git.extraHeader", "Authorization: Basic secret-password")
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "http.http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c4.git.extraHeader-randomEnd", "Authorization: Basic secret-password")
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "hTTp.http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c5.git.ExtrAheaDeR", "Authorization: Basic secret-password")
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "http.http://extraHeader/extraheader/EXTRAHEADER.git.extraHeader", "Authorization: Basic secret-password")
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "https.https://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c5.git.extraHeader", "Authorization: Basic secret-password")
+ confFileData := testhelper.MustReadFile(t, filepath.Join(repoPath, "config"))
require.True(t, bytes.Contains(confFileData, []byte("http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c1.git")))
require.True(t, bytes.Contains(confFileData, []byte("http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c2.git")))
require.True(t, bytes.Contains(confFileData, []byte("http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c3")))
@@ -74,11 +71,10 @@ func TestOptimizeRepository(t *testing.T) {
require.True(t, bytes.Contains(confFileData, []byte("http://extraHeader/extraheader/EXTRAHEADER.git")))
require.True(t, bytes.Contains(confFileData, []byte("https://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c5.git")))
- _, err = client.OptimizeRepository(ctx, &gitalypb.OptimizeRepositoryRequest{Repository: repo})
+ _, err = client.OptimizeRepository(ctx, &gitalypb.OptimizeRepositoryRequest{Repository: repoProto})
require.NoError(t, err)
- confFileData, err = ioutil.ReadFile(filepath.Join(repoPath, "config"))
- require.NoError(t, err)
+ confFileData = testhelper.MustReadFile(t, filepath.Join(repoPath, "config"))
require.False(t, bytes.Contains(confFileData, []byte("http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c1.git")))
require.False(t, bytes.Contains(confFileData, []byte("http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c2.git")))
require.True(t, bytes.Contains(confFileData, []byte("http://localhost:51744/60631c8695bf041a808759a05de53e36a73316aacb502824fabbb0c6055637c3")))
@@ -89,22 +85,22 @@ func TestOptimizeRepository(t *testing.T) {
require.Equal(t, getNewestPackfileModtime(t, repoPath), newestsPackfileTime, "there should not have been a new packfile created")
- testRepo, testRepoPath, cleanupBare := gittest.InitBareRepoAt(t, cfg.Storages[0])
+ testRepoProto, testRepoPath, cleanupBare := gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
t.Cleanup(cleanupBare)
blobs := 10
- blobIDs := gittest.WriteBlobs(t, testRepoPath, blobs)
-
- updater, err := updateref.New(ctx, cfg, git.NewExecCommandFactory(cfg), testRepo)
- require.NoError(t, err)
+ blobIDs := gittest.WriteBlobs(t, cfg, testRepoPath, blobs)
for _, blobID := range blobIDs {
- commitID := gittest.CommitBlobWithName(t, cfg, testRepoPath, blobID, blobID, "adding another blob....")
- require.NoError(t, updater.Create(git.ReferenceName("refs/heads/"+blobID), commitID))
+ gittest.WriteCommit(t, cfg, testRepoPath,
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ OID: git.ObjectID(blobID), Mode: "100644", Path: "blob",
+ }),
+ gittest.WithBranch(blobID),
+ gittest.WithParents(),
+ )
}
- require.NoError(t, updater.Wait())
-
bitmaps, err := filepath.Glob(filepath.Join(testRepoPath, "objects", "pack", "*.bitmap"))
require.NoError(t, err)
require.Empty(t, bitmaps)
@@ -115,7 +111,7 @@ func TestOptimizeRepository(t *testing.T) {
require.DirExists(t, emptyRef, "sanity check for empty ref dir existence")
// optimize repository on a repository without a bitmap should call repack full
- _, err = client.OptimizeRepository(ctx, &gitalypb.OptimizeRepositoryRequest{Repository: testRepo})
+ _, err = client.OptimizeRepository(ctx, &gitalypb.OptimizeRepositoryRequest{Repository: testRepoProto})
require.NoError(t, err)
bitmaps, err = filepath.Glob(filepath.Join(testRepoPath, "objects", "pack", "*.bitmap"))
@@ -136,11 +132,11 @@ func TestOptimizeRepository(t *testing.T) {
require.NoError(t, os.Chtimes(emptyRef, oneDayAgo, oneDayAgo))
require.NoError(t, os.Chtimes(mrRefs, oneDayAgo, oneDayAgo))
- _, err = client.OptimizeRepository(ctx, &gitalypb.OptimizeRepositoryRequest{Repository: testRepo})
+ _, err = client.OptimizeRepository(ctx, &gitalypb.OptimizeRepositoryRequest{Repository: testRepoProto})
require.NoError(t, err)
- testhelper.AssertPathNotExists(t, emptyRef)
- testhelper.AssertPathNotExists(t, mrRefs)
+ require.NoFileExists(t, emptyRef)
+ require.NoFileExists(t, mrRefs)
}
func TestOptimizeRepositoryValidation(t *testing.T) {
diff --git a/internal/gitaly/service/repository/raw_changes.go b/internal/gitaly/service/repository/raw_changes.go
index f8744072a..377728549 100644
--- a/internal/gitaly/service/repository/raw_changes.go
+++ b/internal/gitaly/service/repository/raw_changes.go
@@ -19,9 +19,9 @@ import (
func (s *server) GetRawChanges(req *gitalypb.GetRawChangesRequest, stream gitalypb.RepositoryService_GetRawChangesServer) error {
ctx := stream.Context()
+ repo := s.localrepo(req.GetRepository())
- repo := req.Repository
- batch, err := catfile.New(stream.Context(), s.gitCmdFactory, repo)
+ batch, err := s.catfileCache.BatchProcess(stream.Context(), repo)
if err != nil {
return helper.ErrInternal(err)
}
@@ -53,7 +53,7 @@ func validateRawChangesRequest(ctx context.Context, req *gitalypb.GetRawChangesR
return nil
}
-func (s *server) getRawChanges(stream gitalypb.RepositoryService_GetRawChangesServer, repo *gitalypb.Repository, batch catfile.Batch, from, to string) error {
+func (s *server) getRawChanges(stream gitalypb.RepositoryService_GetRawChangesServer, repo git.RepositoryExecutor, batch catfile.Batch, from, to string) error {
if git.ObjectID(to).IsZeroOID() {
return nil
}
@@ -64,7 +64,7 @@ func (s *server) getRawChanges(stream gitalypb.RepositoryService_GetRawChangesSe
ctx := stream.Context()
- diffCmd, err := s.gitCmdFactory.New(ctx, repo, git.SubCmd{
+ diffCmd, err := repo.Exec(ctx, git.SubCmd{
Name: "diff",
Flags: []git.Option{git.Flag{Name: "--raw"}, git.Flag{Name: "-z"}},
Args: []string{from, to},
diff --git a/internal/gitaly/service/repository/raw_changes_test.go b/internal/gitaly/service/repository/raw_changes_test.go
index ac673e1c7..d9a7ed6b5 100644
--- a/internal/gitaly/service/repository/raw_changes_test.go
+++ b/internal/gitaly/service/repository/raw_changes_test.go
@@ -296,21 +296,15 @@ func TestGetRawChangesInvalidUTF8Paths(t *testing.T) {
)
require.False(t, utf8.ValidString(nonUTF8Filename)) // sanity check
- fromCommitID := gittest.CommitBlobWithName(
- t,
- cfg,
- repoPath,
- blobID1,
- nonUTF8Filename,
- "killer AI might use non-UTF filenames",
+ fromCommitID := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ OID: blobID1, Path: nonUTF8Filename, Mode: "100644",
+ }),
)
- toCommitID := gittest.CommitBlobWithName(
- t,
- cfg,
- repoPath,
- blobID2,
- nonUTF8Filename,
- "hostile extraterrestrials won't use UTF",
+ toCommitID := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithTreeEntries(gittest.TreeEntry{
+ OID: blobID2, Path: nonUTF8Filename, Mode: "100644",
+ }),
)
ctx, cancel := testhelper.Context()
@@ -318,8 +312,8 @@ func TestGetRawChangesInvalidUTF8Paths(t *testing.T) {
req := &gitalypb.GetRawChangesRequest{
Repository: repo,
- FromRevision: fromCommitID,
- ToRevision: toCommitID,
+ FromRevision: fromCommitID.String(),
+ ToRevision: toCommitID.String(),
}
c, err := client.GetRawChanges(ctx, req)
diff --git a/internal/gitaly/service/repository/rebase_in_progress_test.go b/internal/gitaly/service/repository/rebase_in_progress_test.go
index d168de7f5..bbdae6163 100644
--- a/internal/gitaly/service/repository/rebase_in_progress_test.go
+++ b/internal/gitaly/service/repository/rebase_in_progress_test.go
@@ -17,18 +17,18 @@ import (
func TestSuccessfulIsRebaseInProgressRequest(t *testing.T) {
cfg, repo1, repoPath1, client := setupRepositoryService(t)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath1, "worktree", "add", "--detach", filepath.Join(repoPath1, worktreePrefix, fmt.Sprintf("%s-1", rebaseWorktreePrefix)), "master")
+ gittest.Exec(t, cfg, "-C", repoPath1, "worktree", "add", "--detach", filepath.Join(repoPath1, worktreePrefix, fmt.Sprintf("%s-1", rebaseWorktreePrefix)), "master")
brokenPath := filepath.Join(repoPath1, worktreePrefix, fmt.Sprintf("%s-2", rebaseWorktreePrefix))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath1, "worktree", "add", "--detach", brokenPath, "master")
+ gittest.Exec(t, cfg, "-C", repoPath1, "worktree", "add", "--detach", brokenPath, "master")
require.NoError(t, os.Chmod(brokenPath, 0))
require.NoError(t, os.Chtimes(brokenPath, time.Now(), time.Now().Add(-16*time.Minute)))
oldPath := filepath.Join(repoPath1, worktreePrefix, fmt.Sprintf("%s-3", rebaseWorktreePrefix))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath1, "worktree", "add", "--detach", oldPath, "master")
+ gittest.Exec(t, cfg, "-C", repoPath1, "worktree", "add", "--detach", oldPath, "master")
require.NoError(t, os.Chtimes(oldPath, time.Now(), time.Now().Add(-16*time.Minute)))
- repo2, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "second")
+ repo2, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "second")
t.Cleanup(cleanupFn)
testCases := []struct {
diff --git a/internal/gitaly/service/repository/remove_test.go b/internal/gitaly/service/repository/remove_test.go
index 29d70a33f..4894a9c70 100644
--- a/internal/gitaly/service/repository/remove_test.go
+++ b/internal/gitaly/service/repository/remove_test.go
@@ -17,7 +17,7 @@ func TestRemoveRepository(t *testing.T) {
_, err := client.RemoveRepository(ctx, &gitalypb.RemoveRepositoryRequest{Repository: repo})
require.NoError(t, err)
- testhelper.AssertPathNotExists(t, repoPath)
+ require.NoFileExists(t, repoPath)
}
func TestRemoveRepositoryDoesNotExist(t *testing.T) {
diff --git a/internal/gitaly/service/repository/rename_test.go b/internal/gitaly/service/repository/rename_test.go
index 89b6fbb49..d920afa18 100644
--- a/internal/gitaly/service/repository/rename_test.go
+++ b/internal/gitaly/service/repository/rename_test.go
@@ -37,10 +37,10 @@ func TestRenameRepositorySuccess(t *testing.T) {
func TestRenameRepositoryDestinationExists(t *testing.T) {
cfg, repo, _, client := setupRepositoryService(t)
- destinationRepo, destinationRepoPath, cleanupDestinationRepo := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "dst")
+ destinationRepo, destinationRepoPath, cleanupDestinationRepo := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "dst")
t.Cleanup(cleanupDestinationRepo)
- _, sha := gittest.CreateCommitOnNewBranch(t, cfg, destinationRepoPath)
+ sha := gittest.WriteCommit(t, cfg, destinationRepoPath)
req := &gitalypb.RenameRepositoryRequest{Repository: repo, RelativePath: destinationRepo.GetRelativePath()}
@@ -51,7 +51,7 @@ func TestRenameRepositoryDestinationExists(t *testing.T) {
testhelper.RequireGrpcError(t, err, codes.FailedPrecondition)
// ensure the git directory that already existed didn't get overwritten
- gittest.GitObjectMustExist(t, cfg.Git.BinPath, destinationRepoPath, sha)
+ gittest.GitObjectMustExist(t, cfg.Git.BinPath, destinationRepoPath, sha.String())
}
func TestRenameRepositoryInvalidRequest(t *testing.T) {
diff --git a/internal/gitaly/service/repository/repack.go b/internal/gitaly/service/repository/repack.go
index dc6776c63..fea3910d7 100644
--- a/internal/gitaly/service/repository/repack.go
+++ b/internal/gitaly/service/repository/repack.go
@@ -76,6 +76,10 @@ func (s *server) repackCommand(ctx context.Context, repo repository.GitRepo, bit
return status.Errorf(codes.Internal, err.Error())
}
+ if err = s.writeCommitGraph(ctx, repo, gitalypb.WriteCommitGraphRequest_SizeMultiple); err != nil {
+ return err
+ }
+
stats.LogObjectsInfo(ctx, s.gitCmdFactory, repo)
return nil
diff --git a/internal/gitaly/service/repository/repack_test.go b/internal/gitaly/service/repository/repack_test.go
index e49ee8d2b..a98cd1447 100644
--- a/internal/gitaly/service/repository/repack_test.go
+++ b/internal/gitaly/service/repository/repack_test.go
@@ -37,6 +37,11 @@ func TestRepackIncrementalSuccess(t *testing.T) {
// Entire `path`-folder gets updated so this is fine :D
assertModTimeAfter(t, testTime, packPath)
+
+ assert.FileExistsf(t,
+ filepath.Join(repoPath, CommitGraphChainRelPath),
+ "pre-computed commit-graph should exist after running incremental repack",
+ )
}
func TestRepackIncrementalCollectLogStatistics(t *testing.T) {
@@ -63,7 +68,7 @@ func TestRepackLocal(t *testing.T) {
altObjectsDir := "./alt-objects"
altDirsCommit := gittest.CreateCommitInAlternateObjectDirectory(t, cfg.Git.BinPath, repoPath, altObjectsDir, cmd)
- repoCommit := gittest.CreateCommit(t, cfg, repoPath, t.Name(), &gittest.CreateCommitOpts{Message: t.Name()})
+ repoCommit := gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch(t.Name()))
ctx, cancelFn := testhelper.Context()
defer cancelFn()
@@ -81,7 +86,7 @@ func TestRepackLocal(t *testing.T) {
require.NoError(t, err)
require.Len(t, packFiles, 1)
- packContents := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "verify-pack", "-v", packFiles[0])
+ packContents := gittest.Exec(t, cfg, "-C", repoPath, "verify-pack", "-v", packFiles[0])
require.NotContains(t, string(packContents), string(altDirsCommit))
require.Contains(t, string(packContents), repoCommit)
}
@@ -111,22 +116,23 @@ func TestRepackIncrementalFailure(t *testing.T) {
}
func TestRepackFullSuccess(t *testing.T) {
- _, repo, repoPath, client := setupRepositoryService(t)
+ cfg, client := setupRepositoryServiceWithoutRepo(t)
tests := []struct {
req *gitalypb.RepackFullRequest
desc string
}{
- {req: &gitalypb.RepackFullRequest{Repository: repo, CreateBitmap: true}, desc: "with bitmap"},
- {req: &gitalypb.RepackFullRequest{Repository: repo, CreateBitmap: false}, desc: "without bitmap"},
+ {req: &gitalypb.RepackFullRequest{CreateBitmap: true}, desc: "with bitmap"},
+ {req: &gitalypb.RepackFullRequest{CreateBitmap: false}, desc: "without bitmap"},
}
- packPath := filepath.Join(repoPath, "objects", "pack")
-
for _, test := range tests {
t.Run(test.desc, func(t *testing.T) {
+ var repoPath string
+ test.req.Repository, repoPath, _ = gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
// Reset mtime to a long while ago since some filesystems don't have sub-second
// precision on `mtime`.
+ packPath := filepath.Join(repoPath, "objects", "pack")
testhelper.MustRunCommand(t, nil, "touch", "-t", testTimeString, packPath)
testTime := time.Date(2006, 01, 02, 15, 04, 05, 0, time.UTC)
ctx, cancel := testhelper.Context()
@@ -152,6 +158,11 @@ func TestRepackFullSuccess(t *testing.T) {
t.Errorf("Bitmap found: %v", bmPath)
}
}
+
+ assert.FileExistsf(t,
+ filepath.Join(repoPath, CommitGraphChainRelPath),
+ "pre-computed commit-graph should exist after running full repack",
+ )
})
}
}
@@ -224,12 +235,12 @@ func TestRepackFullFailure(t *testing.T) {
}
func TestRepackFullDeltaIslands(t *testing.T) {
- _, repo, repoPath, client := setupRepositoryService(t)
+ cfg, repo, repoPath, client := setupRepositoryService(t)
ctx, cancel := testhelper.Context()
defer cancel()
- gittest.TestDeltaIslands(t, repoPath, func() error {
+ gittest.TestDeltaIslands(t, cfg, repoPath, func() error {
_, err := client.RepackFull(ctx, &gitalypb.RepackFullRequest{Repository: repo})
return err
})
diff --git a/internal/gitaly/service/repository/replicate.go b/internal/gitaly/service/repository/replicate.go
index 69b00de09..6d7354c5a 100644
--- a/internal/gitaly/service/repository/replicate.go
+++ b/internal/gitaly/service/repository/replicate.go
@@ -57,6 +57,7 @@ func (s *server) ReplicateRepository(ctx context.Context, in *gitalypb.Replicate
outgoingCtx := helper.IncomingToOutgoing(ctx)
syncFuncs := []func(context.Context, *gitalypb.ReplicateRepositoryRequest) error{
+ s.syncGitconfig,
s.syncInfoAttributes,
s.syncRepository,
}
@@ -209,7 +210,7 @@ func (s *server) syncRepository(ctx context.Context, in *gitalypb.ReplicateRepos
return nil
}
-func (s *server) syncInfoAttributes(ctx context.Context, in *gitalypb.ReplicateRepositoryRequest) error {
+func (s *server) syncGitconfig(ctx context.Context, in *gitalypb.ReplicateRepositoryRequest) error {
repoClient, err := s.newRepoClient(ctx, in.GetSource().GetStorageName())
if err != nil {
return err
@@ -220,18 +221,46 @@ func (s *server) syncInfoAttributes(ctx context.Context, in *gitalypb.ReplicateR
return err
}
- infoPath := filepath.Join(repoPath, "info")
- attributesPath := filepath.Join(infoPath, "attributes")
+ // At the point of implementing this, the `GetConfig` RPC hasn't been deployed yet and is
+ // thus not available for general use. In theory, we'd have to wait for this release cycle
+ // to finish, and only afterwards would we be able to implement replication of the
+ // gitconfig. In order to allow us to iterate fast, we just try to call `GetConfig()`, but
+ // ignore any errors for the case where the target Gitaly node doesn't support the RPC yet.
+ // TODO: Remove this hack and properly return the error in the next release cycle.
+ if err := func() error {
+ stream, err := repoClient.GetConfig(ctx, &gitalypb.GetConfigRequest{
+ Repository: in.GetSource(),
+ })
+ if err != nil {
+ return err
+ }
+
+ configPath := filepath.Join(repoPath, "config")
+ if err := writeFile(configPath, 0644, streamio.NewReader(func() ([]byte, error) {
+ resp, err := stream.Recv()
+ return resp.GetData(), err
+ })); err != nil {
+ return err
+ }
- if err := os.MkdirAll(infoPath, 0755); err != nil {
+ return nil
+ }(); err != nil {
+ ctxlogrus.Extract(ctx).WithError(err).Warn("synchronizing gitconfig failed")
+ }
+
+ return nil
+}
+
+func (s *server) syncInfoAttributes(ctx context.Context, in *gitalypb.ReplicateRepositoryRequest) error {
+ repoClient, err := s.newRepoClient(ctx, in.GetSource().GetStorageName())
+ if err != nil {
return err
}
- fw, err := safe.CreateFileWriter(attributesPath)
+ repoPath, err := s.locator.GetRepoPath(in.GetRepository())
if err != nil {
return err
}
- defer fw.Close()
stream, err := repoClient.GetInfoAttributes(ctx, &gitalypb.GetInfoAttributesRequest{
Repository: in.GetSource(),
@@ -240,22 +269,42 @@ func (s *server) syncInfoAttributes(ctx context.Context, in *gitalypb.ReplicateR
return err
}
- if _, err := io.Copy(fw, streamio.NewReader(func() ([]byte, error) {
+ attributesPath := filepath.Join(repoPath, "info", "attributes")
+ if err := writeFile(attributesPath, attributesFileMode, streamio.NewReader(func() ([]byte, error) {
resp, err := stream.Recv()
return resp.GetAttributes(), err
})); err != nil {
return err
}
+ return nil
+}
+
+func writeFile(path string, mode os.FileMode, reader io.Reader) error {
+ parentDir := filepath.Dir(path)
+ if err := os.MkdirAll(parentDir, 0755); err != nil {
+ return err
+ }
+
+ fw, err := safe.CreateFileWriter(path)
+ if err != nil {
+ return err
+ }
+ defer fw.Close()
+
+ if _, err := io.Copy(fw, reader); err != nil {
+ return err
+ }
+
if err = fw.Commit(); err != nil {
return err
}
- if err := os.Chmod(attributesPath, attributesFileMode); err != nil {
+ if err := os.Chmod(path, mode); err != nil {
return err
}
- return os.Rename(attributesPath, attributesPath)
+ return nil
}
// newRemoteClient creates a new RemoteClient that talks to the same gitaly server
diff --git a/internal/gitaly/service/repository/replicate_test.go b/internal/gitaly/service/repository/replicate_test.go
index 2c26c3fda..131392911 100644
--- a/internal/gitaly/service/repository/replicate_test.go
+++ b/internal/gitaly/service/repository/replicate_test.go
@@ -3,27 +3,23 @@ package repository
import (
"bytes"
"context"
- "fmt"
"io/ioutil"
- "net"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/backchannel"
- "gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+ "google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/metadata"
- "google.golang.org/grpc/reflection"
)
func TestReplicateRepository(t *testing.T) {
@@ -38,19 +34,24 @@ func TestReplicateRepository(t *testing.T) {
client := newRepositoryClient(t, cfg, serverSocketPath)
- repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "source")
+ repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "source")
t.Cleanup(cleanup)
// create a loose object to ensure snapshot replication is used
blobData, err := text.RandomHex(10)
require.NoError(t, err)
- blobID := text.ChompBytes(testhelper.MustRunCommand(t, bytes.NewBuffer([]byte(blobData)), "git", "-C", repoPath, "hash-object", "-w", "--stdin"))
+ blobID := text.ChompBytes(gittest.ExecStream(t, cfg, bytes.NewBuffer([]byte(blobData)), "-C", repoPath, "hash-object", "-w", "--stdin"))
// write info attributes
attrFilePath := filepath.Join(repoPath, "info", "attributes")
attrData := []byte("*.pbxproj binary\n")
require.NoError(t, ioutil.WriteFile(attrFilePath, attrData, 0644))
+ // Write a modified gitconfig
+ gittest.Exec(t, cfg, "-C", repoPath, "config", "please.replicate", "me")
+ configData := testhelper.MustReadFile(t, filepath.Join(repoPath, "config"))
+ require.Contains(t, string(configData), "[please]\n\treplicate = me\n")
+
targetRepo := *repo
targetRepo.StorageName = cfg.Storages[1].Name
@@ -66,27 +67,30 @@ func TestReplicateRepository(t *testing.T) {
require.NoError(t, err)
targetRepoPath := filepath.Join(cfg.Storages[1].Path, targetRepo.GetRelativePath())
- testhelper.MustRunCommand(t, nil, "git", "-C", targetRepoPath, "fsck")
+ gittest.Exec(t, cfg, "-C", targetRepoPath, "fsck")
replicatedAttrFilePath := filepath.Join(targetRepoPath, "info", "attributes")
- replicatedAttrData, err := ioutil.ReadFile(replicatedAttrFilePath)
- require.NoError(t, err)
+ replicatedAttrData := testhelper.MustReadFile(t, replicatedAttrFilePath)
require.Equal(t, string(attrData), string(replicatedAttrData), "info/attributes files must match")
+ replicatedConfigPath := filepath.Join(targetRepoPath, "config")
+ replicatedConfigData := testhelper.MustReadFile(t, replicatedConfigPath)
+ require.Equal(t, string(configData), string(replicatedConfigData), "config files must match")
+
// create another branch
- _, anotherNewBranch := gittest.CreateCommitOnNewBranch(t, cfg, repoPath)
+ gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch("branch"))
_, err = client.ReplicateRepository(injectedCtx, &gitalypb.ReplicateRepositoryRequest{
Repository: &targetRepo,
Source: repo,
})
require.NoError(t, err)
require.Equal(t,
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "show-ref", "--hash", "--verify", fmt.Sprintf("refs/heads/%s", anotherNewBranch)),
- testhelper.MustRunCommand(t, nil, "git", "-C", targetRepoPath, "show-ref", "--hash", "--verify", fmt.Sprintf("refs/heads/%s", anotherNewBranch)),
+ gittest.Exec(t, cfg, "-C", repoPath, "show-ref", "--hash", "--verify", "refs/heads/branch"),
+ gittest.Exec(t, cfg, "-C", targetRepoPath, "show-ref", "--hash", "--verify", "refs/heads/branch"),
)
// if an unreachable object has been replicated, that means snapshot replication was used
- testhelper.MustRunCommand(t, nil, "git", "-C", targetRepoPath, "cat-file", "-p", blobID)
+ gittest.Exec(t, cfg, "-C", targetRepoPath, "cat-file", "-p", blobID)
}
func TestReplicateRepositoryInvalidArguments(t *testing.T) {
@@ -214,10 +218,10 @@ func TestReplicateRepository_BadRepository(t *testing.T) {
client := newRepositoryClient(t, cfg, serverSocketPath)
- sourceRepo, _, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "source")
+ sourceRepo, _, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "source")
t.Cleanup(cleanup)
- targetRepo, targetRepoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[1], sourceRepo.RelativePath)
+ targetRepo, targetRepoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[1], sourceRepo.RelativePath)
t.Cleanup(cleanup)
var invalidRepos []*gitalypb.Repository
@@ -255,7 +259,7 @@ func TestReplicateRepository_BadRepository(t *testing.T) {
}
require.NoError(t, err)
- testhelper.MustRunCommand(t, nil, "git", "-C", targetRepoPath, "fsck")
+ gittest.Exec(t, cfg, "-C", targetRepoPath, "fsck")
})
}
}
@@ -268,7 +272,7 @@ func TestReplicateRepository_FailedFetchInternalRemote(t *testing.T) {
locator := config.NewLocator(cfg)
- testRepo, _, cleanupRepo := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ testRepo, _, cleanupRepo := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanupRepo)
repoClient := newRepositoryClient(t, cfg, cfg.SocketPath)
@@ -296,23 +300,17 @@ func TestReplicateRepository_FailedFetchInternalRemote(t *testing.T) {
}
func runServerWithBadFetchInternalRemote(t *testing.T, cfg config.Cfg) string {
- server := testhelper.NewTestGrpcServer(t, nil, nil)
- serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
-
- listener, err := net.Listen("unix", serverSocketPath)
- require.NoError(t, err)
-
- internalListener, err := net.Listen("unix", cfg.GitalyInternalSocketPath())
- require.NoError(t, err)
-
- gitalypb.RegisterRepositoryServiceServer(server, NewServer(cfg, nil, config.NewLocator(cfg), transaction.NewManager(cfg, backchannel.NewRegistry()), git.NewExecCommandFactory(cfg)))
- gitalypb.RegisterRemoteServiceServer(server, &mockRemoteServer{})
- reflection.Register(server)
-
- go server.Serve(listener)
- go server.Serve(internalListener)
- t.Cleanup(server.Stop)
- return "unix://" + serverSocketPath
+ return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterRepositoryServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetTxManager(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterRemoteServiceServer(srv, &mockRemoteServer{})
+ })
}
type mockRemoteServer struct {
diff --git a/internal/gitaly/service/repository/repository.go b/internal/gitaly/service/repository/repository.go
index 6e0ac7b37..86daf08fb 100644
--- a/internal/gitaly/service/repository/repository.go
+++ b/internal/gitaly/service/repository/repository.go
@@ -3,7 +3,6 @@ package repository
import (
"context"
- "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -26,7 +25,7 @@ func (s *server) RepositoryExists(ctx context.Context, in *gitalypb.RepositoryEx
}
func (s *server) HasLocalBranches(ctx context.Context, in *gitalypb.HasLocalBranchesRequest) (*gitalypb.HasLocalBranchesResponse, error) {
- hasBranches, err := localrepo.New(s.gitCmdFactory, in.Repository, s.cfg).HasBranches(ctx)
+ hasBranches, err := s.localrepo(in.GetRepository()).HasBranches(ctx)
if err != nil {
return nil, helper.ErrInternal(err)
}
diff --git a/internal/gitaly/service/repository/repository_test.go b/internal/gitaly/service/repository/repository_test.go
index e68bda64b..003fc5576 100644
--- a/internal/gitaly/service/repository/repository_test.go
+++ b/internal/gitaly/service/repository/repository_test.go
@@ -23,7 +23,7 @@ func TestRepositoryExists(t *testing.T) {
serverSocketPath := runRepositoryServerWithConfig(t, cfg, nil, testserver.WithDisablePraefect())
- repo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanupFn)
client := newRepositoryClient(t, cfg, serverSocketPath)
@@ -116,7 +116,7 @@ func TestSuccessfulHasLocalBranches(t *testing.T) {
emptyRepoName := "empty-repo.git"
emptyRepoPath := filepath.Join(cfg.Storages[0].Path, emptyRepoName)
- testhelper.MustRunCommand(t, nil, "git", "init", "--bare", emptyRepoPath)
+ gittest.Exec(t, cfg, "init", "--bare", emptyRepoPath)
defer os.RemoveAll(emptyRepoPath)
testCases := []struct {
diff --git a/internal/gitaly/service/repository/search_files_test.go b/internal/gitaly/service/repository/search_files_test.go
index 8a4e03986..7e04d9968 100644
--- a/internal/gitaly/service/repository/search_files_test.go
+++ b/internal/gitaly/service/repository/search_files_test.go
@@ -12,6 +12,8 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
@@ -150,7 +152,7 @@ func TestSearchFilesByContentLargeFile(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- _, repo, repoPath, client := setupRepositoryServiceWithWorktree(t)
+ cfg, repo, repoPath, client := setupRepositoryServiceWithWorktree(t)
committerName := "Scrooge McDuck"
committerEmail := "scrooge@mcduck.com"
@@ -178,8 +180,8 @@ func TestSearchFilesByContentLargeFile(t *testing.T) {
for _, largeFile := range largeFiles {
t.Run(largeFile.filename, func(t *testing.T) {
require.NoError(t, ioutil.WriteFile(filepath.Join(repoPath, largeFile.filename), bytes.Repeat([]byte(largeFile.line), largeFile.repeated), 0644))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "add", ".")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath,
+ gittest.Exec(t, cfg, "-C", repoPath, "add", ".")
+ gittest.Exec(t, cfg, "-C", repoPath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail), "commit", "-m", "large file commit", "--", largeFile.filename)
@@ -201,7 +203,16 @@ func TestSearchFilesByContentLargeFile(t *testing.T) {
func TestSearchFilesByContentFailure(t *testing.T) {
cfg, repo, _ := testcfg.BuildWithRepo(t)
- server := NewServer(cfg, nil, config.NewLocator(cfg), transaction.NewManager(cfg, backchannel.NewRegistry()), git.NewExecCommandFactory(cfg))
+ gitCommandFactory := git.NewExecCommandFactory(cfg)
+
+ server := NewServer(
+ cfg,
+ nil,
+ config.NewLocator(cfg),
+ transaction.NewManager(cfg, backchannel.NewRegistry()),
+ gitCommandFactory,
+ catfile.NewCache(cfg),
+ )
testCases := []struct {
desc string
@@ -313,7 +324,16 @@ func TestSearchFilesByNameSuccessful(t *testing.T) {
func TestSearchFilesByNameFailure(t *testing.T) {
cfg := testcfg.Build(t)
- server := NewServer(cfg, nil, config.NewLocator(cfg), transaction.NewManager(cfg, backchannel.NewRegistry()), git.NewExecCommandFactory(cfg))
+ gitCommandFactory := git.NewExecCommandFactory(cfg)
+
+ server := NewServer(
+ cfg,
+ nil,
+ config.NewLocator(cfg),
+ transaction.NewManager(cfg, backchannel.NewRegistry()),
+ gitCommandFactory,
+ catfile.NewCache(cfg),
+ )
testCases := []struct {
desc string
diff --git a/internal/gitaly/service/repository/server.go b/internal/gitaly/service/repository/server.go
index b73e29047..5b93c6309 100644
--- a/internal/gitaly/service/repository/server.go
+++ b/internal/gitaly/service/repository/server.go
@@ -3,6 +3,9 @@ package repository
import (
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
@@ -19,6 +22,7 @@ type server struct {
cfg config.Cfg
binDir string
loggingCfg config.Logging
+ catfileCache catfile.Cache
}
// NewServer creates a new instance of a gRPC repo server
@@ -28,6 +32,7 @@ func NewServer(
locator storage.Locator,
txManager transaction.Manager,
gitCmdFactory git.CommandFactory,
+ catfileCache catfile.Cache,
) gitalypb.RepositoryServiceServer {
return &server{
ruby: rs,
@@ -38,8 +43,13 @@ func NewServer(
client.WithDialer(client.HealthCheckDialer(client.DialContext)),
client.WithDialOptions(client.FailOnNonTempDialError()...),
),
- cfg: cfg,
- binDir: cfg.BinDir,
- loggingCfg: cfg.Logging,
+ cfg: cfg,
+ binDir: cfg.BinDir,
+ loggingCfg: cfg.Logging,
+ catfileCache: catfileCache,
}
}
+
+func (s *server) localrepo(repo repository.GitRepo) *localrepo.Repo {
+ return localrepo.New(s.gitCmdFactory, s.catfileCache, repo, s.cfg)
+}
diff --git a/internal/gitaly/service/repository/snapshot_test.go b/internal/gitaly/service/repository/snapshot_test.go
index 6a4f8e372..6b5910f9a 100644
--- a/internal/gitaly/service/repository/snapshot_test.go
+++ b/internal/gitaly/service/repository/snapshot_test.go
@@ -17,6 +17,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/archive"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
@@ -54,8 +55,8 @@ func TestGetSnapshotSuccess(t *testing.T) {
cfg, repo, repoPath, client := setupRepositoryService(t)
// Ensure certain files exist in the test repo.
- // CreateCommit produces a loose object with the given sha
- sha := gittest.CreateCommit(t, cfg, repoPath, "master", nil)
+ // WriteCommit produces a loose object with the given sha
+ sha := gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch("master"))
zeroes := strings.Repeat("0", 40)
require.NoError(t, os.MkdirAll(filepath.Join(repoPath, "hooks"), 0755))
require.NoError(t, os.MkdirAll(filepath.Join(repoPath, "objects/pack"), 0755))
@@ -111,7 +112,8 @@ func TestGetSnapshotWithDedupe(t *testing.T) {
},
} {
t.Run(tc.desc, func(t *testing.T) {
- cfg, repo, repoPath, client := setupRepositoryServiceWithWorktree(t)
+ cfg, repoProto, repoPath, client := setupRepositoryServiceWithWorktree(t)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -128,16 +130,16 @@ func TestGetSnapshotWithDedupe(t *testing.T) {
originalAlternatesCommit := string(commitSha)
locator := config.NewLocator(cfg)
- gitCmdFactory := git.NewExecCommandFactory(cfg)
+ catfileCache := catfile.NewCache(cfg)
// ensure commit cannot be found in current repository
- c, err := catfile.New(ctx, gitCmdFactory, repo)
+ c, err := catfileCache.BatchProcess(ctx, repo)
require.NoError(t, err)
_, err = c.Info(ctx, git.Revision(originalAlternatesCommit))
require.True(t, catfile.IsNotFound(err))
// write alternates file to point to alt objects folder
- alternatesPath, err := locator.InfoAlternatesPath(repo)
+ alternatesPath, err := locator.InfoAlternatesPath(repoProto)
require.NoError(t, err)
require.NoError(t, ioutil.WriteFile(alternatesPath, []byte(filepath.Join(repoPath, ".git", fmt.Sprintf("%s\n", alternateObjDir))), 0644))
@@ -148,17 +150,17 @@ func TestGetSnapshotWithDedupe(t *testing.T) {
"commit", "--allow-empty", "-m", "Another empty commit")
commitSha = gittest.CreateCommitInAlternateObjectDirectory(t, cfg.Git.BinPath, repoPath, alternateObjDir, cmd)
- c, err = catfile.New(ctx, gitCmdFactory, repo)
+ c, err = catfileCache.BatchProcess(ctx, repo)
require.NoError(t, err)
_, err = c.Info(ctx, git.Revision(commitSha))
require.NoError(t, err)
- _, repoCopyPath, cleanupCopy := copyRepoUsingSnapshot(t, cfg, client, repo)
+ _, repoCopyPath, cleanupCopy := copyRepoUsingSnapshot(t, cfg, client, repoProto)
defer cleanupCopy()
// ensure the sha committed to the alternates directory can be accessed
- testhelper.MustRunCommand(t, nil, "git", "-C", repoCopyPath, "cat-file", "-p", originalAlternatesCommit)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoCopyPath, "fsck")
+ gittest.Exec(t, cfg, "-C", repoCopyPath, "cat-file", "-p", originalAlternatesCommit)
+ gittest.Exec(t, cfg, "-C", repoCopyPath, "fsck")
})
}
}
@@ -166,7 +168,7 @@ func TestGetSnapshotWithDedupe(t *testing.T) {
func TestGetSnapshotWithDedupeSoftFailures(t *testing.T) {
cfg, client := setupRepositoryServiceWithoutRepo(t)
- testRepo, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ testRepo, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
defer cleanup()
locator := config.NewLocator(cfg)
@@ -216,8 +218,8 @@ func TestGetSnapshotWithDedupeSoftFailures(t *testing.T) {
defer cleanupCopy()
// ensure the sha committed to the alternates directory can be accessed
- testhelper.MustRunCommand(t, nil, "git", "-C", repoCopyPath, "cat-file", "-p", originalAlternatesCommit)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoCopyPath, "fsck")
+ gittest.Exec(t, cfg, "-C", repoCopyPath, "cat-file", "-p", originalAlternatesCommit)
+ gittest.Exec(t, cfg, "-C", repoCopyPath, "fsck")
}
// copyRepoUsingSnapshot creates a tarball snapshot, then creates a new repository from that snapshot
@@ -232,7 +234,7 @@ func copyRepoUsingSnapshot(t *testing.T, cfg config.Cfg, client gitalypb.Reposit
srv := httptest.NewServer(&tarTesthandler{tarData: bytes.NewBuffer(data), secret: secret})
defer srv.Close()
- repoCopy, repoCopyPath, cleanupCopy := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "copy")
+ repoCopy, repoCopyPath, cleanupCopy := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "copy")
// Delete the repository so we can re-use the path
require.NoError(t, os.RemoveAll(repoCopyPath))
diff --git a/internal/gitaly/service/repository/squash_in_progress_test.go b/internal/gitaly/service/repository/squash_in_progress_test.go
index 28f3ed2ce..f7e10f49a 100644
--- a/internal/gitaly/service/repository/squash_in_progress_test.go
+++ b/internal/gitaly/service/repository/squash_in_progress_test.go
@@ -14,9 +14,9 @@ import (
func TestSuccessfulIsSquashInProgressRequest(t *testing.T) {
cfg, repo, repoPath, client := setupRepositoryService(t)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "worktree", "add", "--detach", filepath.Join(repoPath, worktreePrefix, "squash-1"), "master")
+ gittest.Exec(t, cfg, "-C", repoPath, "worktree", "add", "--detach", filepath.Join(repoPath, worktreePrefix, "squash-1"), "master")
- repoCopy, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "copy")
+ repoCopy, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "copy")
defer cleanupFn()
testCases := []struct {
diff --git a/internal/gitaly/service/repository/testhelper_test.go b/internal/gitaly/service/repository/testhelper_test.go
index eba69b705..3e02f6a71 100644
--- a/internal/gitaly/service/repository/testhelper_test.go
+++ b/internal/gitaly/service/repository/testhelper_test.go
@@ -59,6 +59,7 @@ func TestWithRubySidecar(t *testing.T) {
fs := []func(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server){
testCloneFromPoolHTTP,
testSetConfig,
+ testSetConfigTransactional,
testFetchRemoteFailure,
testFetchRemoteOverHTTP,
testSuccessfulFindLicenseRequest,
@@ -96,23 +97,24 @@ func setupRepositoryWithWorkingtreeServiceWithRuby(t testing.TB, cfg config.Cfg,
client, serverSocketPath := runRepositoryService(t, cfg, rubySrv)
cfg.SocketPath = serverSocketPath
- repo, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repo, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
return cfg, repo, repoPath, client
}
-func setupRepositoryServiceWithRuby(t testing.TB, cfg config.Cfg, rubySrv *rubyserver.Server) (config.Cfg, *gitalypb.Repository, string, gitalypb.RepositoryServiceClient) {
- client, serverSocketPath := runRepositoryService(t, cfg, rubySrv)
+func setupRepositoryServiceWithRuby(t testing.TB, cfg config.Cfg, rubySrv *rubyserver.Server, opts ...testserver.GitalyServerOpt) (config.Cfg, *gitalypb.Repository, string, gitalypb.RepositoryServiceClient) {
+ client, serverSocketPath := runRepositoryService(t, cfg, rubySrv, opts...)
cfg.SocketPath = serverSocketPath
- repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanup)
return cfg, repo, repoPath, client
}
func assertModTimeAfter(t *testing.T, afterTime time.Time, paths ...string) bool {
+ t.Helper()
// NOTE: Since some filesystems don't have sub-second precision on `mtime`
// we're rounding the times to seconds
afterTime = afterTime.Round(time.Second)
@@ -129,12 +131,43 @@ func assertModTimeAfter(t *testing.T, afterTime time.Time, paths ...string) bool
func runRepositoryServerWithConfig(t testing.TB, cfg config.Cfg, rubySrv *rubyserver.Server, opts ...testserver.GitalyServerOpt) string {
return testserver.RunGitalyServer(t, cfg, rubySrv, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterRepositoryServiceServer(srv, NewServer(cfg, deps.GetRubyServer(), deps.GetLocator(), deps.GetTxManager(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterRepositoryServiceServer(srv, NewServer(
+ cfg,
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetTxManager(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
gitalypb.RegisterHookServiceServer(srv, hookservice.NewServer(cfg, deps.GetHookManager(), deps.GetGitCmdFactory()))
- gitalypb.RegisterRemoteServiceServer(srv, remote.NewServer(cfg, rubySrv, deps.GetLocator(), deps.GetGitCmdFactory()))
- gitalypb.RegisterSSHServiceServer(srv, ssh.NewServer(cfg, deps.GetLocator(), deps.GetGitCmdFactory()))
- gitalypb.RegisterRefServiceServer(srv, ref.NewServer(cfg, deps.GetLocator(), deps.GetGitCmdFactory(), deps.GetTxManager()))
- gitalypb.RegisterCommitServiceServer(srv, commit.NewServer(cfg, deps.GetLocator(), deps.GetGitCmdFactory(), nil))
+ gitalypb.RegisterRemoteServiceServer(srv, remote.NewServer(
+ cfg,
+ rubySrv,
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ deps.GetTxManager(),
+ ))
+ gitalypb.RegisterSSHServiceServer(srv, ssh.NewServer(
+ cfg,
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
+ ))
+ gitalypb.RegisterRefServiceServer(srv, ref.NewServer(
+ cfg,
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterCommitServiceServer(srv, commit.NewServer(
+ cfg,
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ nil,
+ deps.GetCatfileCache(),
+ ))
}, opts...)
}
@@ -147,7 +180,7 @@ func runRepositoryService(t testing.TB, cfg config.Cfg, rubySrv *rubyserver.Serv
func setupRepositoryService(t testing.TB, opts ...testserver.GitalyServerOpt) (config.Cfg, *gitalypb.Repository, string, gitalypb.RepositoryServiceClient) {
cfg, client := setupRepositoryServiceWithoutRepo(t, opts...)
- repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, cleanup := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
t.Cleanup(cleanup)
return cfg, repo, repoPath, client
}
@@ -167,7 +200,7 @@ func setupRepositoryServiceWithoutRepo(t testing.TB, opts ...testserver.GitalySe
func setupRepositoryServiceWithWorktree(t testing.TB) (config.Cfg, *gitalypb.Repository, string, gitalypb.RepositoryServiceClient) {
cfg, client := setupRepositoryServiceWithoutRepo(t)
- repo, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ repo, repoPath, cleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
t.Cleanup(cleanup)
return cfg, repo, repoPath, client
diff --git a/internal/gitaly/service/repository/write_ref.go b/internal/gitaly/service/repository/write_ref.go
index d765870a0..fb409d671 100644
--- a/internal/gitaly/service/repository/write_ref.go
+++ b/internal/gitaly/service/repository/write_ref.go
@@ -6,6 +6,7 @@ import (
"fmt"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git/updateref"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/helper"
@@ -24,31 +25,28 @@ func (s *server) WriteRef(ctx context.Context, req *gitalypb.WriteRefRequest) (*
}
func (s *server) writeRef(ctx context.Context, req *gitalypb.WriteRefRequest) error {
+ repo := s.localrepo(req.GetRepository())
if string(req.Ref) == "HEAD" {
- return s.updateSymbolicRef(ctx, req)
+ return s.updateSymbolicRef(ctx, repo, req)
}
- return updateRef(ctx, s.cfg, s.gitCmdFactory, req)
+ return updateRef(ctx, s.cfg, repo, req)
}
-func (s *server) updateSymbolicRef(ctx context.Context, req *gitalypb.WriteRefRequest) error {
- cmd, err := s.gitCmdFactory.New(ctx, req.GetRepository(),
+func (s *server) updateSymbolicRef(ctx context.Context, repo *localrepo.Repo, req *gitalypb.WriteRefRequest) error {
+ if err := repo.ExecAndWait(ctx,
git.SubCmd{
Name: "symbolic-ref",
Args: []string{string(req.GetRef()), string(req.GetRevision())},
},
git.WithRefTxHook(ctx, req.GetRepository(), s.cfg),
- )
- if err != nil {
- return fmt.Errorf("error when creating symbolic-ref command: %v", err)
- }
- if err = cmd.Wait(); err != nil {
+ ); err != nil {
return fmt.Errorf("error when running symbolic-ref command: %v", err)
}
return nil
}
-func updateRef(ctx context.Context, cfg config.Cfg, gitCmdFactory git.CommandFactory, req *gitalypb.WriteRefRequest) error {
- u, err := updateref.New(ctx, cfg, gitCmdFactory, req.GetRepository())
+func updateRef(ctx context.Context, cfg config.Cfg, repo *localrepo.Repo, req *gitalypb.WriteRefRequest) error {
+ u, err := updateref.New(ctx, cfg, repo)
if err != nil {
return fmt.Errorf("error when running creating new updater: %v", err)
}
diff --git a/internal/gitaly/service/repository/write_ref_test.go b/internal/gitaly/service/repository/write_ref_test.go
index ddfbf0ac7..e538a29f0 100644
--- a/internal/gitaly/service/repository/write_ref_test.go
+++ b/internal/gitaly/service/repository/write_ref_test.go
@@ -6,13 +6,14 @@ import (
"testing"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
func TestWriteRefSuccessful(t *testing.T) {
- _, repo, repoPath, client := setupRepositoryService(t)
+ cfg, repo, repoPath, client := setupRepositoryService(t)
testCases := []struct {
desc string
@@ -61,7 +62,7 @@ func TestWriteRefSuccessful(t *testing.T) {
require.EqualValues(t, content, refRevision)
return
}
- rev := testhelper.MustRunCommand(t, nil, "git", "--git-dir", repoPath, "log", "--pretty=%H", "-1", string(tc.req.Ref))
+ rev := gittest.Exec(t, cfg, "--git-dir", repoPath, "log", "--pretty=%H", "-1", string(tc.req.Ref))
rev = bytes.Replace(rev, []byte("\n"), nil, 1)
diff --git a/internal/gitaly/service/server/disk_stats_test.go b/internal/gitaly/service/server/disk_stats_test.go
index 47c5167c7..a377636a8 100644
--- a/internal/gitaly/service/server/disk_stats_test.go
+++ b/internal/gitaly/service/server/disk_stats_test.go
@@ -17,11 +17,8 @@ func TestStorageDiskStatistics(t *testing.T) {
cfg.Storages = append(cfg.Storages, config.Storage{Name: "broken", Path: "/does/not/exist"})
- server, serverSocketPath := runServer(t, cfg)
- defer server.Stop()
-
- client, conn := newServerClient(t, serverSocketPath)
- defer conn.Close()
+ addr := runServer(t, cfg)
+ client := newServerClient(t, addr)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -53,7 +50,7 @@ func getSpaceStats(t *testing.T, path string) (available int64, used int64) {
require.NoError(t, err)
// Redundant conversions to handle differences between unix families
- available = int64(stats.Bavail) * int64(stats.Bsize) //nolint:unconvert
- used = (int64(stats.Blocks) - int64(stats.Bfree)) * int64(stats.Bsize) //nolint:unconvert
+ available = int64(stats.Bavail) * int64(stats.Bsize) //nolint:unconvert,nolintlint
+ used = (int64(stats.Blocks) - int64(stats.Bfree)) * int64(stats.Bsize) //nolint:unconvert,nolintlint
return
}
diff --git a/internal/gitaly/service/server/info_test.go b/internal/gitaly/service/server/info_test.go
index 8f8e84c64..4ca7293c0 100644
--- a/internal/gitaly/service/server/info_test.go
+++ b/internal/gitaly/service/server/info_test.go
@@ -1,23 +1,22 @@
package server
import (
- "net"
"testing"
"github.com/stretchr/testify/require"
gitalyauth "gitlab.com/gitlab-org/gitaly/auth"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- internalauth "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/auth"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/server/auth"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/auth"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/internal/version"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
- "google.golang.org/grpc/reflection"
)
func TestGitalyServerInfo(t *testing.T) {
@@ -25,11 +24,9 @@ func TestGitalyServerInfo(t *testing.T) {
cfg.Storages = append(cfg.Storages, config.Storage{Name: "broken", Path: "/does/not/exist"})
- server, serverSocketPath := runServer(t, cfg)
- defer server.Stop()
+ addr := runServer(t, cfg, testserver.WithDisablePraefect())
- client, conn := newServerClient(t, serverSocketPath)
- defer conn.Close()
+ client := newServerClient(t, addr)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -59,41 +56,20 @@ func TestGitalyServerInfo(t *testing.T) {
require.Equal(t, uint32(1), c.GetStorageStatuses()[1].ReplicationFactor)
}
-func runServer(t *testing.T, cfg config.Cfg) (*grpc.Server, string) {
- authConfig := internalauth.Config{Token: testhelper.RepositoryAuthToken}
- streamInt := []grpc.StreamServerInterceptor{auth.StreamServerInterceptor(authConfig)}
- unaryInt := []grpc.UnaryServerInterceptor{auth.UnaryServerInterceptor(authConfig)}
-
- server := testhelper.NewTestGrpcServer(t, streamInt, unaryInt)
- serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
-
- listener, err := net.Listen("unix", serverSocketPath)
- if err != nil {
- t.Fatal(err)
- }
- gitCmdFactory := git.NewExecCommandFactory(cfg)
- gitalypb.RegisterServerServiceServer(server, NewServer(gitCmdFactory, cfg.Storages))
- reflection.Register(server)
-
- go server.Serve(listener)
-
- return server, "unix://" + serverSocketPath
+func runServer(t *testing.T, cfg config.Cfg, opts ...testserver.GitalyServerOpt) string {
+ return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterServerServiceServer(srv, NewServer(deps.GetGitCmdFactory(), deps.GetCfg().Storages))
+ }, opts...)
}
func TestServerNoAuth(t *testing.T) {
- cfg := testcfg.Build(t)
-
- srv, path := runServer(t, cfg)
- defer srv.Stop()
+ cfg := testcfg.Build(t, testcfg.WithBase(config.Cfg{Auth: auth.Config{Token: "some"}}))
- connOpts := []grpc.DialOption{
- grpc.WithInsecure(),
- }
+ addr := runServer(t, cfg)
- conn, err := grpc.Dial(path, connOpts...)
- if err != nil {
- t.Fatal(err)
- }
+ conn, err := grpc.Dial(addr, grpc.WithInsecure())
+ require.NoError(t, err)
+ t.Cleanup(func() { testhelper.MustClose(t, conn) })
ctx, cancel := testhelper.Context()
defer cancel()
@@ -104,15 +80,14 @@ func TestServerNoAuth(t *testing.T) {
testhelper.RequireGrpcError(t, err, codes.Unauthenticated)
}
-func newServerClient(t *testing.T, serverSocketPath string) (gitalypb.ServerServiceClient, *grpc.ClientConn) {
+func newServerClient(t *testing.T, serverSocketPath string) gitalypb.ServerServiceClient {
connOpts := []grpc.DialOption{
grpc.WithInsecure(),
grpc.WithPerRPCCredentials(gitalyauth.RPCCredentialsV2(testhelper.RepositoryAuthToken)),
}
conn, err := grpc.Dial(serverSocketPath, connOpts...)
- if err != nil {
- t.Fatal(err)
- }
+ require.NoError(t, err)
+ t.Cleanup(func() { testhelper.MustClose(t, conn) })
- return gitalypb.NewServerServiceClient(conn), conn
+ return gitalypb.NewServerServiceClient(conn)
}
diff --git a/internal/gitaly/service/server/storage_status_unix.go b/internal/gitaly/service/server/storage_status_unix.go
index ccce15e6e..da17ebcd8 100644
--- a/internal/gitaly/service/server/storage_status_unix.go
+++ b/internal/gitaly/service/server/storage_status_unix.go
@@ -16,8 +16,8 @@ func getStorageStatus(shard config.Storage) (*gitalypb.DiskStatisticsResponse_St
}
// Redundant conversions to handle differences between unix families
- available := int64(stats.Bavail) * int64(stats.Bsize) //nolint:unconvert
- used := (int64(stats.Blocks) - int64(stats.Bfree)) * int64(stats.Bsize) //nolint:unconvert
+ available := int64(stats.Bavail) * int64(stats.Bsize) //nolint:unconvert,nolintlint
+ used := (int64(stats.Blocks) - int64(stats.Bfree)) * int64(stats.Bsize) //nolint:unconvert,nolintlint
return &gitalypb.DiskStatisticsResponse_StorageStatus{
StorageName: shard.Name,
diff --git a/internal/gitaly/service/setup/register.go b/internal/gitaly/service/setup/register.go
index 2188bf97b..88adac4ff 100644
--- a/internal/gitaly/service/setup/register.go
+++ b/internal/gitaly/service/setup/register.go
@@ -53,31 +53,91 @@ var (
// RegisterAll will register all the known gRPC services on the provided gRPC service instance.
func RegisterAll(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterBlobServiceServer(srv, blob.NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory()))
- gitalypb.RegisterCleanupServiceServer(srv, cleanup.NewServer(deps.GetCfg(), deps.GetGitCmdFactory()))
- gitalypb.RegisterCommitServiceServer(srv, commit.NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory(), deps.GetLinguist()))
- gitalypb.RegisterDiffServiceServer(srv, diff.NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterBlobServiceServer(srv, blob.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterCleanupServiceServer(srv, cleanup.NewServer(
+ deps.GetCfg(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterCommitServiceServer(srv, commit.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetLinguist(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterDiffServiceServer(srv, diff.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
gitalypb.RegisterNamespaceServiceServer(srv, namespace.NewServer(deps.GetLocator()))
- gitalypb.RegisterOperationServiceServer(srv, operations.NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetHookManager(), deps.GetLocator(), deps.GetConnsPool(), deps.GetGitCmdFactory()))
- gitalypb.RegisterRefServiceServer(srv, ref.NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory(), deps.GetTxManager()))
- gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetLocator(), deps.GetTxManager(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterOperationServiceServer(srv, operations.NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetHookManager(),
+ deps.GetLocator(),
+ deps.GetConnsPool(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterRefServiceServer(srv, ref.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetTxManager(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
gitalypb.RegisterSSHServiceServer(srv, ssh.NewServer(
deps.GetCfg(),
deps.GetLocator(),
deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
ssh.WithPackfileNegotiationMetrics(sshPackfileNegotiationMetrics),
))
gitalypb.RegisterSmartHTTPServiceServer(srv, smarthttp.NewServer(
deps.GetCfg(),
deps.GetLocator(),
deps.GetGitCmdFactory(),
+ deps.GetDiskCache(),
smarthttp.WithPackfileNegotiationMetrics(smarthttpPackfileNegotiationMetrics),
))
gitalypb.RegisterWikiServiceServer(srv, wiki.NewServer(deps.GetRubyServer(), deps.GetLocator()))
- gitalypb.RegisterConflictsServiceServer(srv, conflicts.NewServer(deps.GetRubyServer(), deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory()))
- gitalypb.RegisterRemoteServiceServer(srv, remote.NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetLocator(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterConflictsServiceServer(srv, conflicts.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
+ gitalypb.RegisterRemoteServiceServer(srv, remote.NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ deps.GetTxManager(),
+ ))
gitalypb.RegisterServerServiceServer(srv, server.NewServer(deps.GetGitCmdFactory(), deps.GetCfg().Storages))
- gitalypb.RegisterObjectPoolServiceServer(srv, objectpool.NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterObjectPoolServiceServer(srv, objectpool.NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
gitalypb.RegisterHookServiceServer(srv, hook.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
gitalypb.RegisterInternalGitalyServer(srv, internalgitaly.NewServer(deps.GetCfg().Storages))
diff --git a/internal/gitaly/service/smarthttp/inforefs_test.go b/internal/gitaly/service/smarthttp/inforefs_test.go
index da3d6e90f..336c94ac2 100644
--- a/internal/gitaly/service/smarthttp/inforefs_test.go
+++ b/internal/gitaly/service/smarthttp/inforefs_test.go
@@ -21,7 +21,6 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/objectpool"
"gitlab.com/gitlab-org/gitaly/internal/git/stats"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -179,7 +178,14 @@ func TestObjectPoolRefAdvertisementHiding(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- pool, err := objectpool.NewObjectPool(cfg, config.NewLocator(cfg), git.NewExecCommandFactory(cfg), repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(
+ cfg,
+ config.NewLocator(cfg),
+ git.NewExecCommandFactory(cfg),
+ nil,
+ repo.GetStorageName(),
+ gittest.NewObjectPoolName(t),
+ )
require.NoError(t, err)
require.NoError(t, pool.Create(ctx, repo))
@@ -187,7 +193,7 @@ func TestObjectPoolRefAdvertisementHiding(t *testing.T) {
require.NoError(t, pool.Remove(ctx))
}()
- commitID := gittest.CreateCommit(t, cfg, pool.FullPath(), t.Name(), nil)
+ commitID := gittest.WriteCommit(t, cfg, pool.FullPath(), gittest.WithBranch(t.Name()))
require.NoError(t, pool.Link(ctx, repo))
@@ -284,7 +290,10 @@ func (ms mockStreamer) PutStream(ctx context.Context, repo *gitalypb.Repository,
func TestCacheInfoRefsUploadPack(t *testing.T) {
cfg, repo, _ := testcfg.BuildWithRepo(t)
- gitalyServer := startSmartHTTPServer(t, cfg)
+ locator := config.NewLocator(cfg)
+ cache := cache.New(cfg, locator)
+
+ gitalyServer := startSmartHTTPServer(t, cfg, withInfoRefCache(newInfoRefCache(cache)))
rpcRequest := &gitalypb.InfoRefsRequest{Repository: repo}
@@ -308,7 +317,7 @@ func TestCacheInfoRefsUploadPack(t *testing.T) {
}
assertNormalResponse(gitalyServer.Address())
- require.FileExists(t, pathToCachedResponse(t, ctx, config.NewLocator(cfg), rpcRequest))
+ require.FileExists(t, pathToCachedResponse(t, ctx, cache, rpcRequest))
replacedContents := []string{
"first line",
@@ -318,7 +327,7 @@ func TestCacheInfoRefsUploadPack(t *testing.T) {
}
// replace cached response file to prove the info-ref uses the cache
- replaceCachedResponse(t, ctx, cfg, rpcRequest, strings.Join(replacedContents, "\n"))
+ replaceCachedResponse(t, ctx, cache, rpcRequest, strings.Join(replacedContents, "\n"))
response, err := makeInfoRefsUploadPackRequest(ctx, t, gitalyServer.Address(), cfg.Auth.Token, rpcRequest)
require.NoError(t, err)
assertGitRefAdvertisement(t, "InfoRefsUploadPack", string(response),
@@ -326,7 +335,7 @@ func TestCacheInfoRefsUploadPack(t *testing.T) {
)
invalidateCacheForRepo := func() {
- ender, err := cache.NewLeaseKeyer(config.NewLocator(cfg)).StartLease(rpcRequest.Repository)
+ ender, err := cache.StartLease(rpcRequest.Repository)
require.NoError(t, err)
require.NoError(t, ender.EndLease(setInfoRefsUploadPackMethod(ctx)))
}
@@ -348,14 +357,14 @@ func TestCacheInfoRefsUploadPack(t *testing.T) {
_, err = makeInfoRefsUploadPackRequest(ctx, t, gitalyServer.Address(), cfg.Auth.Token, invalidReq)
testhelper.RequireGrpcError(t, err, codes.NotFound)
- testhelper.AssertPathNotExists(t, pathToCachedResponse(t, ctx, config.NewLocator(cfg), invalidReq))
+ require.NoFileExists(t, pathToCachedResponse(t, ctx, cache, invalidReq))
// if an error occurs while putting stream, it should not interrupt
// request from being served
happened := false
mockInfoRefCache := newInfoRefCache(mockStreamer{
- streamer: cache.NewStreamDB(cache.NewLeaseKeyer(config.NewLocator(cfg))),
+ streamer: cache,
putStream: func(context.Context, *gitalypb.Repository, proto.Message, io.Reader) error {
happened = true
return errors.New("oopsie")
@@ -383,8 +392,8 @@ func createInvalidRepo(t testing.TB, repoDir string) func() {
return func() { require.NoError(t, os.RemoveAll(repoDir)) }
}
-func replaceCachedResponse(t testing.TB, ctx context.Context, cfg config.Cfg, req *gitalypb.InfoRefsRequest, newContents string) {
- path := pathToCachedResponse(t, ctx, config.NewLocator(cfg), req)
+func replaceCachedResponse(t testing.TB, ctx context.Context, cache *cache.Cache, req *gitalypb.InfoRefsRequest, newContents string) {
+ path := pathToCachedResponse(t, ctx, cache, req)
require.NoError(t, ioutil.WriteFile(path, []byte(newContents), 0644))
}
@@ -392,9 +401,9 @@ func setInfoRefsUploadPackMethod(ctx context.Context) context.Context {
return testhelper.SetCtxGrpcMethod(ctx, "/gitaly.SmartHTTPService/InfoRefsUploadPack")
}
-func pathToCachedResponse(t testing.TB, ctx context.Context, locator storage.Locator, req *gitalypb.InfoRefsRequest) string {
+func pathToCachedResponse(t testing.TB, ctx context.Context, cache *cache.Cache, req *gitalypb.InfoRefsRequest) string {
ctx = setInfoRefsUploadPackMethod(ctx)
- path, err := cache.NewLeaseKeyer(locator).KeyPath(ctx, req.GetRepository(), req)
+ path, err := cache.KeyPath(ctx, req.GetRepository(), req)
require.NoError(t, err)
return path
}
diff --git a/internal/gitaly/service/smarthttp/receive_pack_test.go b/internal/gitaly/service/smarthttp/receive_pack_test.go
index c0dd80465..68e7ae483 100644
--- a/internal/gitaly/service/smarthttp/receive_pack_test.go
+++ b/internal/gitaly/service/smarthttp/receive_pack_test.go
@@ -17,6 +17,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/hooks"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/hook"
@@ -24,10 +25,10 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
pconfig "gitlab.com/gitlab-org/gitaly/internal/praefect/config"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/streamio"
"google.golang.org/grpc"
@@ -69,11 +70,9 @@ func TestSuccessfulReceivePackRequest(t *testing.T) {
require.Equal(t, expectedResponse, string(response), "Expected response to be %q, got %q", expectedResponse, response)
// The fact that this command succeeds means that we got the commit correctly, no further checks should be needed.
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "show", push.newHead)
-
- envData, err := ioutil.ReadFile(hookOutputFile)
- require.NoError(t, err, "get git env data")
+ gittest.Exec(t, cfg, "-C", repoPath, "show", push.newHead)
+ envData := testhelper.MustReadFile(t, hookOutputFile)
payload, err := git.HooksPayloadFromEnv(strings.Split(string(envData), "\n"))
require.NoError(t, err)
@@ -103,6 +102,55 @@ func TestSuccessfulReceivePackRequest(t *testing.T) {
}, payload)
}
+func TestReceivePackHiddenRefs(t *testing.T) {
+ cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
+ repoProto.GlProjectPath = "project/path"
+
+ testhelper.ConfigureGitalyHooksBin(t, cfg)
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
+ oldHead, err := repo.ResolveRevision(ctx, "HEAD~")
+ require.NoError(t, err)
+ newHead, err := repo.ResolveRevision(ctx, "HEAD")
+ require.NoError(t, err)
+
+ serverSocketPath := runSmartHTTPServer(t, cfg)
+
+ client, conn := newSmartHTTPClient(t, serverSocketPath, cfg.Auth.Token)
+ defer conn.Close()
+
+ for _, ref := range []string{
+ "refs/environments/1",
+ "refs/merge-requests/1/head",
+ "refs/merge-requests/1/merge",
+ "refs/pipelines/1",
+ } {
+ t.Run(ref, func(t *testing.T) {
+ request := &bytes.Buffer{}
+ gittest.WritePktlineString(t, request, fmt.Sprintf("%s %s %s\x00 %s",
+ oldHead, newHead, ref, uploadPackCapabilities))
+ gittest.WritePktlineFlush(t, request)
+
+ // The options passed are the same ones used when doing an actual push.
+ revisions := strings.NewReader(fmt.Sprintf("^%s\n%s\n", oldHead, newHead))
+ pack := gittest.ExecStream(t, cfg, revisions, "-C", repoPath, "pack-objects", "--stdout", "--revs", "--thin", "--delta-base-offset", "-q")
+ request.Write(pack)
+
+ stream, err := client.PostReceivePack(ctx)
+ require.NoError(t, err)
+
+ response := doPush(t, stream, &gitalypb.PostReceivePackRequest{
+ Repository: repoProto, GlUsername: "user", GlId: "123", GlRepository: "project-456",
+ }, request)
+
+ require.Contains(t, string(response), fmt.Sprintf("%s deny updating a hidden ref", ref))
+ })
+ }
+}
+
func TestSuccessfulReceivePackRequestWithGitProtocol(t *testing.T) {
cfg, repo, repoPath := testcfg.BuildWithRepo(t)
@@ -129,7 +177,7 @@ func TestSuccessfulReceivePackRequestWithGitProtocol(t *testing.T) {
require.Equal(t, fmt.Sprintf("GIT_PROTOCOL=%s\n", git.ProtocolV2), envData)
// The fact that this command succeeds means that we got the commit correctly, no further checks should be needed.
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "show", push.newHead)
+ gittest.Exec(t, cfg, "-C", repoPath, "show", push.newHead)
}
func TestFailedReceivePackRequestWithGitOpts(t *testing.T) {
@@ -216,10 +264,10 @@ type pushData struct {
}
func newTestPush(t *testing.T, cfg config.Cfg, fileContents []byte) *pushData {
- _, repoPath, localCleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ _, repoPath, localCleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
defer localCleanup()
- oldHead, newHead := createCommit(t, repoPath, fileContents)
+ oldHead, newHead := createCommit(t, cfg, repoPath, fileContents)
// ReceivePack request is a packet line followed by a packet flush, then the pack file of the objects we want to push.
// This is explained a bit in https://git-scm.com/book/en/v2/Git-Internals-Transfer-Protocols#_uploading_data
@@ -241,7 +289,7 @@ func newTestPush(t *testing.T, cfg config.Cfg, fileContents []byte) *pushData {
stdin := strings.NewReader(fmt.Sprintf("^%s\n%s\n", oldHead, newHead))
// The options passed are the same ones used when doing an actual push.
- pack := testhelper.MustRunCommand(t, stdin, "git", "-C", repoPath, "pack-objects", "--stdout", "--revs", "--thin", "--delta-base-offset", "-q")
+ pack := gittest.ExecStream(t, cfg, stdin, "-C", repoPath, "pack-objects", "--stdout", "--revs", "--thin", "--delta-base-offset", "-q")
requestBuffer.Write(pack)
return &pushData{newHead: newHead, body: requestBuffer}
@@ -249,25 +297,25 @@ func newTestPush(t *testing.T, cfg config.Cfg, fileContents []byte) *pushData {
// createCommit creates a commit on HEAD with a file containing the
// specified contents.
-func createCommit(t *testing.T, repoPath string, fileContents []byte) (oldHead string, newHead string) {
+func createCommit(t *testing.T, cfg config.Cfg, repoPath string, fileContents []byte) (oldHead string, newHead string) {
commitMsg := fmt.Sprintf("Testing ReceivePack RPC around %d", time.Now().Unix())
committerName := "Scrooge McDuck"
committerEmail := "scrooge@mcduck.com"
// The latest commit ID on the remote repo
- oldHead = text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "master"))
+ oldHead = text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "master"))
changedFile := "README.md"
require.NoError(t, ioutil.WriteFile(filepath.Join(repoPath, changedFile), fileContents, 0644))
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "add", changedFile)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath,
+ gittest.Exec(t, cfg, "-C", repoPath, "add", changedFile)
+ gittest.Exec(t, cfg, "-C", repoPath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "-m", commitMsg)
// The commit ID we want to push to the remote repo
- newHead = text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "master"))
+ newHead = text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "master"))
return oldHead, newHead
}
@@ -306,11 +354,11 @@ func TestFailedReceivePackRequestDueToValidationError(t *testing.T) {
func TestInvalidTimezone(t *testing.T) {
cfg, repo, repoPath := testcfg.BuildWithRepo(t)
- _, localRepoPath, localCleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ _, localRepoPath, localCleanup := gittest.CloneRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
defer localCleanup()
- head := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", localRepoPath, "rev-parse", "HEAD"))
- tree := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", localRepoPath, "rev-parse", "HEAD^{tree}"))
+ head := text.ChompBytes(gittest.Exec(t, cfg, "-C", localRepoPath, "rev-parse", "HEAD"))
+ tree := text.ChompBytes(gittest.Exec(t, cfg, "-C", localRepoPath, "rev-parse", "HEAD^{tree}"))
buf := new(bytes.Buffer)
buf.WriteString("tree " + tree + "\n")
@@ -319,10 +367,10 @@ func TestInvalidTimezone(t *testing.T) {
buf.WriteString("committer Au Thor <author@example.com> 1313584730 +051800\n")
buf.WriteString("\n")
buf.WriteString("Commit message\n")
- commit := text.ChompBytes(testhelper.MustRunCommand(t, buf, "git", "-C", localRepoPath, "hash-object", "-t", "commit", "--stdin", "-w"))
+ commit := text.ChompBytes(gittest.ExecStream(t, cfg, buf, "-C", localRepoPath, "hash-object", "-t", "commit", "--stdin", "-w"))
stdin := strings.NewReader(fmt.Sprintf("^%s\n%s\n", head, commit))
- pack := testhelper.MustRunCommand(t, stdin, "git", "-C", localRepoPath, "pack-objects", "--stdout", "--revs", "--thin", "--delta-base-offset", "-q")
+ pack := gittest.ExecStream(t, cfg, stdin, "-C", localRepoPath, "pack-objects", "--stdout", "--revs", "--thin", "--delta-base-offset", "-q")
pkt := fmt.Sprintf("%s %s refs/heads/master\x00 %s", head, commit, "report-status side-band-64k agent=git/2.12.0")
body := &bytes.Buffer{}
@@ -344,16 +392,58 @@ func TestInvalidTimezone(t *testing.T) {
stream, err := client.PostReceivePack(ctx)
require.NoError(t, err)
firstRequest := &gitalypb.PostReceivePackRequest{
- Repository: repo,
- GlId: "user-123",
- GlRepository: "project-456",
- GitConfigOptions: []string{"receive.fsckObjects=true"},
+ Repository: repo,
+ GlId: "user-123",
+ GlRepository: "project-456",
}
response := doPush(t, stream, firstRequest, body)
expectedResponse := "0030\x01000eunpack ok\n0019ok refs/heads/master\n00000000"
require.Equal(t, expectedResponse, string(response), "Expected response to be %q, got %q", expectedResponse, response)
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "show", commit)
+ gittest.Exec(t, cfg, "-C", repoPath, "show", commit)
+}
+
+func TestReceivePackFsck(t *testing.T) {
+ cfg, repo, repoPath := testcfg.BuildWithRepo(t)
+
+ testhelper.ConfigureGitalyHooksBin(t, cfg)
+
+ head := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "HEAD"))
+
+ // We're creating a new commit which has a root tree with duplicate entries. git-mktree(1)
+ // allows us to create these trees just fine, but git-fsck(1) complains.
+ commit := gittest.WriteCommit(t, cfg, repoPath,
+ gittest.WithTreeEntries(
+ gittest.TreeEntry{OID: "4b825dc642cb6eb9a060e54bf8d69288fbee4904", Path: "dup", Mode: "040000"},
+ gittest.TreeEntry{OID: "4b825dc642cb6eb9a060e54bf8d69288fbee4904", Path: "dup", Mode: "040000"},
+ ),
+ )
+
+ stdin := strings.NewReader(fmt.Sprintf("^%s\n%s\n", head, commit))
+ pack := gittest.ExecStream(t, cfg, stdin, "-C", repoPath, "pack-objects", "--stdout", "--revs", "--thin", "--delta-base-offset", "-q")
+
+ var body bytes.Buffer
+ gittest.WritePktlineString(t, &body, fmt.Sprintf("%s %s refs/heads/master\x00 %s", head, commit, "report-status side-band-64k agent=git/2.12.0"))
+ gittest.WritePktlineFlush(t, &body)
+ _, err := body.Write(pack)
+ require.NoError(t, err)
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ client, conn := newSmartHTTPClient(t, runSmartHTTPServer(t, cfg), cfg.Auth.Token)
+ defer conn.Close()
+
+ stream, err := client.PostReceivePack(ctx)
+ require.NoError(t, err)
+
+ response := doPush(t, stream, &gitalypb.PostReceivePackRequest{
+ Repository: repo,
+ GlId: "user-123",
+ GlRepository: "project-456",
+ }, &body)
+
+ require.Contains(t, string(response), "duplicateEntries: contains duplicate file entries")
}
func drainPostReceivePackResponse(stream gitalypb.SmartHTTPService_PostReceivePackClient) error {
@@ -383,7 +473,7 @@ func TestPostReceivePackToHooks(t *testing.T) {
push := newTestPush(t, cfg, nil)
testRepoPath := filepath.Join(cfg.Storages[0].Path, repo.RelativePath)
- oldHead := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "rev-parse", "HEAD"))
+ oldHead := text.ChompBytes(gittest.Exec(t, cfg, "-C", testRepoPath, "rev-parse", "HEAD"))
changes := fmt.Sprintf("%s %s refs/heads/master\n", oldHead, push.newHead)
@@ -492,12 +582,6 @@ func (t *testTransactionServer) VoteTransaction(ctx context.Context, in *gitalyp
}
func TestPostReceiveWithReferenceTransactionHook(t *testing.T) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.BackchannelVoting,
- }).Run(t, testPostReceiveWithReferenceTransactionHook)
-}
-
-func testPostReceiveWithReferenceTransactionHook(t *testing.T, ctx context.Context) {
cfg := testcfg.Build(t)
testhelper.ConfigureGitalyHooksBin(t, cfg)
@@ -505,23 +589,28 @@ func testPostReceiveWithReferenceTransactionHook(t *testing.T, ctx context.Conte
refTransactionServer := &testTransactionServer{}
addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterSmartHTTPServiceServer(srv, NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterSmartHTTPServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetDiskCache(),
+ ))
gitalypb.RegisterHookServiceServer(srv, hook.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
- if featureflag.IsDisabled(ctx, featureflag.BackchannelVoting) {
- gitalypb.RegisterRefTransactionServer(srv, refTransactionServer)
- }
}, testserver.WithDisablePraefect())
// As we ain't got a Praefect server setup, we instead hooked up the
// RefTransaction server for Gitaly itself. As this is the only Praefect
// service required in this context, we can just pretend that
// Gitaly is the Praefect server and inject it.
- praefectServer, err := metadata.PraefectFromConfig(pconfig.Config{
+ praefectServer, err := txinfo.PraefectFromConfig(pconfig.Config{
SocketPath: addr,
})
require.NoError(t, err)
- ctx, err = metadata.InjectTransaction(ctx, 1234, "primary", true)
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ ctx, err = txinfo.InjectTransaction(ctx, 1234, "primary", true)
require.NoError(t, err)
ctx, err = praefectServer.Inject(ctx)
require.NoError(t, err)
@@ -530,10 +619,7 @@ func testPostReceiveWithReferenceTransactionHook(t *testing.T, ctx context.Conte
client := newMuxedSmartHTTPClient(t, ctx, addr, cfg.Auth.Token, func() backchannel.Server {
srv := grpc.NewServer()
- if featureflag.IsEnabled(ctx, featureflag.BackchannelVoting) {
- gitalypb.RegisterRefTransactionServer(srv, refTransactionServer)
- }
-
+ gitalypb.RegisterRefTransactionServer(srv, refTransactionServer)
return srv
})
@@ -541,14 +627,14 @@ func testPostReceiveWithReferenceTransactionHook(t *testing.T, ctx context.Conte
stream, err := client.PostReceivePack(ctx)
require.NoError(t, err)
- repo, _, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, _, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
request := &gitalypb.PostReceivePackRequest{Repository: repo, GlId: "key-1234", GlRepository: "some_repo"}
response := doPush(t, stream, request, newTestPush(t, cfg, nil).body)
expectedResponse := "0049\x01000eunpack ok\n0019ok refs/heads/master\n0019ok refs/heads/branch\n00000000"
require.Equal(t, expectedResponse, string(response), "Expected response to be %q, got %q", expectedResponse, response)
- require.Equal(t, 2, refTransactionServer.called)
+ require.Equal(t, 4, refTransactionServer.called)
})
t.Run("delete", func(t *testing.T) {
@@ -557,15 +643,15 @@ func testPostReceiveWithReferenceTransactionHook(t *testing.T, ctx context.Conte
stream, err := client.PostReceivePack(ctx)
require.NoError(t, err)
- repo, repoPath, _ := gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ repo, repoPath, _ := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
// Create a new branch which we're about to delete. We also pack references because
// this used to generate two transactions: one for the packed-refs file and one for
// the loose ref. We only expect a single transaction though, given that the
// packed-refs transaction should get filtered out.
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "branch", "delete-me")
- testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "pack-refs", "--all")
- branchOID := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "refs/heads/delete-me"))
+ gittest.Exec(t, cfg, "-C", repoPath, "branch", "delete-me")
+ gittest.Exec(t, cfg, "-C", repoPath, "pack-refs", "--all")
+ branchOID := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "refs/heads/delete-me"))
uploadPackData := &bytes.Buffer{}
gittest.WritePktlineString(t, uploadPackData, fmt.Sprintf("%s %s refs/heads/delete-me\x00 %s", branchOID, git.ZeroOID.String(), uploadPackCapabilities))
@@ -576,6 +662,6 @@ func testPostReceiveWithReferenceTransactionHook(t *testing.T, ctx context.Conte
expectedResponse := "0033\x01000eunpack ok\n001cok refs/heads/delete-me\n00000000"
require.Equal(t, expectedResponse, string(response), "Expected response to be %q, got %q", expectedResponse, response)
- require.Equal(t, 1, refTransactionServer.called)
+ require.Equal(t, 2, refTransactionServer.called)
})
}
diff --git a/internal/gitaly/service/smarthttp/server.go b/internal/gitaly/service/smarthttp/server.go
index 443dda253..3fdf05612 100644
--- a/internal/gitaly/service/smarthttp/server.go
+++ b/internal/gitaly/service/smarthttp/server.go
@@ -18,7 +18,8 @@ type server struct {
}
// NewServer creates a new instance of a grpc SmartHTTPServer
-func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory, serverOpts ...ServerOpt) gitalypb.SmartHTTPServiceServer {
+func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory,
+ cache *cache.Cache, serverOpts ...ServerOpt) gitalypb.SmartHTTPServiceServer {
s := &server{
cfg: cfg,
locator: locator,
@@ -27,7 +28,7 @@ func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.Comman
prometheus.CounterOpts{},
[]string{"git_negotiation_feature"},
),
- infoRefCache: newInfoRefCache(cache.NewStreamDB(cache.NewLeaseKeyer(locator))),
+ infoRefCache: newInfoRefCache(cache),
}
for _, serverOpt := range serverOpts {
diff --git a/internal/gitaly/service/smarthttp/testhelper_test.go b/internal/gitaly/service/smarthttp/testhelper_test.go
index 624a661c5..e333999ce 100644
--- a/internal/gitaly/service/smarthttp/testhelper_test.go
+++ b/internal/gitaly/service/smarthttp/testhelper_test.go
@@ -37,7 +37,13 @@ func testMain(m *testing.M) int {
func startSmartHTTPServer(t *testing.T, cfg config.Cfg, serverOpts ...ServerOpt) testserver.GitalyServer {
return testserver.StartGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterSmartHTTPServiceServer(srv, NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory(), serverOpts...))
+ gitalypb.RegisterSmartHTTPServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetDiskCache(),
+ serverOpts...,
+ ))
gitalypb.RegisterHookServiceServer(srv, hookservice.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
})
}
diff --git a/internal/gitaly/service/smarthttp/upload_pack.go b/internal/gitaly/service/smarthttp/upload_pack.go
index 1d77d5ca8..5b5ad5171 100644
--- a/internal/gitaly/service/smarthttp/upload_pack.go
+++ b/internal/gitaly/service/smarthttp/upload_pack.go
@@ -1,6 +1,7 @@
package smarthttp
import (
+ "context"
"crypto/sha1"
"fmt"
"io"
@@ -36,22 +37,8 @@ func (s *server) PostUploadPack(stream gitalypb.SmartHTTPService_PostUploadPackS
return resp.GetData(), err
}), h)
- pr, pw := io.Pipe()
- defer pw.Close()
- stdin := io.TeeReader(stdinReader, pw)
- statsCh := make(chan stats.PackfileNegotiation, 1)
- go func() {
- defer close(statsCh)
-
- stats, err := stats.ParsePackfileNegotiation(pr)
- if err != nil {
- ctxlogrus.Extract(stream.Context()).WithError(err).Debug("failed parsing packfile negotiation")
- return
- }
- stats.UpdateMetrics(s.packfileNegotiationMetrics)
-
- statsCh <- stats
- }()
+ stdin, collector := s.runStatsCollector(stream.Context(), stdinReader)
+ defer collector.finish()
var respBytes int64
@@ -96,8 +83,7 @@ func (s *server) PostUploadPack(stream gitalypb.SmartHTTPService_PostUploadPackS
}
if err := cmd.Wait(); err != nil {
- pw.Close() // ensure PackfileNegotiation parser returns
- stats := <-statsCh
+ stats := collector.finish()
if _, ok := command.ExitStatus(err); ok && stats.Deepen != "" {
// We have seen a 'deepen' message in the request. It is expected that
@@ -109,9 +95,6 @@ func (s *server) PostUploadPack(stream gitalypb.SmartHTTPService_PostUploadPackS
return status.Errorf(codes.Unavailable, "PostUploadPack: %v", err)
}
- pw.Close() // Ensure PackfileNegotiation parser returns
- <-statsCh // Wait for the packfile negotiation parser to finish.
-
ctxlogrus.Extract(ctx).WithField("request_sha", fmt.Sprintf("%x", h.Sum(nil))).WithField("response_bytes", respBytes).Info("request details")
return nil
@@ -124,3 +107,36 @@ func validateUploadPackRequest(req *gitalypb.PostUploadPackRequest) error {
return nil
}
+
+type statsCollector struct {
+ c io.Closer
+ statsCh chan stats.PackfileNegotiation
+}
+
+func (sc *statsCollector) finish() stats.PackfileNegotiation {
+ sc.c.Close()
+ return <-sc.statsCh
+}
+
+func (s *server) runStatsCollector(ctx context.Context, r io.Reader) (io.Reader, *statsCollector) {
+ pr, pw := io.Pipe()
+ sc := &statsCollector{
+ c: pw,
+ statsCh: make(chan stats.PackfileNegotiation, 1),
+ }
+
+ go func() {
+ defer close(sc.statsCh)
+
+ stats, err := stats.ParsePackfileNegotiation(pr)
+ if err != nil {
+ ctxlogrus.Extract(ctx).WithError(err).Debug("failed parsing packfile negotiation")
+ return
+ }
+ stats.UpdateMetrics(s.packfileNegotiationMetrics)
+
+ sc.statsCh <- stats
+ }()
+
+ return io.TeeReader(r, pw), sc
+}
diff --git a/internal/gitaly/service/smarthttp/upload_pack_test.go b/internal/gitaly/service/smarthttp/upload_pack_test.go
index 21aca461d..b9c6d73ed 100644
--- a/internal/gitaly/service/smarthttp/upload_pack_test.go
+++ b/internal/gitaly/service/smarthttp/upload_pack_test.go
@@ -47,9 +47,9 @@ func TestSuccessfulUploadPackRequest(t *testing.T) {
localRepoPath := filepath.Join(storagePath, localRepoRelativePath)
testRepoPath := filepath.Join(storagePath, repo.RelativePath)
// Make a non-bare clone of the test repo to act as a remote one
- testhelper.MustRunCommand(t, nil, "git", "clone", testRepoPath, remoteRepoPath)
+ gittest.Exec(t, cfg, "clone", testRepoPath, remoteRepoPath)
// Make a bare clone of the test repo to act as a local one and to leave the original repo intact for other tests
- testhelper.MustRunCommand(t, nil, "git", "clone", "--bare", testRepoPath, localRepoPath)
+ gittest.Exec(t, cfg, "clone", "--bare", testRepoPath, localRepoPath)
defer os.RemoveAll(localRepoPath)
defer os.RemoveAll(remoteRepoPath)
@@ -58,15 +58,15 @@ func TestSuccessfulUploadPackRequest(t *testing.T) {
committerEmail := "scrooge@mcduck.com"
// The latest commit ID on the local repo
- oldHead := bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath, "rev-parse", "master"))
+ oldHead := bytes.TrimSpace(gittest.Exec(t, cfg, "-C", remoteRepoPath, "rev-parse", "master"))
- testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath,
+ gittest.Exec(t, cfg, "-C", remoteRepoPath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "--allow-empty", "-m", commitMsg)
// The commit ID we want to pull from the remote repo
- newHead := bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath, "rev-parse", "master"))
+ newHead := bytes.TrimSpace(gittest.Exec(t, cfg, "-C", remoteRepoPath, "rev-parse", "master"))
// UploadPack request is a "want" packet line followed by a packet flush, then many "have" packets followed by a packet flush.
// This is explained a bit in https://git-scm.com/book/en/v2/Git-Internals-Transfer-Protocols#_downloading_data
@@ -90,10 +90,10 @@ func TestSuccessfulUploadPackRequest(t *testing.T) {
pack, version, entries := extractPackDataFromResponse(t, responseBuffer)
require.NotNil(t, pack, "Expected to find a pack file in response, found none")
- testhelper.MustRunCommand(t, bytes.NewReader(pack), "git", "-C", localRepoPath, "unpack-objects", fmt.Sprintf("--pack_header=%d,%d", version, entries))
+ gittest.ExecStream(t, cfg, bytes.NewReader(pack), "-C", localRepoPath, "unpack-objects", fmt.Sprintf("--pack_header=%d,%d", version, entries))
// The fact that this command succeeds means that we got the commit correctly, no further checks should be needed.
- testhelper.MustRunCommand(t, nil, "git", "-C", localRepoPath, "show", string(newHead))
+ gittest.Exec(t, cfg, "-C", localRepoPath, "show", string(newHead))
metric, err := negotiationMetrics.GetMetricWithLabelValues("have")
require.NoError(t, err)
@@ -115,18 +115,18 @@ func TestUploadPackRequestWithGitConfigOptions(t *testing.T) {
testRepoPath := filepath.Join(storagePath, repo.RelativePath)
// Make a clone of the test repo to modify
- testhelper.MustRunCommand(t, nil, "git", "clone", "--bare", testRepoPath, ourRepoPath)
+ gittest.Exec(t, cfg, "clone", "--bare", testRepoPath, ourRepoPath)
defer os.RemoveAll(ourRepoPath)
// Remove remote-tracking branches that get in the way for this test
- testhelper.MustRunCommand(t, nil, "git", "-C", ourRepoPath, "remote", "remove", "origin")
+ gittest.Exec(t, cfg, "-C", ourRepoPath, "remote", "remove", "origin")
// Turn the csv branch into a hidden ref
- want := string(bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", ourRepoPath, "rev-parse", "refs/heads/csv")))
- testhelper.MustRunCommand(t, nil, "git", "-C", ourRepoPath, "update-ref", "refs/hidden/csv", want)
- testhelper.MustRunCommand(t, nil, "git", "-C", ourRepoPath, "update-ref", "-d", "refs/heads/csv")
+ want := string(bytes.TrimSpace(gittest.Exec(t, cfg, "-C", ourRepoPath, "rev-parse", "refs/heads/csv")))
+ gittest.Exec(t, cfg, "-C", ourRepoPath, "update-ref", "refs/hidden/csv", want)
+ gittest.Exec(t, cfg, "-C", ourRepoPath, "update-ref", "-d", "refs/heads/csv")
- have := string(bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", ourRepoPath, "rev-parse", want+"~1")))
+ have := string(bytes.TrimSpace(gittest.Exec(t, cfg, "-C", ourRepoPath, "rev-parse", want+"~1")))
requestBody := &bytes.Buffer{}
requestBodyCopy := &bytes.Buffer{}
@@ -244,8 +244,8 @@ func TestUploadPackWithPackObjectsHook(t *testing.T) {
serverSocketPath := runSmartHTTPServer(t, cfg)
- oldHead := bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "master~"))
- newHead := bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "master"))
+ oldHead := bytes.TrimSpace(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "master~"))
+ newHead := bytes.TrimSpace(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "master"))
requestBuffer := &bytes.Buffer{}
gittest.WritePktlineString(t, requestBuffer, fmt.Sprintf("want %s %s\n", newHead, clientCapabilities))
@@ -371,9 +371,9 @@ func TestUploadPackRequestForPartialCloneSuccess(t *testing.T) {
remoteRepoPath := filepath.Join(storagePath, remoteRepoRelativePath)
localRepoPath := filepath.Join(storagePath, localRepoRelativePath)
// Make a non-bare clone of the test repo to act as a remote one
- testhelper.MustRunCommand(t, nil, "git", "clone", repoPath, remoteRepoPath)
+ gittest.Exec(t, cfg, "clone", repoPath, remoteRepoPath)
// Make a bare clone of the test repo to act as a local one and to leave the original repo intact for other tests
- testhelper.MustRunCommand(t, nil, "git", "init", "--bare", localRepoPath)
+ gittest.Exec(t, cfg, "init", "--bare", localRepoPath)
defer os.RemoveAll(localRepoPath)
defer os.RemoveAll(remoteRepoPath)
@@ -382,13 +382,13 @@ func TestUploadPackRequestForPartialCloneSuccess(t *testing.T) {
committerName := "Scrooge McDuck"
committerEmail := "scrooge@mcduck.com"
- testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath,
+ gittest.Exec(t, cfg, "-C", remoteRepoPath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "--allow-empty", "-m", commitMsg)
// The commit ID we want to pull from the remote repo
- newHead := bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath, "rev-parse", "master"))
+ newHead := bytes.TrimSpace(gittest.Exec(t, cfg, "-C", remoteRepoPath, "rev-parse", "master"))
// The commit ID we want to pull from the remote repo
// UploadPack request is a "want" packet line followed by a packet flush, then many "have" packets followed by a packet flush.
@@ -414,7 +414,7 @@ func TestUploadPackRequestForPartialCloneSuccess(t *testing.T) {
pack, version, entries := extractPackDataFromResponse(t, responseBuffer)
require.NotNil(t, pack, "Expected to find a pack file in response, found none")
- testhelper.MustRunCommand(t, bytes.NewReader(pack), "git", "-C", localRepoPath, "unpack-objects", fmt.Sprintf("--pack_header=%d,%d", version, entries))
+ gittest.ExecStream(t, cfg, bytes.NewReader(pack), "-C", localRepoPath, "unpack-objects", fmt.Sprintf("--pack_header=%d,%d", version, entries))
// a4a132b1b0d6720ca9254440a7ba8a6b9bbd69ec is README.md, which is a small file
blobLessThanLimit := "a4a132b1b0d6720ca9254440a7ba8a6b9bbd69ec"
@@ -427,12 +427,10 @@ func TestUploadPackRequestForPartialCloneSuccess(t *testing.T) {
gittest.GitObjectMustNotExist(t, cfg.Git.BinPath, localRepoPath, blobGreaterThanLimit)
newBranch := "new-branch"
- newHead = []byte(gittest.CreateCommit(t, cfg, remoteRepoPath, newBranch, &gittest.CreateCommitOpts{
- Message: commitMsg,
- }))
+ newHead = []byte(gittest.WriteCommit(t, cfg, remoteRepoPath, gittest.WithBranch(newBranch)))
// after we delete the branch, we have a dangling commit
- testhelper.MustRunCommand(t, nil, "git", "-C", remoteRepoPath, "branch", "-D", newBranch)
+ gittest.Exec(t, cfg, "-C", remoteRepoPath, "branch", "-D", newBranch)
requestBuffer.Reset()
gittest.WritePktlineString(t, &requestBuffer, fmt.Sprintf("want %s %s\n", string(newHead), clientCapabilities))
diff --git a/internal/gitaly/service/ssh/receive_pack.go b/internal/gitaly/service/ssh/receive_pack.go
index b4cc40ef0..c681e9392 100644
--- a/internal/gitaly/service/ssh/receive_pack.go
+++ b/internal/gitaly/service/ssh/receive_pack.go
@@ -9,9 +9,13 @@ import (
log "github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/streamio"
+ "google.golang.org/grpc/codes"
+ "google.golang.org/grpc/status"
)
func (s *server) SSHReceivePack(stream gitalypb.SSHService_SSHReceivePackServer) error {
@@ -92,6 +96,15 @@ func (s *server) sshReceivePack(stream gitalypb.SSHService_SSHReceivePackServer,
return fmt.Errorf("cmd wait: %v", err)
}
+ // In cases where all reference updates are rejected by git-receive-pack(1), we would end up
+ // with no transactional votes at all. We thus do a final vote which concludes this RPC to
+ // ensure there's always at least one vote. In case there was diverging behaviour in
+ // git-receive-pack(1) which led to a different outcome across voters, then this final vote
+ // would fail because the sequence of votes would be different.
+ if err := transaction.VoteOnContext(ctx, s.txManager, voting.Vote{}); err != nil {
+ return status.Errorf(codes.Aborted, "final transactional vote: %v", err)
+ }
+
return nil
}
diff --git a/internal/gitaly/service/ssh/receive_pack_test.go b/internal/gitaly/service/ssh/receive_pack_test.go
index c9e64a21a..fdfd48c4a 100644
--- a/internal/gitaly/service/ssh/receive_pack_test.go
+++ b/internal/gitaly/service/ssh/receive_pack_test.go
@@ -2,6 +2,7 @@ package ssh
import (
"bytes"
+ "context"
"fmt"
"io"
"io/ioutil"
@@ -17,10 +18,17 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/hooks"
+ "gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/git/objectpool"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/streamio"
"google.golang.org/grpc/codes"
@@ -103,9 +111,7 @@ func TestReceivePackPushSuccess(t *testing.T) {
require.NoError(t, err)
require.Equal(t, lHead, rHead, "local and remote head not equal. push failed")
- envData, err := ioutil.ReadFile(hookOutputFile)
- require.NoError(t, err, "get git env data")
-
+ envData := testhelper.MustReadFile(t, hookOutputFile)
payload, err := git.HooksPayloadFromEnv(strings.Split(string(envData), "\n"))
require.NoError(t, err)
@@ -213,7 +219,14 @@ func TestObjectPoolRefAdvertisementHidingSSH(t *testing.T) {
stream, err := client.SSHReceivePack(ctx)
require.NoError(t, err)
- pool, err := objectpool.NewObjectPool(cfg, config.NewLocator(cfg), git.NewExecCommandFactory(cfg), repo.GetStorageName(), gittest.NewObjectPoolName(t))
+ pool, err := objectpool.NewObjectPool(
+ cfg,
+ config.NewLocator(cfg),
+ git.NewExecCommandFactory(cfg),
+ nil,
+ repo.GetStorageName(),
+ gittest.NewObjectPoolName(t),
+ )
require.NoError(t, err)
require.NoError(t, pool.Create(ctx, repo))
@@ -223,7 +236,7 @@ func TestObjectPoolRefAdvertisementHidingSSH(t *testing.T) {
require.NoError(t, pool.Link(ctx, repo))
- commitID := gittest.CreateCommit(t, cfg, pool.FullPath(), t.Name(), nil)
+ commitID := gittest.WriteCommit(t, cfg, pool.FullPath(), gittest.WithBranch(t.Name()))
// First request
require.NoError(t, stream.Send(&gitalypb.SSHReceivePackRequest{
@@ -244,6 +257,219 @@ func TestObjectPoolRefAdvertisementHidingSSH(t *testing.T) {
require.NotContains(t, b.String(), commitID+" .have")
}
+func TestReceivePackTransactional(t *testing.T) {
+ cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
+
+ testhelper.ConfigureGitalyHooksBin(t, cfg)
+
+ var votes int
+ serverSocketPath := runSSHServer(t, cfg, testserver.WithTransactionManager(
+ &transaction.MockManager{
+ VoteFn: func(context.Context, txinfo.Transaction,
+ txinfo.PraefectServer, voting.Vote,
+ ) error {
+ votes++
+ return nil
+ },
+ },
+ ))
+
+ client, conn := newSSHClient(t, serverSocketPath)
+ defer conn.Close()
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+ ctx, err := (&txinfo.PraefectServer{SocketPath: "whatever"}).Inject(ctx)
+ require.NoError(t, err)
+ ctx, err = txinfo.InjectTransaction(ctx, 1, "node", true)
+ require.NoError(t, err)
+ ctx = helper.IncomingToOutgoing(ctx)
+
+ masterOID := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath,
+ "rev-parse", "refs/heads/master"))
+ masterParentOID := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "refs/heads/master~"))
+
+ type command struct {
+ ref string
+ oldOID string
+ newOID string
+ }
+
+ for _, tc := range []struct {
+ desc string
+ writePackfile bool
+ commands []command
+ expectedRefs map[string]string
+ expectedVotes int
+ }{
+ {
+ desc: "noop",
+ writePackfile: true,
+ commands: []command{
+ {
+ ref: "refs/heads/master",
+ oldOID: masterOID,
+ newOID: masterOID,
+ },
+ },
+ expectedRefs: map[string]string{
+ "refs/heads/master": masterOID,
+ },
+ expectedVotes: 3,
+ },
+ {
+ desc: "update",
+ writePackfile: true,
+ commands: []command{
+ {
+ ref: "refs/heads/master",
+ oldOID: masterOID,
+ newOID: masterParentOID,
+ },
+ },
+ expectedRefs: map[string]string{
+ "refs/heads/master": masterParentOID,
+ },
+ expectedVotes: 3,
+ },
+ {
+ desc: "creation",
+ writePackfile: true,
+ commands: []command{
+ {
+ ref: "refs/heads/other",
+ oldOID: git.ZeroOID.String(),
+ newOID: masterOID,
+ },
+ },
+ expectedRefs: map[string]string{
+ "refs/heads/other": masterOID,
+ },
+ expectedVotes: 3,
+ },
+ {
+ desc: "deletion",
+ commands: []command{
+ {
+ ref: "refs/heads/other",
+ oldOID: masterOID,
+ newOID: git.ZeroOID.String(),
+ },
+ },
+ expectedRefs: map[string]string{
+ "refs/heads/other": git.ZeroOID.String(),
+ },
+ expectedVotes: 3,
+ },
+ {
+ desc: "multiple commands",
+ writePackfile: true,
+ commands: []command{
+ {
+ ref: "refs/heads/a",
+ oldOID: git.ZeroOID.String(),
+ newOID: masterOID,
+ },
+ {
+ ref: "refs/heads/b",
+ oldOID: git.ZeroOID.String(),
+ newOID: masterOID,
+ },
+ },
+ expectedRefs: map[string]string{
+ "refs/heads/a": masterOID,
+ "refs/heads/b": masterOID,
+ },
+ expectedVotes: 5,
+ },
+ {
+ desc: "refused recreation of branch",
+ writePackfile: true,
+ commands: []command{
+ {
+ ref: "refs/heads/a",
+ oldOID: git.ZeroOID.String(),
+ newOID: masterParentOID,
+ },
+ },
+ expectedRefs: map[string]string{
+ "refs/heads/a": masterOID,
+ },
+ expectedVotes: 1,
+ },
+ {
+ desc: "refused recreation and successful delete",
+ writePackfile: true,
+ commands: []command{
+ {
+ ref: "refs/heads/a",
+ oldOID: git.ZeroOID.String(),
+ newOID: masterParentOID,
+ },
+ {
+ ref: "refs/heads/b",
+ oldOID: masterOID,
+ newOID: git.ZeroOID.String(),
+ },
+ },
+ expectedRefs: map[string]string{
+ "refs/heads/a": masterOID,
+ },
+ expectedVotes: 3,
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ votes = 0
+
+ var request bytes.Buffer
+ for i, command := range tc.commands {
+ // Only the first pktline contains capabilities.
+ if i == 0 {
+ gittest.WritePktlineString(t, &request, fmt.Sprintf("%s %s %s\000 %s",
+ command.oldOID, command.newOID, command.ref,
+ "report-status side-band-64k agent=git/2.12.0"))
+ } else {
+ gittest.WritePktlineString(t, &request, fmt.Sprintf("%s %s %s",
+ command.oldOID, command.newOID, command.ref))
+ }
+ }
+ gittest.WritePktlineFlush(t, &request)
+
+ if tc.writePackfile {
+ // We're lazy and simply send over all objects to simplify test
+ // setup.
+ pack := gittest.Exec(t, cfg, "-C", repoPath, "pack-objects", "--stdout", "--revs", "--thin", "--delta-base-offset", "-q")
+ request.Write(pack)
+ }
+
+ stream, err := client.SSHReceivePack(ctx)
+ require.NoError(t, err)
+
+ require.NoError(t, stream.Send(&gitalypb.SSHReceivePackRequest{
+ Repository: repoProto, GlId: "user-123",
+ }))
+ require.NoError(t, stream.Send(&gitalypb.SSHReceivePackRequest{
+ Stdin: request.Bytes(),
+ }))
+ require.NoError(t, stream.CloseSend())
+ require.Equal(t, io.EOF, drainPostReceivePackResponse(stream))
+
+ for expectedRef, expectedOID := range tc.expectedRefs {
+ actualOID, err := repo.ResolveRevision(ctx, git.Revision(expectedRef))
+
+ if expectedOID == git.ZeroOID.String() {
+ require.Equal(t, git.ErrReferenceNotFound, err)
+ } else {
+ require.NoError(t, err)
+ require.Equal(t, expectedOID, actualOID.String())
+ }
+ }
+ require.Equal(t, tc.expectedVotes, votes)
+ })
+ }
+}
+
func TestSSHReceivePackToHooks(t *testing.T) {
cfg, repo, _ := testcfg.BuildWithRepo(t)
@@ -262,7 +488,7 @@ func TestSSHReceivePackToHooks(t *testing.T) {
cfg.GitlabShell.Dir = tempGitlabShellDir
- cloneDetails, cleanup := setupSSHClone(t, cfg.Storages[0].Path, repo)
+ cloneDetails, cleanup := setupSSHClone(t, cfg, cfg.Storages[0].Path, repo)
defer cleanup()
serverURL, cleanup := testhelper.NewGitlabTestServer(t, testhelper.GitlabTestServerOptions{
@@ -307,7 +533,7 @@ type SSHCloneDetails struct {
}
// setupSSHClone sets up a test clone
-func setupSSHClone(t *testing.T, storagePath string, testRepo *gitalypb.Repository) (SSHCloneDetails, func()) {
+func setupSSHClone(t *testing.T, cfg config.Cfg, storagePath string, testRepo *gitalypb.Repository) (SSHCloneDetails, func()) {
tempRepo := "gitlab-test-ssh-receive-pack.git"
testRepoPath := filepath.Join(storagePath, testRepo.GetRelativePath())
remoteRepoPath := filepath.Join(storagePath, tempRepo)
@@ -316,15 +542,15 @@ func setupSSHClone(t *testing.T, storagePath string, testRepo *gitalypb.Reposito
if err := os.RemoveAll(remoteRepoPath); err != nil && !os.IsNotExist(err) {
t.Fatal(err)
}
- testhelper.MustRunCommand(t, nil, "git", "clone", "--bare", testRepoPath, remoteRepoPath)
+ gittest.Exec(t, cfg, "clone", "--bare", testRepoPath, remoteRepoPath)
// Make a non-bare clone of the test repo to act as a local one
if err := os.RemoveAll(localRepoPath); err != nil && !os.IsNotExist(err) {
t.Fatal(err)
}
- testhelper.MustRunCommand(t, nil, "git", "clone", remoteRepoPath, localRepoPath)
+ gittest.Exec(t, cfg, "clone", remoteRepoPath, localRepoPath)
// We need git thinking we're pushing over SSH...
- oldHead, newHead, success := makeCommit(t, localRepoPath)
+ oldHead, newHead, success := makeCommit(t, cfg, localRepoPath)
require.True(t, success)
return SSHCloneDetails{
@@ -373,21 +599,21 @@ func sshPush(t *testing.T, cfg config.Cfg, cloneDetails SSHCloneDetails, serverS
return "", "", fmt.Errorf("failed to run `git push`: %q", out)
}
- localHead := bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", cloneDetails.LocalRepoPath, "rev-parse", "master"))
- remoteHead := bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", cloneDetails.RemoteRepoPath, "rev-parse", "master"))
+ localHead := bytes.TrimSpace(gittest.Exec(t, cfg, "-C", cloneDetails.LocalRepoPath, "rev-parse", "master"))
+ remoteHead := bytes.TrimSpace(gittest.Exec(t, cfg, "-C", cloneDetails.RemoteRepoPath, "rev-parse", "master"))
return string(localHead), string(remoteHead), nil
}
func testCloneAndPush(t *testing.T, cfg config.Cfg, storagePath, serverSocketPath string, testRepo *gitalypb.Repository, params pushParams) (string, string, error) {
- cloneDetails, cleanup := setupSSHClone(t, storagePath, testRepo)
+ cloneDetails, cleanup := setupSSHClone(t, cfg, storagePath, testRepo)
defer cleanup()
return sshPush(t, cfg, cloneDetails, serverSocketPath, params)
}
// makeCommit creates a new commit and returns oldHead, newHead, success
-func makeCommit(t *testing.T, localRepoPath string) ([]byte, []byte, bool) {
+func makeCommit(t *testing.T, cfg config.Cfg, localRepoPath string) ([]byte, []byte, bool) {
commitMsg := fmt.Sprintf("Testing ReceivePack RPC around %d", time.Now().Unix())
committerName := "Scrooge McDuck"
committerEmail := "scrooge@mcduck.com"
@@ -395,12 +621,12 @@ func makeCommit(t *testing.T, localRepoPath string) ([]byte, []byte, bool) {
// Create a tiny file and add it to the index
require.NoError(t, ioutil.WriteFile(newFilePath, []byte("foo bar"), 0644))
- testhelper.MustRunCommand(t, nil, "git", "-C", localRepoPath, "add", ".")
+ gittest.Exec(t, cfg, "-C", localRepoPath, "add", ".")
// The latest commit ID on the remote repo
- oldHead := bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", localRepoPath, "rev-parse", "master"))
+ oldHead := bytes.TrimSpace(gittest.Exec(t, cfg, "-C", localRepoPath, "rev-parse", "master"))
- testhelper.MustRunCommand(t, nil, "git", "-C", localRepoPath,
+ gittest.Exec(t, cfg, "-C", localRepoPath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "-m", commitMsg)
@@ -409,7 +635,7 @@ func makeCommit(t *testing.T, localRepoPath string) ([]byte, []byte, bool) {
}
// The commit ID we want to push to the remote repo
- newHead := bytes.TrimSpace(testhelper.MustRunCommand(t, nil, "git", "-C", localRepoPath, "rev-parse", "master"))
+ newHead := bytes.TrimSpace(gittest.Exec(t, cfg, "-C", localRepoPath, "rev-parse", "master"))
return oldHead, newHead, true
}
diff --git a/internal/gitaly/service/ssh/server.go b/internal/gitaly/service/ssh/server.go
index 9e1cf3306..7ca42e3a5 100644
--- a/internal/gitaly/service/ssh/server.go
+++ b/internal/gitaly/service/ssh/server.go
@@ -6,6 +6,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -19,17 +20,25 @@ type server struct {
cfg config.Cfg
locator storage.Locator
gitCmdFactory git.CommandFactory
+ txManager transaction.Manager
uploadPackRequestTimeout time.Duration
uploadArchiveRequestTimeout time.Duration
packfileNegotiationMetrics *prometheus.CounterVec
}
// NewServer creates a new instance of a grpc SSHServer
-func NewServer(cfg config.Cfg, locator storage.Locator, gitCmdFactory git.CommandFactory, serverOpts ...ServerOpt) gitalypb.SSHServiceServer {
+func NewServer(
+ cfg config.Cfg,
+ locator storage.Locator,
+ gitCmdFactory git.CommandFactory,
+ txManager transaction.Manager,
+ serverOpts ...ServerOpt,
+) gitalypb.SSHServiceServer {
s := &server{
cfg: cfg,
locator: locator,
gitCmdFactory: gitCmdFactory,
+ txManager: txManager,
uploadPackRequestTimeout: defaultUploadPackRequestTimeout,
uploadArchiveRequestTimeout: defaultUploadArchiveRequestTimeout,
packfileNegotiationMetrics: prometheus.NewCounterVec(
diff --git a/internal/gitaly/service/ssh/testhelper_test.go b/internal/gitaly/service/ssh/testhelper_test.go
index 27b5a116d..383b325f8 100644
--- a/internal/gitaly/service/ssh/testhelper_test.go
+++ b/internal/gitaly/service/ssh/testhelper_test.go
@@ -32,7 +32,12 @@ func runSSHServer(t *testing.T, cfg config.Cfg, serverOpts ...testserver.GitalyS
func runSSHServerWithOptions(t *testing.T, cfg config.Cfg, opts []ServerOpt, serverOpts ...testserver.GitalyServerOpt) string {
return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterSSHServiceServer(srv, NewServer(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory(), opts...))
+ gitalypb.RegisterSSHServiceServer(srv, NewServer(
+ deps.GetCfg(),
+ deps.GetLocator(),
+ deps.GetGitCmdFactory(),
+ deps.GetTxManager(),
+ opts...))
gitalypb.RegisterHookServiceServer(srv, hookservice.NewServer(deps.GetCfg(), deps.GetHookManager(), deps.GetGitCmdFactory()))
}, serverOpts...)
}
diff --git a/internal/gitaly/service/ssh/upload_pack_test.go b/internal/gitaly/service/ssh/upload_pack_test.go
index a9b2012a2..60d46a3d3 100644
--- a/internal/gitaly/service/ssh/upload_pack_test.go
+++ b/internal/gitaly/service/ssh/upload_pack_test.go
@@ -73,7 +73,7 @@ func (cmd cloneCommand) execute(t *testing.T) error {
return nil
}
-func (cmd cloneCommand) test(t *testing.T, repoPath string, localRepoPath string) (string, string, string, string) {
+func (cmd cloneCommand) test(t *testing.T, cfg config.Cfg, repoPath string, localRepoPath string) (string, string, string, string) {
t.Helper()
defer os.RemoveAll(localRepoPath)
@@ -81,11 +81,11 @@ func (cmd cloneCommand) test(t *testing.T, repoPath string, localRepoPath string
err := cmd.execute(t)
require.NoError(t, err)
- remoteHead := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "master"))
- localHead := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", localRepoPath, "rev-parse", "master"))
+ remoteHead := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "master"))
+ localHead := text.ChompBytes(gittest.Exec(t, cfg, "-C", localRepoPath, "rev-parse", "master"))
- remoteTags := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "tag"))
- localTags := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", localRepoPath, "tag"))
+ remoteTags := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "tag"))
+ localTags := text.ChompBytes(gittest.Exec(t, cfg, "-C", localRepoPath, "tag"))
return localHead, remoteHead, localTags, remoteTags
}
@@ -237,7 +237,7 @@ func TestUploadPackCloneSuccess(t *testing.T) {
server: serverSocketPath,
cfg: cfg,
}
- lHead, rHead, _, _ := cmd.test(t, repoPath, localRepoPath)
+ lHead, rHead, _, _ := cmd.test(t, cfg, repoPath, localRepoPath)
require.Equal(t, lHead, rHead, "local and remote head not equal")
metric, err := negotiationMetrics.GetMetricWithLabelValues("deepen")
@@ -424,7 +424,7 @@ func TestUploadPackCloneSuccessWithGitProtocol(t *testing.T) {
cfg: cfg,
}
- lHead, rHead, _, _ := cmd.test(t, repoPath, localRepoPath)
+ lHead, rHead, _, _ := cmd.test(t, cfg, repoPath, localRepoPath)
require.Equal(t, lHead, rHead, "local and remote head not equal")
envData := readProto()
@@ -453,7 +453,7 @@ func TestUploadPackCloneHideTags(t *testing.T) {
gitConfig: "transfer.hideRefs=refs/tags",
cfg: cfg,
}
- _, _, lTags, rTags := cloneCmd.test(t, repoPath, localRepoPath)
+ _, _, lTags, rTags := cloneCmd.test(t, cfg, repoPath, localRepoPath)
if lTags == rTags {
t.Fatalf("local and remote tags are equal. clone failed: %q != %q", lTags, rTags)
diff --git a/internal/gitaly/service/wiki/delete_page_test.go b/internal/gitaly/service/wiki/delete_page_test.go
index 0ed152cfb..3518e9690 100644
--- a/internal/gitaly/service/wiki/delete_page_test.go
+++ b/internal/gitaly/service/wiki/delete_page_test.go
@@ -5,6 +5,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
@@ -17,7 +18,7 @@ func testSuccessfulWikiDeletePageRequest(t *testing.T, cfg config.Cfg, rubySrv *
wikiRepoProto, wikiRepoPath, cleanupFunc := setupWikiRepo(t, cfg)
defer cleanupFunc()
- wikiRepo := localrepo.New(git.NewExecCommandFactory(cfg), wikiRepoProto, cfg)
+ wikiRepo := localrepo.NewTestRepo(t, cfg, wikiRepoProto)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -71,7 +72,7 @@ func testSuccessfulWikiDeletePageRequest(t *testing.T, cfg config.Cfg, rubySrv *
_, err := client.WikiDeletePage(ctx, tc.req)
require.NoError(t, err)
- headID := testhelper.MustRunCommand(t, nil, "git", "-C", wikiRepoPath, "show", "--format=format:%H", "--no-patch", "HEAD")
+ headID := gittest.Exec(t, cfg, "-C", wikiRepoPath, "show", "--format=format:%H", "--no-patch", "HEAD")
commit, err := wikiRepo.ReadCommit(ctx, git.Revision(headID))
require.NoError(t, err, "look up git commit after deleting a wiki page")
diff --git a/internal/gitaly/service/wiki/find_file.go b/internal/gitaly/service/wiki/find_file.go
deleted file mode 100644
index 3e9a28719..000000000
--- a/internal/gitaly/service/wiki/find_file.go
+++ /dev/null
@@ -1,46 +0,0 @@
-package wiki
-
-import (
- "gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
- "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
- "google.golang.org/grpc/codes"
- "google.golang.org/grpc/status"
-)
-
-func (s *server) WikiFindFile(request *gitalypb.WikiFindFileRequest, stream gitalypb.WikiService_WikiFindFileServer) error {
- ctx := stream.Context()
-
- if err := git.ValidateRevisionAllowEmpty(request.Revision); err != nil {
- return status.Errorf(codes.InvalidArgument, "WikiFindFile: %s", err)
- }
-
- if len(request.GetName()) == 0 {
- return status.Errorf(codes.InvalidArgument, "WikiFindFile: Empty Name")
- }
-
- client, err := s.ruby.WikiServiceClient(ctx)
- if err != nil {
- return err
- }
-
- clientCtx, err := rubyserver.SetHeaders(ctx, s.locator, request.GetRepository())
- if err != nil {
- return err
- }
-
- rubyStream, err := client.WikiFindFile(clientCtx, request)
- if err != nil {
- return err
- }
-
- return rubyserver.Proxy(func() error {
- resp, err := rubyStream.Recv()
- if err != nil {
- md := rubyStream.Trailer()
- stream.SetTrailer(md)
- return err
- }
- return stream.Send(resp)
- })
-}
diff --git a/internal/gitaly/service/wiki/find_file_test.go b/internal/gitaly/service/wiki/find_file_test.go
deleted file mode 100644
index effa48ab4..000000000
--- a/internal/gitaly/service/wiki/find_file_test.go
+++ /dev/null
@@ -1,232 +0,0 @@
-package wiki
-
-import (
- "fmt"
- "io"
- "io/ioutil"
- "os"
- "path/filepath"
- "testing"
-
- "github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
- "gitlab.com/gitlab-org/gitaly/internal/testhelper"
- "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
- "google.golang.org/grpc/codes"
-)
-
-func testSuccessfulWikiFindFileRequest(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- _, wikiRepoPath, cleanupFunc := setupWikiRepo(t, cfg)
- defer cleanupFunc()
-
- client := setupWikiService(t, cfg, rubySrv)
-
- committerName := "Scrooge McDuck"
- committerEmail := "scrooge@mcduck.com"
- sandboxWikiPath := filepath.Join(cfg.Storages[0].Path, "find-file-sandbox")
-
- testhelper.MustRunCommand(t, nil, "git", "clone", wikiRepoPath, sandboxWikiPath)
- defer os.RemoveAll(sandboxWikiPath)
-
- sandboxWiki := &gitalypb.Repository{
- StorageName: "default",
- RelativePath: "find-file-sandbox/.git",
- }
-
- content, err := ioutil.ReadFile("testdata/clouds.png")
- require.NoError(t, err)
-
- err = ioutil.WriteFile(filepath.Join(sandboxWikiPath, "cloúds.png"), content, 0644)
- require.NoError(t, err)
-
- err = ioutil.WriteFile(filepath.Join(sandboxWikiPath, "no_content.png"), nil, 0644)
- require.NoError(t, err)
-
- // Sandbox wiki is empty, so we create a commit to be used later
- testhelper.MustRunCommand(t, nil, "git", "-C", sandboxWikiPath,
- "-c", fmt.Sprintf("user.name=%s", committerName),
- "-c", fmt.Sprintf("user.email=%s", committerEmail),
- "commit", "--allow-empty", "-m", "Adding an empty commit")
- oldHeadID := testhelper.MustRunCommand(t, nil, "git", "-C", sandboxWikiPath, "show", "--format=format:%H", "--no-patch", "HEAD")
-
- testhelper.MustRunCommand(t, nil, "git", "-C", sandboxWikiPath, "add", ".")
- testhelper.MustRunCommand(t, nil, "git", "-C", sandboxWikiPath,
- "-c", fmt.Sprintf("user.name=%s", committerName),
- "-c", fmt.Sprintf("user.email=%s", committerEmail),
- "commit", "-m", "Adding an image")
-
- newHeadID := testhelper.MustRunCommand(t, nil, "git", "-C", sandboxWikiPath, "show", "--format=format:%H", "--no-patch", "HEAD")
-
- response := &gitalypb.WikiFindFileResponse{
- Name: []byte("cloúds.png"),
- MimeType: "image/png",
- Path: []byte("cloúds.png"),
- }
-
- testCases := []struct {
- desc string
- request *gitalypb.WikiFindFileRequest
- response *gitalypb.WikiFindFileResponse
- expectedContent []byte
- }{
- {
- desc: "name only",
- request: &gitalypb.WikiFindFileRequest{
- Repository: sandboxWiki,
- Name: []byte("cloúds.png"),
- },
- response: response,
- expectedContent: content,
- },
- {
- desc: "name + revision that includes the file",
- request: &gitalypb.WikiFindFileRequest{
- Repository: sandboxWiki,
- Name: []byte("cloúds.png"),
- Revision: newHeadID,
- },
- response: response,
- expectedContent: content,
- },
- {
- desc: "name + revision that does not include the file",
- request: &gitalypb.WikiFindFileRequest{
- Repository: sandboxWiki,
- Name: []byte("cloúds.png"),
- Revision: oldHeadID,
- },
- response: &gitalypb.WikiFindFileResponse{},
- expectedContent: content,
- },
- {
- desc: "non-existent name",
- request: &gitalypb.WikiFindFileRequest{
- Repository: sandboxWiki,
- Name: []byte("moar-clouds.png"),
- },
- response: &gitalypb.WikiFindFileResponse{},
- expectedContent: content,
- },
- {
- desc: "file with no content",
- request: &gitalypb.WikiFindFileRequest{
- Repository: sandboxWiki,
- Name: []byte("no_content.png"),
- },
- response: &gitalypb.WikiFindFileResponse{
- Name: []byte("no_content.png"),
- MimeType: "image/png",
- Path: []byte("no_content.png"),
- },
- expectedContent: nil,
- },
- }
-
- for _, testCase := range testCases {
- t.Run(testCase.desc, func(t *testing.T) {
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- c, err := client.WikiFindFile(ctx, testCase.request)
- require.NoError(t, err)
-
- expectedResponse := testCase.response
- receivedResponse := readFullResponseFromWikiFindFileClient(t, c)
-
- // require.Equal doesn't display a proper diff when either expected/actual has a field
- // with large data (RawData in our case), so we compare file attributes and content separately.
- receivedContent := receivedResponse.GetRawData()
- if receivedResponse != nil {
- receivedResponse.RawData = nil
- }
-
- testhelper.ProtoEqual(t, expectedResponse, receivedResponse)
- if len(expectedResponse.Name) > 0 {
- require.Equal(t, testCase.expectedContent, receivedContent, "mismatched content")
- }
- })
- }
-}
-
-func testFailedWikiFindFileDueToValidation(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- wikiRepo, _, cleanupFunc := setupWikiRepo(t, cfg)
- defer cleanupFunc()
-
- client := setupWikiService(t, cfg, rubySrv)
-
- testCases := []struct {
- desc string
- name string
- revision string
- code codes.Code
- }{
- {
- desc: "empty file path",
- name: "",
- revision: "master",
- code: codes.InvalidArgument,
- },
- {
- desc: "invalid revision",
- name: "image.jpg",
- revision: "deadfacedeadfacedeadfacedeadfacedeadface",
- code: codes.Unknown,
- },
- {
- desc: "dangerously invalid revision",
- name: "image.jpg",
- revision: "--output=/meow",
- code: codes.InvalidArgument,
- },
- }
-
- for _, testCase := range testCases {
- t.Run(testCase.desc, func(t *testing.T) {
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- request := &gitalypb.WikiFindFileRequest{
- Repository: wikiRepo,
- Name: []byte(testCase.name),
- Revision: []byte(testCase.revision),
- }
-
- c, err := client.WikiFindFile(ctx, request)
- require.NoError(t, err)
-
- err = drainWikiFindFileResponse(c)
- testhelper.RequireGrpcError(t, err, testCase.code)
- })
- }
-}
-
-func drainWikiFindFileResponse(c gitalypb.WikiService_WikiFindFileClient) error {
- for {
- _, err := c.Recv()
- if err != nil {
- return err
- }
- }
-}
-
-func readFullResponseFromWikiFindFileClient(t *testing.T, c gitalypb.WikiService_WikiFindFileClient) (fullResponse *gitalypb.WikiFindFileResponse) {
- t.Helper()
-
- for {
- resp, err := c.Recv()
- if err == io.EOF {
- break
- } else if err != nil {
- t.Fatal(err)
- }
-
- if fullResponse == nil {
- fullResponse = resp
- } else {
- fullResponse.RawData = append(fullResponse.RawData, resp.GetRawData()...)
- }
- }
-
- return fullResponse
-}
diff --git a/internal/gitaly/service/wiki/find_page_test.go b/internal/gitaly/service/wiki/find_page_test.go
index 81f5e1c2a..efd089ef2 100644
--- a/internal/gitaly/service/wiki/find_page_test.go
+++ b/internal/gitaly/service/wiki/find_page_test.go
@@ -455,13 +455,13 @@ func TestInvalidWikiFindPageRequestRevision(t *testing.T) {
}
func testSuccessfulWikiFindPageRequestWithTrailers(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- wikiRepo, worktreePath, cleanupFn := gittest.InitRepoWithWorktreeAtStorage(t, cfg.Storages[0])
+ wikiRepo, worktreePath, cleanupFn := gittest.InitRepoWithWorktreeAtStorage(t, cfg, cfg.Storages[0])
defer cleanupFn()
committerName := "Scróoge McDuck" // Include UTF-8 to ensure encoding is handled
committerEmail := "scrooge@mcduck.com"
- testhelper.MustRunCommand(t, nil, "git", "-C", worktreePath,
+ gittest.Exec(t, cfg, "-C", worktreePath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "--allow-empty", "-m", "master branch, empty commit")
@@ -471,7 +471,7 @@ func testSuccessfulWikiFindPageRequestWithTrailers(t *testing.T, cfg config.Cfg,
page1Name := "Home Pagé"
createTestWikiPage(t, cfg, client, wikiRepo, worktreePath, createWikiPageOpts{title: page1Name})
- testhelper.MustRunCommand(t, nil, "git", "-C", worktreePath,
+ gittest.Exec(t, cfg, "-C", worktreePath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "--amend", "-m", "Empty commit", "-s")
diff --git a/internal/gitaly/service/wiki/get_page_versions.go b/internal/gitaly/service/wiki/get_page_versions.go
deleted file mode 100644
index 4585814f0..000000000
--- a/internal/gitaly/service/wiki/get_page_versions.go
+++ /dev/null
@@ -1,41 +0,0 @@
-package wiki
-
-import (
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
- "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
- "google.golang.org/grpc/codes"
- "google.golang.org/grpc/status"
-)
-
-func (s *server) WikiGetPageVersions(request *gitalypb.WikiGetPageVersionsRequest, stream gitalypb.WikiService_WikiGetPageVersionsServer) error {
- ctx := stream.Context()
-
- if len(request.GetPagePath()) == 0 {
- return status.Errorf(codes.InvalidArgument, "WikiGetPageVersions: Empty Path")
- }
-
- client, err := s.ruby.WikiServiceClient(ctx)
- if err != nil {
- return err
- }
-
- clientCtx, err := rubyserver.SetHeaders(ctx, s.locator, request.GetRepository())
- if err != nil {
- return err
- }
-
- rubyStream, err := client.WikiGetPageVersions(clientCtx, request)
- if err != nil {
- return err
- }
-
- return rubyserver.Proxy(func() error {
- resp, err := rubyStream.Recv()
- if err != nil {
- md := rubyStream.Trailer()
- stream.SetTrailer(md)
- return err
- }
- return stream.Send(resp)
- })
-}
diff --git a/internal/gitaly/service/wiki/get_page_versions_test.go b/internal/gitaly/service/wiki/get_page_versions_test.go
deleted file mode 100644
index 71c3bb317..000000000
--- a/internal/gitaly/service/wiki/get_page_versions_test.go
+++ /dev/null
@@ -1,185 +0,0 @@
-package wiki
-
-import (
- "bytes"
- "fmt"
- "io"
- "testing"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
- "gitlab.com/gitlab-org/gitaly/internal/helper/text"
- "gitlab.com/gitlab-org/gitaly/internal/testhelper"
- "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
-)
-
-func testWikiGetPageVersionsRequest(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- wikiRepo, wikiRepoPath, cleanupFunc := setupWikiRepo(t, cfg)
- defer cleanupFunc()
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- client := setupWikiService(t, cfg, rubySrv)
-
- const pageTitle = "WikiGétPageVersions"
-
- content := bytes.Repeat([]byte("Mock wiki page content"), 10000)
- writeWikiPage(t, client, wikiRepo, createWikiPageOpts{title: pageTitle, content: content})
- v1cid := testhelper.MustRunCommand(t, nil, "git", "-C", wikiRepoPath, "log", "-1", "--format=%H")
- updateWikiPage(t, client, wikiRepo, pageTitle, []byte("New content"))
- v2cid := testhelper.MustRunCommand(t, nil, "git", "-C", wikiRepoPath, "log", "-1", "--format=%H")
-
- gitAuthor := &gitalypb.CommitAuthor{
- Name: []byte("Ahmad Sherif"),
- Email: []byte("ahmad@gitlab.com"),
- }
-
- testCases := []struct {
- desc string
- request *gitalypb.WikiGetPageVersionsRequest
- versions []*gitalypb.WikiPageVersion
- }{
- {
- desc: "No page found",
- request: &gitalypb.WikiGetPageVersionsRequest{
- Repository: wikiRepo,
- PagePath: []byte("not-found"),
- },
- versions: nil,
- },
- {
- desc: "2 version found",
- request: &gitalypb.WikiGetPageVersionsRequest{
- Repository: wikiRepo,
- PagePath: []byte(pageTitle),
- },
- versions: []*gitalypb.WikiPageVersion{
- {
- Commit: &gitalypb.GitCommit{
- Id: text.ChompBytes(v2cid),
- Body: []byte("Update WikiGétPageVersions"),
- Subject: []byte("Update WikiGétPageVersions"),
- Author: gitAuthor,
- Committer: gitAuthor,
- ParentIds: []string{text.ChompBytes(v1cid)},
- BodySize: 26,
- },
- Format: "markdown",
- },
- {
- Commit: &gitalypb.GitCommit{
- Id: text.ChompBytes(v1cid),
- Body: []byte("Add WikiGétPageVersions"),
- Subject: []byte("Add WikiGétPageVersions"),
- Author: gitAuthor,
- Committer: gitAuthor,
- BodySize: 23,
- },
- Format: "markdown",
- },
- },
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.desc, func(t *testing.T) {
- stream, err := client.WikiGetPageVersions(ctx, tc.request)
- require.NoError(t, err)
- require.NoError(t, stream.CloseSend())
-
- response, err := stream.Recv()
- require.NoError(t, err)
-
- require.Len(t, response.GetVersions(), len(tc.versions))
- for i, version := range response.GetVersions() {
- v2 := tc.versions[i]
-
- assert.Equal(t, version.GetFormat(), v2.GetFormat(),
- "expected wiki page format to be equal for version %d", i)
- assert.NoError(t, gittest.GitCommitEqual(version.GetCommit(), v2.GetCommit()),
- "expected wiki page commit to be equal for version %d", i)
- }
- })
- }
-}
-
-func testWikiGetPageVersionsPaginationParams(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
- wikiRepo, _, cleanupFunc := setupWikiRepo(t, cfg)
- defer cleanupFunc()
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- client := setupWikiService(t, cfg, rubySrv)
-
- const pageTitle = "WikiGetPageVersions"
-
- content := []byte("page content")
- writeWikiPage(t, client, wikiRepo, createWikiPageOpts{title: pageTitle, content: content})
-
- for i := 0; i < 25; i++ {
- updateWikiPage(t, client, wikiRepo, pageTitle, []byte(fmt.Sprintf("%d", i)))
- }
-
- testCases := []struct {
- desc string
- perPage int32
- page int32
- nrOfResults int
- }{
- {
- desc: "default to page 1 with 20 items",
- nrOfResults: 20,
- },
- {
- desc: "oversized perPage param",
- perPage: 100,
- nrOfResults: 26,
- },
- {
- desc: "allows later pages",
- page: 2,
- nrOfResults: 6,
- },
- {
- desc: "returns nothing of no versions can be found",
- page: 100,
- nrOfResults: 0,
- },
- {
- // https://github.com/gollum/gollum-lib/blob/be6409315f6af5a6d90eb012a1154b485579db67/lib/gollum-lib/pagination.rb#L34
- desc: "per page is minimal 20",
- perPage: 1,
- nrOfResults: 20,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.desc, func(t *testing.T) {
- stream, err := client.WikiGetPageVersions(ctx, &gitalypb.WikiGetPageVersionsRequest{
- Repository: wikiRepo,
- PagePath: []byte(pageTitle),
- PerPage: tc.perPage,
- Page: tc.page})
- require.NoError(t, err)
-
- nrFoundVersions := 0
- for {
- resp, err := stream.Recv()
- if err == io.EOF {
- break
- } else if err != nil {
- t.Fatal(err)
- }
-
- nrFoundVersions += len(resp.GetVersions())
- }
-
- require.Equal(t, tc.nrOfResults, nrFoundVersions)
- })
- }
-}
diff --git a/internal/gitaly/service/wiki/testhelper_test.go b/internal/gitaly/service/wiki/testhelper_test.go
index fce7bfdef..11ce4e964 100644
--- a/internal/gitaly/service/wiki/testhelper_test.go
+++ b/internal/gitaly/service/wiki/testhelper_test.go
@@ -65,16 +65,12 @@ func TestWithRubySidecar(t *testing.T) {
fs := []func(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server){
testSuccessfulWikiDeletePageRequest,
testFailedWikiDeletePageDueToValidations,
- testSuccessfulWikiFindFileRequest,
- testFailedWikiFindFileDueToValidation,
testSuccessfulWikiFindPageRequest,
testSuccessfulWikiFindPageSameTitleDifferentPathRequest,
testSuccessfulWikiFindPageRequestWithTrailers,
testSuccessfulWikiGetAllPagesRequest,
testWikiGetAllPagesSorting,
testFailedWikiGetAllPagesDueToValidation,
- testWikiGetPageVersionsRequest,
- testWikiGetPageVersionsPaginationParams,
testSuccessfulWikiListPagesRequest,
testWikiListPagesSorting,
testSuccessfulWikiUpdatePageRequest,
@@ -152,39 +148,8 @@ func writeWikiPage(t *testing.T, client gitalypb.WikiServiceClient, wikiRepo *gi
require.NoError(t, err)
}
-func updateWikiPage(t *testing.T, client gitalypb.WikiServiceClient, wikiRepo *gitalypb.Repository, name string, content []byte) {
- t.Helper()
-
- commitDetails := &gitalypb.WikiCommitDetails{
- Name: []byte("Ahmad Sherif"),
- Email: []byte("ahmad@gitlab.com"),
- Message: []byte("Update " + name),
- UserId: int32(1),
- UserName: []byte("ahmad"),
- }
-
- request := &gitalypb.WikiUpdatePageRequest{
- Repository: wikiRepo,
- PagePath: []byte(name),
- Title: []byte(name),
- Format: "markdown",
- CommitDetails: commitDetails,
- Content: content,
- }
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- stream, err := client.WikiUpdatePage(ctx)
- require.NoError(t, err)
- require.NoError(t, stream.Send(request))
-
- _, err = stream.CloseAndRecv()
- require.NoError(t, err)
-}
-
func setupWikiRepo(t *testing.T, cfg config.Cfg) (*gitalypb.Repository, string, func()) {
- return gittest.InitBareRepoAt(t, cfg.Storages[0])
+ return gittest.InitBareRepoAt(t, cfg, cfg.Storages[0])
}
func sendBytes(data []byte, chunkSize int, sender func([]byte) error) (int, error) {
@@ -211,9 +176,9 @@ func createTestWikiPage(t *testing.T, cfg config.Cfg, client gitalypb.WikiServic
defer cancel()
writeWikiPage(t, client, wikiRepoProto, opts)
- head1ID := testhelper.MustRunCommand(t, nil, "git", "-C", wikiRepoPath, "show", "--format=format:%H", "--no-patch", "HEAD")
+ head1ID := gittest.Exec(t, cfg, "-C", wikiRepoPath, "show", "--format=format:%H", "--no-patch", "HEAD")
- wikiRepo := localrepo.New(git.NewExecCommandFactory(cfg), wikiRepoProto, cfg)
+ wikiRepo := localrepo.NewTestRepo(t, cfg, wikiRepoProto)
pageCommit, err := wikiRepo.ReadCommit(ctx, git.Revision(head1ID))
require.NoError(t, err, "look up git commit after writing a wiki page")
diff --git a/internal/gitaly/service/wiki/update_page_test.go b/internal/gitaly/service/wiki/update_page_test.go
index 01ff5b232..60f04e78e 100644
--- a/internal/gitaly/service/wiki/update_page_test.go
+++ b/internal/gitaly/service/wiki/update_page_test.go
@@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
@@ -17,7 +18,7 @@ import (
func testSuccessfulWikiUpdatePageRequest(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
wikiRepoProto, wikiRepoPath, cleanupFunc := setupWikiRepo(t, cfg)
defer cleanupFunc()
- wikiRepo := localrepo.New(git.NewExecCommandFactory(cfg), wikiRepoProto, cfg)
+ wikiRepo := localrepo.NewTestRepo(t, cfg, wikiRepoProto)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -96,7 +97,7 @@ func testSuccessfulWikiUpdatePageRequest(t *testing.T, cfg config.Cfg, rubySrv *
_, err = stream.CloseAndRecv()
require.NoError(t, err)
- headID := testhelper.MustRunCommand(t, nil, "git", "-C", wikiRepoPath, "show", "--format=format:%H", "--no-patch", "HEAD")
+ headID := gittest.Exec(t, cfg, "-C", wikiRepoPath, "show", "--format=format:%H", "--no-patch", "HEAD")
commit, err := wikiRepo.ReadCommit(ctx, git.Revision(headID))
require.NoError(t, err, "look up git commit before merge is applied")
@@ -104,7 +105,7 @@ func testSuccessfulWikiUpdatePageRequest(t *testing.T, cfg config.Cfg, rubySrv *
require.Equal(t, authorEmail, commit.Author.Email, "author email mismatched")
require.Equal(t, message, commit.Subject, "message mismatched")
- pageContent := testhelper.MustRunCommand(t, nil, "git", "-C", wikiRepoPath, "cat-file", "blob", "HEAD:Instálling-Gitaly.md")
+ pageContent := gittest.Exec(t, cfg, "-C", wikiRepoPath, "cat-file", "blob", "HEAD:Instálling-Gitaly.md")
require.Equal(t, tc.content, pageContent, "mismatched content")
})
}
diff --git a/internal/gitaly/service/wiki/write_page_test.go b/internal/gitaly/service/wiki/write_page_test.go
index 8a1464086..b4969e9f6 100644
--- a/internal/gitaly/service/wiki/write_page_test.go
+++ b/internal/gitaly/service/wiki/write_page_test.go
@@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/rubyserver"
@@ -18,7 +19,7 @@ import (
func testSuccessfulWikiWritePageRequest(t *testing.T, cfg config.Cfg, rubySrv *rubyserver.Server) {
wikiRepoProto, wikiRepoPath, cleanupFunc := setupWikiRepo(t, cfg)
defer cleanupFunc()
- wikiRepo := localrepo.New(git.NewExecCommandFactory(cfg), wikiRepoProto, cfg)
+ wikiRepo := localrepo.NewTestRepo(t, cfg, wikiRepoProto)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -100,7 +101,7 @@ func testSuccessfulWikiWritePageRequest(t *testing.T, cfg config.Cfg, rubySrv *r
require.Empty(t, resp.DuplicateError, "DuplicateError must be empty")
- headID := testhelper.MustRunCommand(t, nil, "git", "-C", wikiRepoPath, "show", "--format=format:%H", "--no-patch", "HEAD")
+ headID := gittest.Exec(t, cfg, "-C", wikiRepoPath, "show", "--format=format:%H", "--no-patch", "HEAD")
commit, err := wikiRepo.ReadCommit(ctx, git.Revision(headID))
require.NoError(t, err, "look up git commit after writing a wiki page")
@@ -108,7 +109,7 @@ func testSuccessfulWikiWritePageRequest(t *testing.T, cfg config.Cfg, rubySrv *r
require.Equal(t, authorEmail, commit.Author.Email, "author email mismatched")
require.Equal(t, message, commit.Subject, "message mismatched")
- pageContent := testhelper.MustRunCommand(t, nil, "git", "-C", wikiRepoPath, "cat-file", "blob", "HEAD:"+tc.gollumPath)
+ pageContent := gittest.Exec(t, cfg, "-C", wikiRepoPath, "cat-file", "blob", "HEAD:"+tc.gollumPath)
require.Equal(t, tc.content, pageContent, "mismatched content")
})
}
diff --git a/internal/gitaly/transaction/manager.go b/internal/gitaly/transaction/manager.go
index 2e2e39852..ab4b950bf 100644
--- a/internal/gitaly/transaction/manager.go
+++ b/internal/gitaly/transaction/manager.go
@@ -2,7 +2,6 @@ package transaction
import (
"context"
- "encoding/hex"
"errors"
"fmt"
"time"
@@ -13,8 +12,8 @@ import (
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -48,11 +47,11 @@ var (
type Manager interface {
// Vote casts a vote on the given transaction which is hosted by the
// given Praefect server.
- Vote(context.Context, metadata.Transaction, metadata.PraefectServer, Vote) error
+ Vote(context.Context, txinfo.Transaction, txinfo.PraefectServer, voting.Vote) error
// Stop gracefully stops the given transaction which is hosted by the
// given Praefect server.
- Stop(context.Context, metadata.Transaction, metadata.PraefectServer) error
+ Stop(context.Context, txinfo.Transaction, txinfo.PraefectServer) error
}
// PoolManager is an implementation of the Manager interface using a pool to
@@ -88,8 +87,13 @@ func (m *PoolManager) Collect(metrics chan<- prometheus.Metric) {
m.votingDelayMetric.Collect(metrics)
}
-func (m *PoolManager) getTransactionClient(ctx context.Context, server metadata.PraefectServer) (gitalypb.RefTransactionClient, error) {
- if featureflag.IsEnabled(ctx, featureflag.BackchannelVoting) {
+func (m *PoolManager) getTransactionClient(ctx context.Context, server txinfo.PraefectServer) (gitalypb.RefTransactionClient, error) {
+ // Gitaly is upgraded prior to Praefect. Older Praefects may still be using non-multiplexed connections
+ // and send dialing information for voting. To prevent failing RPCs during the upgrade, Gitaly still
+ // needs to support the old voting approach. If multiplexed connection is in use, the backchannel ID would
+ // be set to >0. If so, the mutator came from an upgraded Praefect that supports backchannel voting and Gitaly
+ // defaults to the backchannel. The fallback code can be removed in 14.0.
+ if server.BackchannelID > 0 {
conn, err := m.backchannels.Backchannel(server.BackchannelID)
if err != nil {
return nil, fmt.Errorf("get backchannel: %w", err)
@@ -111,9 +115,8 @@ func (m *PoolManager) getTransactionClient(ctx context.Context, server metadata.
return gitalypb.NewRefTransactionClient(conn), nil
}
-// Vote connects to the given server and casts hash as a vote for the
-// transaction identified by tx.
-func (m *PoolManager) Vote(ctx context.Context, tx metadata.Transaction, server metadata.PraefectServer, hash Vote) error {
+// Vote connects to the given server and casts vote as a vote for the transaction identified by tx.
+func (m *PoolManager) Vote(ctx context.Context, tx txinfo.Transaction, server txinfo.PraefectServer, vote voting.Vote) error {
client, err := m.getTransactionClient(ctx, server)
if err != nil {
return err
@@ -122,7 +125,7 @@ func (m *PoolManager) Vote(ctx context.Context, tx metadata.Transaction, server
logger := m.log(ctx).WithFields(logrus.Fields{
"transaction.id": tx.ID,
"transaction.voter": tx.Node,
- "transaction.hash": hex.EncodeToString(hash.Bytes()),
+ "transaction.hash": vote.String(),
})
defer prometheus.NewTimer(m.votingDelayMetric).ObserveDuration()
@@ -133,7 +136,7 @@ func (m *PoolManager) Vote(ctx context.Context, tx metadata.Transaction, server
response, err := client.VoteTransaction(transactionCtx, &gitalypb.VoteTransactionRequest{
TransactionId: tx.ID,
Node: tx.Node,
- ReferenceUpdatesHash: hash.Bytes(),
+ ReferenceUpdatesHash: vote.Bytes(),
})
if err != nil {
// Add some additional context to cancellation errors so that
@@ -161,7 +164,7 @@ func (m *PoolManager) Vote(ctx context.Context, tx metadata.Transaction, server
}
// Stop connects to the given server and stops the transaction identified by tx.
-func (m *PoolManager) Stop(ctx context.Context, tx metadata.Transaction, server metadata.PraefectServer) error {
+func (m *PoolManager) Stop(ctx context.Context, tx txinfo.Transaction, server txinfo.PraefectServer) error {
client, err := m.getTransactionClient(ctx, server)
if err != nil {
return err
@@ -186,8 +189,8 @@ func (m *PoolManager) log(ctx context.Context) logrus.FieldLogger {
}
// RunOnContext runs the given function if the context identifies a transaction.
-func RunOnContext(ctx context.Context, fn func(metadata.Transaction, metadata.PraefectServer) error) error {
- transaction, praefect, err := metadata.TransactionMetadataFromContext(ctx)
+func RunOnContext(ctx context.Context, fn func(txinfo.Transaction, txinfo.PraefectServer) error) error {
+ transaction, praefect, err := txinfo.FromContext(ctx)
if err != nil {
return err
}
@@ -198,8 +201,8 @@ func RunOnContext(ctx context.Context, fn func(metadata.Transaction, metadata.Pr
}
// VoteOnContext casts the vote on a transaction identified by the context, if there is any.
-func VoteOnContext(ctx context.Context, m Manager, vote Vote) error {
- return RunOnContext(ctx, func(transaction metadata.Transaction, praefect metadata.PraefectServer) error {
+func VoteOnContext(ctx context.Context, m Manager, vote voting.Vote) error {
+ return RunOnContext(ctx, func(transaction txinfo.Transaction, praefect txinfo.PraefectServer) error {
return m.Vote(ctx, transaction, praefect, vote)
})
}
diff --git a/internal/gitaly/transaction/manager_test.go b/internal/gitaly/transaction/manager_test.go
index 4e4117ed0..0ed6773d0 100644
--- a/internal/gitaly/transaction/manager_test.go
+++ b/internal/gitaly/transaction/manager_test.go
@@ -1,4 +1,4 @@
-package transaction
+package transaction_test
import (
"context"
@@ -9,11 +9,15 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+ "google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
@@ -38,156 +42,145 @@ func (s *testTransactionServer) StopTransaction(ctx context.Context, in *gitalyp
}
func TestPoolManager_Vote(t *testing.T) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.BackchannelVoting,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- cfg := testcfg.Build(t)
-
- transactionServer, praefect, stop := runTransactionServer(t, cfg)
- defer stop()
-
- registry := backchannel.NewRegistry()
- if featureflag.IsEnabled(ctx, featureflag.BackchannelVoting) {
- backchannelConn, err := client.Dial(ctx, praefect.ListenAddr, nil, nil)
- require.NoError(t, err)
- defer backchannelConn.Close()
- praefect = metadata.PraefectServer{BackchannelID: registry.RegisterBackchannel(backchannelConn)}
- }
-
- manager := NewManager(cfg, registry)
-
- for _, tc := range []struct {
- desc string
- transaction metadata.Transaction
- vote Vote
- voteFn func(*testing.T, *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error)
- expectedErr error
- }{
- {
- desc: "successful vote",
- transaction: metadata.Transaction{
- ID: 1,
- Node: "node",
- },
- vote: VoteFromData([]byte("foobar")),
- voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- require.Equal(t, uint64(1), request.TransactionId)
- require.Equal(t, "node", request.Node)
- require.Equal(t, request.ReferenceUpdatesHash, VoteFromData([]byte("foobar")).Bytes())
-
- return &gitalypb.VoteTransactionResponse{
- State: gitalypb.VoteTransactionResponse_COMMIT,
- }, nil
- },
+ cfg := testcfg.Build(t)
+
+ transactionServer, praefect := runTransactionServer(t, cfg)
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ registry := backchannel.NewRegistry()
+ backchannelConn, err := client.Dial(ctx, praefect.ListenAddr, nil, nil)
+ require.NoError(t, err)
+ defer backchannelConn.Close()
+ praefect = txinfo.PraefectServer{BackchannelID: registry.RegisterBackchannel(backchannelConn)}
+
+ manager := transaction.NewManager(cfg, registry)
+
+ for _, tc := range []struct {
+ desc string
+ transaction txinfo.Transaction
+ vote voting.Vote
+ voteFn func(*testing.T, *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error)
+ expectedErr error
+ }{
+ {
+ desc: "successful vote",
+ transaction: txinfo.Transaction{
+ ID: 1,
+ Node: "node",
},
- {
- desc: "aborted vote",
- voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- return &gitalypb.VoteTransactionResponse{
- State: gitalypb.VoteTransactionResponse_ABORT,
- }, nil
- },
- expectedErr: errors.New("transaction was aborted"),
+ vote: voting.VoteFromData([]byte("foobar")),
+ voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
+ require.Equal(t, uint64(1), request.TransactionId)
+ require.Equal(t, "node", request.Node)
+ require.Equal(t, request.ReferenceUpdatesHash, voting.VoteFromData([]byte("foobar")).Bytes())
+
+ return &gitalypb.VoteTransactionResponse{
+ State: gitalypb.VoteTransactionResponse_COMMIT,
+ }, nil
},
- {
- desc: "stopped vote",
- voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- return &gitalypb.VoteTransactionResponse{
- State: gitalypb.VoteTransactionResponse_STOP,
- }, nil
- },
- expectedErr: errors.New("transaction was stopped"),
+ },
+ {
+ desc: "aborted vote",
+ voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
+ return &gitalypb.VoteTransactionResponse{
+ State: gitalypb.VoteTransactionResponse_ABORT,
+ }, nil
},
- {
- desc: "erroneous vote",
- voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- return nil, status.Error(codes.Internal, "foobar")
- },
- expectedErr: status.Error(codes.Internal, "foobar"),
+ expectedErr: errors.New("transaction was aborted"),
+ },
+ {
+ desc: "stopped vote",
+ voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
+ return &gitalypb.VoteTransactionResponse{
+ State: gitalypb.VoteTransactionResponse_STOP,
+ }, nil
},
- } {
- t.Run(tc.desc, func(t *testing.T) {
- transactionServer.vote = func(request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- return tc.voteFn(t, request)
- }
-
- err := manager.Vote(ctx, tc.transaction, praefect, tc.vote)
- require.Equal(t, tc.expectedErr, err)
- })
- }
- })
+ expectedErr: errors.New("transaction was stopped"),
+ },
+ {
+ desc: "erroneous vote",
+ voteFn: func(t *testing.T, request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
+ return nil, status.Error(codes.Internal, "foobar")
+ },
+ expectedErr: status.Error(codes.Internal, "foobar"),
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ transactionServer.vote = func(request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
+ return tc.voteFn(t, request)
+ }
+
+ err := manager.Vote(ctx, tc.transaction, praefect, tc.vote)
+ require.Equal(t, tc.expectedErr, err)
+ })
+ }
}
func TestPoolManager_Stop(t *testing.T) {
- testhelper.NewFeatureSets([]featureflag.FeatureFlag{
- featureflag.BackchannelVoting,
- }).Run(t, func(t *testing.T, ctx context.Context) {
- cfg := testcfg.Build(t)
-
- transactionServer, praefect, stop := runTransactionServer(t, cfg)
- defer stop()
-
- registry := backchannel.NewRegistry()
- if featureflag.IsEnabled(ctx, featureflag.BackchannelVoting) {
- backchannelConn, err := client.Dial(ctx, praefect.ListenAddr, nil, nil)
- require.NoError(t, err)
- defer backchannelConn.Close()
- praefect = metadata.PraefectServer{BackchannelID: registry.RegisterBackchannel(backchannelConn)}
- }
-
- manager := NewManager(cfg, registry)
-
- for _, tc := range []struct {
- desc string
- transaction metadata.Transaction
- stopFn func(*testing.T, *gitalypb.StopTransactionRequest) (*gitalypb.StopTransactionResponse, error)
- expectedErr error
- }{
- {
- desc: "successful stop",
- transaction: metadata.Transaction{
- ID: 1,
- Node: "node",
- },
- stopFn: func(t *testing.T, request *gitalypb.StopTransactionRequest) (*gitalypb.StopTransactionResponse, error) {
- require.Equal(t, uint64(1), request.TransactionId)
- return &gitalypb.StopTransactionResponse{}, nil
- },
+ cfg := testcfg.Build(t)
+
+ transactionServer, praefect := runTransactionServer(t, cfg)
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ registry := backchannel.NewRegistry()
+ backchannelConn, err := client.Dial(ctx, praefect.ListenAddr, nil, nil)
+ require.NoError(t, err)
+ defer backchannelConn.Close()
+ praefect = txinfo.PraefectServer{BackchannelID: registry.RegisterBackchannel(backchannelConn)}
+
+ manager := transaction.NewManager(cfg, registry)
+
+ for _, tc := range []struct {
+ desc string
+ transaction txinfo.Transaction
+ stopFn func(*testing.T, *gitalypb.StopTransactionRequest) (*gitalypb.StopTransactionResponse, error)
+ expectedErr error
+ }{
+ {
+ desc: "successful stop",
+ transaction: txinfo.Transaction{
+ ID: 1,
+ Node: "node",
+ },
+ stopFn: func(t *testing.T, request *gitalypb.StopTransactionRequest) (*gitalypb.StopTransactionResponse, error) {
+ require.Equal(t, uint64(1), request.TransactionId)
+ return &gitalypb.StopTransactionResponse{}, nil
},
- {
- desc: "erroneous stop",
- stopFn: func(t *testing.T, request *gitalypb.StopTransactionRequest) (*gitalypb.StopTransactionResponse, error) {
- return nil, status.Error(codes.Internal, "foobar")
- },
- expectedErr: status.Error(codes.Internal, "foobar"),
+ },
+ {
+ desc: "erroneous stop",
+ stopFn: func(t *testing.T, request *gitalypb.StopTransactionRequest) (*gitalypb.StopTransactionResponse, error) {
+ return nil, status.Error(codes.Internal, "foobar")
},
- } {
- t.Run(tc.desc, func(t *testing.T) {
- transactionServer.stop = func(request *gitalypb.StopTransactionRequest) (*gitalypb.StopTransactionResponse, error) {
- return tc.stopFn(t, request)
- }
-
- err := manager.Stop(ctx, tc.transaction, praefect)
- require.Equal(t, tc.expectedErr, err)
- })
- }
- })
+ expectedErr: status.Error(codes.Internal, "foobar"),
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ transactionServer.stop = func(request *gitalypb.StopTransactionRequest) (*gitalypb.StopTransactionResponse, error) {
+ return tc.stopFn(t, request)
+ }
+
+ err := manager.Stop(ctx, tc.transaction, praefect)
+ require.Equal(t, tc.expectedErr, err)
+ })
+ }
}
-func runTransactionServer(t *testing.T, cfg config.Cfg) (*testTransactionServer, metadata.PraefectServer, func()) {
+func runTransactionServer(t *testing.T, cfg config.Cfg) (*testTransactionServer, txinfo.PraefectServer) {
transactionServer := &testTransactionServer{}
+ cfg.ListenAddr = ":0" // pushes gRPC to listen on the TCP address
+ addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterRefTransactionServer(srv, transactionServer)
+ }, testserver.WithDisablePraefect())
- server := testhelper.NewServerWithAuth(t, nil, nil, cfg.Auth.Token, backchannel.NewRegistry(), testhelper.WithInternalSocket(cfg))
- gitalypb.RegisterRefTransactionServer(server.GrpcServer(), transactionServer)
- server.Start(t)
-
- listener, address := testhelper.GetLocalhostListener(t)
- go func() { require.NoError(t, server.GrpcServer().Serve(listener)) }()
-
- praefect := metadata.PraefectServer{
- ListenAddr: "tcp://" + address,
+ praefect := txinfo.PraefectServer{
+ ListenAddr: addr,
Token: cfg.Auth.Token,
}
- return transactionServer, praefect, server.Stop
+ return transactionServer, praefect
}
diff --git a/internal/gitaly/transaction/mock.go b/internal/gitaly/transaction/mock.go
index 1f617909d..a0c3ab894 100644
--- a/internal/gitaly/transaction/mock.go
+++ b/internal/gitaly/transaction/mock.go
@@ -4,17 +4,18 @@ import (
"context"
"errors"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
)
// MockManager is a mock Manager for use in tests.
type MockManager struct {
- VoteFn func(context.Context, metadata.Transaction, metadata.PraefectServer, Vote) error
- StopFn func(context.Context, metadata.Transaction, metadata.PraefectServer) error
+ VoteFn func(context.Context, txinfo.Transaction, txinfo.PraefectServer, voting.Vote) error
+ StopFn func(context.Context, txinfo.Transaction, txinfo.PraefectServer) error
}
// Vote calls the MockManager's Vote function, if set. Otherwise, it returns an error.
-func (m *MockManager) Vote(ctx context.Context, tx metadata.Transaction, praefect metadata.PraefectServer, vote Vote) error {
+func (m *MockManager) Vote(ctx context.Context, tx txinfo.Transaction, praefect txinfo.PraefectServer, vote voting.Vote) error {
if m.VoteFn == nil {
return errors.New("mock does not implement Vote function")
}
@@ -22,7 +23,7 @@ func (m *MockManager) Vote(ctx context.Context, tx metadata.Transaction, praefec
}
// Stop calls the MockManager's Stop function, if set. Otherwise, it returns an error.
-func (m *MockManager) Stop(ctx context.Context, tx metadata.Transaction, praefect metadata.PraefectServer) error {
+func (m *MockManager) Stop(ctx context.Context, tx txinfo.Transaction, praefect txinfo.PraefectServer) error {
if m.StopFn == nil {
return errors.New("mock does not implement Stop function")
}
diff --git a/internal/gitalyssh/gitalyssh_test.go b/internal/gitalyssh/gitalyssh_test.go
index 8585e808a..7b5a2714c 100644
--- a/internal/gitalyssh/gitalyssh_test.go
+++ b/internal/gitalyssh/gitalyssh_test.go
@@ -7,17 +7,16 @@ import (
"github.com/golang/protobuf/jsonpb"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/labkit/correlation"
"google.golang.org/grpc/metadata"
)
func TestUploadPackEnv(t *testing.T) {
- testRepo, _, cleanupFn := gittest.CloneRepo(t)
- defer cleanupFn()
+ _, repo, _ := testcfg.BuildWithRepo(t)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -27,7 +26,7 @@ func TestUploadPackEnv(t *testing.T) {
ctx = correlation.ContextWithCorrelation(ctx, "correlation-id-1")
req := gitalypb.SSHUploadPackRequest{
- Repository: testRepo,
+ Repository: repo,
}
var pbMarshaler jsonpb.Marshaler
diff --git a/internal/gitlab/client.go b/internal/gitlab/client.go
new file mode 100644
index 000000000..d4e147451
--- /dev/null
+++ b/internal/gitlab/client.go
@@ -0,0 +1,55 @@
+package gitlab
+
+import (
+ "context"
+)
+
+// AllowedParams compose set of parameters required to call 'GitlabAPI.Allowed' method.
+type AllowedParams struct {
+ // RepoPath is an absolute path to the repository.
+ RepoPath string
+ // GitObjectDirectory is a path to git object directory.
+ GitObjectDirectory string
+ // GitAlternateObjectDirectories are the paths to alternate object directories.
+ GitAlternateObjectDirectories []string
+ // GLRepository is a name of the repository.
+ GLRepository string
+ // GLID is an identifier of the repository.
+ GLID string
+ // GLProtocol is a protocol used for operation.
+ GLProtocol string
+ // Changes is a set of changes to be applied.
+ Changes string
+}
+
+// PostReceiveMessage encapsulates a message from the /post_receive endpoint that gets printed to stdout
+type PostReceiveMessage struct {
+ Message string `json:"message"`
+ Type string `json:"type"`
+}
+
+// CheckInfo represents the response of GitLabs `check` API endpoint
+type CheckInfo struct {
+ // Version of the GitLab Rails component
+ Version string `json:"gitlab_version"`
+ // Revision of the Git object of the running GitLab
+ Revision string `json:"gitlab_revision"`
+ // APIVersion of GitLab, expected to be v4
+ APIVersion string `json:"api_version"`
+ // RedisReachable shows if GitLab can reach Redis. This can be false
+ // while the check itself succeeds. Normal hook API calls will likely
+ // fail.
+ RedisReachable bool `json:"redis"`
+}
+
+// Client is an interface for accessing the GitLab internal API
+type Client interface {
+ // Allowed queries the gitlab internal api /allowed endpoint to determine if a ref change for a given repository and user is allowed
+ Allowed(ctx context.Context, params AllowedParams) (bool, string, error)
+ // Check verifies that GitLab can be reached, and authenticated to
+ Check(ctx context.Context) (*CheckInfo, error)
+ // PreReceive queries the gitlab internal api /pre_receive to increase the reference counter
+ PreReceive(ctx context.Context, glRepository string) (bool, error)
+ // PostReceive queries the gitlab internal api /post_receive to decrease the reference counter
+ PostReceive(ctx context.Context, glRepository, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error)
+}
diff --git a/internal/gitaly/hook/access.go b/internal/gitlab/http_client.go
index adb957028..225bf67bc 100644
--- a/internal/gitaly/hook/access.go
+++ b/internal/gitlab/http_client.go
@@ -1,4 +1,4 @@
-package hook
+package gitlab
import (
"context"
@@ -12,82 +12,28 @@ import (
"regexp"
"strings"
+ "github.com/prometheus/client_golang/prometheus"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ promcfg "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/prometheus"
+ "gitlab.com/gitlab-org/gitaly/internal/prometheus/metrics"
"gitlab.com/gitlab-org/gitaly/internal/version"
"gitlab.com/gitlab-org/gitlab-shell/client"
)
-// AllowedResponse is a response for the internal gitlab api's /allowed endpoint with a subset
-// of fields
-type AllowedResponse struct {
- Status bool `json:"status"`
- Message string `json:"message"`
-}
-
-// AllowedRequest is a request for the internal gitlab api /allowed endpoint
-type AllowedRequest struct {
- Action string `json:"action,omitempty"`
- GLRepository string `json:"gl_repository,omitempty"`
- Project string `json:"project,omitempty"`
- Changes string `json:"changes,omitempty"`
- Protocol string `json:"protocol,omitempty"`
- Env string `json:"env,omitempty"`
- Username string `json:"username,omitempty"`
- KeyID string `json:"key_id,omitempty"`
- UserID string `json:"user_id,omitempty"`
-}
-
-// marshallGitObjectDirs generates a json encoded string containing GIT_OBJECT_DIRECTORY_RELATIVE, and GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE
-func marshallGitObjectDirs(gitObjectDirRel string, gitAltObjectDirsRel []string) (string, error) {
- envString, err := json.Marshal(map[string]interface{}{
- "GIT_OBJECT_DIRECTORY_RELATIVE": gitObjectDirRel,
- "GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE": gitAltObjectDirsRel,
- })
-
- if err != nil {
- return "", err
- }
-
- return string(envString), nil
-}
-
-// AllowedParams compose set of parameters required to call 'GitlabAPI.Allowed' method.
-type AllowedParams struct {
- // RepoPath is an absolute path to the repository.
- RepoPath string
- // GitObjectDirectory is a path to git object directory.
- GitObjectDirectory string
- // GitAlternateObjectDirectories are the paths to alternate object directories.
- GitAlternateObjectDirectories []string
- // GLRepository is a name of the repository.
- GLRepository string
- // GLID is an identifier of the repository.
- GLID string
- // GLProtocol is a protocol used for operation.
- GLProtocol string
- // Changes is a set of changes to be applied.
- Changes string
-}
-
-// GitlabAPI is an interface for accessing the gitlab internal API
-type GitlabAPI interface {
- // Allowed queries the gitlab internal api /allowed endpoint to determine if a ref change for a given repository and user is allowed
- Allowed(ctx context.Context, params AllowedParams) (bool, string, error)
- // Check verifies that GitLab can be reached, and authenticated to
- Check(ctx context.Context) (*CheckInfo, error)
- // PreReceive queries the gitlab internal api /pre_receive to increase the reference counter
- PreReceive(ctx context.Context, glRepository string) (bool, error)
- // PostReceive queries the gitlab internal api /post_receive to decrease the reference counter
- PostReceive(ctx context.Context, glRepository, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error)
-}
+var glIDRegex = regexp.MustCompile(`\A[0-9]+\z`)
-// gitlabAPI is a wrapper around client.GitlabNetClient with API methods for gitlab git receive hooks
-type gitlabAPI struct {
- client *client.GitlabNetClient
+// HTTPClient is an HTTP client used to talk to the internal GitLab Rails API.
+type HTTPClient struct {
+ *client.GitlabNetClient
+ latencyMetric metrics.HistogramVec
}
-// NewGitlabNetClient creates an HTTP client to talk to the Rails internal API
-func NewGitlabNetClient(gitlabCfg config.Gitlab, tlsCfg config.TLS) (*client.GitlabNetClient, error) {
+// NewHTTPClient creates an HTTP client to talk to the Rails internal API
+func NewHTTPClient(
+ gitlabCfg config.Gitlab,
+ tlsCfg config.TLS,
+ promCfg promcfg.Config,
+) (*HTTPClient, error) {
url, err := url.PathUnescape(gitlabCfg.URL)
if err != nil {
return nil, err
@@ -127,27 +73,81 @@ func NewGitlabNetClient(gitlabCfg config.Gitlab, tlsCfg config.TLS) (*client.Git
gitlabnetClient.SetUserAgent("gitaly/" + version.GetVersion())
- return gitlabnetClient, nil
+ return &HTTPClient{
+ GitlabNetClient: gitlabnetClient,
+ latencyMetric: prometheus.NewHistogramVec(
+ prometheus.HistogramOpts{
+ Name: "gitaly_gitlab_api_latency_seconds",
+ Help: "Latency between posting to GitLab's `/internal/` APIs and receiving a response",
+ Buckets: promCfg.GRPCLatencyBuckets,
+ },
+ []string{"endpoint"},
+ ),
+ }, nil
}
-// NewGitlabAPI creates a GitLabAPI to talk to the Rails internal API
-func NewGitlabAPI(gitlabCfg config.Gitlab, tlsCfg config.TLS) (GitlabAPI, error) {
- client, err := NewGitlabNetClient(gitlabCfg, tlsCfg)
- if err != nil {
- return nil, err
+// Describe describes Prometheus metrics exposed by the HTTPClient.
+func (c *HTTPClient) Describe(descs chan<- *prometheus.Desc) {
+ prometheus.DescribeByCollect(c, descs)
+}
+
+// Collect collects Prometheus metrics exposed by the HTTPClient.
+func (c *HTTPClient) Collect(metrics chan<- prometheus.Metric) {
+ c.latencyMetric.Collect(metrics)
+}
+
+// allowedRequest is a request for the internal gitlab api /allowed endpoint
+type allowedRequest struct {
+ Action string `json:"action,omitempty"`
+ GLRepository string `json:"gl_repository,omitempty"`
+ Project string `json:"project,omitempty"`
+ Changes string `json:"changes,omitempty"`
+ Protocol string `json:"protocol,omitempty"`
+ Env string `json:"env,omitempty"`
+ Username string `json:"username,omitempty"`
+ KeyID string `json:"key_id,omitempty"`
+ UserID string `json:"user_id,omitempty"`
+}
+
+func (a *allowedRequest) parseAndSetGLID(glID string) error {
+ var value string
+
+ switch {
+ case strings.HasPrefix(glID, "username-"):
+ a.Username = strings.TrimPrefix(glID, "username-")
+ return nil
+ case strings.HasPrefix(glID, "key-"):
+ a.KeyID = strings.TrimPrefix(glID, "key-")
+ value = a.KeyID
+ case strings.HasPrefix(glID, "user-"):
+ a.UserID = strings.TrimPrefix(glID, "user-")
+ value = a.UserID
+ }
+
+ if !glIDRegex.MatchString(value) {
+ return fmt.Errorf("gl_id='%s' is invalid", glID)
}
- return &gitlabAPI{client: client}, nil
+ return nil
+}
+
+// allowedResponse is a response for the internal gitlab api's /allowed endpoint with a subset
+// of fields
+type allowedResponse struct {
+ Status bool `json:"status"`
+ Message string `json:"message"`
}
// Allowed checks if a ref change for a given repository is allowed through the gitlab internal api /allowed endpoint
-func (a *gitlabAPI) Allowed(ctx context.Context, params AllowedParams) (bool, string, error) {
+func (c *HTTPClient) Allowed(ctx context.Context, params AllowedParams) (bool, string, error) {
+ defer prometheus.NewTimer(c.latencyMetric.WithLabelValues("allowed")).ObserveDuration()
+
gitObjDirVars, err := marshallGitObjectDirs(params.GitObjectDirectory, params.GitAlternateObjectDirectories)
if err != nil {
return false, "", fmt.Errorf("when getting git object directories json encoded string: %w", err)
}
- req := AllowedRequest{
+ req := allowedRequest{
Action: "git-receive-pack",
GLRepository: params.GLRepository,
Changes: params.Changes,
@@ -160,7 +160,7 @@ func (a *gitlabAPI) Allowed(ctx context.Context, params AllowedParams) (bool, st
return false, "", fmt.Errorf("setting gl_id: %w", err)
}
- resp, err := a.client.Post(ctx, "/allowed", &req)
+ resp, err := c.Post(ctx, "/allowed", &req)
if err != nil {
return false, "", err
}
@@ -170,7 +170,7 @@ func (a *gitlabAPI) Allowed(ctx context.Context, params AllowedParams) (bool, st
resp.Body.Close()
}()
- var response AllowedResponse
+ var response allowedResponse
switch resp.StatusCode {
case http.StatusOK,
@@ -200,8 +200,10 @@ type preReceiveResponse struct {
}
// PreReceive increases the reference counter for a push for a given gl_repository through the gitlab internal API /pre_receive endpoint
-func (a *gitlabAPI) PreReceive(ctx context.Context, glRepository string) (bool, error) {
- resp, err := a.client.Post(ctx, "/pre_receive", map[string]string{"gl_repository": glRepository})
+func (c *HTTPClient) PreReceive(ctx context.Context, glRepository string) (bool, error) {
+ defer prometheus.NewTimer(c.latencyMetric.WithLabelValues("pre-receive")).ObserveDuration()
+
+ resp, err := c.Post(ctx, "/pre_receive", map[string]string{"gl_repository": glRepository})
if err != nil {
return false, fmt.Errorf("http post to gitlab api /pre_receive endpoint: %w", err)
}
@@ -233,21 +235,17 @@ func (a *gitlabAPI) PreReceive(ctx context.Context, glRepository string) (bool,
return result.ReferenceCounterIncreased, nil
}
-// PostReceiveResponse is the response the GitLab internal api provides on a successful /post_receive call
-type PostReceiveResponse struct {
+// postReceiveResponse is the response the GitLab internal api provides on a successful /post_receive call
+type postReceiveResponse struct {
ReferenceCounterDecreased bool `json:"reference_counter_decreased"`
Messages []PostReceiveMessage `json:"messages"`
}
-// PostReceiveMessage encapsulates a message from the /post_receive endpoint that gets printed to stdout
-type PostReceiveMessage struct {
- Message string `json:"message"`
- Type string `json:"type"`
-}
-
// PostReceive decreases the reference counter for a push for a given gl_repository through the gitlab internal API /post_receive endpoint
-func (a *gitlabAPI) PostReceive(ctx context.Context, glRepository, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error) {
- resp, err := a.client.Post(ctx, "/post_receive", map[string]interface{}{"gl_repository": glRepository, "identifier": glID, "changes": changes, "push_options": pushOptions})
+func (c *HTTPClient) PostReceive(ctx context.Context, glRepository, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error) {
+ defer prometheus.NewTimer(c.latencyMetric.WithLabelValues("post-receive")).ObserveDuration()
+
+ resp, err := c.Post(ctx, "/post_receive", map[string]interface{}{"gl_repository": glRepository, "identifier": glID, "changes": changes, "push_options": pushOptions})
if err != nil {
return false, nil, fmt.Errorf("http post to gitlab api /post_receive endpoint: %w", err)
}
@@ -270,7 +268,7 @@ func (a *gitlabAPI) PostReceive(ctx context.Context, glRepository, glID, changes
return false, nil, fmt.Errorf("/post_receive endpoint respond with unsupported content type: %s", mtype)
}
- var result PostReceiveResponse
+ var result postReceiveResponse
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return false, nil, fmt.Errorf("decoding response from /post_receive endpoint: %w", err)
@@ -279,53 +277,44 @@ func (a *gitlabAPI) PostReceive(ctx context.Context, glRepository, glID, changes
return result.ReferenceCounterDecreased, result.Messages, nil
}
-var glIDRegex = regexp.MustCompile(`\A[0-9]+\z`)
-
-func (a *AllowedRequest) parseAndSetGLID(glID string) error {
- var value string
+// Check performs an HTTP request to the internal/check API endpoint to verify
+// the connection and tokens. It returns basic information of the installed
+// GitLab
+func (c *HTTPClient) Check(ctx context.Context) (*CheckInfo, error) {
+ defer prometheus.NewTimer(c.latencyMetric.WithLabelValues("check")).ObserveDuration()
- switch {
- case strings.HasPrefix(glID, "username-"):
- a.Username = strings.TrimPrefix(glID, "username-")
- return nil
- case strings.HasPrefix(glID, "key-"):
- a.KeyID = strings.TrimPrefix(glID, "key-")
- value = a.KeyID
- case strings.HasPrefix(glID, "user-"):
- a.UserID = strings.TrimPrefix(glID, "user-")
- value = a.UserID
+ resp, err := c.Get(ctx, "/check")
+ if err != nil {
+ return nil, fmt.Errorf("HTTP GET to GitLab endpoint /check failed: %w", err)
}
- if !glIDRegex.MatchString(value) {
- return fmt.Errorf("gl_id='%s' is invalid", glID)
- }
+ defer func() {
+ io.Copy(ioutil.Discard, resp.Body)
+ resp.Body.Close()
+ }()
- return nil
-}
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("Check HTTP request failed with status: %d", resp.StatusCode)
+ }
-// mockAPI is a noop gitlab API client
-type mockAPI struct{}
+ var info CheckInfo
+ if err := json.NewDecoder(resp.Body).Decode(&info); err != nil {
+ return nil, fmt.Errorf("failed to decode response from /check endpoint: %w", err)
+ }
-func (m *mockAPI) Allowed(ctx context.Context, params AllowedParams) (bool, string, error) {
- return true, "", nil
+ return &info, nil
}
-func (m *mockAPI) Check(ctx context.Context) (*CheckInfo, error) {
- return &CheckInfo{
- Version: "v13.5.0",
- Revision: "deadbeef",
- APIVersion: "v4",
- RedisReachable: true,
- }, nil
-}
+// marshallGitObjectDirs generates a json encoded string containing GIT_OBJECT_DIRECTORY_RELATIVE, and GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE
+func marshallGitObjectDirs(gitObjectDirRel string, gitAltObjectDirsRel []string) (string, error) {
+ envString, err := json.Marshal(map[string]interface{}{
+ "GIT_OBJECT_DIRECTORY_RELATIVE": gitObjectDirRel,
+ "GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE": gitAltObjectDirsRel,
+ })
-func (m *mockAPI) PreReceive(ctx context.Context, glRepository string) (bool, error) {
- return true, nil
-}
+ if err != nil {
+ return "", err
+ }
-func (m *mockAPI) PostReceive(ctx context.Context, glRepository, glID, changes string, gitPushOptions ...string) (bool, []PostReceiveMessage, error) {
- return true, nil, nil
+ return string(envString), nil
}
-
-// GitlabAPIStub is a global mock that can be used in testing
-var GitlabAPIStub = &mockAPI{}
diff --git a/internal/gitaly/hook/access_test.go b/internal/gitlab/http_client_test.go
index da34b4c7e..fe87fe722 100644
--- a/internal/gitaly/hook/access_test.go
+++ b/internal/gitlab/http_client_test.go
@@ -1,4 +1,4 @@
-package hook
+package gitlab
import (
"context"
@@ -10,9 +10,11 @@ import (
"testing"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/prometheus"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/promtest"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
)
@@ -31,19 +33,18 @@ func TestAccess_verifyParams(t *testing.T) {
secretToken := "topsecret"
glID, glRepository := "key-123", "repo-1"
- testRepo, testRepoPath, cleanup := gittest.CloneRepo(t)
- defer cleanup()
+ _, repo, repoPath := testcfg.BuildWithRepo(t)
changes := "changes1\nchanges2\nchanges3"
protocol := "protocol"
- testRepo.GitObjectDirectory = "object/dir"
- testRepo.GitAlternateObjectDirectories = []string{"alt/object/dir1", "alt/object/dir2"}
+ repo.GitObjectDirectory = "object/dir"
+ repo.GitAlternateObjectDirectories = []string{"alt/object/dir1", "alt/object/dir2"}
- gitObjectDirFull := filepath.Join(testRepoPath, testRepo.GitObjectDirectory)
+ gitObjectDirFull := filepath.Join(repoPath, repo.GitObjectDirectory)
var gitAlternateObjectDirsFull []string
- for _, gitAlternateObjectDirRel := range testRepo.GitAlternateObjectDirectories {
- gitAlternateObjectDirsFull = append(gitAlternateObjectDirsFull, filepath.Join(testRepoPath, gitAlternateObjectDirRel))
+ for _, gitAlternateObjectDirRel := range repo.GitAlternateObjectDirectories {
+ gitAlternateObjectDirsFull = append(gitAlternateObjectDirsFull, filepath.Join(repoPath, gitAlternateObjectDirRel))
}
tempDir := testhelper.TempDir(t)
@@ -63,14 +64,14 @@ func TestAccess_verifyParams(t *testing.T) {
GitPushOptions: nil,
GitObjectDir: gitObjectDirFull,
GitAlternateObjectDirs: gitAlternateObjectDirsFull,
- RepoPath: testRepoPath,
+ RepoPath: repoPath,
ClientCACertPath: "testdata/certs/server.crt",
ServerCertPath: "testdata/certs/server.crt",
ServerKeyPath: "testdata/certs/server.key",
})
defer cleanup()
- c, err := NewGitlabAPI(config.Gitlab{
+ c, err := NewHTTPClient(config.Gitlab{
URL: serverURL,
SecretFile: secretFilePath,
HTTPSettings: config.HTTPSettings{
@@ -81,11 +82,11 @@ func TestAccess_verifyParams(t *testing.T) {
}, config.TLS{
CertPath: "testdata/certs/server.crt",
KeyPath: "testdata/certs/server.key",
- })
+ }, prometheus.Config{})
require.NoError(t, err)
- badRepo := *testRepo
- badRepo.GitObjectDirectory = filepath.Join(testRepoPath, "bad/object/directory")
+ badRepo := *repo
+ badRepo.GitObjectDirectory = filepath.Join(repoPath, "bad/object/directory")
testCases := []struct {
desc string
@@ -95,7 +96,7 @@ func TestAccess_verifyParams(t *testing.T) {
}{
{
desc: "success",
- repo: testRepo,
+ repo: repo,
glRepository: glRepository,
glID: glID,
protocol: protocol,
@@ -133,19 +134,18 @@ func TestAccess_escapedAndRelativeURLs(t *testing.T) {
secretToken := "topsecret"
glID, glRepository := "key-123", "repo-1"
- testRepo, testRepoPath, cleanup := gittest.CloneRepo(t)
- defer cleanup()
+ _, repo, repoPath := testcfg.BuildWithRepo(t)
changes := "changes1\nchanges2\nchanges3"
protocol := "protocol"
- testRepo.GitObjectDirectory = "object/dir"
- testRepo.GitAlternateObjectDirectories = []string{"alt/object/dir1", "alt/object/dir2"}
+ repo.GitObjectDirectory = "object/dir"
+ repo.GitAlternateObjectDirectories = []string{"alt/object/dir1", "alt/object/dir2"}
- gitObjectDirFull := filepath.Join(testRepoPath, testRepo.GitObjectDirectory)
+ gitObjectDirFull := filepath.Join(repoPath, repo.GitObjectDirectory)
var gitAlternateObjectDirsFull []string
- for _, gitAlternateObjectDirRel := range testRepo.GitAlternateObjectDirectories {
- gitAlternateObjectDirsFull = append(gitAlternateObjectDirsFull, filepath.Join(testRepoPath, gitAlternateObjectDirRel))
+ for _, gitAlternateObjectDirRel := range repo.GitAlternateObjectDirectories {
+ gitAlternateObjectDirsFull = append(gitAlternateObjectDirsFull, filepath.Join(repoPath, gitAlternateObjectDirRel))
}
tempDir := testhelper.TempDir(t)
@@ -196,7 +196,7 @@ func TestAccess_escapedAndRelativeURLs(t *testing.T) {
GitPushOptions: nil,
GitObjectDir: gitObjectDirFull,
GitAlternateObjectDirs: gitAlternateObjectDirsFull,
- RepoPath: testRepoPath,
+ RepoPath: repoPath,
RelativeURLRoot: tc.relativeURLRoot,
UnixSocket: tc.unixSocket,
})
@@ -206,7 +206,7 @@ func TestAccess_escapedAndRelativeURLs(t *testing.T) {
serverURL = url.PathEscape(serverURL)
}
- c, err := NewGitlabAPI(config.Gitlab{
+ c, err := NewHTTPClient(config.Gitlab{
URL: serverURL,
RelativeURLRoot: tc.relativeURLRoot,
SecretFile: secretFilePath,
@@ -214,12 +214,12 @@ func TestAccess_escapedAndRelativeURLs(t *testing.T) {
User: user,
Password: password,
},
- }, config.TLS{})
+ }, config.TLS{}, prometheus.Config{})
require.NoError(t, err)
allowed, _, err := c.Allowed(context.Background(), AllowedParams{
- RepoPath: testRepo.RelativePath,
- GitObjectDirectory: testRepo.GitObjectDirectory,
- GitAlternateObjectDirectories: testRepo.GitAlternateObjectDirectories,
+ RepoPath: repo.RelativePath,
+ GitObjectDirectory: repo.GitObjectDirectory,
+ GitAlternateObjectDirectories: repo.GitAlternateObjectDirectories,
GLID: glID,
GLRepository: glRepository,
GLProtocol: protocol,
@@ -232,15 +232,13 @@ func TestAccess_escapedAndRelativeURLs(t *testing.T) {
}
func TestAccess_allowedResponseHandling(t *testing.T) {
- testRepo, testRepoPath, cleanup := gittest.CloneRepo(t)
+ _, repo, repoPath := testcfg.BuildWithRepo(t)
// set git quarantine directories
- gitObjectDir := filepath.Join(testRepoPath, "quarantine", "object", "dir")
- testRepo.GitObjectDirectory = gitObjectDir
- gitAltObjectDir := filepath.Join(testRepoPath, "objects")
- testRepo.GitAlternateObjectDirectories = []string{gitAltObjectDir}
-
- defer cleanup()
+ gitObjectDir := filepath.Join(repoPath, "quarantine", "object", "dir")
+ repo.GitObjectDirectory = gitObjectDir
+ gitAltObjectDir := filepath.Join(repoPath, "objects")
+ repo.GitAlternateObjectDirectories = []string{gitAltObjectDir}
tempDir := testhelper.TempDir(t)
testhelper.WriteShellSecretFile(t, tempDir, "secret_token")
@@ -357,16 +355,19 @@ func TestAccess_allowedResponseHandling(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(tc.allowedHandler))
defer server.Close()
- c, err := NewGitlabAPI(config.Gitlab{
+ c, err := NewHTTPClient(config.Gitlab{
URL: server.URL,
SecretFile: secretFilePath,
- }, config.TLS{})
+ }, config.TLS{}, prometheus.Config{})
require.NoError(t, err)
+ mockHistogramVec := promtest.NewMockHistogramVec()
+ c.latencyMetric = mockHistogramVec
+
allowed, message, err := c.Allowed(context.Background(), AllowedParams{
- RepoPath: testRepo.RelativePath,
- GitObjectDirectory: testRepo.GitObjectDirectory,
- GitAlternateObjectDirectories: testRepo.GitAlternateObjectDirectories,
+ RepoPath: repo.RelativePath,
+ GitObjectDirectory: repo.GitObjectDirectory,
+ GitAlternateObjectDirectories: repo.GitAlternateObjectDirectories,
GLRepository: "repo-1",
GLID: "key-123",
GLProtocol: "http",
@@ -378,6 +379,8 @@ func TestAccess_allowedResponseHandling(t *testing.T) {
} else {
require.Equal(t, tc.errMsg, message)
}
+
+ require.Equal(t, [][]string{{"allowed"}}, mockHistogramVec.LabelsCalled())
})
}
}
@@ -457,17 +460,22 @@ func TestAccess_preReceive(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(tc.prereceiveHandler))
defer server.Close()
- c, err := NewGitlabAPI(config.Gitlab{
+ c, err := NewHTTPClient(config.Gitlab{
URL: server.URL,
SecretFile: secretFilePath,
- }, config.TLS{})
+ }, config.TLS{}, prometheus.Config{})
require.NoError(t, err)
+ mockHistogramVec := promtest.NewMockHistogramVec()
+ c.latencyMetric = mockHistogramVec
+
success, err := c.PreReceive(context.Background(), "key-123")
require.Equal(t, tc.success, success)
if err != nil {
require.Contains(t, err.Error(), tc.errMsg)
}
+
+ require.Equal(t, [][]string{{"pre-receive"}}, mockHistogramVec.LabelsCalled())
})
}
}
@@ -535,12 +543,15 @@ func TestAccess_postReceive(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(tc.postReceiveHandler))
defer server.Close()
- c, err := NewGitlabAPI(config.Gitlab{
+ c, err := NewHTTPClient(config.Gitlab{
URL: server.URL,
SecretFile: secretFilePath,
- }, config.TLS{})
+ }, config.TLS{}, prometheus.Config{})
require.NoError(t, err)
+ mockHistogramVec := promtest.NewMockHistogramVec()
+ c.latencyMetric = mockHistogramVec
+
repositoryID := "project-123"
identifier := "key-123"
changes := "000 000 refs/heads/master"
@@ -554,6 +565,8 @@ func TestAccess_postReceive(t *testing.T) {
require.Equal(t, changes, receivedRequest.Changes)
require.Equal(t, tc.pushOptions, receivedRequest.PushOptions)
}
+
+ require.Equal(t, [][]string{{"post-receive"}}, mockHistogramVec.LabelsCalled())
})
}
}
diff --git a/internal/gitlab/mock_client.go b/internal/gitlab/mock_client.go
new file mode 100644
index 000000000..4490796fd
--- /dev/null
+++ b/internal/gitlab/mock_client.go
@@ -0,0 +1,38 @@
+package gitlab
+
+import (
+ "context"
+)
+
+// MockClient is a mock client of the internal GitLab API.
+type MockClient struct{}
+
+// NewMockClient returns a new mock client for the internal GitLab API.
+func NewMockClient() Client {
+ return &MockClient{}
+}
+
+// Allowed does nothing and always returns true.
+func (m *MockClient) Allowed(ctx context.Context, params AllowedParams) (bool, string, error) {
+ return true, "", nil
+}
+
+// Check does nothing and always returns a CheckInfo prepopulated with static data.
+func (m *MockClient) Check(ctx context.Context) (*CheckInfo, error) {
+ return &CheckInfo{
+ Version: "v13.5.0",
+ Revision: "deadbeef",
+ APIVersion: "v4",
+ RedisReachable: true,
+ }, nil
+}
+
+// PreReceive does nothing and always return true.
+func (m *MockClient) PreReceive(ctx context.Context, glRepository string) (bool, error) {
+ return true, nil
+}
+
+// PostReceive does nothing and always returns true.
+func (m *MockClient) PostReceive(ctx context.Context, glRepository, glID, changes string, gitPushOptions ...string) (bool, []PostReceiveMessage, error) {
+ return true, nil, nil
+}
diff --git a/internal/gitaly/hook/testdata/certs/server.crt b/internal/gitlab/testdata/certs/server.crt
index af0103e8e..af0103e8e 100644
--- a/internal/gitaly/hook/testdata/certs/server.crt
+++ b/internal/gitlab/testdata/certs/server.crt
diff --git a/internal/gitaly/hook/testdata/certs/server.key b/internal/gitlab/testdata/certs/server.key
index f343b2be7..f343b2be7 100644
--- a/internal/gitaly/hook/testdata/certs/server.key
+++ b/internal/gitlab/testdata/certs/server.key
diff --git a/internal/gitlab/testhelper_test.go b/internal/gitlab/testhelper_test.go
new file mode 100644
index 000000000..9684f3acb
--- /dev/null
+++ b/internal/gitlab/testhelper_test.go
@@ -0,0 +1,15 @@
+package gitlab
+
+import (
+ "os"
+ "testing"
+
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+)
+
+func TestMain(m *testing.M) {
+ defer testhelper.MustHaveNoChildProcess()
+ cleanup := testhelper.Configure()
+ defer cleanup()
+ os.Exit(m.Run())
+}
diff --git a/internal/helper/suppressed_context.go b/internal/helper/suppressed_context.go
new file mode 100644
index 000000000..3806b91d8
--- /dev/null
+++ b/internal/helper/suppressed_context.go
@@ -0,0 +1,18 @@
+package helper
+
+import (
+ "context"
+ "time"
+)
+
+// suppressedContext suppresses cancellation or expiration of the context.
+type suppressedContext struct{ context.Context }
+
+func (suppressedContext) Deadline() (deadline time.Time, ok bool) { return time.Time{}, false }
+
+func (suppressedContext) Done() <-chan struct{} { return nil }
+
+func (suppressedContext) Err() error { return nil }
+
+// SuppressCancellation returns a context that suppresses cancellation or expiration of the parent context.
+func SuppressCancellation(ctx context.Context) context.Context { return suppressedContext{ctx} }
diff --git a/internal/helper/suppressed_context_test.go b/internal/helper/suppressed_context_test.go
new file mode 100644
index 000000000..7c910e6ca
--- /dev/null
+++ b/internal/helper/suppressed_context_test.go
@@ -0,0 +1,58 @@
+package helper
+
+import (
+ "context"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestSuppressCancellation(t *testing.T) {
+ type key struct{}
+
+ parentDeadline := time.Now()
+ parentCtx, cancel := context.WithDeadline(context.WithValue(context.Background(), key{}, "value"), parentDeadline)
+ cancel()
+
+ t.Run("no deadline on suppressed context", func(t *testing.T) {
+ ctx := SuppressCancellation(parentCtx)
+
+ deadline, ok := ctx.Deadline()
+ require.False(t, ok)
+ require.Equal(t, time.Time{}, deadline)
+
+ require.Nil(t, ctx.Done())
+ require.NoError(t, ctx.Err())
+
+ require.Equal(t, ctx.Value(key{}), "value")
+ })
+
+ t.Run("with deadline on suppressed context", func(t *testing.T) {
+ newDeadline := parentDeadline.Add(24 * time.Hour)
+ ctx, cancel := context.WithDeadline(SuppressCancellation(parentCtx), newDeadline)
+
+ deadline, ok := ctx.Deadline()
+ require.True(t, ok)
+ require.Equal(t, newDeadline, deadline)
+
+ require.NoError(t, ctx.Err())
+ select {
+ case <-ctx.Done():
+ t.Fatal("context should not be done yet")
+ default:
+ require.NotNil(t, ctx.Done())
+ }
+
+ require.Equal(t, ctx.Value(key{}), "value")
+
+ cancel()
+
+ require.Error(t, context.Canceled)
+ select {
+ case <-ctx.Done():
+ default:
+ t.Fatal("context should have been done")
+ }
+ })
+}
diff --git a/internal/metadata/featureflag/feature_flags.go b/internal/metadata/featureflag/feature_flags.go
index 482ff2113..12378cae9 100644
--- a/internal/metadata/featureflag/feature_flags.go
+++ b/internal/metadata/featureflag/feature_flags.go
@@ -11,43 +11,33 @@ type FeatureFlag struct {
var (
// ReferenceTransactions will handle Git reference updates via the transaction service for strong consistency
ReferenceTransactions = FeatureFlag{Name: "reference_transactions", OnByDefault: true}
- // LogCommandStats will log additional rusage stats for commands
- LogCommandStats = FeatureFlag{Name: "log_command_stats", OnByDefault: true}
- // GoUserCherryPick enables the Go implementation of UserCherryPick
- GoUserCherryPick = FeatureFlag{Name: "go_user_cherry_pick", OnByDefault: true}
// GoUserUpdateBranch enables the Go implementation of UserUpdateBranch
GoUserUpdateBranch = FeatureFlag{Name: "go_user_update_branch", OnByDefault: true}
- // GoResolveConflicts enables the Go implementation of ResolveConflicts
- GoResolveConflicts = FeatureFlag{Name: "go_resolve_conflicts", OnByDefault: false}
- // GoUserUpdateSubmodule enables the Go implementation of
- // UserUpdateSubmodules
- GoUserUpdateSubmodule = FeatureFlag{Name: "go_user_update_submodule", OnByDefault: true}
- // GoUserRevert enables the Go implementation of UserRevert
- GoUserRevert = FeatureFlag{Name: "go_user_revert", OnByDefault: false}
+ // UserRebaseConfirmable
+ GoUserRebaseConfirmable = FeatureFlag{Name: "go_user_rebase_confirmable", OnByDefault: true}
// GoUpdateRemoteMirror enables the Go implementation of UpdateRemoteMirror
GoUpdateRemoteMirror = FeatureFlag{Name: "go_update_remote_mirror", OnByDefault: false}
// GrpcTreeEntryNotFound makes the TreeEntry gRPC call return NotFound instead of an empty blob
GrpcTreeEntryNotFound = FeatureFlag{Name: "grpc_tree_entry_not_found", OnByDefault: false}
- // BackchannelVoting enables voting via the backchannel connection.
- BackchannelVoting = FeatureFlag{Name: "backchannel_voting", OnByDefault: false}
// FetchInternalRemoteErrors makes FetchInternalRemote return actual errors instead of a boolean
FetchInternalRemoteErrors = FeatureFlag{Name: "fetch_internal_remote_errors", OnByDefault: false}
// GoFindLicense enables Go implementation of FindLicense
GoFindLicense = FeatureFlag{Name: "go_find_license", OnByDefault: false}
+ // TxConfig enables transactional voting for SetConfig and DeleteConfig RPCs.
+ TxConfig = FeatureFlag{Name: "tx_config", OnByDefault: false}
+ // TxRemote enables transactional voting for AddRemote and DeleteRemote.
+ TxRemote = FeatureFlag{Name: "tx_remote", OnByDefault: false}
)
// All includes all feature flags.
var All = []FeatureFlag{
- LogCommandStats,
ReferenceTransactions,
- GoUserCherryPick,
GoUserUpdateBranch,
- GoResolveConflicts,
- GoUserUpdateSubmodule,
- GoUserRevert,
+ GoUserRebaseConfirmable,
GrpcTreeEntryNotFound,
GoUpdateRemoteMirror,
- BackchannelVoting,
FetchInternalRemoteErrors,
GoFindLicense,
+ TxConfig,
+ TxRemote,
}
diff --git a/internal/middleware/commandstatshandler/commandstatshandler_test.go b/internal/middleware/commandstatshandler/commandstatshandler_test.go
index 740929ba9..7bbefca4f 100644
--- a/internal/middleware/commandstatshandler/commandstatshandler_test.go
+++ b/internal/middleware/commandstatshandler/commandstatshandler_test.go
@@ -13,11 +13,11 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/ref"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/internal/log"
- "gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
@@ -46,7 +46,15 @@ func createNewServer(t *testing.T, cfg config.Cfg) *grpc.Server {
server := grpc.NewServer(opts...)
- gitalypb.RegisterRefServiceServer(server, ref.NewServer(cfg, config.NewLocator(cfg), git.NewExecCommandFactory(cfg), transaction.NewManager(cfg, backchannel.NewRegistry())))
+ gitCommandFactory := git.NewExecCommandFactory(cfg)
+
+ gitalypb.RegisterRefServiceServer(server, ref.NewServer(
+ cfg,
+ config.NewLocator(cfg),
+ gitCommandFactory,
+ transaction.NewManager(cfg, backchannel.NewRegistry()),
+ catfile.NewCache(cfg),
+ ))
return server
}
@@ -116,8 +124,8 @@ func TestInterceptor(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
logBuffer.Reset()
- ctx := context.TODO()
- ctx = featureflag.OutgoingCtxWithFeatureFlags(ctx, featureflag.LogCommandStats)
+ ctx, cancel := testhelper.Context()
+ defer cancel()
conn, err := grpc.DialContext(ctx, "", grpc.WithContextDialer(getBufDialer(listener)), grpc.WithInsecure())
require.NoError(t, err)
diff --git a/internal/praefect/coordinator.go b/internal/praefect/coordinator.go
index dc1b41529..887116148 100644
--- a/internal/praefect/coordinator.go
+++ b/internal/praefect/coordinator.go
@@ -5,6 +5,7 @@ import (
"errors"
"fmt"
"sync"
+ "time"
"github.com/golang/protobuf/proto"
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
@@ -18,11 +19,11 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/praefect/config"
"gitlab.com/gitlab-org/gitaly/internal/praefect/datastore"
"gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/proxy"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/praefect/metrics"
"gitlab.com/gitlab-org/gitaly/internal/praefect/nodes"
"gitlab.com/gitlab-org/gitaly/internal/praefect/protoregistry"
"gitlab.com/gitlab-org/gitaly/internal/praefect/transactions"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/labkit/correlation"
"golang.org/x/sync/errgroup"
@@ -38,6 +39,12 @@ type transactionsCondition func(context.Context) bool
func transactionsEnabled(context.Context) bool { return true }
func transactionsDisabled(context.Context) bool { return false }
+func transactionsFlag(flag featureflag.FeatureFlag) transactionsCondition {
+ return func(ctx context.Context) bool {
+ return featureflag.IsEnabled(ctx, flag)
+ }
+}
+
// transactionRPCs contains the list of repository-scoped mutating calls which may take part in
// transactions. An optional feature flag can be added to conditionally enable transactional
// behaviour. If none is given, it's always enabled.
@@ -68,6 +75,7 @@ var transactionRPCs = map[string]transactionsCondition{
"/gitaly.RepositoryService/CloneFromPool": transactionsEnabled,
"/gitaly.RepositoryService/CloneFromPoolInternal": transactionsEnabled,
"/gitaly.RepositoryService/CreateFork": transactionsEnabled,
+ "/gitaly.RepositoryService/CreateRepository": transactionsEnabled,
"/gitaly.RepositoryService/CreateRepositoryFromBundle": transactionsEnabled,
"/gitaly.RepositoryService/CreateRepositoryFromSnapshot": transactionsEnabled,
"/gitaly.RepositoryService/CreateRepositoryFromURL": transactionsEnabled,
@@ -81,6 +89,9 @@ var transactionRPCs = map[string]transactionsCondition{
"/gitaly.WikiService/WikiUpdatePage": transactionsEnabled,
"/gitaly.WikiService/WikiWritePage": transactionsEnabled,
+ "/gitaly.RepositoryService/SetConfig": transactionsFlag(featureflag.TxConfig),
+ "/gitaly.RepositoryService/DeleteConfig": transactionsFlag(featureflag.TxConfig),
+
// The following RPCs don't perform any reference updates and thus
// shouldn't use transactions.
"/gitaly.ObjectPoolService/CreateObjectPool": transactionsDisabled,
@@ -91,9 +102,6 @@ var transactionRPCs = map[string]transactionsCondition{
"/gitaly.ObjectPoolService/UnlinkRepositoryFromObjectPool": transactionsDisabled,
"/gitaly.RefService/PackRefs": transactionsDisabled,
"/gitaly.RepositoryService/Cleanup": transactionsDisabled,
- "/gitaly.RepositoryService/CreateRepository": transactionsDisabled,
- "/gitaly.RepositoryService/DeleteConfig": transactionsDisabled,
- "/gitaly.RepositoryService/Fsck": transactionsDisabled,
"/gitaly.RepositoryService/GarbageCollect": transactionsDisabled,
"/gitaly.RepositoryService/MidxRepack": transactionsDisabled,
"/gitaly.RepositoryService/OptimizeRepository": transactionsDisabled,
@@ -102,7 +110,6 @@ var transactionRPCs = map[string]transactionsCondition{
"/gitaly.RepositoryService/RepackFull": transactionsDisabled,
"/gitaly.RepositoryService/RepackIncremental": transactionsDisabled,
"/gitaly.RepositoryService/RestoreCustomHooks": transactionsDisabled,
- "/gitaly.RepositoryService/SetConfig": transactionsDisabled,
"/gitaly.RepositoryService/WriteCommitGraph": transactionsDisabled,
// These shouldn't ever use transactions for the sake of not creating
@@ -289,7 +296,7 @@ func (c *Coordinator) directRepositoryScopedMessage(ctx context.Context, call gr
"relative_path": call.targetRepo.RelativePath,
})
- praefectServer, err := metadata.PraefectFromConfig(c.conf)
+ praefectServer, err := txinfo.PraefectFromConfig(c.conf)
if err != nil {
return nil, fmt.Errorf("repo scoped: could not create Praefect configuration: %w", err)
}
@@ -458,7 +465,7 @@ func (c *Coordinator) mutatorStreamParameters(ctx context.Context, call grpcCall
errByNode: make(map[string]error),
}
- injectedCtx, err := metadata.InjectTransaction(ctx, transaction.ID(), route.Primary.Storage, true)
+ injectedCtx, err := txinfo.InjectTransaction(ctx, transaction.ID(), route.Primary.Storage, true)
if err != nil {
return nil, err
}
@@ -476,7 +483,7 @@ func (c *Coordinator) mutatorStreamParameters(ctx context.Context, call grpcCall
return nil, err
}
- injectedCtx, err := metadata.InjectTransaction(ctx, transaction.ID(), secondary.Storage, false)
+ injectedCtx, err := txinfo.InjectTransaction(ctx, transaction.ID(), secondary.Storage, false)
if err != nil {
return nil, err
}
@@ -732,7 +739,13 @@ func (c *Coordinator) createTransactionFinalizer(
nodeErrors *nodeErrors,
) func() error {
return func() error {
- updated, outdated := getUpdatedAndOutdatedSecondaries(ctx, route, transaction, nodeErrors)
+ primaryDirtied, updated, outdated := getUpdatedAndOutdatedSecondaries(ctx, route, transaction, nodeErrors)
+ if !primaryDirtied {
+ // If the primary replica was not modified then we don't need to consider the secondaries
+ // outdated. Praefect requires the primary to be always part of the quorum, so no changes
+ // to secondaries would be made without primary being in agreement.
+ return nil
+ }
return c.newRequestFinalizer(
ctx, virtualStorage, targetRepo, route.Primary.Storage,
@@ -743,9 +756,11 @@ func (c *Coordinator) createTransactionFinalizer(
// getUpdatedAndOutdatedSecondaries returns all nodes which can be considered up-to-date or outdated
// after the given transaction. A node is considered outdated, if one of the following is true:
//
-// - No subtransactions were created. This really is only a safeguard in case the RPC wasn't aware
-// of transactions and thus failed to correctly assert its state matches across nodes. This is
-// rather pessimistic, as it could also indicate that an RPC simply didn't change anything.
+// - No subtransactions were created and the RPC was successful on the primary. This really is only
+// a safeguard in case the RPC wasn't aware of transactions and thus failed to correctly assert
+// its state matches across nodes. This is rather pessimistic, as it could also indicate that an
+// RPC simply didn't change anything. If the RPC was a failure on the primary and there were no
+// subtransactions, we assume no changes were done and that the nodes failed prior to voting.
//
// - The node failed to be part of the quorum. As a special case, if the primary fails the vote, all
// nodes need to get replication jobs.
@@ -760,7 +775,7 @@ func getUpdatedAndOutdatedSecondaries(
route RepositoryMutatorRoute,
transaction transactions.Transaction,
nodeErrors *nodeErrors,
-) (updated []string, outdated []string) {
+) (primaryDirtied bool, updated []string, outdated []string) {
nodeErrors.Lock()
defer nodeErrors.Unlock()
@@ -769,17 +784,31 @@ func getUpdatedAndOutdatedSecondaries(
// for them.
outdated = append(outdated, route.ReplicationTargets...)
+ primaryErr := nodeErrors.errByNode[route.Primary.Storage]
+
+ // If there were subtransactions, we only assume some changes were made if one of the subtransactions
+ // was committed.
+ //
+ // If there were no subtransactions, we assume changes were performed only if the primary successfully
+ // processed the RPC. This might be an RPC that is not correctly casting votes thus we replicate everywhere.
+ //
+ // If there were no subtransactions and the primary failed the RPC, we assume no changes have been made and
+ // the nodes simply failed before voting.
+ primaryDirtied = transaction.DidCommitAnySubtransaction() ||
+ (transaction.CountSubtransactions() == 0 && primaryErr == nil)
+
// If the primary errored, then we need to assume that it has modified on-disk state and
// thus need to replicate those changes to secondaries.
- if err := nodeErrors.errByNode[route.Primary.Storage]; err != nil {
- ctxlogrus.Extract(ctx).WithError(err).Info("primary failed transaction")
+ if primaryErr != nil {
+ ctxlogrus.Extract(ctx).WithError(primaryErr).Info("primary failed transaction")
outdated = append(outdated, routerNodesToStorages(route.Secondaries)...)
return
}
- // If no subtransaction happened, then the called RPC may not be aware of transactions at
- // all. We thus need to assume it changed repository state and need to create replication
- // jobs.
+ // If no subtransaction happened, then the called RPC may not be aware of transactions or
+ // the nodes failed before casting any votes. If the primary failed the RPC, we assume
+ // no changes were done and the nodes hit an error prior to voting. If the primary processed
+ // the RPC successfully, we assume the RPC is not correctly voting and replicate everywhere.
if transaction.CountSubtransactions() == 0 {
ctxlogrus.Extract(ctx).Info("transaction did not create subtransactions")
outdated = append(outdated, routerNodesToStorages(route.Secondaries)...)
@@ -795,9 +824,9 @@ func getUpdatedAndOutdatedSecondaries(
return
}
- // If the primary node did not commit the transaction, then we must assume that it dirtied
- // on-disk state. This modified state may not be what we want, but it's what we got. So in
- // order to ensure a consistent state, we need to replicate.
+ // If the primary node did not commit the transaction but there were some subtransactions committed,
+ // then we must assume that it dirtied on-disk state. This modified state may not be what we want,
+ // but it's what we got. So in order to ensure a consistent state, we need to replicate.
if state := nodeStates[route.Primary.Storage]; state != transactions.VoteCommitted {
if state == transactions.VoteFailed {
ctxlogrus.Extract(ctx).Error("transaction: primary failed vote")
@@ -845,6 +874,13 @@ func (c *Coordinator) newRequestFinalizer(
cause string,
) func() error {
return func() error {
+ // Use a separate timeout for the database operations. If the request times out, the passed in context is
+ // canceled. We need to perform the database updates regardless whether the request was canceled or not as
+ // the primary replica could have been dirtied and secondaries become outdated. Otherwise we'd have no idea of
+ // the possible changes performed on the disk.
+ ctx, cancel := context.WithTimeout(helper.SuppressCancellation(ctx), 30*time.Second)
+ defer cancel()
+
log := ctxlogrus.Extract(ctx).WithFields(logrus.Fields{
"replication.cause": cause,
"replication.change": change,
@@ -897,7 +933,8 @@ func (c *Coordinator) newRequestFinalizer(
virtualStorage,
targetRepo.GetRelativePath(),
primary,
- append(updatedSecondaries, outdatedSecondaries...),
+ updatedSecondaries,
+ outdatedSecondaries,
repositorySpecificPrimariesEnabled,
variableReplicationFactorEnabled,
); err != nil {
diff --git a/internal/praefect/coordinator_pg_test.go b/internal/praefect/coordinator_pg_test.go
index dfab8af54..df5dec438 100644
--- a/internal/praefect/coordinator_pg_test.go
+++ b/internal/praefect/coordinator_pg_test.go
@@ -4,7 +4,6 @@ package praefect
import (
"context"
- "crypto/sha1"
"errors"
"fmt"
"sync"
@@ -17,13 +16,15 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/praefect/config"
"gitlab.com/gitlab-org/gitaly/internal/praefect/datastore"
"gitlab.com/gitlab-org/gitaly/internal/praefect/datastore/glsql"
- praefect_metadata "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/praefect/nodes"
"gitlab.com/gitlab-org/gitaly/internal/praefect/protoregistry"
"gitlab.com/gitlab-org/gitaly/internal/praefect/transactions"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/promtest"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
+ "google.golang.org/grpc/peer"
)
func getDB(t *testing.T) glsql.DB {
@@ -31,10 +32,14 @@ func getDB(t *testing.T) glsql.DB {
}
func TestStreamDirectorMutator_Transaction(t *testing.T) {
+ type subtransactions []struct {
+ vote string
+ shouldSucceed bool
+ }
+
type node struct {
primary bool
- vote string
- shouldSucceed bool
+ subtransactions subtransactions
shouldGetRepl bool
shouldParticipate bool
generation int
@@ -42,45 +47,63 @@ func TestStreamDirectorMutator_Transaction(t *testing.T) {
}
testcases := []struct {
- desc string
- nodes []node
+ desc string
+ primaryFails bool
+ nodes []node
}{
{
desc: "successful vote should not create replication jobs",
nodes: []node{
- {primary: true, vote: "foobar", shouldSucceed: true, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
- {primary: false, vote: "foobar", shouldSucceed: true, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
- {primary: false, vote: "foobar", shouldSucceed: true, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
+ {primary: true, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
+ {primary: false, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
+ {primary: false, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
+ },
+ },
+ {
+ desc: "successful vote should create replication jobs if the primary fails",
+ primaryFails: true,
+ nodes: []node{
+ {primary: true, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
+ {primary: false, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldGetRepl: true, shouldParticipate: true, expectedGeneration: 0},
+ {primary: false, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldGetRepl: true, shouldParticipate: true, expectedGeneration: 0},
+ },
+ },
+ {
+ desc: "failing vote should not create replication jobs without committed subtransactions",
+ nodes: []node{
+ {primary: true, subtransactions: subtransactions{{vote: "foo", shouldSucceed: false}}, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 0},
+ {primary: false, subtransactions: subtransactions{{vote: "qux", shouldSucceed: false}}, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 0},
+ {primary: false, subtransactions: subtransactions{{vote: "bar", shouldSucceed: false}}, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 0},
},
},
{
- desc: "failing vote should not create replication jobs",
+ desc: "failing vote should create replication jobs with committed subtransaction",
nodes: []node{
- {primary: true, vote: "foo", shouldSucceed: false, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
- {primary: false, vote: "qux", shouldSucceed: false, shouldGetRepl: true, shouldParticipate: true, expectedGeneration: 0},
- {primary: false, vote: "bar", shouldSucceed: false, shouldGetRepl: true, shouldParticipate: true, expectedGeneration: 0},
+ {primary: true, subtransactions: subtransactions{{vote: "foo", shouldSucceed: true}, {vote: "foo", shouldSucceed: false}}, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
+ {primary: false, subtransactions: subtransactions{{vote: "foo", shouldSucceed: true}, {vote: "qux", shouldSucceed: false}}, shouldGetRepl: true, shouldParticipate: true, expectedGeneration: 0},
+ {primary: false, subtransactions: subtransactions{{vote: "foo", shouldSucceed: true}, {vote: "bar", shouldSucceed: false}}, shouldGetRepl: true, shouldParticipate: true, expectedGeneration: 0},
},
},
{
desc: "primary should reach quorum with disagreeing secondary",
nodes: []node{
- {primary: true, vote: "foobar", shouldSucceed: true, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
- {primary: false, vote: "barfoo", shouldSucceed: false, shouldGetRepl: true, shouldParticipate: true, expectedGeneration: 0},
+ {primary: true, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
+ {primary: false, subtransactions: subtransactions{{vote: "barfoo", shouldSucceed: false}}, shouldGetRepl: true, shouldParticipate: true, expectedGeneration: 0},
},
},
{
desc: "quorum should create replication jobs for disagreeing node",
nodes: []node{
- {primary: true, vote: "foobar", shouldSucceed: true, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
- {primary: false, vote: "foobar", shouldSucceed: true, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
- {primary: false, vote: "barfoo", shouldSucceed: false, shouldGetRepl: true, shouldParticipate: true, expectedGeneration: 0},
+ {primary: true, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
+ {primary: false, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldGetRepl: false, shouldParticipate: true, expectedGeneration: 1},
+ {primary: false, subtransactions: subtransactions{{vote: "barfoo", shouldSucceed: false}}, shouldGetRepl: true, shouldParticipate: true, expectedGeneration: 0},
},
},
{
desc: "only consistent secondaries should participate",
nodes: []node{
- {primary: true, vote: "foobar", shouldSucceed: true, shouldParticipate: true, generation: 1, expectedGeneration: 2},
- {primary: false, vote: "foobar", shouldSucceed: true, shouldParticipate: true, generation: 1, expectedGeneration: 2},
+ {primary: true, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldParticipate: true, generation: 1, expectedGeneration: 2},
+ {primary: false, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldParticipate: true, generation: 1, expectedGeneration: 2},
{shouldParticipate: false, shouldGetRepl: true, generation: 0, expectedGeneration: 0},
{shouldParticipate: false, shouldGetRepl: true, generation: datastore.GenerationUnknown, expectedGeneration: datastore.GenerationUnknown},
},
@@ -88,30 +111,51 @@ func TestStreamDirectorMutator_Transaction(t *testing.T) {
{
desc: "secondaries should not participate when primary's generation is unknown",
nodes: []node{
- {primary: true, vote: "foobar", shouldSucceed: true, shouldParticipate: true, generation: datastore.GenerationUnknown, expectedGeneration: 0},
+ {primary: true, subtransactions: subtransactions{{vote: "foobar", shouldSucceed: true}}, shouldParticipate: true, generation: datastore.GenerationUnknown, expectedGeneration: 0},
{shouldParticipate: false, shouldGetRepl: true, generation: datastore.GenerationUnknown, expectedGeneration: datastore.GenerationUnknown},
},
},
{
- // If the transaction didn't receive any votes at all, we need to assume
- // that the RPC wasn't aware of transactions and thus need to schedule
- // replication jobs.
- desc: "unstarted transaction should create replication jobs",
+ // All transactional RPCs are expected to cast vote if they are successful. If they don't, something is wrong
+ // and we should replicate to the secondaries to be sure.
+ desc: "unstarted transaction creates replication jobs if the primary is successful",
nodes: []node{
- {primary: true, shouldSucceed: true, shouldGetRepl: false, expectedGeneration: 1},
- {primary: false, shouldSucceed: false, shouldGetRepl: true, expectedGeneration: 0},
+ {primary: true, shouldGetRepl: false, expectedGeneration: 1},
+ {primary: false, shouldGetRepl: true, expectedGeneration: 0},
},
},
{
- // If the transaction didn't receive any votes at all, we need to assume
- // that the RPC wasn't aware of transactions and thus need to schedule
- // replication jobs.
- desc: "unstarted transaction should create replication jobs for outdated node",
+ // If the RPC fails without any subtransactions, the Gitalys would not have performed any changes yet.
+ // We don't have to consider the secondaries outdated.
+ desc: "unstarted transaction doesn't create replication jobs if the primary fails",
+ primaryFails: true,
nodes: []node{
- {primary: true, shouldSucceed: true, shouldGetRepl: false, generation: 1, expectedGeneration: 2},
- {primary: false, shouldSucceed: false, shouldGetRepl: true, generation: 1, expectedGeneration: 1},
- {primary: false, shouldSucceed: false, shouldGetRepl: true, generation: 0, expectedGeneration: 0},
- {primary: false, shouldSucceed: false, shouldGetRepl: true, generation: datastore.GenerationUnknown, expectedGeneration: datastore.GenerationUnknown},
+ {primary: true, expectedGeneration: 0},
+ {primary: false, expectedGeneration: 0},
+ },
+ },
+ {
+ // If there were no subtransactions and the RPC failed, the primary should not have performed any changes.
+ // We don't need to schedule replication jobs to replication targets either as they'd have jobs
+ // already scheduled by the earlier RPC that made them outdated or by the reconciler.
+ desc: "unstarted transaction should not create replication jobs for outdated node if the primary fails",
+ primaryFails: true,
+ nodes: []node{
+ {primary: true, shouldGetRepl: false, generation: 1, expectedGeneration: 1},
+ {primary: false, shouldGetRepl: false, generation: 1, expectedGeneration: 1},
+ {primary: false, shouldGetRepl: false, generation: 0, expectedGeneration: 0},
+ {primary: false, shouldGetRepl: false, generation: datastore.GenerationUnknown, expectedGeneration: datastore.GenerationUnknown},
+ },
+ },
+ {
+ // If there were no subtransactions and the primary did not fail, we should schedule replication jobs to every secondary.
+ // All transactional RPCs are expected to vote if they are successful.
+ desc: "unstarted transaction should create replication jobs for outdated node if the primary succeeds",
+ nodes: []node{
+ {primary: true, shouldGetRepl: false, generation: 1, expectedGeneration: 2},
+ {primary: false, shouldGetRepl: true, generation: 1, expectedGeneration: 1},
+ {primary: false, shouldGetRepl: true, generation: 0, expectedGeneration: 0},
+ {primary: false, shouldGetRepl: true, generation: datastore.GenerationUnknown, expectedGeneration: datastore.GenerationUnknown},
},
},
}
@@ -121,8 +165,7 @@ func TestStreamDirectorMutator_Transaction(t *testing.T) {
storageNodes := make([]*config.Node, 0, len(tc.nodes))
for i := range tc.nodes {
socket := testhelper.GetTemporaryGitalySocketFileName(t)
- server, _ := testhelper.NewServerWithHealth(t, socket)
- defer server.Stop()
+ testhelper.NewServerWithHealth(t, socket)
node := &config.Node{Address: "unix://" + socket, Storage: fmt.Sprintf("node-%d", i)}
storageNodes = append(storageNodes, node)
}
@@ -196,7 +239,8 @@ func TestStreamDirectorMutator_Transaction(t *testing.T) {
streamParams, err := coordinator.StreamDirector(ctx, fullMethod, peeker)
require.NoError(t, err)
- transaction, err := praefect_metadata.TransactionFromContext(streamParams.Primary().Ctx)
+ txCtx := peer.NewContext(streamParams.Primary().Ctx, &peer.Peer{})
+ transaction, err := txinfo.TransactionFromContext(txCtx)
require.NoError(t, err)
var voterWaitGroup sync.WaitGroup
@@ -216,17 +260,27 @@ func TestStreamDirectorMutator_Transaction(t *testing.T) {
go func() {
defer voterWaitGroup.Done()
- vote := sha1.Sum([]byte(node.vote))
- err := txMgr.VoteTransaction(ctx, transaction.ID, fmt.Sprintf("node-%d", i), vote[:])
- if node.shouldSucceed {
- assert.NoError(t, err)
- } else {
- assert.True(t, errors.Is(err, transactions.ErrTransactionFailed))
+ for _, subtransaction := range node.subtransactions {
+ vote := voting.VoteFromData([]byte(subtransaction.vote))
+ err := txMgr.VoteTransaction(ctx, transaction.ID, fmt.Sprintf("node-%d", i), vote)
+ if subtransaction.shouldSucceed {
+ if !assert.NoError(t, err) {
+ break
+ }
+ } else {
+ if !assert.True(t, errors.Is(err, transactions.ErrTransactionFailed)) {
+ break
+ }
+ }
}
}()
}
voterWaitGroup.Wait()
+ if tc.primaryFails {
+ streamParams.Primary().ErrHandler(errors.New("rpc failure"))
+ }
+
err = streamParams.RequestFinalizer()
require.NoError(t, err)
diff --git a/internal/praefect/coordinator_test.go b/internal/praefect/coordinator_test.go
index 49e60ce89..b0bee67d3 100644
--- a/internal/praefect/coordinator_test.go
+++ b/internal/praefect/coordinator_test.go
@@ -2,7 +2,6 @@ package praefect
import (
"context"
- "crypto/sha1"
"errors"
"fmt"
"io/ioutil"
@@ -17,7 +16,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/client"
- "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/internal/middleware/metadatahandler"
@@ -25,19 +24,23 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/praefect/config"
"gitlab.com/gitlab-org/gitaly/internal/praefect/datastore"
"gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/proxy"
- praefect_metadata "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/praefect/mock"
"gitlab.com/gitlab-org/gitaly/internal/praefect/nodes"
"gitlab.com/gitlab-org/gitaly/internal/praefect/protoregistry"
"gitlab.com/gitlab-org/gitaly/internal/praefect/transactions"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/promtest"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/labkit/correlation"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/health/grpc_health_v1"
"google.golang.org/grpc/metadata"
+ "google.golang.org/grpc/peer"
"google.golang.org/grpc/status"
)
@@ -126,10 +129,8 @@ func TestStreamDirectorReadOnlyEnforcement(t *testing.T) {
func TestStreamDirectorMutator(t *testing.T) {
gitalySocket0, gitalySocket1 := testhelper.GetTemporaryGitalySocketFileName(t), testhelper.GetTemporaryGitalySocketFileName(t)
- srv1, _ := testhelper.NewServerWithHealth(t, gitalySocket0)
- defer srv1.Stop()
- srv2, _ := testhelper.NewServerWithHealth(t, gitalySocket1)
- defer srv2.Stop()
+ testhelper.NewServerWithHealth(t, gitalySocket0)
+ testhelper.NewServerWithHealth(t, gitalySocket1)
primaryAddress, secondaryAddress := "unix://"+gitalySocket0, "unix://"+gitalySocket1
primaryNode := &config.Node{Address: primaryAddress, Storage: "praefect-internal-1"}
@@ -194,7 +195,7 @@ func TestStreamDirectorMutator(t *testing.T) {
md, ok := metadata.FromOutgoingContext(streamParams.Primary().Ctx)
require.True(t, ok)
- require.Contains(t, md, praefect_metadata.PraefectMetadataKey)
+ require.Contains(t, md, txinfo.PraefectMetadataKey)
mi, err := coordinator.registry.LookupMethod(fullMethod)
require.NoError(t, err)
@@ -236,8 +237,7 @@ func TestStreamDirectorMutator(t *testing.T) {
func TestStreamDirectorMutator_StopTransaction(t *testing.T) {
socket := testhelper.GetTemporaryGitalySocketFileName(t)
- server, _ := testhelper.NewServerWithHealth(t, socket)
- defer server.Stop()
+ testhelper.NewServerWithHealth(t, socket)
conf := config.Config{
VirtualStorages: []*config.VirtualStorage{
@@ -300,7 +300,8 @@ func TestStreamDirectorMutator_StopTransaction(t *testing.T) {
streamParams, err := coordinator.StreamDirector(correlation.ContextWithCorrelation(ctx, "my-correlation-id"), fullMethod, peeker)
require.NoError(t, err)
- transaction, err := praefect_metadata.TransactionFromContext(streamParams.Primary().Ctx)
+ txCtx := peer.NewContext(streamParams.Primary().Ctx, &peer.Peer{})
+ transaction, err := txinfo.TransactionFromContext(txCtx)
require.NoError(t, err)
var wg sync.WaitGroup
@@ -312,8 +313,8 @@ func TestStreamDirectorMutator_StopTransaction(t *testing.T) {
go func() {
defer wg.Done()
- vote := sha1.Sum([]byte("vote"))
- err := txMgr.VoteTransaction(ctx, transaction.ID, "primary", vote[:])
+ vote := voting.VoteFromData([]byte("vote"))
+ err := txMgr.VoteTransaction(ctx, transaction.ID, "primary", vote)
require.NoError(t, err)
// Assure that at least one vote was agreed on.
@@ -326,15 +327,15 @@ func TestStreamDirectorMutator_StopTransaction(t *testing.T) {
go func() {
defer wg.Done()
- vote := sha1.Sum([]byte("vote"))
- err := txMgr.VoteTransaction(ctx, transaction.ID, "secondary", vote[:])
+ vote := voting.VoteFromData([]byte("vote"))
+ err := txMgr.VoteTransaction(ctx, transaction.ID, "secondary", vote)
require.NoError(t, err)
// Assure that at least one vote was agreed on.
syncWG.Done()
syncWG.Wait()
- err = txMgr.VoteTransaction(ctx, transaction.ID, "secondary", vote[:])
+ err = txMgr.VoteTransaction(ctx, transaction.ID, "secondary", vote)
assert.True(t, errors.Is(err, transactions.ErrTransactionStopped))
}()
@@ -355,8 +356,7 @@ func (m mockRouter) RouteRepositoryAccessor(ctx context.Context, virtualStorage,
func TestStreamDirectorAccessor(t *testing.T) {
gitalySocket := testhelper.GetTemporaryGitalySocketFileName(t)
- srv, _ := testhelper.NewServerWithHealth(t, gitalySocket)
- defer srv.Stop()
+ testhelper.NewServerWithHealth(t, gitalySocket)
gitalyAddress := "unix://" + gitalySocket
conf := config.Config{
@@ -438,7 +438,7 @@ func TestStreamDirectorAccessor(t *testing.T) {
md, ok := metadata.FromOutgoingContext(streamParams.Primary().Ctx)
require.True(t, ok)
- require.Contains(t, md, praefect_metadata.PraefectMetadataKey)
+ require.Contains(t, md, txinfo.PraefectMetadataKey)
mi, err := coordinator.registry.LookupMethod(fullMethod)
require.NoError(t, err)
@@ -457,10 +457,8 @@ func TestStreamDirectorAccessor(t *testing.T) {
func TestCoordinatorStreamDirector_distributesReads(t *testing.T) {
gitalySocket0, gitalySocket1 := testhelper.GetTemporaryGitalySocketFileName(t), testhelper.GetTemporaryGitalySocketFileName(t)
- srv1, primaryHealthSrv := testhelper.NewServerWithHealth(t, gitalySocket0)
- defer srv1.Stop()
- srv2, healthSrv := testhelper.NewServerWithHealth(t, gitalySocket1)
- defer srv2.Stop()
+ primaryHealthSrv := testhelper.NewServerWithHealth(t, gitalySocket0)
+ healthSrv := testhelper.NewServerWithHealth(t, gitalySocket1)
primaryNodeConf := config.Node{
Address: "unix://" + gitalySocket0,
@@ -545,7 +543,7 @@ func TestCoordinatorStreamDirector_distributesReads(t *testing.T) {
md, ok := metadata.FromOutgoingContext(streamParams.Primary().Ctx)
require.True(t, ok)
- require.Contains(t, md, praefect_metadata.PraefectMetadataKey)
+ require.Contains(t, md, txinfo.PraefectMetadataKey)
mi, err := coordinator.registry.LookupMethod(fullMethod)
require.NoError(t, err)
@@ -594,7 +592,7 @@ func TestCoordinatorStreamDirector_distributesReads(t *testing.T) {
md, ok := metadata.FromOutgoingContext(streamParams.Primary().Ctx)
require.True(t, ok)
- require.Contains(t, md, praefect_metadata.PraefectMetadataKey)
+ require.Contains(t, md, txinfo.PraefectMetadataKey)
mi, err := coordinator.registry.LookupMethod(fullMethod)
require.NoError(t, err)
@@ -643,7 +641,7 @@ func TestCoordinatorStreamDirector_distributesReads(t *testing.T) {
md, ok := metadata.FromOutgoingContext(streamParams.Primary().Ctx)
require.True(t, ok)
- require.Contains(t, md, praefect_metadata.PraefectMetadataKey)
+ require.Contains(t, md, txinfo.PraefectMetadataKey)
mi, err := coordinator.registry.LookupMethod(fullMethod)
require.NoError(t, err)
@@ -687,7 +685,7 @@ func TestCoordinatorStreamDirector_distributesReads(t *testing.T) {
md, ok := metadata.FromOutgoingContext(streamParams.Primary().Ctx)
require.True(t, ok)
- require.Contains(t, md, praefect_metadata.PraefectMetadataKey)
+ require.Contains(t, md, txinfo.PraefectMetadataKey)
mi, err := coordinator.registry.LookupMethod(fullMethod)
require.NoError(t, err)
@@ -741,7 +739,7 @@ func TestCoordinatorStreamDirector_distributesReads(t *testing.T) {
md, ok := metadata.FromOutgoingContext(streamParams.Primary().Ctx)
require.True(t, ok)
- require.Contains(t, md, praefect_metadata.PraefectMetadataKey)
+ require.Contains(t, md, txinfo.PraefectMetadataKey)
mi, err := coordinator.registry.LookupMethod(fullMethod)
require.NoError(t, err)
@@ -815,12 +813,13 @@ func TestStreamDirector_repo_creation(t *testing.T) {
var createRepositoryCalled int64
rs := datastore.MockRepositoryStore{
- CreateRepositoryFunc: func(ctx context.Context, virtualStorage, relativePath, primary string, secondaries []string, storePrimary, storeAssignments bool) error {
+ CreateRepositoryFunc: func(ctx context.Context, virtualStorage, relativePath, primary string, updatedSecondaries, outdatedSecondaries []string, storePrimary, storeAssignments bool) error {
atomic.AddInt64(&createRepositoryCalled, 1)
assert.Equal(t, targetRepo.StorageName, virtualStorage)
assert.Equal(t, targetRepo.RelativePath, relativePath)
assert.Equal(t, rewrittenStorage, primary)
- assert.ElementsMatch(t, []string{healthySecondaryNode.Storage, unhealthySecondaryNode.Storage}, secondaries)
+ assert.Equal(t, []string{healthySecondaryNode.Storage}, updatedSecondaries)
+ assert.Equal(t, []string{unhealthySecondaryNode.Storage}, outdatedSecondaries)
assert.Equal(t, tc.primaryStored, storePrimary)
assert.Equal(t, tc.assignmentsStored, storeAssignments)
return nil
@@ -834,18 +833,15 @@ func TestStreamDirector_repo_creation(t *testing.T) {
case config.ElectionStrategySQL:
gitalySocket0 := testhelper.GetTemporaryGitalySocketFileName(t)
gitalySocket1 := testhelper.GetTemporaryGitalySocketFileName(t)
- gitalySocket3 := testhelper.GetTemporaryGitalySocketFileName(t)
- srv1, _ := testhelper.NewServerWithHealth(t, gitalySocket0)
- defer srv1.Stop()
- srv2, _ := testhelper.NewServerWithHealth(t, gitalySocket1)
- defer srv2.Stop()
- srv3, healthSrv3 := testhelper.NewServerWithHealth(t, gitalySocket3)
- healthSrv3.SetServingStatus("", grpc_health_v1.HealthCheckResponse_NOT_SERVING)
- defer srv3.Stop()
+ gitalySocket2 := testhelper.GetTemporaryGitalySocketFileName(t)
+ testhelper.NewServerWithHealth(t, gitalySocket0)
+ testhelper.NewServerWithHealth(t, gitalySocket1)
+ healthSrv2 := testhelper.NewServerWithHealth(t, gitalySocket2)
+ healthSrv2.SetServingStatus("", grpc_health_v1.HealthCheckResponse_NOT_SERVING)
primaryNode.Address = "unix://" + gitalySocket0
healthySecondaryNode.Address = "unix://" + gitalySocket1
- unhealthySecondaryNode.Address = "unix://" + gitalySocket1
+ unhealthySecondaryNode.Address = "unix://" + gitalySocket2
nodeMgr, err := nodes.NewManager(testhelper.DiscardTestEntry(t), conf, nil, nil, promtest.NewMockHistogramVec(), protoregistry.GitalyProtoPreregistered, nil, nil)
require.NoError(t, err)
@@ -857,6 +853,10 @@ func TestStreamDirector_repo_creation(t *testing.T) {
primaryConnPointer = fmt.Sprintf("%p", node.GetConnection())
continue
}
+
+ if node.GetStorage() == healthySecondaryNode.Storage {
+ secondaryConnPointers = []string{fmt.Sprintf("%p", node.GetConnection())}
+ }
}
case config.ElectionStrategyPerRepository:
conns := Connections{
@@ -867,6 +867,7 @@ func TestStreamDirector_repo_creation(t *testing.T) {
},
}
primaryConnPointer = fmt.Sprintf("%p", conns["praefect"][primaryNode.Storage])
+ secondaryConnPointers = []string{fmt.Sprintf("%p", conns["praefect"][healthySecondaryNode.Storage])}
router = NewPerRepositoryRouter(
conns,
nil,
@@ -922,7 +923,7 @@ func TestStreamDirector_repo_creation(t *testing.T) {
md, ok := metadata.FromOutgoingContext(streamParams.Primary().Ctx)
require.True(t, ok)
- require.Contains(t, md, praefect_metadata.PraefectMetadataKey)
+ require.Contains(t, md, txinfo.PraefectMetadataKey)
mi, err := coordinator.registry.LookupMethod(fullMethod)
require.NoError(t, err)
@@ -934,7 +935,12 @@ func TestStreamDirector_repo_creation(t *testing.T) {
require.NoError(t, err)
require.Equal(t, rewrittenStorage, rewrittenTargetRepo.GetStorageName(), "stream director should have rewritten the storage name")
- replEventWait.Add(2) // expected only one event to be created
+ replEventWait.Add(1)
+
+ vote := voting.VoteFromData([]byte{})
+ require.NoError(t, txMgr.VoteTransaction(ctx, 1, "praefect-internal-1", vote))
+ require.NoError(t, txMgr.VoteTransaction(ctx, 1, "praefect-internal-2", vote))
+
// this call creates new events in the queue and simulates usual flow of the update operation
err = streamParams.RequestFinalizer()
require.NoError(t, err)
@@ -942,7 +948,7 @@ func TestStreamDirector_repo_creation(t *testing.T) {
replEventWait.Wait() // wait until event persisted (async operation)
var expectedEvents, actualEvents []datastore.ReplicationEvent
- for _, target := range []string{healthySecondaryNode.Storage, unhealthySecondaryNode.Storage} {
+ for _, target := range []string{unhealthySecondaryNode.Storage} {
actual, err := queueInterceptor.Dequeue(ctx, "praefect", target, 10)
require.NoError(t, err)
require.Len(t, actual, 1)
@@ -1000,8 +1006,8 @@ func (m *mockPeeker) Modify(payload []byte) error {
func TestAbsentCorrelationID(t *testing.T) {
gitalySocket0, gitalySocket1 := testhelper.GetTemporaryGitalySocketFileName(t), testhelper.GetTemporaryGitalySocketFileName(t)
- _, healthSrv0 := testhelper.NewServerWithHealth(t, gitalySocket0)
- _, healthSrv1 := testhelper.NewServerWithHealth(t, gitalySocket1)
+ healthSrv0 := testhelper.NewServerWithHealth(t, gitalySocket0)
+ healthSrv1 := testhelper.NewServerWithHealth(t, gitalySocket1)
healthSrv0.SetServingStatus("", grpc_health_v1.HealthCheckResponse_SERVING)
healthSrv1.SetServingStatus("", grpc_health_v1.HealthCheckResponse_SERVING)
@@ -1151,10 +1157,8 @@ func TestCoordinatorEnqueueFailure(t *testing.T) {
func TestStreamDirectorStorageScope(t *testing.T) {
// stubs health-check requests because nodes.NewManager establishes connection on creation
gitalySocket0, gitalySocket1 := testhelper.GetTemporaryGitalySocketFileName(t), testhelper.GetTemporaryGitalySocketFileName(t)
- srv1, _ := testhelper.NewServerWithHealth(t, gitalySocket0)
- defer srv1.Stop()
- srv2, _ := testhelper.NewServerWithHealth(t, gitalySocket1)
- defer srv2.Stop()
+ testhelper.NewServerWithHealth(t, gitalySocket0)
+ testhelper.NewServerWithHealth(t, gitalySocket1)
primaryAddress, secondaryAddress := "unix://"+gitalySocket0, "unix://"+gitalySocket1
primaryGitaly := &config.Node{Address: primaryAddress, Storage: "gitaly-1"}
@@ -1416,12 +1420,10 @@ func TestCoordinator_grpcErrorHandling(t *testing.T) {
type gitalyNode struct {
mock *nodes.MockNode
- grpcServer *grpc.Server
operationServer *mockOperationServer
}
- repoProto, _, cleanup := gittest.CloneRepo(t)
- defer cleanup()
+ _, repoProto, _ := testcfg.BuildWithRepo(t)
for _, tc := range []struct {
desc string
@@ -1460,19 +1462,18 @@ func TestCoordinator_grpcErrorHandling(t *testing.T) {
for _, gitaly := range []string{"primary", "secondary-1", "secondary-2"} {
gitaly := gitaly
- grpcServer := testhelper.NewTestGrpcServer(t, nil, nil)
+ cfg := testcfg.Build(t, testcfg.WithStorages(gitaly))
+ cfg.ListenAddr = ":0"
operationServer := &mockOperationServer{
t: t,
wg: &wg,
}
- gitalypb.RegisterOperationServiceServer(grpcServer, operationServer)
-
- listener, address := testhelper.GetLocalhostListener(t)
- go grpcServer.Serve(listener)
- defer grpcServer.Stop()
+ addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterOperationServiceServer(srv, operationServer)
+ })
- conn, err := client.DialContext(ctx, "tcp://"+address, []grpc.DialOption{
+ conn, err := client.DialContext(ctx, addr, []grpc.DialOption{
grpc.WithDefaultCallOptions(grpc.ForceCodec(proxy.NewCodec())),
})
require.NoError(t, err)
@@ -1483,12 +1484,11 @@ func TestCoordinator_grpcErrorHandling(t *testing.T) {
Healthy: true,
GetStorageMethod: func() string { return gitaly },
},
- grpcServer: grpcServer,
operationServer: operationServer,
}
praefectConfig.VirtualStorages[0].Nodes = append(praefectConfig.VirtualStorages[0].Nodes, &config.Node{
- Address: "tcp://" + address,
+ Address: addr,
Storage: gitaly,
})
}
@@ -1539,8 +1539,9 @@ func TestCoordinator_grpcErrorHandling(t *testing.T) {
}
type mockTransaction struct {
- nodeStates map[string]transactions.VoteResult
- subtransactions int
+ nodeStates map[string]transactions.VoteResult
+ subtransactions int
+ didCommitAnySubtransaction bool
}
func (t mockTransaction) ID() uint64 {
@@ -1551,6 +1552,10 @@ func (t mockTransaction) CountSubtransactions() int {
return t.subtransactions
}
+func (t mockTransaction) DidCommitAnySubtransaction() bool {
+ return t.didCommitAnySubtransaction
+}
+
func (t mockTransaction) State() (map[string]transactions.VoteResult, error) {
return t.nodeStates, nil
}
@@ -1568,13 +1573,15 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
anyErr := errors.New("arbitrary error")
for _, tc := range []struct {
- desc string
- primary node
- secondaries []node
- replicas []string
- subtransactions int
- expectedOutdated []string
- expectedUpdated []string
+ desc string
+ primary node
+ secondaries []node
+ replicas []string
+ subtransactions int
+ didCommitAnySubtransaction bool
+ expectedPrimaryDirtied bool
+ expectedOutdated []string
+ expectedUpdated []string
}{
{
desc: "single committed node",
@@ -1582,7 +1589,9 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
name: "primary",
state: transactions.VoteCommitted,
},
- subtransactions: 1,
+ didCommitAnySubtransaction: true,
+ subtransactions: 1,
+ expectedPrimaryDirtied: true,
},
{
desc: "single failed node",
@@ -1604,7 +1613,8 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
primary: node{
name: "primary",
},
- subtransactions: 0,
+ subtransactions: 0,
+ expectedPrimaryDirtied: true,
},
{
desc: "single successful node with replica",
@@ -1612,9 +1622,11 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
name: "primary",
state: transactions.VoteCommitted,
},
- replicas: []string{"replica"},
- subtransactions: 1,
- expectedOutdated: []string{"replica"},
+ replicas: []string{"replica"},
+ didCommitAnySubtransaction: true,
+ subtransactions: 1,
+ expectedPrimaryDirtied: true,
+ expectedOutdated: []string{"replica"},
},
{
desc: "single failing node with replica",
@@ -1633,18 +1645,21 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
state: transactions.VoteCommitted,
err: anyErr,
},
- replicas: []string{"replica"},
- subtransactions: 1,
- expectedOutdated: []string{"replica"},
+ replicas: []string{"replica"},
+ didCommitAnySubtransaction: true,
+ subtransactions: 1,
+ expectedPrimaryDirtied: true,
+ expectedOutdated: []string{"replica"},
},
{
desc: "single node without transaction with replica",
primary: node{
name: "primary",
},
- replicas: []string{"replica"},
- subtransactions: 0,
- expectedOutdated: []string{"replica"},
+ replicas: []string{"replica"},
+ subtransactions: 0,
+ expectedPrimaryDirtied: true,
+ expectedOutdated: []string{"replica"},
},
{
desc: "multiple committed nodes",
@@ -1656,8 +1671,10 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
{name: "s1", state: transactions.VoteCommitted},
{name: "s2", state: transactions.VoteCommitted},
},
- subtransactions: 1,
- expectedUpdated: []string{"s1", "s2"},
+ didCommitAnySubtransaction: true,
+ subtransactions: 1,
+ expectedPrimaryDirtied: true,
+ expectedUpdated: []string{"s1", "s2"},
},
{
desc: "multiple committed nodes with primary err",
@@ -1670,8 +1687,10 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
{name: "s1", state: transactions.VoteCommitted},
{name: "s2", state: transactions.VoteCommitted},
},
- subtransactions: 1,
- expectedOutdated: []string{"s1", "s2"},
+ didCommitAnySubtransaction: true,
+ subtransactions: 1,
+ expectedPrimaryDirtied: true,
+ expectedOutdated: []string{"s1", "s2"},
},
{
desc: "multiple committed nodes with secondary err",
@@ -1683,9 +1702,11 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
{name: "s1", state: transactions.VoteCommitted, err: anyErr},
{name: "s2", state: transactions.VoteCommitted},
},
- subtransactions: 1,
- expectedUpdated: []string{"s2"},
- expectedOutdated: []string{"s1"},
+ didCommitAnySubtransaction: true,
+ subtransactions: 1,
+ expectedPrimaryDirtied: true,
+ expectedUpdated: []string{"s2"},
+ expectedOutdated: []string{"s1"},
},
{
desc: "partial success",
@@ -1697,9 +1718,11 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
{name: "s1", state: transactions.VoteFailed},
{name: "s2", state: transactions.VoteCommitted},
},
- subtransactions: 1,
- expectedUpdated: []string{"s2"},
- expectedOutdated: []string{"s1"},
+ didCommitAnySubtransaction: true,
+ subtransactions: 1,
+ expectedPrimaryDirtied: true,
+ expectedUpdated: []string{"s2"},
+ expectedOutdated: []string{"s1"},
},
{
desc: "failure with (impossible) secondary success",
@@ -1711,8 +1734,10 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
{name: "s1", state: transactions.VoteFailed},
{name: "s2", state: transactions.VoteCommitted},
},
- subtransactions: 1,
- expectedOutdated: []string{"s1", "s2"},
+ didCommitAnySubtransaction: true,
+ subtransactions: 1,
+ expectedPrimaryDirtied: true,
+ expectedOutdated: []string{"s1", "s2"},
},
{
desc: "multiple nodes without subtransactions",
@@ -1724,8 +1749,9 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
{name: "s1", state: transactions.VoteFailed},
{name: "s2", state: transactions.VoteCommitted},
},
- subtransactions: 0,
- expectedOutdated: []string{"s1", "s2"},
+ subtransactions: 0,
+ expectedPrimaryDirtied: true,
+ expectedOutdated: []string{"s1", "s2"},
},
{
desc: "multiple nodes with replica and partial failures",
@@ -1737,10 +1763,12 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
{name: "s1", state: transactions.VoteFailed},
{name: "s2", state: transactions.VoteCommitted},
},
- replicas: []string{"r1", "r2"},
- subtransactions: 1,
- expectedOutdated: []string{"s1", "r1", "r2"},
- expectedUpdated: []string{"s2"},
+ replicas: []string{"r1", "r2"},
+ didCommitAnySubtransaction: true,
+ subtransactions: 1,
+ expectedPrimaryDirtied: true,
+ expectedOutdated: []string{"s1", "r1", "r2"},
+ expectedUpdated: []string{"s2"},
},
{
desc: "multiple nodes with replica and partial err",
@@ -1752,9 +1780,11 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
{name: "s1", state: transactions.VoteFailed},
{name: "s2", state: transactions.VoteCommitted, err: anyErr},
},
- replicas: []string{"r1", "r2"},
- subtransactions: 1,
- expectedOutdated: []string{"s1", "s2", "r1", "r2"},
+ replicas: []string{"r1", "r2"},
+ didCommitAnySubtransaction: true,
+ subtransactions: 1,
+ expectedPrimaryDirtied: true,
+ expectedOutdated: []string{"s1", "s2", "r1", "r2"},
},
} {
t.Run(tc.desc, func(t *testing.T) {
@@ -1776,8 +1806,9 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
}
transaction := mockTransaction{
- nodeStates: states,
- subtransactions: tc.subtransactions,
+ nodeStates: states,
+ subtransactions: tc.subtransactions,
+ didCommitAnySubtransaction: tc.didCommitAnySubtransaction,
}
route := RepositoryMutatorRoute{
@@ -1792,9 +1823,102 @@ func TestGetUpdatedAndOutdatedSecondaries(t *testing.T) {
}
route.ReplicationTargets = append(route.ReplicationTargets, tc.replicas...)
- updated, outdated := getUpdatedAndOutdatedSecondaries(ctx, route, transaction, nodeErrors)
+ primaryDirtied, updated, outdated := getUpdatedAndOutdatedSecondaries(ctx, route, transaction, nodeErrors)
+ require.Equal(t, tc.expectedPrimaryDirtied, primaryDirtied)
require.ElementsMatch(t, tc.expectedUpdated, updated)
require.ElementsMatch(t, tc.expectedOutdated, outdated)
})
}
}
+
+func TestNewRequestFinalizer_contextIsDisjointedFromTheRPC(t *testing.T) {
+ type ctxKey struct{}
+
+ parentDeadline := time.Now()
+ ctx, cancel := context.WithDeadline(context.WithValue(context.Background(), ctxKey{}, "value"), parentDeadline)
+ cancel()
+
+ requireSuppressedCancellation := func(t testing.TB, ctx context.Context) {
+ deadline, ok := ctx.Deadline()
+ require.True(t, ok)
+ require.NotEqual(t, parentDeadline, deadline)
+ require.Equal(t, ctx.Value(ctxKey{}), "value")
+ require.Nil(t, ctx.Err())
+ select {
+ case <-ctx.Done():
+ t.Fatal("context should not be canceled if the parent is canceled")
+ default:
+ require.NotNil(t, ctx.Done())
+ }
+ }
+
+ err := errors.New("error")
+
+ for _, tc := range []struct {
+ change datastore.ChangeType
+ errMsg string
+ }{
+ {
+ change: datastore.UpdateRepo,
+ errMsg: "increment generation: error",
+ },
+ {
+ change: datastore.RenameRepo,
+ errMsg: "rename repository: error",
+ },
+ {
+ change: datastore.DeleteRepo,
+ errMsg: "delete repository: error",
+ },
+ {
+ change: "replication jobs only",
+ errMsg: "enqueue replication event: error",
+ },
+ } {
+ t.Run(string(tc.change), func(t *testing.T) {
+ require.EqualError(t,
+ NewCoordinator(
+ &datastore.MockReplicationEventQueue{
+ EnqueueFunc: func(ctx context.Context, _ datastore.ReplicationEvent) (datastore.ReplicationEvent, error) {
+ requireSuppressedCancellation(t, ctx)
+ return datastore.ReplicationEvent{}, err
+ },
+ },
+ datastore.MockRepositoryStore{
+ IncrementGenerationFunc: func(ctx context.Context, _, _, _ string, _ []string) error {
+ requireSuppressedCancellation(t, ctx)
+ return err
+ },
+ RenameRepositoryFunc: func(ctx context.Context, _, _, _, _ string) error {
+ requireSuppressedCancellation(t, ctx)
+ return err
+ },
+ DeleteRepositoryFunc: func(ctx context.Context, _, _, _ string) error {
+ requireSuppressedCancellation(t, ctx)
+ return err
+ },
+ CreateRepositoryFunc: func(ctx context.Context, _, _, _ string, _, _ []string, _, _ bool) error {
+ requireSuppressedCancellation(t, ctx)
+ return err
+ },
+ },
+ nil,
+ nil,
+ config.Config{},
+ nil,
+ ).newRequestFinalizer(
+ ctx,
+ "virtual storage",
+ &gitalypb.Repository{},
+ "primary",
+ []string{},
+ []string{"secondary"},
+ tc.change,
+ datastore.Params{"RelativePath": "relative-path"},
+ "rpc-name",
+ )(),
+ tc.errMsg,
+ )
+ })
+ }
+}
diff --git a/internal/praefect/datastore/repository_store.go b/internal/praefect/datastore/repository_store.go
index 36a259041..623dd308f 100644
--- a/internal/praefect/datastore/repository_store.go
+++ b/internal/praefect/datastore/repository_store.go
@@ -90,15 +90,16 @@ type RepositoryStore interface {
// downgrade, a DowngradeAttemptedError is returned.
GetReplicatedGeneration(ctx context.Context, virtualStorage, relativePath, source, target string) (int, error)
// CreateRepository creates a record for a repository in the specified virtual storage and relative path.
- // Primary is the storage the repository was created on. Returns RepositoryExistsError when trying to create
- // a repository which already exists in the store.
+ // Primary is the storage the repository was created on. UpdatedSecondaries are secondaries that participated
+ // and successfully completed the transaction. OutdatedSecondaries are secondaries that were outdated or failed
+ // the transaction. Returns RepositoryExistsError when trying to create a repository which already exists in the store.
//
// storePrimary should be set when repository specific primaries are enabled. When set, the primary is stored as
// the repository's primary.
//
// storeAssignments should be set when variable replication factor is enabled. When set, the primary and the
// secondaries are stored as the assigned hosts of the repository.
- CreateRepository(ctx context.Context, virtualStorage, relativePath, primary string, secondaries []string, storePrimary, storeAssignments bool) error
+ CreateRepository(ctx context.Context, virtualStorage, relativePath, primary string, updatedSecondaries, outdatedSecondaries []string, storePrimary, storeAssignments bool) error
// DeleteRepository deletes the repository from the virtual storage and the storage. Returns
// RepositoryNotExistsError when trying to delete a repository which has no record in the virtual storage
// or the storage.
@@ -297,7 +298,7 @@ AND storage = ANY($3)
//nolint:stylecheck
//nolint:golint
-func (rs *PostgresRepositoryStore) CreateRepository(ctx context.Context, virtualStorage, relativePath, primary string, secondaries []string, storePrimary, storeAssignments bool) error {
+func (rs *PostgresRepositoryStore) CreateRepository(ctx context.Context, virtualStorage, relativePath, primary string, updatedSecondaries, outdatedSecondaries []string, storePrimary, storeAssignments bool) error {
const q = `
WITH repo AS (
INSERT INTO repositories (
@@ -316,11 +317,13 @@ assignments AS (
)
SELECT $1, $2, storage
FROM (
- SELECT unnest($5::text[]) AS storage
+ SELECT $3 AS storage
UNION
- SELECT $3
+ SELECT unnest($5::text[])
+ UNION
+ SELECT unnest($6::text[])
) AS storages
- WHERE $6
+ WHERE $7
)
INSERT INTO storage_repositories (
@@ -329,11 +332,23 @@ INSERT INTO storage_repositories (
storage,
generation
)
-VALUES ($1, $2, $3, 0)
+SELECT $1, $2, storage, 0
+FROM (
+ SELECT $3 AS storage
+ UNION
+ SELECT unnest($5::text[])
+) AS updated_storages
`
_, err := rs.db.ExecContext(ctx, q,
- virtualStorage, relativePath, primary, storePrimary, pq.StringArray(secondaries), storeAssignments)
+ virtualStorage,
+ relativePath,
+ primary,
+ storePrimary,
+ pq.StringArray(updatedSecondaries),
+ pq.StringArray(outdatedSecondaries),
+ storeAssignments,
+ )
var pqerr *pq.Error
if errors.As(err, &pqerr) && pqerr.Code.Name() == "unique_violation" {
diff --git a/internal/praefect/datastore/repository_store_mock.go b/internal/praefect/datastore/repository_store_mock.go
index 13dcd2eab..015648c5b 100644
--- a/internal/praefect/datastore/repository_store_mock.go
+++ b/internal/praefect/datastore/repository_store_mock.go
@@ -9,7 +9,7 @@ type MockRepositoryStore struct {
IncrementGenerationFunc func(ctx context.Context, virtualStorage, relativePath, primary string, secondaries []string) error
GetReplicatedGenerationFunc func(ctx context.Context, virtualStorage, relativePath, source, target string) (int, error)
SetGenerationFunc func(ctx context.Context, virtualStorage, relativePath, storage string, generation int) error
- CreateRepositoryFunc func(ctx context.Context, virtualStorage, relativePath, primary string, secondaries []string, storePrimary, storeAssignments bool) error
+ CreateRepositoryFunc func(ctx context.Context, virtualStorage, relativePath, primary string, updatedSecondaries, outdatedSecondaries []string, storePrimary, storeAssignments bool) error
DeleteRepositoryFunc func(ctx context.Context, virtualStorage, relativePath, storage string) error
DeleteReplicaFunc func(ctx context.Context, virtualStorage, relativePath, storage string) error
RenameRepositoryFunc func(ctx context.Context, virtualStorage, relativePath, storage, newRelativePath string) error
@@ -53,12 +53,12 @@ func (m MockRepositoryStore) SetGeneration(ctx context.Context, virtualStorage,
//nolint:stylecheck
//nolint:golint
-func (m MockRepositoryStore) CreateRepository(ctx context.Context, virtualStorage, relativePath, primary string, secondaries []string, storePrimary, storeAssignments bool) error {
+func (m MockRepositoryStore) CreateRepository(ctx context.Context, virtualStorage, relativePath, primary string, updatedSecondaries, outdatedSecondaries []string, storePrimary, storeAssignments bool) error {
if m.CreateRepositoryFunc == nil {
return nil
}
- return m.CreateRepositoryFunc(ctx, virtualStorage, relativePath, primary, secondaries, storePrimary, storeAssignments)
+ return m.CreateRepositoryFunc(ctx, virtualStorage, relativePath, primary, updatedSecondaries, outdatedSecondaries, storePrimary, storeAssignments)
}
func (m MockRepositoryStore) DeleteRepository(ctx context.Context, virtualStorage, relativePath, storage string) error {
diff --git a/internal/praefect/datastore/repository_store_test.go b/internal/praefect/datastore/repository_store_test.go
index 69222db51..2cba83009 100644
--- a/internal/praefect/datastore/repository_store_test.go
+++ b/internal/praefect/datastore/repository_store_test.go
@@ -311,32 +311,44 @@ func testRepositoryStore(t *testing.T, newStore repositoryStoreFactory) {
t.Run("successfully created", func(t *testing.T) {
for _, tc := range []struct {
desc string
- secondaries []string
+ updatedSecondaries []string
+ outdatedSecondaries []string
storePrimary bool
storeAssignments bool
expectedPrimary string
expectedAssignments []string
}{
{
- desc: "store only repository record",
+ desc: "store only repository record for primary",
},
{
- desc: "primary stored",
- secondaries: []string{"secondary-1", "secondary-2"},
- storePrimary: true,
- expectedPrimary: "primary",
+ desc: "store only repository records for primary and outdated secondaries",
+ outdatedSecondaries: []string{"secondary-1", "secondary-2"},
+ },
+ {
+ desc: "store only repository records for primary and updated secondaries",
+ updatedSecondaries: []string{"secondary-1", "secondary-2"},
+ },
+ {
+ desc: "primary stored",
+ updatedSecondaries: []string{"secondary-1"},
+ outdatedSecondaries: []string{"secondary-2"},
+ storePrimary: true,
+ expectedPrimary: "primary",
},
{
desc: "assignments stored",
storeAssignments: true,
- secondaries: []string{"secondary-1", "secondary-2"},
+ updatedSecondaries: []string{"secondary-1"},
+ outdatedSecondaries: []string{"secondary-2"},
expectedAssignments: []string{"primary", "secondary-1", "secondary-2"},
},
{
desc: "store primary and assignments",
storePrimary: true,
storeAssignments: true,
- secondaries: []string{"secondary-1", "secondary-2"},
+ updatedSecondaries: []string{"secondary-1"},
+ outdatedSecondaries: []string{"secondary-2"},
expectedPrimary: "primary",
expectedAssignments: []string{"primary", "secondary-1", "secondary-2"},
},
@@ -344,7 +356,8 @@ func testRepositoryStore(t *testing.T, newStore repositoryStoreFactory) {
desc: "store primary and no secondaries",
storePrimary: true,
storeAssignments: true,
- secondaries: []string{},
+ updatedSecondaries: []string{},
+ outdatedSecondaries: []string{},
expectedPrimary: "primary",
expectedAssignments: []string{"primary"},
},
@@ -359,7 +372,20 @@ func testRepositoryStore(t *testing.T, newStore repositoryStoreFactory) {
t.Run(tc.desc, func(t *testing.T) {
rs, requireState := newStore(t, nil)
- require.NoError(t, rs.CreateRepository(ctx, vs, repo, "primary", tc.secondaries, tc.storePrimary, tc.storeAssignments))
+ require.NoError(t, rs.CreateRepository(ctx, vs, repo, "primary", tc.updatedSecondaries, tc.outdatedSecondaries, tc.storePrimary, tc.storeAssignments))
+
+ expectedStorageState := storageState{
+ vs: {
+ repo: {
+ "primary": 0,
+ },
+ },
+ }
+
+ for _, updatedSecondary := range tc.updatedSecondaries {
+ expectedStorageState[vs][repo][updatedSecondary] = 0
+ }
+
requireState(t, ctx,
virtualStorageState{
vs: {
@@ -369,13 +395,7 @@ func testRepositoryStore(t *testing.T, newStore repositoryStoreFactory) {
},
},
},
- storageState{
- vs: {
- repo: {
- "primary": 0,
- },
- },
- },
+ expectedStorageState,
)
})
}
@@ -384,10 +404,10 @@ func testRepositoryStore(t *testing.T, newStore repositoryStoreFactory) {
t.Run("conflict", func(t *testing.T) {
rs, _ := newStore(t, nil)
- require.NoError(t, rs.CreateRepository(ctx, vs, repo, stor, nil, false, false))
+ require.NoError(t, rs.CreateRepository(ctx, vs, repo, stor, nil, nil, false, false))
require.Equal(t,
RepositoryExistsError{vs, repo, stor},
- rs.CreateRepository(ctx, vs, repo, stor, nil, false, false),
+ rs.CreateRepository(ctx, vs, repo, stor, nil, nil, false, false),
)
})
})
diff --git a/internal/praefect/grpc-proxy/proxy/handler.go b/internal/praefect/grpc-proxy/proxy/handler.go
index aeac70181..743aedf8a 100644
--- a/internal/praefect/grpc-proxy/proxy/handler.go
+++ b/internal/praefect/grpc-proxy/proxy/handler.go
@@ -10,6 +10,7 @@ package proxy
import (
"context"
"errors"
+ "fmt"
"io"
"gitlab.com/gitlab-org/gitaly/internal/middleware/sentryhandler"
@@ -96,6 +97,20 @@ type streamAndDestination struct {
cancel func()
}
+// failDestinationWithErrors marks all of the destinations in the StreamParameters as
+// having failed with the given error.
+func failDestinationsWithError(params *StreamParameters, err error) {
+ if params.Primary().ErrHandler != nil {
+ _ = params.Primary().ErrHandler(err)
+ }
+
+ for _, secondary := range params.Secondaries() {
+ if secondary.ErrHandler != nil {
+ _ = secondary.ErrHandler(err)
+ }
+ }
+}
+
// handler is where the real magic of proxying happens.
// It is invoked like any gRPC server stream and uses the gRPC server framing to get and receive bytes from the wire,
// forwarding it to a ClientStream established against the relevant ClientConn.
@@ -127,6 +142,7 @@ func (s *handler) handler(srv interface{}, serverStream grpc.ServerStream) (fina
primaryClientStream, err := grpc.NewClientStream(clientCtx, clientStreamDescForProxying, params.Primary().Conn, fullMethodName, params.CallOptions()...)
if err != nil {
+ failDestinationsWithError(params, fmt.Errorf("initiate primary stream: %w", err))
return err
}
@@ -143,6 +159,7 @@ func (s *handler) handler(srv interface{}, serverStream grpc.ServerStream) (fina
secondaryClientStream, err := grpc.NewClientStream(clientCtx, clientStreamDescForProxying, destination.Conn, fullMethodName, params.CallOptions()...)
if err != nil {
+ failDestinationsWithError(params, fmt.Errorf("initiate secondary stream: %w", err))
return err
}
secondaryStreams = append(secondaryStreams, streamAndDestination{
diff --git a/internal/praefect/grpc-proxy/proxy/handler_ext_test.go b/internal/praefect/grpc-proxy/proxy/handler_ext_test.go
new file mode 100644
index 000000000..eb47a6240
--- /dev/null
+++ b/internal/praefect/grpc-proxy/proxy/handler_ext_test.go
@@ -0,0 +1,523 @@
+// Copyright 2017 Michal Witkowski. All Rights Reserved.
+// See LICENSE for licensing terms.
+
+package proxy_test
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "net"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "path/filepath"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/getsentry/sentry-go"
+ grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware"
+ grpc_ctxtags "github.com/grpc-ecosystem/go-grpc-middleware/tags"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/stretchr/testify/suite"
+ "gitlab.com/gitlab-org/gitaly/client"
+ "gitlab.com/gitlab-org/gitaly/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/internal/helper/fieldextractors"
+ "gitlab.com/gitlab-org/gitaly/internal/middleware/sentryhandler"
+ "gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/proxy"
+ pb "gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/testdata"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "go.uber.org/goleak"
+ "google.golang.org/grpc"
+ "google.golang.org/grpc/codes"
+ "google.golang.org/grpc/metadata"
+ "google.golang.org/grpc/status"
+)
+
+const (
+ pingDefaultValue = "I like kittens."
+ clientMdKey = "test-client-header"
+ serverHeaderMdKey = "test-client-header"
+ serverTrailerMdKey = "test-client-trailer"
+
+ rejectingMdKey = "test-reject-rpc-if-in-context"
+
+ countListResponses = 20
+)
+
+func TestMain(m *testing.M) {
+ defer testhelper.MustHaveNoChildProcess()
+ cleanup := testhelper.Configure()
+ defer cleanup()
+
+ goleak.VerifyTestMain(m)
+}
+
+// asserting service is implemented on the server side and serves as a handler for stuff
+type assertingService struct {
+ t *testing.T
+}
+
+func (s *assertingService) PingEmpty(ctx context.Context, _ *pb.Empty) (*pb.PingResponse, error) {
+ // Check that this call has client's metadata.
+ md, ok := metadata.FromIncomingContext(ctx)
+ assert.True(s.t, ok, "PingEmpty call must have metadata in context")
+ _, ok = md[clientMdKey]
+ assert.True(s.t, ok, "PingEmpty call must have clients's custom headers in metadata")
+ return &pb.PingResponse{Value: pingDefaultValue, Counter: 42}, nil
+}
+
+func (s *assertingService) Ping(ctx context.Context, ping *pb.PingRequest) (*pb.PingResponse, error) {
+ // Send user trailers and headers.
+ require.NoError(s.t, grpc.SendHeader(ctx, metadata.Pairs(serverHeaderMdKey, "I like turtles.")))
+ require.NoError(s.t, grpc.SetTrailer(ctx, metadata.Pairs(serverTrailerMdKey, "I like ending turtles.")))
+ return &pb.PingResponse{Value: ping.Value, Counter: 42}, nil
+}
+
+func (s *assertingService) PingError(ctx context.Context, ping *pb.PingRequest) (*pb.Empty, error) {
+ return nil, status.Errorf(codes.ResourceExhausted, "Userspace error.")
+}
+
+func (s *assertingService) PingList(ping *pb.PingRequest, stream pb.TestService_PingListServer) error {
+ // Send user trailers and headers.
+ require.NoError(s.t, stream.SendHeader(metadata.Pairs(serverHeaderMdKey, "I like turtles.")))
+ for i := 0; i < countListResponses; i++ {
+ require.NoError(s.t, stream.Send(&pb.PingResponse{Value: ping.Value, Counter: int32(i)}))
+ }
+ stream.SetTrailer(metadata.Pairs(serverTrailerMdKey, "I like ending turtles."))
+ return nil
+}
+
+func (s *assertingService) PingStream(stream pb.TestService_PingStreamServer) error {
+ require.NoError(s.t, stream.SendHeader(metadata.Pairs(serverHeaderMdKey, "I like turtles.")))
+ counter := int32(0)
+ for {
+ ping, err := stream.Recv()
+ if err == io.EOF {
+ break
+ } else if err != nil {
+ require.NoError(s.t, err, "can't fail reading stream")
+ return err
+ }
+ pong := &pb.PingResponse{Value: ping.Value, Counter: counter}
+ if err := stream.Send(pong); err != nil {
+ require.NoError(s.t, err, "can't fail sending back a pong")
+ }
+ counter++
+ }
+ stream.SetTrailer(metadata.Pairs(serverTrailerMdKey, "I like ending turtles."))
+ return nil
+}
+
+// ProxyHappySuite tests the "happy" path of handling: that everything works in absence of connection issues.
+type ProxyHappySuite struct {
+ suite.Suite
+
+ serverListener net.Listener
+ server *grpc.Server
+ proxyListener net.Listener
+ proxy *grpc.Server
+ serverClientConn *grpc.ClientConn
+
+ client *grpc.ClientConn
+ testClient pb.TestServiceClient
+ testClientConn *grpc.ClientConn
+}
+
+func (s *ProxyHappySuite) ctx() context.Context {
+ // Make all RPC calls last at most 1 sec, meaning all async issues or deadlock will not kill tests.
+ ctx, _ := context.WithTimeout(context.TODO(), 120*time.Second) // nolint: govet
+ return ctx
+}
+
+func (s *ProxyHappySuite) TestPingEmptyCarriesClientMetadata() {
+ ctx := metadata.NewOutgoingContext(s.ctx(), metadata.Pairs(clientMdKey, "true"))
+ out, err := s.testClient.PingEmpty(ctx, &pb.Empty{})
+ require.NoError(s.T(), err, "PingEmpty should succeed without errors")
+ require.Equal(s.T(), &pb.PingResponse{Value: pingDefaultValue, Counter: 42}, out)
+}
+
+func (s *ProxyHappySuite) TestPingEmpty_StressTest() {
+ for i := 0; i < 50; i++ {
+ s.TestPingEmptyCarriesClientMetadata()
+ }
+}
+
+func (s *ProxyHappySuite) TestPingCarriesServerHeadersAndTrailers() {
+ headerMd := make(metadata.MD)
+ trailerMd := make(metadata.MD)
+ // This is an awkward calling convention... but meh.
+ out, err := s.testClient.Ping(s.ctx(), &pb.PingRequest{Value: "foo"}, grpc.Header(&headerMd), grpc.Trailer(&trailerMd))
+ require.NoError(s.T(), err, "Ping should succeed without errors")
+ require.Equal(s.T(), &pb.PingResponse{Value: "foo", Counter: 42}, out)
+ assert.Contains(s.T(), headerMd, serverHeaderMdKey, "server response headers must contain server data")
+ assert.Len(s.T(), trailerMd, 1, "server response trailers must contain server data")
+}
+
+func (s *ProxyHappySuite) TestPingErrorPropagatesAppError() {
+ sentryTriggered := 0
+ sentrySrv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ sentryTriggered++
+ }))
+ defer sentrySrv.Close()
+
+ // minimal required sentry client configuration
+ sentryURL, err := url.Parse(sentrySrv.URL)
+ require.NoError(s.T(), err)
+ sentryURL.User = url.UserPassword("stub", "stub")
+ sentryURL.Path = "/stub/1"
+
+ require.NoError(s.T(), sentry.Init(sentry.ClientOptions{
+ Dsn: sentryURL.String(),
+ Transport: sentry.NewHTTPSyncTransport(),
+ }))
+
+ sentry.CaptureEvent(sentry.NewEvent())
+ require.Equal(s.T(), 1, sentryTriggered, "sentry configured incorrectly")
+
+ _, err = s.testClient.PingError(s.ctx(), &pb.PingRequest{Value: "foo"})
+ require.Error(s.T(), err, "PingError should never succeed")
+ assert.Equal(s.T(), codes.ResourceExhausted, status.Code(err))
+ assert.Equal(s.T(), "Userspace error.", status.Convert(err).Message())
+ require.Equal(s.T(), 1, sentryTriggered, "sentry must not be triggered because errors from remote must be just propagated")
+}
+
+func (s *ProxyHappySuite) TestDirectorErrorIsPropagated() {
+ // See SetupSuite where the StreamDirector has a special case.
+ ctx := metadata.NewOutgoingContext(s.ctx(), metadata.Pairs(rejectingMdKey, "true"))
+ _, err := s.testClient.Ping(ctx, &pb.PingRequest{Value: "foo"})
+ require.Error(s.T(), err, "Director should reject this RPC")
+ assert.Equal(s.T(), codes.PermissionDenied, status.Code(err))
+ assert.Equal(s.T(), "testing rejection", status.Convert(err).Message())
+}
+
+func (s *ProxyHappySuite) TestPingStream_FullDuplexWorks() {
+ stream, err := s.testClient.PingStream(s.ctx())
+ require.NoError(s.T(), err, "PingStream request should be successful.")
+
+ for i := 0; i < countListResponses; i++ {
+ ping := &pb.PingRequest{Value: fmt.Sprintf("foo:%d", i)}
+ require.NoError(s.T(), stream.Send(ping), "sending to PingStream must not fail")
+ resp, err := stream.Recv()
+ if err == io.EOF {
+ break
+ }
+ if i == 0 {
+ // Check that the header arrives before all entries.
+ headerMd, err := stream.Header()
+ require.NoError(s.T(), err, "PingStream headers should not error.")
+ assert.Contains(s.T(), headerMd, serverHeaderMdKey, "PingStream response headers user contain metadata")
+ }
+ assert.EqualValues(s.T(), i, resp.Counter, "ping roundtrip must succeed with the correct id")
+ }
+ require.NoError(s.T(), stream.CloseSend(), "no error on close send")
+ _, err = stream.Recv()
+ require.Equal(s.T(), io.EOF, err, "stream should close with io.EOF, meaining OK")
+ // Check that the trailer headers are here.
+ trailerMd := stream.Trailer()
+ assert.Len(s.T(), trailerMd, 1, "PingList trailer headers user contain metadata")
+}
+
+func (s *ProxyHappySuite) TestPingStream_StressTest() {
+ for i := 0; i < 50; i++ {
+ s.TestPingStream_FullDuplexWorks()
+ }
+}
+
+func (s *ProxyHappySuite) SetupSuite() {
+ var err error
+
+ s.proxyListener, err = net.Listen("tcp", "127.0.0.1:0")
+ require.NoError(s.T(), err, "must be able to allocate a port for proxyListener")
+ s.serverListener, err = net.Listen("tcp", "127.0.0.1:0")
+ require.NoError(s.T(), err, "must be able to allocate a port for serverListener")
+
+ s.server = grpc.NewServer()
+ pb.RegisterTestServiceServer(s.server, &assertingService{t: s.T()})
+
+ // Setup of the proxy's Director.
+ s.serverClientConn, err = grpc.Dial(s.serverListener.Addr().String(), grpc.WithInsecure(), grpc.WithDefaultCallOptions(grpc.ForceCodec(proxy.NewCodec())))
+ require.NoError(s.T(), err, "must not error on deferred client Dial")
+ director := func(ctx context.Context, fullName string, peeker proxy.StreamPeeker) (*proxy.StreamParameters, error) {
+ payload, err := peeker.Peek()
+ if err != nil {
+ return nil, err
+ }
+
+ md, ok := metadata.FromIncomingContext(ctx)
+ if ok {
+ if _, exists := md[rejectingMdKey]; exists {
+ return proxy.NewStreamParameters(proxy.Destination{Ctx: helper.IncomingToOutgoing(ctx), Msg: payload}, nil, nil, nil), status.Errorf(codes.PermissionDenied, "testing rejection")
+ }
+ }
+
+ // Explicitly copy the metadata, otherwise the tests will fail.
+ return proxy.NewStreamParameters(proxy.Destination{Ctx: helper.IncomingToOutgoing(ctx), Conn: s.serverClientConn, Msg: payload}, nil, nil, nil), nil
+ }
+
+ s.proxy = grpc.NewServer(
+ grpc.CustomCodec(proxy.NewCodec()),
+ grpc.StreamInterceptor(
+ grpc_middleware.ChainStreamServer(
+ // context tags usage is required by sentryhandler.StreamLogHandler
+ grpc_ctxtags.StreamServerInterceptor(grpc_ctxtags.WithFieldExtractorForInitialReq(fieldextractors.FieldExtractor)),
+ // sentry middleware to capture errors
+ sentryhandler.StreamLogHandler,
+ ),
+ ),
+ grpc.UnknownServiceHandler(proxy.TransparentHandler(director)),
+ )
+ // Ping handler is handled as an explicit registration and not as a TransparentHandler.
+ proxy.RegisterService(s.proxy, director,
+ "mwitkow.testproto.TestService",
+ "Ping")
+
+ // Start the serving loops.
+ go func() {
+ s.server.Serve(s.serverListener)
+ }()
+ go func() {
+ s.proxy.Serve(s.proxyListener)
+ }()
+
+ ctx, cancel := context.WithTimeout(context.TODO(), 1*time.Second)
+ defer cancel()
+
+ s.testClientConn, err = grpc.DialContext(ctx, strings.Replace(s.proxyListener.Addr().String(), "127.0.0.1", "localhost", 1), grpc.WithInsecure())
+ require.NoError(s.T(), err, "must not error on deferred client Dial")
+ s.testClient = pb.NewTestServiceClient(s.testClientConn)
+}
+
+func (s *ProxyHappySuite) TearDownSuite() {
+ if s.client != nil {
+ s.client.Close()
+ }
+ if s.testClientConn != nil {
+ s.testClientConn.Close()
+ }
+ if s.serverClientConn != nil {
+ s.serverClientConn.Close()
+ }
+ // Close all transports so the logs don't get spammy.
+ time.Sleep(10 * time.Millisecond)
+ if s.proxy != nil {
+ s.proxy.Stop()
+ s.proxyListener.Close()
+ }
+ if s.serverListener != nil {
+ s.server.Stop()
+ s.serverListener.Close()
+ }
+}
+
+func TestProxyHappySuite(t *testing.T) {
+ suite.Run(t, &ProxyHappySuite{})
+}
+
+func TestProxyErrorPropagation(t *testing.T) {
+ errBackend := status.Error(codes.InvalidArgument, "backend error")
+ errDirector := status.Error(codes.FailedPrecondition, "director error")
+ errRequestFinalizer := status.Error(codes.Internal, "request finalizer error")
+
+ for _, tc := range []struct {
+ desc string
+ backendError error
+ directorError error
+ requestFinalizerError error
+ returnedError error
+ errHandler func(error) error
+ }{
+ {
+ desc: "backend error is propagated",
+ backendError: errBackend,
+ returnedError: errBackend,
+ },
+ {
+ desc: "director error is propagated",
+ directorError: errDirector,
+ returnedError: errDirector,
+ },
+ {
+ desc: "request finalizer error is propagated",
+ requestFinalizerError: errRequestFinalizer,
+ returnedError: errRequestFinalizer,
+ },
+ {
+ desc: "director error cancels proxying",
+ backendError: errBackend,
+ requestFinalizerError: errRequestFinalizer,
+ directorError: errDirector,
+ returnedError: errDirector,
+ },
+ {
+ desc: "backend error prioritized over request finalizer error",
+ backendError: errBackend,
+ requestFinalizerError: errRequestFinalizer,
+ returnedError: errBackend,
+ },
+ {
+ desc: "err handler gets error",
+ backendError: errBackend,
+ requestFinalizerError: errRequestFinalizer,
+ returnedError: errBackend,
+ errHandler: func(err error) error {
+ require.Equal(t, errBackend, err)
+ return errBackend
+ },
+ },
+ {
+ desc: "err handler can swallow error",
+ backendError: errBackend,
+ returnedError: io.EOF,
+ errHandler: func(err error) error {
+ require.Equal(t, errBackend, err)
+ return nil
+ },
+ },
+ {
+ desc: "swallowed error surfaces request finalizer error",
+ backendError: errBackend,
+ requestFinalizerError: errRequestFinalizer,
+ returnedError: errRequestFinalizer,
+ errHandler: func(err error) error {
+ require.Equal(t, errBackend, err)
+ return nil
+ },
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ tmpDir := testhelper.TempDir(t)
+
+ backendListener, err := net.Listen("unix", filepath.Join(tmpDir, "backend"))
+ require.NoError(t, err)
+
+ backendServer := grpc.NewServer(grpc.UnknownServiceHandler(func(interface{}, grpc.ServerStream) error {
+ return tc.backendError
+ }))
+ go func() { backendServer.Serve(backendListener) }()
+ defer backendServer.Stop()
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ backendClientConn, err := grpc.DialContext(ctx, "unix://"+backendListener.Addr().String(),
+ grpc.WithInsecure(), grpc.WithDefaultCallOptions(grpc.ForceCodec(proxy.NewCodec())))
+ require.NoError(t, err)
+ defer func() {
+ require.NoError(t, backendClientConn.Close())
+ }()
+
+ proxyListener, err := net.Listen("unix", filepath.Join(tmpDir, "proxy"))
+ require.NoError(t, err)
+
+ proxyServer := grpc.NewServer(
+ grpc.CustomCodec(proxy.NewCodec()),
+ grpc.UnknownServiceHandler(proxy.TransparentHandler(func(ctx context.Context, fullMethodName string, peeker proxy.StreamPeeker) (*proxy.StreamParameters, error) {
+ return proxy.NewStreamParameters(
+ proxy.Destination{
+ Ctx: ctx,
+ Conn: backendClientConn,
+ ErrHandler: tc.errHandler,
+ },
+ nil,
+ func() error { return tc.requestFinalizerError },
+ nil,
+ ), tc.directorError
+ })),
+ )
+
+ go func() { proxyServer.Serve(proxyListener) }()
+ defer proxyServer.Stop()
+
+ proxyClientConn, err := grpc.DialContext(ctx, "unix://"+proxyListener.Addr().String(), grpc.WithInsecure())
+ require.NoError(t, err)
+ defer func() {
+ require.NoError(t, proxyClientConn.Close())
+ }()
+
+ resp, err := pb.NewTestServiceClient(proxyClientConn).Ping(ctx, &pb.PingRequest{})
+ require.Equal(t, tc.returnedError, err)
+ require.Nil(t, resp)
+ })
+ }
+}
+
+func TestRegisterStreamHandlers(t *testing.T) {
+ directorCalledError := errors.New("director was called")
+
+ server := grpc.NewServer(
+ grpc.CustomCodec(proxy.NewCodec()),
+ grpc.UnknownServiceHandler(proxy.TransparentHandler(func(ctx context.Context, fullMethodName string, peeker proxy.StreamPeeker) (*proxy.StreamParameters, error) {
+ return nil, directorCalledError
+ })),
+ )
+
+ var pingStreamHandlerCalled, pingEmptyStreamHandlerCalled bool
+
+ pingValue := "hello"
+
+ pingStreamHandler := func(srv interface{}, stream grpc.ServerStream) error {
+ pingStreamHandlerCalled = true
+ var req pb.PingRequest
+
+ if err := stream.RecvMsg(&req); err != nil {
+ return err
+ }
+
+ require.Equal(t, pingValue, req.Value)
+
+ return stream.SendMsg(nil)
+ }
+
+ pingEmptyStreamHandler := func(srv interface{}, stream grpc.ServerStream) error {
+ pingEmptyStreamHandlerCalled = true
+ var req pb.Empty
+
+ if err := stream.RecvMsg(&req); err != nil {
+ return err
+ }
+
+ return stream.SendMsg(nil)
+ }
+
+ streamers := map[string]grpc.StreamHandler{
+ "Ping": pingStreamHandler,
+ "PingEmpty": pingEmptyStreamHandler,
+ }
+
+ proxy.RegisterStreamHandlers(server, "mwitkow.testproto.TestService", streamers)
+
+ serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
+
+ listener, err := net.Listen("unix", serverSocketPath)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ go server.Serve(listener)
+ defer server.Stop()
+
+ cc, err := client.Dial("unix://"+serverSocketPath, []grpc.DialOption{grpc.WithBlock()})
+ require.NoError(t, err)
+ defer cc.Close()
+
+ testServiceClient := pb.NewTestServiceClient(cc)
+
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ _, err = testServiceClient.Ping(ctx, &pb.PingRequest{Value: pingValue})
+ require.NoError(t, err)
+ require.True(t, pingStreamHandlerCalled)
+
+ _, err = testServiceClient.PingEmpty(ctx, &pb.Empty{})
+ require.NoError(t, err)
+ require.True(t, pingEmptyStreamHandlerCalled)
+
+ // since PingError was never registered with its own streamer, it should get sent to the UnknownServiceHandler
+ _, err = testServiceClient.PingError(ctx, &pb.PingRequest{})
+ require.Equal(t, status.Error(codes.Unknown, directorCalledError.Error()), err)
+}
diff --git a/internal/praefect/grpc-proxy/proxy/handler_test.go b/internal/praefect/grpc-proxy/proxy/handler_test.go
index e20678708..b33fcba43 100644
--- a/internal/praefect/grpc-proxy/proxy/handler_test.go
+++ b/internal/praefect/grpc-proxy/proxy/handler_test.go
@@ -1,523 +1,39 @@
-// Copyright 2017 Michal Witkowski. All Rights Reserved.
-// See LICENSE for licensing terms.
-
-package proxy_test
+package proxy
import (
- "context"
"errors"
- "fmt"
- "io"
- "net"
- "net/http"
- "net/http/httptest"
- "net/url"
- "path/filepath"
- "strings"
"testing"
- "time"
- "github.com/getsentry/sentry-go"
- grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware"
- grpc_ctxtags "github.com/grpc-ecosystem/go-grpc-middleware/tags"
- "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "github.com/stretchr/testify/suite"
- "gitlab.com/gitlab-org/gitaly/client"
- "gitlab.com/gitlab-org/gitaly/internal/helper"
- "gitlab.com/gitlab-org/gitaly/internal/helper/fieldextractors"
- "gitlab.com/gitlab-org/gitaly/internal/middleware/sentryhandler"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/proxy"
- pb "gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/testdata"
- "gitlab.com/gitlab-org/gitaly/internal/testhelper"
- "go.uber.org/goleak"
- "google.golang.org/grpc"
- "google.golang.org/grpc/codes"
- "google.golang.org/grpc/metadata"
- "google.golang.org/grpc/status"
-)
-
-const (
- pingDefaultValue = "I like kittens."
- clientMdKey = "test-client-header"
- serverHeaderMdKey = "test-client-header"
- serverTrailerMdKey = "test-client-trailer"
-
- rejectingMdKey = "test-reject-rpc-if-in-context"
-
- countListResponses = 20
)
-func TestMain(m *testing.M) {
- defer testhelper.MustHaveNoChildProcess()
- cleanup := testhelper.Configure()
- defer cleanup()
-
- goleak.VerifyTestMain(m)
-}
-
-// asserting service is implemented on the server side and serves as a handler for stuff
-type assertingService struct {
- t *testing.T
-}
-
-func (s *assertingService) PingEmpty(ctx context.Context, _ *pb.Empty) (*pb.PingResponse, error) {
- // Check that this call has client's metadata.
- md, ok := metadata.FromIncomingContext(ctx)
- assert.True(s.t, ok, "PingEmpty call must have metadata in context")
- _, ok = md[clientMdKey]
- assert.True(s.t, ok, "PingEmpty call must have clients's custom headers in metadata")
- return &pb.PingResponse{Value: pingDefaultValue, Counter: 42}, nil
-}
-
-func (s *assertingService) Ping(ctx context.Context, ping *pb.PingRequest) (*pb.PingResponse, error) {
- // Send user trailers and headers.
- require.NoError(s.t, grpc.SendHeader(ctx, metadata.Pairs(serverHeaderMdKey, "I like turtles.")))
- require.NoError(s.t, grpc.SetTrailer(ctx, metadata.Pairs(serverTrailerMdKey, "I like ending turtles.")))
- return &pb.PingResponse{Value: ping.Value, Counter: 42}, nil
-}
-
-func (s *assertingService) PingError(ctx context.Context, ping *pb.PingRequest) (*pb.Empty, error) {
- return nil, status.Errorf(codes.ResourceExhausted, "Userspace error.")
-}
-
-func (s *assertingService) PingList(ping *pb.PingRequest, stream pb.TestService_PingListServer) error {
- // Send user trailers and headers.
- require.NoError(s.t, stream.SendHeader(metadata.Pairs(serverHeaderMdKey, "I like turtles.")))
- for i := 0; i < countListResponses; i++ {
- require.NoError(s.t, stream.Send(&pb.PingResponse{Value: ping.Value, Counter: int32(i)}))
- }
- stream.SetTrailer(metadata.Pairs(serverTrailerMdKey, "I like ending turtles."))
- return nil
-}
-
-func (s *assertingService) PingStream(stream pb.TestService_PingStreamServer) error {
- require.NoError(s.t, stream.SendHeader(metadata.Pairs(serverHeaderMdKey, "I like turtles.")))
- counter := int32(0)
- for {
- ping, err := stream.Recv()
- if err == io.EOF {
- break
- } else if err != nil {
- require.NoError(s.t, err, "can't fail reading stream")
- return err
- }
- pong := &pb.PingResponse{Value: ping.Value, Counter: counter}
- if err := stream.Send(pong); err != nil {
- require.NoError(s.t, err, "can't fail sending back a pong")
- }
- counter++
- }
- stream.SetTrailer(metadata.Pairs(serverTrailerMdKey, "I like ending turtles."))
- return nil
-}
-
-// ProxyHappySuite tests the "happy" path of handling: that everything works in absence of connection issues.
-type ProxyHappySuite struct {
- suite.Suite
-
- serverListener net.Listener
- server *grpc.Server
- proxyListener net.Listener
- proxy *grpc.Server
- serverClientConn *grpc.ClientConn
-
- client *grpc.ClientConn
- testClient pb.TestServiceClient
- testClientConn *grpc.ClientConn
-}
-
-func (s *ProxyHappySuite) ctx() context.Context {
- // Make all RPC calls last at most 1 sec, meaning all async issues or deadlock will not kill tests.
- ctx, _ := context.WithTimeout(context.TODO(), 120*time.Second) // nolint: govet
- return ctx
-}
-
-func (s *ProxyHappySuite) TestPingEmptyCarriesClientMetadata() {
- ctx := metadata.NewOutgoingContext(s.ctx(), metadata.Pairs(clientMdKey, "true"))
- out, err := s.testClient.PingEmpty(ctx, &pb.Empty{})
- require.NoError(s.T(), err, "PingEmpty should succeed without errors")
- require.Equal(s.T(), &pb.PingResponse{Value: pingDefaultValue, Counter: 42}, out)
-}
-
-func (s *ProxyHappySuite) TestPingEmpty_StressTest() {
- for i := 0; i < 50; i++ {
- s.TestPingEmptyCarriesClientMetadata()
- }
-}
-
-func (s *ProxyHappySuite) TestPingCarriesServerHeadersAndTrailers() {
- headerMd := make(metadata.MD)
- trailerMd := make(metadata.MD)
- // This is an awkward calling convention... but meh.
- out, err := s.testClient.Ping(s.ctx(), &pb.PingRequest{Value: "foo"}, grpc.Header(&headerMd), grpc.Trailer(&trailerMd))
- require.NoError(s.T(), err, "Ping should succeed without errors")
- require.Equal(s.T(), &pb.PingResponse{Value: "foo", Counter: 42}, out)
- assert.Contains(s.T(), headerMd, serverHeaderMdKey, "server response headers must contain server data")
- assert.Len(s.T(), trailerMd, 1, "server response trailers must contain server data")
-}
-
-func (s *ProxyHappySuite) TestPingErrorPropagatesAppError() {
- sentryTriggered := 0
- sentrySrv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- sentryTriggered++
- }))
- defer sentrySrv.Close()
-
- // minimal required sentry client configuration
- sentryURL, err := url.Parse(sentrySrv.URL)
- require.NoError(s.T(), err)
- sentryURL.User = url.UserPassword("stub", "stub")
- sentryURL.Path = "/stub/1"
-
- require.NoError(s.T(), sentry.Init(sentry.ClientOptions{
- Dsn: sentryURL.String(),
- Transport: sentry.NewHTTPSyncTransport(),
- }))
-
- sentry.CaptureEvent(sentry.NewEvent())
- require.Equal(s.T(), 1, sentryTriggered, "sentry configured incorrectly")
-
- _, err = s.testClient.PingError(s.ctx(), &pb.PingRequest{Value: "foo"})
- require.Error(s.T(), err, "PingError should never succeed")
- assert.Equal(s.T(), codes.ResourceExhausted, status.Code(err))
- assert.Equal(s.T(), "Userspace error.", status.Convert(err).Message())
- require.Equal(s.T(), 1, sentryTriggered, "sentry must not be triggered because errors from remote must be just propagated")
-}
-
-func (s *ProxyHappySuite) TestDirectorErrorIsPropagated() {
- // See SetupSuite where the StreamDirector has a special case.
- ctx := metadata.NewOutgoingContext(s.ctx(), metadata.Pairs(rejectingMdKey, "true"))
- _, err := s.testClient.Ping(ctx, &pb.PingRequest{Value: "foo"})
- require.Error(s.T(), err, "Director should reject this RPC")
- assert.Equal(s.T(), codes.PermissionDenied, status.Code(err))
- assert.Equal(s.T(), "testing rejection", status.Convert(err).Message())
-}
-
-func (s *ProxyHappySuite) TestPingStream_FullDuplexWorks() {
- stream, err := s.testClient.PingStream(s.ctx())
- require.NoError(s.T(), err, "PingStream request should be successful.")
-
- for i := 0; i < countListResponses; i++ {
- ping := &pb.PingRequest{Value: fmt.Sprintf("foo:%d", i)}
- require.NoError(s.T(), stream.Send(ping), "sending to PingStream must not fail")
- resp, err := stream.Recv()
- if err == io.EOF {
- break
- }
- if i == 0 {
- // Check that the header arrives before all entries.
- headerMd, err := stream.Header()
- require.NoError(s.T(), err, "PingStream headers should not error.")
- assert.Contains(s.T(), headerMd, serverHeaderMdKey, "PingStream response headers user contain metadata")
- }
- assert.EqualValues(s.T(), i, resp.Counter, "ping roundtrip must succeed with the correct id")
- }
- require.NoError(s.T(), stream.CloseSend(), "no error on close send")
- _, err = stream.Recv()
- require.Equal(s.T(), io.EOF, err, "stream should close with io.EOF, meaining OK")
- // Check that the trailer headers are here.
- trailerMd := stream.Trailer()
- assert.Len(s.T(), trailerMd, 1, "PingList trailer headers user contain metadata")
-}
-
-func (s *ProxyHappySuite) TestPingStream_StressTest() {
- for i := 0; i < 50; i++ {
- s.TestPingStream_FullDuplexWorks()
- }
-}
-
-func (s *ProxyHappySuite) SetupSuite() {
- var err error
-
- s.proxyListener, err = net.Listen("tcp", "127.0.0.1:0")
- require.NoError(s.T(), err, "must be able to allocate a port for proxyListener")
- s.serverListener, err = net.Listen("tcp", "127.0.0.1:0")
- require.NoError(s.T(), err, "must be able to allocate a port for serverListener")
+func TestFailDestinationWithError(t *testing.T) {
+ expectedErr := errors.New("some error")
- s.server = grpc.NewServer()
- pb.RegisterTestServiceServer(s.server, &assertingService{t: s.T()})
-
- // Setup of the proxy's Director.
- s.serverClientConn, err = grpc.Dial(s.serverListener.Addr().String(), grpc.WithInsecure(), grpc.WithDefaultCallOptions(grpc.ForceCodec(proxy.NewCodec())))
- require.NoError(s.T(), err, "must not error on deferred client Dial")
- director := func(ctx context.Context, fullName string, peeker proxy.StreamPeeker) (*proxy.StreamParameters, error) {
- payload, err := peeker.Peek()
- if err != nil {
- return nil, err
- }
-
- md, ok := metadata.FromIncomingContext(ctx)
- if ok {
- if _, exists := md[rejectingMdKey]; exists {
- return proxy.NewStreamParameters(proxy.Destination{Ctx: helper.IncomingToOutgoing(ctx), Msg: payload}, nil, nil, nil), status.Errorf(codes.PermissionDenied, "testing rejection")
- }
- }
-
- // Explicitly copy the metadata, otherwise the tests will fail.
- return proxy.NewStreamParameters(proxy.Destination{Ctx: helper.IncomingToOutgoing(ctx), Conn: s.serverClientConn, Msg: payload}, nil, nil, nil), nil
- }
-
- s.proxy = grpc.NewServer(
- grpc.CustomCodec(proxy.NewCodec()),
- grpc.StreamInterceptor(
- grpc_middleware.ChainStreamServer(
- // context tags usage is required by sentryhandler.StreamLogHandler
- grpc_ctxtags.StreamServerInterceptor(grpc_ctxtags.WithFieldExtractorForInitialReq(fieldextractors.FieldExtractor)),
- // sentry middleware to capture errors
- sentryhandler.StreamLogHandler,
- ),
- ),
- grpc.UnknownServiceHandler(proxy.TransparentHandler(director)),
- )
- // Ping handler is handled as an explicit registration and not as a TransparentHandler.
- proxy.RegisterService(s.proxy, director,
- "mwitkow.testproto.TestService",
- "Ping")
-
- // Start the serving loops.
- go func() {
- s.server.Serve(s.serverListener)
- }()
- go func() {
- s.proxy.Serve(s.proxyListener)
- }()
-
- ctx, cancel := context.WithTimeout(context.TODO(), 1*time.Second)
- defer cancel()
-
- s.testClientConn, err = grpc.DialContext(ctx, strings.Replace(s.proxyListener.Addr().String(), "127.0.0.1", "localhost", 1), grpc.WithInsecure())
- require.NoError(s.T(), err, "must not error on deferred client Dial")
- s.testClient = pb.NewTestServiceClient(s.testClientConn)
-}
-
-func (s *ProxyHappySuite) TearDownSuite() {
- if s.client != nil {
- s.client.Close()
- }
- if s.testClientConn != nil {
- s.testClientConn.Close()
- }
- if s.serverClientConn != nil {
- s.serverClientConn.Close()
- }
- // Close all transports so the logs don't get spammy.
- time.Sleep(10 * time.Millisecond)
- if s.proxy != nil {
- s.proxy.Stop()
- s.proxyListener.Close()
- }
- if s.serverListener != nil {
- s.server.Stop()
- s.serverListener.Close()
- }
-}
-
-func TestProxyHappySuite(t *testing.T) {
- suite.Run(t, &ProxyHappySuite{})
-}
+ t.Run("works with nil ErrHandlers", func(t *testing.T) {
+ require.NotPanics(t, func() {
+ failDestinationsWithError(&StreamParameters{
+ primary: Destination{},
+ secondaries: []Destination{{}},
+ }, expectedErr)
+ })
+ })
-func TestProxyErrorPropagation(t *testing.T) {
- errBackend := status.Error(codes.InvalidArgument, "backend error")
- errDirector := status.Error(codes.FailedPrecondition, "director error")
- errRequestFinalizer := status.Error(codes.Internal, "request finalizer error")
+ t.Run("fails both primary and secondaries", func(t *testing.T) {
+ var primaryErr, secondaryErr error
- for _, tc := range []struct {
- desc string
- backendError error
- directorError error
- requestFinalizerError error
- returnedError error
- errHandler func(error) error
- }{
- {
- desc: "backend error is propagated",
- backendError: errBackend,
- returnedError: errBackend,
- },
- {
- desc: "director error is propagated",
- directorError: errDirector,
- returnedError: errDirector,
- },
- {
- desc: "request finalizer error is propagated",
- requestFinalizerError: errRequestFinalizer,
- returnedError: errRequestFinalizer,
- },
- {
- desc: "director error cancels proxying",
- backendError: errBackend,
- requestFinalizerError: errRequestFinalizer,
- directorError: errDirector,
- returnedError: errDirector,
- },
- {
- desc: "backend error prioritized over request finalizer error",
- backendError: errBackend,
- requestFinalizerError: errRequestFinalizer,
- returnedError: errBackend,
- },
- {
- desc: "err handler gets error",
- backendError: errBackend,
- requestFinalizerError: errRequestFinalizer,
- returnedError: errBackend,
- errHandler: func(err error) error {
- require.Equal(t, errBackend, err)
- return errBackend
- },
- },
- {
- desc: "err handler can swallow error",
- backendError: errBackend,
- returnedError: io.EOF,
- errHandler: func(err error) error {
- require.Equal(t, errBackend, err)
+ failDestinationsWithError(&StreamParameters{
+ primary: Destination{ErrHandler: func(err error) error {
+ primaryErr = err
return nil
- },
- },
- {
- desc: "swallowed error surfaces request finalizer error",
- backendError: errBackend,
- requestFinalizerError: errRequestFinalizer,
- returnedError: errRequestFinalizer,
- errHandler: func(err error) error {
- require.Equal(t, errBackend, err)
+ }},
+ secondaries: []Destination{{ErrHandler: func(err error) error {
+ secondaryErr = err
return nil
- },
- },
- } {
- t.Run(tc.desc, func(t *testing.T) {
- tmpDir := testhelper.TempDir(t)
-
- backendListener, err := net.Listen("unix", filepath.Join(tmpDir, "backend"))
- require.NoError(t, err)
-
- backendServer := grpc.NewServer(grpc.UnknownServiceHandler(func(interface{}, grpc.ServerStream) error {
- return tc.backendError
- }))
- go func() { backendServer.Serve(backendListener) }()
- defer backendServer.Stop()
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- backendClientConn, err := grpc.DialContext(ctx, "unix://"+backendListener.Addr().String(),
- grpc.WithInsecure(), grpc.WithDefaultCallOptions(grpc.ForceCodec(proxy.NewCodec())))
- require.NoError(t, err)
- defer func() {
- require.NoError(t, backendClientConn.Close())
- }()
-
- proxyListener, err := net.Listen("unix", filepath.Join(tmpDir, "proxy"))
- require.NoError(t, err)
-
- proxyServer := grpc.NewServer(
- grpc.CustomCodec(proxy.NewCodec()),
- grpc.UnknownServiceHandler(proxy.TransparentHandler(func(ctx context.Context, fullMethodName string, peeker proxy.StreamPeeker) (*proxy.StreamParameters, error) {
- return proxy.NewStreamParameters(
- proxy.Destination{
- Ctx: ctx,
- Conn: backendClientConn,
- ErrHandler: tc.errHandler,
- },
- nil,
- func() error { return tc.requestFinalizerError },
- nil,
- ), tc.directorError
- })),
- )
-
- go func() { proxyServer.Serve(proxyListener) }()
- defer proxyServer.Stop()
-
- proxyClientConn, err := grpc.DialContext(ctx, "unix://"+proxyListener.Addr().String(), grpc.WithInsecure())
- require.NoError(t, err)
- defer func() {
- require.NoError(t, proxyClientConn.Close())
- }()
-
- resp, err := pb.NewTestServiceClient(proxyClientConn).Ping(ctx, &pb.PingRequest{})
- require.Equal(t, tc.returnedError, err)
- require.Nil(t, resp)
- })
- }
-}
-
-func TestRegisterStreamHandlers(t *testing.T) {
- directorCalledError := errors.New("director was called")
-
- server := grpc.NewServer(
- grpc.CustomCodec(proxy.NewCodec()),
- grpc.UnknownServiceHandler(proxy.TransparentHandler(func(ctx context.Context, fullMethodName string, peeker proxy.StreamPeeker) (*proxy.StreamParameters, error) {
- return nil, directorCalledError
- })),
- )
-
- var pingStreamHandlerCalled, pingEmptyStreamHandlerCalled bool
-
- pingValue := "hello"
-
- pingStreamHandler := func(srv interface{}, stream grpc.ServerStream) error {
- pingStreamHandlerCalled = true
- var req pb.PingRequest
-
- if err := stream.RecvMsg(&req); err != nil {
- return err
- }
-
- require.Equal(t, pingValue, req.Value)
-
- return stream.SendMsg(nil)
- }
-
- pingEmptyStreamHandler := func(srv interface{}, stream grpc.ServerStream) error {
- pingEmptyStreamHandlerCalled = true
- var req pb.Empty
-
- if err := stream.RecvMsg(&req); err != nil {
- return err
- }
-
- return stream.SendMsg(nil)
- }
-
- streamers := map[string]grpc.StreamHandler{
- "Ping": pingStreamHandler,
- "PingEmpty": pingEmptyStreamHandler,
- }
-
- proxy.RegisterStreamHandlers(server, "mwitkow.testproto.TestService", streamers)
-
- serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
-
- listener, err := net.Listen("unix", serverSocketPath)
- if err != nil {
- t.Fatal(err)
- }
-
- go server.Serve(listener)
- defer server.Stop()
-
- cc, err := client.Dial("unix://"+serverSocketPath, []grpc.DialOption{grpc.WithBlock()})
- require.NoError(t, err)
- defer cc.Close()
-
- testServiceClient := pb.NewTestServiceClient(cc)
-
- ctx, cancel := testhelper.Context()
- defer cancel()
-
- _, err = testServiceClient.Ping(ctx, &pb.PingRequest{Value: pingValue})
- require.NoError(t, err)
- require.True(t, pingStreamHandlerCalled)
-
- _, err = testServiceClient.PingEmpty(ctx, &pb.Empty{})
- require.NoError(t, err)
- require.True(t, pingEmptyStreamHandlerCalled)
+ }}},
+ }, expectedErr)
- // since PingError was never registered with its own streamer, it should get sent to the UnknownServiceHandler
- _, err = testServiceClient.PingError(ctx, &pb.PingRequest{})
- testhelper.GrpcErrorHasMessage(t, err, directorCalledError.Error())
+ require.Equal(t, expectedErr, primaryErr)
+ require.Equal(t, expectedErr, secondaryErr)
+ })
}
diff --git a/internal/praefect/info_service_test.go b/internal/praefect/info_service_test.go
index d1ae681c5..7c5386673 100644
--- a/internal/praefect/info_service_test.go
+++ b/internal/praefect/info_service_test.go
@@ -32,7 +32,14 @@ func TestInfoService_RepositoryReplicas(t *testing.T) {
}
cfgs = append(cfgs, cfg)
cfgs[i].SocketPath = testserver.RunGitalyServer(t, cfgs[i], nil, func(srv *grpc.Server, deps *service.Dependencies) {
- gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(deps.GetCfg(), deps.GetRubyServer(), deps.GetLocator(), deps.GetTxManager(), deps.GetGitCmdFactory()))
+ gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetTxManager(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ ))
}, testserver.WithDisablePraefect())
cfgNodes = append(cfgNodes, &config.Node{
Storage: cfgs[i].Storages[0].Name,
@@ -47,7 +54,7 @@ func TestInfoService_RepositoryReplicas(t *testing.T) {
}
// create a commit in the second replica so we can check that its checksum is different than the primary
- gittest.CreateCommit(t, cfgs[1], filepath.Join(cfgs[1].Storages[0].Path, testRepo.GetRelativePath()), "master", nil)
+ gittest.WriteCommit(t, cfgs[1], filepath.Join(cfgs[1].Storages[0].Path, testRepo.GetRelativePath()), gittest.WithBranch("master"))
nodeManager, err := nodes.NewManager(testhelper.DiscardTestEntry(t), conf, nil, nil, promtest.NewMockHistogramVec(), protoregistry.GitalyProtoPreregistered, nil, nil)
require.NoError(t, err)
diff --git a/internal/praefect/middleware/errorhandler_test.go b/internal/praefect/middleware/errorhandler_test.go
index 72e969f5c..d258463ea 100644
--- a/internal/praefect/middleware/errorhandler_test.go
+++ b/internal/praefect/middleware/errorhandler_test.go
@@ -10,13 +10,13 @@ import (
"github.com/golang/protobuf/ptypes/empty"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/proxy"
"gitlab.com/gitlab-org/gitaly/internal/praefect/mock"
"gitlab.com/gitlab-org/gitaly/internal/praefect/nodes/tracker"
"gitlab.com/gitlab-org/gitaly/internal/praefect/protoregistry"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"google.golang.org/grpc"
)
@@ -98,16 +98,15 @@ func TestStreamInterceptor(t *testing.T) {
simpleClient := mock.NewSimpleServiceClient(praefectCC)
- testRepo, _, cleanup := gittest.CloneRepo(t)
- defer cleanup()
+ _, repo, _ := testcfg.BuildWithRepo(t)
for i := 0; i < threshold; i++ {
_, err = simpleClient.RepoAccessorUnary(ctx, &mock.RepoRequest{
- Repo: testRepo,
+ Repo: repo,
})
require.NoError(t, err)
_, err = simpleClient.RepoMutatorUnary(ctx, &mock.RepoRequest{
- Repo: testRepo,
+ Repo: repo,
})
require.NoError(t, err)
}
@@ -133,11 +132,11 @@ func TestStreamInterceptor(t *testing.T) {
for i := 0; i < threshold; i++ {
_, err = simpleClient.RepoAccessorUnary(ctx, &mock.RepoRequest{
- Repo: testRepo,
+ Repo: repo,
})
require.NoError(t, err)
_, err = simpleClient.RepoMutatorUnary(ctx, &mock.RepoRequest{
- Repo: testRepo,
+ Repo: repo,
})
require.NoError(t, err)
}
diff --git a/internal/praefect/nodes/local_elector_test.go b/internal/praefect/nodes/local_elector_test.go
index a8b848321..46b649d53 100644
--- a/internal/praefect/nodes/local_elector_test.go
+++ b/internal/praefect/nodes/local_elector_test.go
@@ -12,9 +12,9 @@ import (
"google.golang.org/grpc"
)
-func setupElector(t *testing.T) (*localElector, []*nodeStatus, *grpc.ClientConn, *grpc.Server) {
+func setupElector(t *testing.T) (*localElector, []*nodeStatus, *grpc.ClientConn) {
socket := testhelper.GetTemporaryGitalySocketFileName(t)
- svr, _ := testhelper.NewServerWithHealth(t, socket)
+ testhelper.NewServerWithHealth(t, socket)
cc, err := grpc.Dial(
"unix://"+socket,
@@ -34,12 +34,11 @@ func setupElector(t *testing.T) (*localElector, []*nodeStatus, *grpc.ClientConn,
strategy.bootstrap(time.Second)
- return strategy, ns, cc, svr
+ return strategy, ns, cc
}
func TestGetShard(t *testing.T) {
- strategy, ns, _, svr := setupElector(t)
- defer svr.Stop()
+ strategy, ns, _ := setupElector(t)
ctx, cancel := testhelper.Context()
defer cancel()
@@ -52,8 +51,7 @@ func TestGetShard(t *testing.T) {
}
func TestConcurrentCheckWithPrimary(t *testing.T) {
- strategy, ns, _, svr := setupElector(t)
- defer svr.Stop()
+ strategy, ns, _ := setupElector(t)
iterations := 10
var wg sync.WaitGroup
diff --git a/internal/praefect/nodes/manager_test.go b/internal/praefect/nodes/manager_test.go
index a550049de..6aa11a59c 100644
--- a/internal/praefect/nodes/manager_test.go
+++ b/internal/praefect/nodes/manager_test.go
@@ -57,8 +57,7 @@ func assertShard(t *testing.T, exp shardAssertion, act Shard) {
func TestNodeStatus(t *testing.T) {
socket := testhelper.GetTemporaryGitalySocketFileName(t)
- svr, healthSvr := testhelper.NewServerWithHealth(t, socket)
- defer svr.Stop()
+ healthSvr := testhelper.NewServerWithHealth(t, socket)
cc, err := grpc.Dial(
"unix://"+socket,
@@ -112,11 +111,8 @@ func TestManagerFailoverDisabledElectionStrategySQL(t *testing.T) {
},
}
- srv0, healthSrv := testhelper.NewServerWithHealth(t, socket0)
- defer srv0.Stop()
-
- srv1, _ := testhelper.NewServerWithHealth(t, socket1)
- defer srv1.Stop()
+ healthSrv := testhelper.NewServerWithHealth(t, socket0)
+ testhelper.NewServerWithHealth(t, socket1)
conf := config.Config{
Failover: config.Failover{Enabled: false, ElectionStrategy: config.ElectionStrategySQL},
@@ -168,8 +164,7 @@ func TestDialWithUnhealthyNode(t *testing.T) {
},
}
- srv, _ := testhelper.NewHealthServerWithListener(t, primaryLn)
- defer srv.Stop()
+ testhelper.NewHealthServerWithListener(t, primaryLn)
mgr, err := NewManager(testhelper.DiscardTestEntry(t), conf, nil, nil, promtest.NewMockHistogramVec(), protoregistry.GitalyProtoPreregistered, nil, nil)
require.NoError(t, err)
@@ -189,11 +184,8 @@ func TestDialWithUnhealthyNode(t *testing.T) {
func TestNodeManager(t *testing.T) {
internalSocket0, internalSocket1 := testhelper.GetTemporaryGitalySocketFileName(t), testhelper.GetTemporaryGitalySocketFileName(t)
- srv0, healthSrv0 := testhelper.NewServerWithHealth(t, internalSocket0)
- defer srv0.Stop()
-
- srv1, healthSrv1 := testhelper.NewServerWithHealth(t, internalSocket1)
- defer srv1.Stop()
+ healthSrv0 := testhelper.NewServerWithHealth(t, internalSocket0)
+ healthSrv1 := testhelper.NewServerWithHealth(t, internalSocket1)
node1 := &config.Node{
Storage: "praefect-internal-0",
@@ -314,13 +306,11 @@ func TestMgr_GetSyncedNode(t *testing.T) {
const virtualStorage = "virtual-storage-0"
const repoPath = "path/1"
- var srvs [count]*grpc.Server
var healthSrvs [count]*health.Server
var nodes [count]*config.Node
for i := 0; i < count; i++ {
socket := testhelper.GetTemporaryGitalySocketFileName(t)
- srvs[i], healthSrvs[i] = testhelper.NewServerWithHealth(t, socket)
- defer srvs[i].Stop()
+ healthSrvs[i] = testhelper.NewServerWithHealth(t, socket)
nodes[i] = &config.Node{Storage: fmt.Sprintf("gitaly-%d", i), Address: "unix://" + socket}
}
@@ -479,8 +469,7 @@ func TestNodeStatus_IsHealthy(t *testing.T) {
socket := testhelper.GetTemporaryGitalySocketFileName(t)
address := "unix://" + socket
- srv, healthSrv := testhelper.NewServerWithHealth(t, socket)
- defer srv.Stop()
+ healthSrv := testhelper.NewServerWithHealth(t, socket)
clientConn, err := client.Dial(address, nil)
require.NoError(t, err)
diff --git a/internal/praefect/nodes/per_repository_test.go b/internal/praefect/nodes/per_repository_test.go
index 546386fa5..067b132e4 100644
--- a/internal/praefect/nodes/per_repository_test.go
+++ b/internal/praefect/nodes/per_repository_test.go
@@ -502,24 +502,52 @@ func TestPerRepositoryElector(t *testing.T) {
}
for _, step := range tc.steps {
- logger, hook := test.NewNullLogger()
- elector := NewPerRepositoryElector(logrus.NewEntry(logger), db,
- HealthConsensusFunc(func() map[string][]string { return step.healthyNodes }),
- )
- elector.handleError = func(err error) error { return err }
+ runElection := func(tx *sql.Tx, matchLogs logMatcher) {
+ // The first transaction runs first
+ logger, hook := test.NewNullLogger()
+ elector := NewPerRepositoryElector(logrus.NewEntry(logger), tx,
+ HealthConsensusFunc(func() map[string][]string { return step.healthyNodes }),
+ )
+ elector.handleError = func(err error) error { return err }
+
+ trigger := make(chan struct{}, 1)
+ trigger <- struct{}{}
+ close(trigger)
+
+ require.NoError(t, elector.Run(ctx, trigger))
+
+ primary, err := elector.GetPrimary(ctx, "virtual-storage-1", "relative-path-1")
+ assert.Equal(t, step.error, err)
+ step.primary(t, primary)
+
+ require.Len(t, hook.Entries, 3)
+ matchLogs(t, hook.Entries[1])
+ }
+
+ // Run every step with two concurrent transactions to ensure two Praefect's running
+ // election at the same time do not elect the primary multiple times. We begin both
+ // transactions at the same time to ensure they have the same snapshot of the
+ // database. The second transaction would be blocked until the first transaction commits.
+ // To verify concurrent election runs do not elect the primary multiple times, we assert
+ // the second transaction performed no changes and the primary is what the first run elected
+ // it to be.
+ txFirst, err := db.Begin()
+ require.NoError(t, err)
+ defer txFirst.Rollback()
+
+ txSecond, err := db.Begin()
+ require.NoError(t, err)
+ defer txSecond.Rollback()
- trigger := make(chan struct{}, 1)
- trigger <- struct{}{}
- close(trigger)
+ runElection(txFirst, step.matchLogs)
- require.NoError(t, elector.Run(ctx, trigger))
+ require.NoError(t, txFirst.Commit())
- primary, err := elector.GetPrimary(ctx, "virtual-storage-1", "relative-path-1")
- assert.Equal(t, step.error, err)
- step.primary(t, primary)
+ // Run the second election on the same database snapshot. This should result in no changes.
+ // Running this prior to the first transaction committing would block.
+ runElection(txSecond, noChanges)
- require.Len(t, hook.Entries, 3)
- step.matchLogs(t, hook.Entries[1])
+ require.NoError(t, txSecond.Commit())
}
})
}
diff --git a/internal/praefect/nodes/sql_elector_test.go b/internal/praefect/nodes/sql_elector_test.go
index 025df6588..c8b203f54 100644
--- a/internal/praefect/nodes/sql_elector_test.go
+++ b/internal/praefect/nodes/sql_elector_test.go
@@ -40,8 +40,7 @@ func TestGetPrimaryAndSecondaries(t *testing.T) {
}
internalSocket0 := testhelper.GetTemporaryGitalySocketFileName(t)
- srv0, _ := testhelper.NewServerWithHealth(t, internalSocket0)
- defer srv0.Stop()
+ testhelper.NewServerWithHealth(t, internalSocket0)
cc0, err := grpc.Dial(
"unix://"+internalSocket0,
@@ -77,8 +76,7 @@ func TestSqlElector_slow_execution(t *testing.T) {
logger := testhelper.NewTestLogger(t).WithField("test", t.Name())
gitalySocket := testhelper.GetTemporaryGitalySocketFileName(t)
- gitalySrv, _ := testhelper.NewServerWithHealth(t, gitalySocket)
- defer gitalySrv.Stop()
+ testhelper.NewServerWithHealth(t, gitalySocket)
gitalyConn, err := grpc.Dial(
"unix://"+gitalySocket,
@@ -118,11 +116,8 @@ func TestBasicFailover(t *testing.T) {
conf := config.Config{SocketPath: socketName}
internalSocket0, internalSocket1 := testhelper.GetTemporaryGitalySocketFileName(t), testhelper.GetTemporaryGitalySocketFileName(t)
- srv0, healthSrv0 := testhelper.NewServerWithHealth(t, internalSocket0)
- defer srv0.Stop()
-
- srv1, healthSrv1 := testhelper.NewServerWithHealth(t, internalSocket1)
- defer srv1.Stop()
+ healthSrv0 := testhelper.NewServerWithHealth(t, internalSocket0)
+ healthSrv1 := testhelper.NewServerWithHealth(t, internalSocket1)
addr0 := "unix://" + internalSocket0
cc0, err := grpc.Dial(
diff --git a/internal/praefect/protoregistry/protoregistry_test.go b/internal/praefect/protoregistry/protoregistry_test.go
index 1d3d12e15..7d5a95033 100644
--- a/internal/praefect/protoregistry/protoregistry_test.go
+++ b/internal/praefect/protoregistry/protoregistry_test.go
@@ -16,11 +16,9 @@ func TestNewProtoRegistry(t *testing.T) {
expectedResults := map[string]map[string]protoregistry.OpType{
"BlobService": map[string]protoregistry.OpType{
- "GetBlob": protoregistry.OpAccessor,
- "GetBlobs": protoregistry.OpAccessor,
- "GetLFSPointers": protoregistry.OpAccessor,
- "GetNewLFSPointers": protoregistry.OpAccessor,
- "GetAllLFSPointers": protoregistry.OpAccessor,
+ "GetBlob": protoregistry.OpAccessor,
+ "GetBlobs": protoregistry.OpAccessor,
+ "GetLFSPointers": protoregistry.OpAccessor,
},
"CleanupService": map[string]protoregistry.OpType{
"ApplyBfgObjectMapStream": protoregistry.OpMutator,
@@ -125,7 +123,7 @@ func TestNewProtoRegistry(t *testing.T) {
"GetArchive": protoregistry.OpAccessor,
"HasLocalBranches": protoregistry.OpAccessor,
"FetchSourceBranch": protoregistry.OpMutator,
- "Fsck": protoregistry.OpMutator,
+ "Fsck": protoregistry.OpAccessor,
"WriteRef": protoregistry.OpMutator,
"FindMergeBase": protoregistry.OpAccessor,
"CreateFork": protoregistry.OpMutator,
@@ -160,14 +158,12 @@ func TestNewProtoRegistry(t *testing.T) {
"SSHUploadArchive": protoregistry.OpAccessor,
},
"WikiService": map[string]protoregistry.OpType{
- "WikiGetPageVersions": protoregistry.OpAccessor,
- "WikiWritePage": protoregistry.OpMutator,
- "WikiUpdatePage": protoregistry.OpMutator,
- "WikiDeletePage": protoregistry.OpMutator,
- "WikiFindPage": protoregistry.OpAccessor,
- "WikiFindFile": protoregistry.OpAccessor,
- "WikiGetAllPages": protoregistry.OpAccessor,
- "WikiListPages": protoregistry.OpAccessor,
+ "WikiWritePage": protoregistry.OpMutator,
+ "WikiUpdatePage": protoregistry.OpMutator,
+ "WikiDeletePage": protoregistry.OpMutator,
+ "WikiFindPage": protoregistry.OpAccessor,
+ "WikiGetAllPages": protoregistry.OpAccessor,
+ "WikiListPages": protoregistry.OpAccessor,
},
}
diff --git a/internal/praefect/replicator_test.go b/internal/praefect/replicator_test.go
index 0c88bd461..4a5081c1a 100644
--- a/internal/praefect/replicator_test.go
+++ b/internal/praefect/replicator_test.go
@@ -2,7 +2,6 @@ package praefect
import (
"context"
- "net"
"os"
"path/filepath"
"strings"
@@ -21,6 +20,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/internal/git/objectpool"
gconfig "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/setup"
"gitlab.com/gitlab-org/gitaly/internal/middleware/metadatahandler"
"gitlab.com/gitlab-org/gitaly/internal/praefect/config"
@@ -38,10 +38,7 @@ import (
"gitlab.com/gitlab-org/labkit/correlation"
"golang.org/x/sync/errgroup"
"google.golang.org/grpc"
- "google.golang.org/grpc/health"
- healthpb "google.golang.org/grpc/health/grpc_health_v1"
"google.golang.org/grpc/metadata"
- "google.golang.org/grpc/reflection"
)
func TestMain(m *testing.M) {
@@ -92,7 +89,7 @@ func TestReplMgr_ProcessBacklog(t *testing.T) {
// create object pool on the source
objectPoolPath := gittest.NewObjectPoolName(t)
- pool, err := objectpool.NewObjectPool(primaryCfg, gconfig.NewLocator(primaryCfg), git.NewExecCommandFactory(primaryCfg), testRepo.GetStorageName(), objectPoolPath)
+ pool, err := objectpool.NewObjectPool(primaryCfg, gconfig.NewLocator(primaryCfg), git.NewExecCommandFactory(primaryCfg), nil, testRepo.GetStorageName(), objectPoolPath)
require.NoError(t, err)
poolCtx, cancel := testhelper.Context()
@@ -145,9 +142,7 @@ func TestReplMgr_ProcessBacklog(t *testing.T) {
}
require.Len(t, events, 1)
- commitID := gittest.CreateCommit(t, primaryCfg, testRepoPath, "master", &gittest.CreateCommitOpts{
- Message: "a commit",
- })
+ commitID := gittest.WriteCommit(t, primaryCfg, testRepoPath, gittest.WithBranch("master"))
var mockReplicationLatencyHistogramVec promtest.MockHistogramVec
var mockReplicationDelayHistogramVec promtest.MockHistogramVec
@@ -201,8 +196,8 @@ func TestReplMgr_ProcessBacklog(t *testing.T) {
replicatedPath := filepath.Join(backupCfg.Storages[0].Path, testRepo.GetRelativePath())
- testhelper.MustRunCommand(t, nil, "git", "-C", replicatedPath, "cat-file", "-e", commitID)
- testhelper.MustRunCommand(t, nil, "git", "-C", replicatedPath, "gc")
+ gittest.Exec(t, backupCfg, "-C", replicatedPath, "cat-file", "-e", commitID.String())
+ gittest.Exec(t, backupCfg, "-C", replicatedPath, "gc")
require.Less(t, gittest.GetGitPackfileDirSize(t, replicatedPath), int64(100), "expect a small pack directory")
require.Equal(t, mockReplicationLatencyHistogramVec.LabelsCalled(), [][]string{{"update"}})
@@ -273,13 +268,14 @@ func TestReplicatorDowngradeAttempt(t *testing.T) {
}
func TestPropagateReplicationJob(t *testing.T) {
- primaryServer, primarySocketPath, cleanup := runMockRepositoryServer(t)
- defer cleanup()
+ primaryStorage, secondaryStorage := "internal-gitaly-0", "internal-gitaly-1"
- secondaryServer, secondarySocketPath, cleanup := runMockRepositoryServer(t)
- defer cleanup()
+ primCfg := testcfg.Build(t, testcfg.WithStorages(primaryStorage))
+ primaryServer, primarySocketPath := runMockRepositoryServer(t, primCfg)
+
+ secCfg := testcfg.Build(t, testcfg.WithStorages(secondaryStorage))
+ secondaryServer, secondarySocketPath := runMockRepositoryServer(t, secCfg)
- primaryStorage, secondaryStorage := "internal-gitaly-0", "internal-gitaly-1"
conf := config.Config{
VirtualStorages: []*config.VirtualStorage{
{
@@ -464,23 +460,14 @@ func (m *mockServer) PackRefs(ctx context.Context, in *gitalypb.PackRefsRequest)
return &gitalypb.PackRefsResponse{}, nil
}
-func runMockRepositoryServer(t *testing.T) (*mockServer, string, func()) {
- server := testhelper.NewTestGrpcServer(t, nil, nil)
- serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
-
- listener, err := net.Listen("unix", serverSocketPath)
- require.NoError(t, err)
-
+func runMockRepositoryServer(t *testing.T, cfg gconfig.Cfg) (*mockServer, string) {
mockServer := newMockRepositoryServer()
- gitalypb.RegisterRepositoryServiceServer(server, mockServer)
- gitalypb.RegisterRefServiceServer(server, mockServer)
- healthpb.RegisterHealthServer(server, health.NewServer())
- reflection.Register(server)
-
- go server.Serve(listener)
-
- return mockServer, "unix://" + serverSocketPath, server.Stop
+ addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterRepositoryServiceServer(srv, mockServer)
+ gitalypb.RegisterRefServiceServer(srv, mockServer)
+ })
+ return mockServer, addr
}
func waitForRequest(t *testing.T, ch chan proto.Message, expected proto.Message, timeout time.Duration) {
@@ -502,7 +489,7 @@ func TestConfirmReplication(t *testing.T) {
cfg, testRepoA, testRepoAPath := testcfg.BuildWithRepo(t)
srvSocketPath := testserver.RunGitalyServer(t, cfg, nil, setup.RegisterAll, testserver.WithDisablePraefect())
- testRepoB, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg.Storages[0], "second")
+ testRepoB, _, cleanupFn := gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], "second")
t.Cleanup(cleanupFn)
connOpts := []grpc.DialOption{
@@ -517,9 +504,7 @@ func TestConfirmReplication(t *testing.T) {
require.NoError(t, err)
require.True(t, equal)
- gittest.CreateCommit(t, cfg, testRepoAPath, "master", &gittest.CreateCommitOpts{
- Message: "a commit",
- })
+ gittest.WriteCommit(t, cfg, testRepoAPath, gittest.WithBranch("master"))
equal, err = confirmChecksums(ctx, testhelper.DiscardTestLogger(t), gitalypb.NewRepositoryServiceClient(conn), gitalypb.NewRepositoryServiceClient(conn), testRepoA, testRepoB)
require.NoError(t, err)
@@ -812,8 +797,7 @@ func TestProcessBacklog_Success(t *testing.T) {
t.Fatal("time limit expired for job to complete")
}
- _, serr := os.Stat(fullNewPath1)
- require.True(t, os.IsNotExist(serr), "repository must be moved from %q to the new location", fullNewPath1)
+ require.NoDirExists(t, fullNewPath1, "repository must be moved from %q to the new location", fullNewPath1)
require.True(t, storage.IsGitDirectory(fullNewPath2), "repository must exist at new last RenameRepository location")
}
diff --git a/internal/praefect/repository_exists_test.go b/internal/praefect/repository_exists_test.go
index 4bf760236..dfb0b4772 100644
--- a/internal/praefect/repository_exists_test.go
+++ b/internal/praefect/repository_exists_test.go
@@ -72,7 +72,7 @@ func TestRepositoryExistsStreamInterceptor(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- require.NoError(t, rs.CreateRepository(ctx, "virtual-storage", "relative-path", "storage", nil, false, false))
+ require.NoError(t, rs.CreateRepository(ctx, "virtual-storage", "relative-path", "storage", nil, nil, false, false))
electionStrategy := config.ElectionStrategyPerRepository
if tc.routeToGitaly {
diff --git a/internal/praefect/router_node_manager.go b/internal/praefect/router_node_manager.go
index f176bd7b6..7952615bf 100644
--- a/internal/praefect/router_node_manager.go
+++ b/internal/praefect/router_node_manager.go
@@ -117,8 +117,29 @@ func (r *nodeManagerRouter) RouteRepositoryMutator(ctx context.Context, virtualS
}, nil
}
+// RouteRepositoryCreation includes healthy secondaries in the transaction and sets the unhealthy secondaries as
+// replication targets. The virtual storage's primary acts as the primary for every repository.
func (r *nodeManagerRouter) RouteRepositoryCreation(ctx context.Context, virtualStorage string) (RepositoryMutatorRoute, error) {
- // nodeManagerRouter doesn't support repository assignments nor repository specific primaries. It
- // is sufficient to route the requests as normal mutators.
- return r.RouteRepositoryMutator(ctx, virtualStorage, "")
+ shard, err := r.mgr.GetShard(ctx, virtualStorage)
+ if err != nil {
+ return RepositoryMutatorRoute{}, fmt.Errorf("get shard: %w", err)
+ }
+
+ var secondaries []RouterNode
+ var replicationTargets []string
+
+ for _, secondary := range shard.Secondaries {
+ if secondary.IsHealthy() {
+ secondaries = append(secondaries, toRouterNode(secondary))
+ continue
+ }
+
+ replicationTargets = append(replicationTargets, secondary.GetStorage())
+ }
+
+ return RepositoryMutatorRoute{
+ Primary: toRouterNode(shard.Primary),
+ Secondaries: secondaries,
+ ReplicationTargets: replicationTargets,
+ }, nil
}
diff --git a/internal/praefect/router_per_repository.go b/internal/praefect/router_per_repository.go
index eabf4fa61..56e561b04 100644
--- a/internal/praefect/router_per_repository.go
+++ b/internal/praefect/router_per_repository.go
@@ -234,7 +234,8 @@ func (r *PerRepositoryRouter) RouteRepositoryMutator(ctx context.Context, virtua
return route, nil
}
-// RouteRepositoryCreation picks a random healthy node to act as the primary node and sets other nodes as
+// RouteRepositoryCreation picks a random healthy node to act as the primary node and selects the secondary nodes
+// if assignments are enabled. Healthy secondaries take part in the transaction, unhealthy secondaries are set as
// replication targets.
func (r *PerRepositoryRouter) RouteRepositoryCreation(ctx context.Context, virtualStorage string) (RepositoryMutatorRoute, error) {
healthyNodes, err := r.healthyNodes(virtualStorage)
@@ -252,31 +253,51 @@ func (r *PerRepositoryRouter) RouteRepositoryCreation(ctx context.Context, virtu
return RepositoryMutatorRoute{Primary: primary}, nil
}
- // NodeManagerRouter doesn't consider any secondaries as consistent when creating a repository,
- // thus the primary is the only participant in the transaction and the secondaries get replicated to.
- // PerRepositoryRouter matches that behavior here for consistency.
- var replicationTargets []string
- for storage := range r.conns[virtualStorage] {
+ var secondaryNodes []RouterNode
+ for storage, conn := range r.conns[virtualStorage] {
if storage == primary.Storage {
continue
}
- replicationTargets = append(replicationTargets, storage)
+ secondaryNodes = append(secondaryNodes, RouterNode{
+ Storage: storage,
+ Connection: conn,
+ })
}
// replicationFactor being zero indicates it has not been configured. If so, we fallback to the behavior
- // of no assignments and replicate everywhere.
- if replicationFactor > 1 {
- r.rand.Shuffle(len(replicationTargets), func(i, j int) {
- replicationTargets[i], replicationTargets[j] = replicationTargets[j], replicationTargets[i]
+ // of no assignments, replicate everywhere and do not select assigned secondaries below.
+ if replicationFactor > 0 {
+ // Select random secondaries according to the default replication factor.
+ r.rand.Shuffle(len(secondaryNodes), func(i, j int) {
+ secondaryNodes[i], secondaryNodes[j] = secondaryNodes[j], secondaryNodes[i]
})
- // deduct one as the primary is also hosting the repository
- replicationTargets = replicationTargets[:replicationFactor-1]
+ secondaryNodes = secondaryNodes[:replicationFactor-1]
+ }
+
+ var secondaries []RouterNode
+ var replicationTargets []string
+ for _, secondaryNode := range secondaryNodes {
+ isHealthy := false
+ for _, healthyNode := range healthyNodes {
+ if healthyNode == secondaryNode {
+ isHealthy = true
+ break
+ }
+ }
+
+ if isHealthy {
+ secondaries = append(secondaries, secondaryNode)
+ continue
+ }
+
+ replicationTargets = append(replicationTargets, secondaryNode.Storage)
}
return RepositoryMutatorRoute{
Primary: primary,
+ Secondaries: secondaries,
ReplicationTargets: replicationTargets,
}, nil
}
diff --git a/internal/praefect/router_per_repository_test.go b/internal/praefect/router_per_repository_test.go
index d97431df8..a2dbc4e8e 100644
--- a/internal/praefect/router_per_repository_test.go
+++ b/internal/praefect/router_per_repository_test.go
@@ -399,109 +399,150 @@ func TestPerRepositoryRouter_RouteRepositoryCreation(t *testing.T) {
"virtual-storage-1": {"primary", "secondary-1", "secondary-2"},
}
- type matcher func(*testing.T, []string)
-
- requireOneOf := func(expected ...[]string) func(*testing.T, []string) {
- return func(t *testing.T, actual []string) {
- sort.Strings(actual)
+ type matcher func(*testing.T, RepositoryMutatorRoute)
+
+ requireOneOf := func(expected ...RepositoryMutatorRoute) matcher {
+ return func(t *testing.T, actual RepositoryMutatorRoute) {
+ sort.Slice(actual.Secondaries, func(i, j int) bool {
+ return actual.Secondaries[i].Storage < actual.Secondaries[j].Storage
+ })
+ sort.Strings(actual.ReplicationTargets)
require.Contains(t, expected, actual)
}
}
- requireNil := func(t *testing.T, actual []string) {
- require.Nil(t, actual)
- }
+ primaryConn := &grpc.ClientConn{}
+ secondary1Conn := &grpc.ClientConn{}
+ secondary2Conn := &grpc.ClientConn{}
for _, tc := range []struct {
- desc string
- virtualStorage string
- healthyNodes StaticHealthChecker
- replicationFactor int
- primaryCandidates int
- primaryPick int
- secondaryCandidates int
- primary string
- requireReplicationTargets matcher
- error error
+ desc string
+ virtualStorage string
+ healthyNodes StaticHealthChecker
+ replicationFactor int
+ primaryCandidates int
+ primaryPick int
+ secondaryCandidates int
+ matchRoute matcher
+ error error
}{
{
- desc: "no healthy nodes",
- virtualStorage: "virtual-storage-1",
- healthyNodes: StaticHealthChecker{},
- error: ErrNoHealthyNodes,
- requireReplicationTargets: requireNil,
+ desc: "no healthy nodes",
+ virtualStorage: "virtual-storage-1",
+ healthyNodes: StaticHealthChecker{},
+ error: ErrNoHealthyNodes,
+ },
+ {
+ desc: "invalid virtual storage",
+ virtualStorage: "invalid",
+ error: nodes.ErrVirtualStorageNotExist,
},
{
- desc: "invalid virtual storage",
- virtualStorage: "invalid",
- error: nodes.ErrVirtualStorageNotExist,
- requireReplicationTargets: requireNil,
+ desc: "no healthy secondaries",
+ virtualStorage: "virtual-storage-1",
+ healthyNodes: StaticHealthChecker{"virtual-storage-1": {"primary"}},
+ primaryCandidates: 1,
+ primaryPick: 0,
+ matchRoute: requireOneOf(
+ RepositoryMutatorRoute{
+ Primary: RouterNode{Storage: "primary", Connection: primaryConn},
+ ReplicationTargets: []string{"secondary-1", "secondary-2"},
+ },
+ ),
},
{
- desc: "no healthy secondaries",
- virtualStorage: "virtual-storage-1",
- healthyNodes: StaticHealthChecker{"virtual-storage-1": {"primary"}},
- primaryCandidates: 1,
- primaryPick: 0,
- primary: "primary",
- requireReplicationTargets: requireOneOf([]string{"secondary-1", "secondary-2"}),
+ desc: "success with all secondaries healthy",
+ virtualStorage: "virtual-storage-1",
+ healthyNodes: StaticHealthChecker(configuredNodes),
+ primaryCandidates: 3,
+ primaryPick: 0,
+ matchRoute: requireOneOf(
+ RepositoryMutatorRoute{
+ Primary: RouterNode{Storage: "primary", Connection: primaryConn},
+ Secondaries: []RouterNode{
+ {Storage: "secondary-1", Connection: secondary1Conn},
+ {Storage: "secondary-2", Connection: secondary2Conn},
+ },
+ },
+ ),
},
{
- desc: "success",
- virtualStorage: "virtual-storage-1",
- healthyNodes: StaticHealthChecker(configuredNodes),
- primaryCandidates: 3,
- primaryPick: 0,
- primary: "primary",
- requireReplicationTargets: requireOneOf([]string{"secondary-1", "secondary-2"}),
+ desc: "success with one secondary unhealthy",
+ virtualStorage: "virtual-storage-1",
+ healthyNodes: StaticHealthChecker{"virtual-storage-1": {"primary", "secondary-1"}},
+ primaryCandidates: 2,
+ primaryPick: 0,
+ matchRoute: requireOneOf(
+ RepositoryMutatorRoute{
+ Primary: RouterNode{Storage: "primary", Connection: primaryConn},
+ Secondaries: []RouterNode{
+ {Storage: "secondary-1", Connection: secondary1Conn},
+ },
+ ReplicationTargets: []string{"secondary-2"},
+ },
+ ),
},
{
- desc: "replication factor of one configured",
- virtualStorage: "virtual-storage-1",
- healthyNodes: StaticHealthChecker(configuredNodes),
- replicationFactor: 1,
- primaryCandidates: 3,
- primaryPick: 0,
- primary: "primary",
- requireReplicationTargets: requireNil,
+ desc: "replication factor of one configured",
+ virtualStorage: "virtual-storage-1",
+ healthyNodes: StaticHealthChecker(configuredNodes),
+ replicationFactor: 1,
+ primaryCandidates: 3,
+ primaryPick: 0,
+ matchRoute: requireOneOf(
+ RepositoryMutatorRoute{
+ Primary: RouterNode{Storage: "primary", Connection: primaryConn},
+ },
+ ),
},
{
- desc: "replication factor of two configured",
- virtualStorage: "virtual-storage-1",
- healthyNodes: StaticHealthChecker(configuredNodes),
- replicationFactor: 2,
- primaryCandidates: 3,
- primaryPick: 0,
- secondaryCandidates: 2,
- primary: "primary",
- requireReplicationTargets: requireOneOf([]string{"secondary-1"}, []string{"secondary-2"}),
+ desc: "replication factor of two configured",
+ virtualStorage: "virtual-storage-1",
+ healthyNodes: StaticHealthChecker(configuredNodes),
+ replicationFactor: 2,
+ primaryCandidates: 3,
+ primaryPick: 0,
+ secondaryCandidates: 2,
+ matchRoute: requireOneOf(
+ RepositoryMutatorRoute{
+ Primary: RouterNode{Storage: "primary", Connection: primaryConn},
+ Secondaries: []RouterNode{{Storage: "secondary-1", Connection: secondary1Conn}},
+ },
+ RepositoryMutatorRoute{
+ Primary: RouterNode{Storage: "primary", Connection: primaryConn},
+ Secondaries: []RouterNode{{Storage: "secondary-2", Connection: secondary1Conn}},
+ },
+ ),
},
{
- desc: "replication factor of three configured with unhealthy secondary",
- virtualStorage: "virtual-storage-1",
- healthyNodes: StaticHealthChecker{"virtual-storage-1": {"primary", "secondary-1"}},
- replicationFactor: 3,
- primaryCandidates: 2,
- primaryPick: 0,
- secondaryCandidates: 2,
- primary: "primary",
- requireReplicationTargets: requireOneOf([]string{"secondary-1", "secondary-2"}),
+ desc: "replication factor of three configured with unhealthy secondary",
+ virtualStorage: "virtual-storage-1",
+ healthyNodes: StaticHealthChecker{"virtual-storage-1": {"primary", "secondary-1"}},
+ replicationFactor: 3,
+ primaryCandidates: 2,
+ primaryPick: 0,
+ secondaryCandidates: 2,
+ matchRoute: requireOneOf(
+ RepositoryMutatorRoute{
+ Primary: RouterNode{Storage: "primary", Connection: primaryConn},
+ Secondaries: []RouterNode{{Storage: "secondary-1", Connection: secondary1Conn}},
+ ReplicationTargets: []string{"secondary-2"},
+ },
+ ),
},
} {
t.Run(tc.desc, func(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- conns := Connections{
- "virtual-storage-1": {
- "primary": &grpc.ClientConn{},
- "secondary-1": &grpc.ClientConn{},
- "secondary-2": &grpc.ClientConn{},
- },
- }
-
route, err := NewPerRepositoryRouter(
- conns,
+ Connections{
+ "virtual-storage-1": {
+ "primary": primaryConn,
+ "secondary-1": secondary1Conn,
+ "secondary-2": secondary2Conn,
+ },
+ },
nil,
tc.healthyNodes,
mockRandom{
@@ -517,16 +558,13 @@ func TestPerRepositoryRouter_RouteRepositoryCreation(t *testing.T) {
nil,
map[string]int{"virtual-storage-1": tc.replicationFactor},
).RouteRepositoryCreation(ctx, tc.virtualStorage)
- require.Equal(t, tc.error, err)
-
- // assert replication targets separately as the picked secondary
- // is random
- tc.requireReplicationTargets(t, route.ReplicationTargets)
- route.ReplicationTargets = nil
+ if tc.error != nil {
+ require.Equal(t, tc.error, err)
+ return
+ }
- require.Equal(t, RepositoryMutatorRoute{
- Primary: RouterNode{Storage: tc.primary, Connection: conns[tc.virtualStorage][tc.primary]},
- }, route)
+ require.NoError(t, err)
+ tc.matchRoute(t, route)
})
}
}
diff --git a/internal/praefect/server_factory_test.go b/internal/praefect/server_factory_test.go
index edaaa0bb8..d778de292 100644
--- a/internal/praefect/server_factory_test.go
+++ b/internal/praefect/server_factory_test.go
@@ -4,7 +4,6 @@ import (
"context"
"crypto/tls"
"crypto/x509"
- "io/ioutil"
"net"
"os"
"testing"
@@ -13,6 +12,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/bootstrap/starter"
+ "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
gconfig "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/setup"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
@@ -58,7 +58,7 @@ func TestServerFactory(t *testing.T) {
}
repo.StorageName = conf.VirtualStorages[0].Name // storage must be re-written to virtual to be properly redirected by praefect
- revision := text.ChompBytes(testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "rev-parse", "HEAD"))
+ revision := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "HEAD"))
logger := testhelper.DiscardTestEntry(t)
queue := datastore.NewMemoryReplicationEventQueue(conf)
@@ -93,9 +93,12 @@ func TestServerFactory(t *testing.T) {
t.Helper()
commitClient := gitalypb.NewCommitServiceClient(cc)
- resp, err := commitClient.CommitLanguages(ctx, &gitalypb.CommitLanguagesRequest{Repository: repo, Revision: []byte(revision)})
+ resp, err := commitClient.FindCommit(ctx, &gitalypb.FindCommitRequest{
+ Repository: repo,
+ Revision: []byte(revision),
+ })
require.NoError(t, err)
- require.Len(t, resp.Languages, 4)
+ require.Equal(t, revision, resp.Commit.Id)
}
t.Run("insecure", func(t *testing.T) {
@@ -143,8 +146,7 @@ func TestServerFactory(t *testing.T) {
certPool, err := x509.SystemCertPool()
require.NoError(t, err)
- pem, err := ioutil.ReadFile(conf.TLS.CertPath)
- require.NoError(t, err)
+ pem := testhelper.MustReadFile(t, conf.TLS.CertPath)
require.True(t, certPool.AppendCertsFromPEM(pem))
diff --git a/internal/praefect/server_test.go b/internal/praefect/server_test.go
index 991b54a76..b54163a36 100644
--- a/internal/praefect/server_test.go
+++ b/internal/praefect/server_test.go
@@ -3,7 +3,6 @@ package praefect
import (
"bytes"
"context"
- "crypto/sha1"
"errors"
"io"
"net"
@@ -22,15 +21,14 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/internal/git"
- "gitlab.com/gitlab-org/gitaly/internal/git/gittest"
gconfig "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/setup"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/praefect/config"
"gitlab.com/gitlab-org/gitaly/internal/praefect/datastore"
"gitlab.com/gitlab-org/gitaly/internal/praefect/grpc-proxy/proxy"
- "gitlab.com/gitlab-org/gitaly/internal/praefect/metadata"
"gitlab.com/gitlab-org/gitaly/internal/praefect/mock"
"gitlab.com/gitlab-org/gitaly/internal/praefect/nodes"
"gitlab.com/gitlab-org/gitaly/internal/praefect/nodes/tracker"
@@ -41,11 +39,12 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/testhelper/promtest"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/internal/testhelper/testserver"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/txinfo"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/internal/version"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
- "google.golang.org/grpc/health"
"google.golang.org/grpc/health/grpc_health_v1"
grpc_metadata "google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
@@ -70,7 +69,9 @@ func TestNewBackchannelServerFactory(t *testing.T) {
}
resp, err := gitalypb.NewRefTransactionClient(backchannelConn).VoteTransaction(
- stream.Context(), &gitalypb.VoteTransactionRequest{},
+ stream.Context(), &gitalypb.VoteTransactionRequest{
+ ReferenceUpdatesHash: voting.VoteFromData([]byte{}).Bytes(),
+ },
)
assert.Nil(t, resp)
@@ -198,7 +199,7 @@ func TestGitalyServerInfo(t *testing.T) {
func TestGitalyServerInfoBadNode(t *testing.T) {
gitalySocket := testhelper.GetTemporaryGitalySocketFileName(t)
- _, healthSrv := testhelper.NewServerWithHealth(t, gitalySocket)
+ healthSrv := testhelper.NewServerWithHealth(t, gitalySocket)
healthSrv.SetServingStatus("", grpc_health_v1.HealthCheckResponse_UNKNOWN)
conf := config.Config{
@@ -677,13 +678,13 @@ func (m *mockSmartHTTP) PostReceivePack(stream gitalypb.SmartHTTPService_PostRec
ctx := stream.Context()
- tx, err := metadata.TransactionFromContext(ctx)
+ tx, err := txinfo.TransactionFromContext(ctx)
if err != nil {
return helper.ErrInternal(err)
}
- hash := sha1.Sum([]byte{})
- if err := m.txMgr.VoteTransaction(ctx, tx.ID, tx.Node, hash[:]); err != nil {
+ vote := voting.VoteFromData([]byte{})
+ if err := m.txMgr.VoteTransaction(ctx, tx.ID, tx.Node, vote); err != nil {
return helper.ErrInternal(err)
}
@@ -697,21 +698,10 @@ func (m *mockSmartHTTP) Called(method string) int {
return m.methodsCalled[method]
}
-func newSmartHTTPGrpcServer(t *testing.T, srv gitalypb.SmartHTTPServiceServer) (string, *grpc.Server) {
- socketPath := testhelper.GetTemporaryGitalySocketFileName(t)
- listener, err := net.Listen("unix", socketPath)
- require.NoError(t, err)
-
- grpcServer := testhelper.NewTestGrpcServer(t, nil, nil)
-
- healthSrvr := health.NewServer()
- grpc_health_v1.RegisterHealthServer(grpcServer, healthSrvr)
- healthSrvr.SetServingStatus("", grpc_health_v1.HealthCheckResponse_SERVING)
- gitalypb.RegisterSmartHTTPServiceServer(grpcServer, srv)
-
- go grpcServer.Serve(listener)
-
- return socketPath, grpcServer
+func newSmartHTTPGrpcServer(t *testing.T, cfg gconfig.Cfg, smartHTTPService gitalypb.SmartHTTPServiceServer) string {
+ return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ gitalypb.RegisterSmartHTTPServiceServer(srv, smartHTTPService)
+ }, testserver.WithDisablePraefect())
}
func TestProxyWrites(t *testing.T) {
@@ -719,12 +709,14 @@ func TestProxyWrites(t *testing.T) {
smartHTTP0, smartHTTP1, smartHTTP2 := &mockSmartHTTP{txMgr: txMgr}, &mockSmartHTTP{txMgr: txMgr}, &mockSmartHTTP{txMgr: txMgr}
- socket0, srv0 := newSmartHTTPGrpcServer(t, smartHTTP0)
- defer srv0.Stop()
- socket1, srv1 := newSmartHTTPGrpcServer(t, smartHTTP1)
- defer srv1.Stop()
- socket2, srv2 := newSmartHTTPGrpcServer(t, smartHTTP2)
- defer srv2.Stop()
+ cfg0 := testcfg.Build(t, testcfg.WithStorages("praefect-internal-0"))
+ addr0 := newSmartHTTPGrpcServer(t, cfg0, smartHTTP0)
+
+ cfg1 := testcfg.Build(t, testcfg.WithStorages("praefect-internal-1"))
+ addr1 := newSmartHTTPGrpcServer(t, cfg1, smartHTTP1)
+
+ cfg2 := testcfg.Build(t, testcfg.WithStorages("praefect-internal-2"))
+ addr2 := newSmartHTTPGrpcServer(t, cfg2, smartHTTP2)
conf := config.Config{
VirtualStorages: []*config.VirtualStorage{
@@ -732,16 +724,16 @@ func TestProxyWrites(t *testing.T) {
Name: "default",
Nodes: []*config.Node{
{
- Storage: "praefect-internal-0",
- Address: "unix://" + socket0,
+ Storage: cfg0.Storages[0].Name,
+ Address: addr0,
},
{
- Storage: "praefect-internal-1",
- Address: "unix://" + socket1,
+ Storage: cfg1.Storages[0].Name,
+ Address: addr1,
},
{
- Storage: "praefect-internal-2",
- Address: "unix://" + socket2,
+ Storage: cfg2.Storages[0].Name,
+ Address: addr2,
},
},
},
@@ -758,12 +750,11 @@ func TestProxyWrites(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
- testRepo, _, cleanup := gittest.CloneRepo(t)
- defer cleanup()
+ _, repo, _ := testcfg.BuildWithRepo(t)
rs := datastore.MockRepositoryStore{
GetConsistentStoragesFunc: func(ctx context.Context, virtualStorage, relativePath string) (map[string]struct{}, error) {
- return map[string]struct{}{"praefect-internal-0": {}, "praefect-internal-1": {}, "praefect-internal-2": {}}, nil
+ return map[string]struct{}{cfg0.Storages[0].Name: {}, cfg1.Storages[0].Name: {}, cfg2.Storages[0].Name: {}}, nil
},
}
@@ -805,7 +796,7 @@ func TestProxyWrites(t *testing.T) {
payload := "some pack data"
for i := 0; i < 10; i++ {
require.NoError(t, stream.Send(&gitalypb.PostReceivePackRequest{
- Repository: testRepo,
+ Repository: repo,
Data: []byte(payload),
}))
}
@@ -950,8 +941,7 @@ func TestErrorThreshold(t *testing.T) {
require.NoError(t, err)
cli := mock.NewSimpleServiceClient(conn)
- repo, _, cleanup := gittest.CloneRepo(t)
- defer cleanup()
+ _, repo, _ := testcfg.BuildWithRepo(t)
node := nodeMgr.Nodes()["default"][0]
require.Equal(t, "praefect-internal-0", node.GetStorage())
diff --git a/internal/praefect/service/info/consistencycheck_test.go b/internal/praefect/service/info/consistencycheck_test.go
index 78cb84b41..5c16be716 100644
--- a/internal/praefect/service/info/consistencycheck_test.go
+++ b/internal/praefect/service/info/consistencycheck_test.go
@@ -45,7 +45,7 @@ func TestServer_ConsistencyCheck(t *testing.T) {
secondRepoPath = "2.git"
thirdRepoPath = "3.git"
- checksum = "06c4db1a33b2e48dac0bf940c7c20429d00a04ea"
+ checksum = "13d09299a4516e741be34e3252e3a35041b6b062"
targetStorageName = "target"
referenceStorageName = "reference"
@@ -57,8 +57,8 @@ func TestServer_ConsistencyCheck(t *testing.T) {
targetCfg := testcfg.Build(t, testcfg.WithStorages(targetStorageName))
// firstRepoPath exists on both storages and has same state
- gittest.CloneRepoAtStorage(t, referenceCfg.Storages[0], firstRepoPath)
- gittest.CloneRepoAtStorage(t, targetCfg.Storages[0], firstRepoPath)
+ gittest.CloneRepoAtStorage(t, referenceCfg, referenceCfg.Storages[0], firstRepoPath)
+ gittest.CloneRepoAtStorage(t, targetCfg, targetCfg.Storages[0], firstRepoPath)
referenceAddr := testserver.RunGitalyServer(t, referenceCfg, nil, setup.RegisterAll, testserver.WithDisablePraefect())
targetGitaly := testserver.StartGitalyServer(t, targetCfg, nil, setup.RegisterAll, testserver.WithDisablePraefect())
@@ -169,9 +169,9 @@ func TestServer_ConsistencyCheck(t *testing.T) {
// secondRepoPath generates an error, but it should not stop other repositories from being processed.
// Order does matter for the test to verify the flow.
- gittest.CloneRepoAtStorage(t, referenceCfg.Storages[0], secondRepoPath)
+ gittest.CloneRepoAtStorage(t, referenceCfg, referenceCfg.Storages[0], secondRepoPath)
// thirdRepoPath exists only on the reference storage (where traversal happens).
- gittest.CloneRepoAtStorage(t, referenceCfg.Storages[0], thirdRepoPath)
+ gittest.CloneRepoAtStorage(t, referenceCfg, referenceCfg.Storages[0], thirdRepoPath)
// not.git is a folder on the reference storage that should be skipped as it is not a git repository.
require.NoError(t, os.MkdirAll(filepath.Join(referenceCfg.Storages[0].Path, "not.git"), os.ModePerm))
diff --git a/internal/praefect/service/transaction/server.go b/internal/praefect/service/transaction/server.go
index ceabc2282..5276c20e2 100644
--- a/internal/praefect/service/transaction/server.go
+++ b/internal/praefect/service/transaction/server.go
@@ -6,6 +6,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/praefect/transactions"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
@@ -24,8 +25,12 @@ func NewServer(txMgr *transactions.Manager) gitalypb.RefTransactionServer {
// transaction, blocking until a vote across all participating nodes has been
// completed.
func (s *Server) VoteTransaction(ctx context.Context, in *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) {
- err := s.txMgr.VoteTransaction(ctx, in.TransactionId, in.Node, in.ReferenceUpdatesHash)
+ vote, err := voting.VoteFromHash(in.GetReferenceUpdatesHash())
if err != nil {
+ return nil, helper.ErrInvalidArgumentf("invalid reference update hash: %v", err)
+ }
+
+ if err := s.txMgr.VoteTransaction(ctx, in.TransactionId, in.Node, vote); err != nil {
switch {
case errors.Is(err, transactions.ErrNotFound):
return nil, helper.ErrNotFound(err)
diff --git a/internal/praefect/transactions/manager.go b/internal/praefect/transactions/manager.go
index abfbe8fdc..f84c5198b 100644
--- a/internal/praefect/transactions/manager.go
+++ b/internal/praefect/transactions/manager.go
@@ -2,19 +2,17 @@ package transactions
import (
"context"
- cryptorand "crypto/rand"
- "encoding/binary"
- "encoding/hex"
"errors"
"fmt"
- "math/rand"
"sync"
+ "sync/atomic"
"time"
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"github.com/prometheus/client_golang/prometheus"
"github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitaly/internal/praefect/config"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
)
var ErrNotFound = errors.New("transaction not found")
@@ -23,7 +21,7 @@ var ErrNotFound = errors.New("transaction not found")
// for Praefect to handle transactions directly instead of having to reach out
// to reference transaction RPCs.
type Manager struct {
- txIDGenerator TransactionIDGenerator
+ idSequence uint64
lock sync.Mutex
transactions map[uint64]*transaction
counterMetric *prometheus.CounterVec
@@ -31,48 +29,10 @@ type Manager struct {
subtransactionsMetric prometheus.Histogram
}
-// TransactionIDGenerator is an interface for types that can generate transaction IDs.
-type TransactionIDGenerator interface {
- // ID generates a new transaction identifier
- ID() uint64
-}
-
-type transactionIDGenerator struct {
- rand *rand.Rand
-}
-
-func newTransactionIDGenerator() *transactionIDGenerator {
- var seed [8]byte
-
- // Ignore any errors. In case we weren't able to generate a seed, the
- // best we can do is to just use the all-zero seed.
- cryptorand.Read(seed[:])
- source := rand.NewSource(int64(binary.LittleEndian.Uint64(seed[:])))
-
- return &transactionIDGenerator{
- rand: rand.New(source),
- }
-}
-
-func (t *transactionIDGenerator) ID() uint64 {
- return rand.Uint64()
-}
-
-// ManagerOpt is a self referential option for Manager
-type ManagerOpt func(*Manager)
-
-// WithTransactionIDGenerator is an option to set the transaction ID generator
-func WithTransactionIDGenerator(generator TransactionIDGenerator) ManagerOpt {
- return func(mgr *Manager) {
- mgr.txIDGenerator = generator
- }
-}
-
// NewManager creates a new transactions Manager.
-func NewManager(cfg config.Config, opts ...ManagerOpt) *Manager {
- mgr := &Manager{
- txIDGenerator: newTransactionIDGenerator(),
- transactions: make(map[uint64]*transaction),
+func NewManager(cfg config.Config) *Manager {
+ return &Manager{
+ transactions: make(map[uint64]*transaction),
counterMetric: prometheus.NewCounterVec(
prometheus.CounterOpts{
Namespace: "gitaly",
@@ -100,12 +60,6 @@ func NewManager(cfg config.Config, opts ...ManagerOpt) *Manager {
},
),
}
-
- for _, opt := range opts {
- opt(mgr)
- }
-
- return mgr
}
func (mgr *Manager) Describe(descs chan<- *prometheus.Desc) {
@@ -135,11 +89,7 @@ func (mgr *Manager) RegisterTransaction(ctx context.Context, voters []Voter, thr
mgr.lock.Lock()
defer mgr.lock.Unlock()
- // Use a random transaction ID. Using monotonic incrementing counters
- // that reset on restart of Praefect would be suboptimal, as the chance
- // for collisions is a lot higher in case Praefect restarts when Gitaly
- // nodes still have in-flight transactions.
- transactionID := mgr.txIDGenerator.ID()
+ transactionID := atomic.AddUint64(&mgr.idSequence, 1)
transaction, err := newTransaction(transactionID, voters, threshold)
if err != nil {
@@ -193,7 +143,7 @@ func (mgr *Manager) cancelTransaction(ctx context.Context, transaction *transact
return nil
}
-func (mgr *Manager) voteTransaction(ctx context.Context, transactionID uint64, node string, hash []byte) error {
+func (mgr *Manager) voteTransaction(ctx context.Context, transactionID uint64, node string, vote voting.Vote) error {
mgr.lock.Lock()
transaction, ok := mgr.transactions[transactionID]
mgr.lock.Unlock()
@@ -202,7 +152,7 @@ func (mgr *Manager) voteTransaction(ctx context.Context, transactionID uint64, n
return fmt.Errorf("%w: %d", ErrNotFound, transactionID)
}
- if err := transaction.vote(ctx, node, hash); err != nil {
+ if err := transaction.vote(ctx, node, vote); err != nil {
return err
}
@@ -211,7 +161,7 @@ func (mgr *Manager) voteTransaction(ctx context.Context, transactionID uint64, n
// VoteTransaction is called by a client who's casting a vote on a reference
// transaction. It waits until quorum was reached on the given transaction.
-func (mgr *Manager) VoteTransaction(ctx context.Context, transactionID uint64, node string, hash []byte) error {
+func (mgr *Manager) VoteTransaction(ctx context.Context, transactionID uint64, node string, vote voting.Vote) error {
start := time.Now()
defer func() {
delay := time.Since(start)
@@ -221,13 +171,13 @@ func (mgr *Manager) VoteTransaction(ctx context.Context, transactionID uint64, n
logger := mgr.log(ctx).WithFields(logrus.Fields{
"transaction.id": transactionID,
"transaction.voter": node,
- "transaction.hash": hex.EncodeToString(hash),
+ "transaction.hash": vote.String(),
})
mgr.counterMetric.WithLabelValues("started").Inc()
logger.Debug("VoteTransaction")
- if err := mgr.voteTransaction(ctx, transactionID, node, hash); err != nil {
+ if err := mgr.voteTransaction(ctx, transactionID, node, vote); err != nil {
var counterLabel string
if errors.Is(err, ErrTransactionStopped) {
diff --git a/internal/praefect/transactions/subtransaction.go b/internal/praefect/transactions/subtransaction.go
index 4b596fc09..2ce44bff2 100644
--- a/internal/praefect/transactions/subtransaction.go
+++ b/internal/praefect/transactions/subtransaction.go
@@ -2,10 +2,11 @@ package transactions
import (
"context"
- "crypto/sha1"
- "encoding/hex"
+ "errors"
"fmt"
"sync"
+
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
)
// VoteResult represents the outcome of a transaction for a single voter.
@@ -26,24 +27,6 @@ const (
VoteStopped
)
-type vote [sha1.Size]byte
-
-func voteFromHash(hash []byte) (vote, error) {
- var vote vote
-
- if len(hash) != sha1.Size {
- return vote, fmt.Errorf("invalid voting hash: %q", hash)
- }
-
- copy(vote[:], hash)
- return vote, nil
-}
-
-// String returns the hexadecimal string representation of the vote.
-func (v vote) String() string {
- return hex.EncodeToString(v[:])
-}
-
// subtransaction is a single session where voters are voting for a certain outcome.
type subtransaction struct {
doneCh chan interface{}
@@ -52,8 +35,7 @@ type subtransaction struct {
lock sync.RWMutex
votersByNode map[string]*Voter
- voteCounts map[vote]uint
- isDone bool
+ voteCounts map[voting.Vote]uint
}
func newSubtransaction(voters []Voter, threshold uint) (*subtransaction, error) {
@@ -67,7 +49,7 @@ func newSubtransaction(voters []Voter, threshold uint) (*subtransaction, error)
doneCh: make(chan interface{}),
threshold: threshold,
votersByNode: votersByNode,
- voteCounts: make(map[vote]uint, len(voters)),
+ voteCounts: make(map[voting.Vote]uint, len(voters)),
}, nil
}
@@ -84,8 +66,7 @@ func (t *subtransaction) cancel() {
}
}
- if !t.isDone {
- t.isDone = true
+ if !t.isDone() {
close(t.doneCh)
}
}
@@ -111,8 +92,7 @@ func (t *subtransaction) stop() error {
}
}
- if !t.isDone {
- t.isDone = true
+ if !t.isDone() {
close(t.doneCh)
}
@@ -131,12 +111,7 @@ func (t *subtransaction) state() map[string]VoteResult {
return results
}
-func (t *subtransaction) vote(node string, hash []byte) error {
- vote, err := voteFromHash(hash)
- if err != nil {
- return err
- }
-
+func (t *subtransaction) vote(node string, vote voting.Vote) error {
t.lock.Lock()
defer t.lock.Unlock()
@@ -175,7 +150,6 @@ func (t *subtransaction) vote(node string, hash []byte) error {
t.updateVoterStates()
if t.mustSignalVoters() {
- t.isDone = true
close(t.doneCh)
}
@@ -185,7 +159,7 @@ func (t *subtransaction) vote(node string, hash []byte) error {
// updateVoterStates updates undecided voters. Voters are updated either as
// soon as quorum was reached or alternatively when all votes were cast.
func (t *subtransaction) updateVoterStates() {
- var majorityVote *vote
+ var majorityVote *voting.Vote
for v, voteCount := range t.voteCounts {
if voteCount >= t.threshold {
v := v
@@ -239,7 +213,7 @@ func (t *subtransaction) updateVoterStates() {
func (t *subtransaction) mustSignalVoters() bool {
// If somebody else already notified voters, then we mustn't do so
// again.
- if t.isDone {
+ if t.isDone() {
return false
}
@@ -266,22 +240,46 @@ func (t *subtransaction) mustSignalVoters() bool {
return true
}
+// cancelVote cancels a node's vote if the subtransaction is still ongoing. This
+// has to be called with the lock acquired as collectVotes does.
+func (t *subtransaction) cancelVote(voter *Voter) error {
+ if t.isDone() {
+ // If the transaction is already done, it's too late to cancel our vote.
+ // Other nodes may have committed their changes already.
+ return errors.New("subtransaction was already finished")
+ }
+
+ // Remove the voter's support for the vote so it's not counted towards the
+ // majority. The node is not going to commit the subtransaction anyway.
+ t.voteCounts[*voter.vote] -= voter.Votes
+ voter.result = VoteCanceled
+ return nil
+}
+
func (t *subtransaction) collectVotes(ctx context.Context, node string) error {
select {
case <-ctx.Done():
- return ctx.Err()
case <-t.doneCh:
- break
}
- t.lock.RLock()
- defer t.lock.RUnlock()
+ t.lock.Lock()
+ defer t.lock.Unlock()
voter, ok := t.votersByNode[node]
if !ok {
return fmt.Errorf("invalid node for transaction: %q", node)
}
+ // If the waiting stopped due to the context being canceled, we need to cancel
+ // this voter's votes.
+ if err := ctx.Err(); err != nil {
+ if err := t.cancelVote(voter); err != nil {
+ return fmt.Errorf("cancel vote: %w", err)
+ }
+
+ return ctx.Err()
+ }
+
switch voter.result {
case VoteCommitted:
// Happy case, we are part of the quorum.
@@ -309,6 +307,15 @@ func (t *subtransaction) collectVotes(ctx context.Context, node string) error {
}
}
+func (t *subtransaction) isDone() bool {
+ select {
+ case <-t.doneCh:
+ return true
+ default:
+ return false
+ }
+}
+
func (t *subtransaction) getResult(node string) (VoteResult, error) {
t.lock.RLock()
defer t.lock.RUnlock()
diff --git a/internal/praefect/transactions/subtransaction_test.go b/internal/praefect/transactions/subtransaction_test.go
index 32a5849f0..e3d60e3eb 100644
--- a/internal/praefect/transactions/subtransaction_test.go
+++ b/internal/praefect/transactions/subtransaction_test.go
@@ -1,14 +1,18 @@
package transactions
import (
+ "context"
"crypto/sha1"
"errors"
"fmt"
"sync"
"testing"
+ "time"
+ "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
)
func TestSubtransaction_cancel(t *testing.T) {
@@ -22,7 +26,7 @@ func TestSubtransaction_cancel(t *testing.T) {
s.cancel()
- require.True(t, s.isDone)
+ require.True(t, s.isDone())
require.Equal(t, VoteCanceled, s.votersByNode["1"].result)
require.Equal(t, VoteCommitted, s.votersByNode["2"].result)
require.Equal(t, VoteFailed, s.votersByNode["3"].result)
@@ -40,7 +44,7 @@ func TestSubtransaction_stop(t *testing.T) {
require.NoError(t, s.stop())
- require.True(t, s.isDone)
+ require.True(t, s.isDone())
require.Equal(t, VoteStopped, s.votersByNode["1"].result)
require.Equal(t, VoteCommitted, s.votersByNode["2"].result)
require.Equal(t, VoteFailed, s.votersByNode["3"].result)
@@ -56,7 +60,7 @@ func TestSubtransaction_stop(t *testing.T) {
require.NoError(t, err)
require.Equal(t, s.stop(), ErrTransactionCanceled)
- require.False(t, s.isDone)
+ require.False(t, s.isDone())
})
t.Run("stop of stopped transaction fails", func(t *testing.T) {
@@ -69,7 +73,7 @@ func TestSubtransaction_stop(t *testing.T) {
require.NoError(t, err)
require.Equal(t, s.stop(), ErrTransactionStopped)
- require.False(t, s.isDone)
+ require.False(t, s.isDone())
})
}
@@ -117,7 +121,7 @@ func TestSubtransaction_getResult(t *testing.T) {
}
func TestSubtransaction_vote(t *testing.T) {
- var zeroVote vote
+ var zeroVote voting.Vote
voteA := newVote(t, "a")
voteB := newVote(t, "b")
@@ -126,9 +130,9 @@ func TestSubtransaction_vote(t *testing.T) {
voters []Voter
threshold uint
voterName string
- vote vote
+ vote voting.Vote
expectedVoterState []Voter
- expectedVoteCounts map[vote]uint
+ expectedVoteCounts map[voting.Vote]uint
expectedErr error
}{
{
@@ -142,7 +146,7 @@ func TestSubtransaction_vote(t *testing.T) {
expectedVoterState: []Voter{
{Name: "1", Votes: 1, result: VoteCommitted, vote: &voteA},
},
- expectedVoteCounts: map[vote]uint{
+ expectedVoteCounts: map[voting.Vote]uint{
voteA: 1,
},
},
@@ -157,7 +161,7 @@ func TestSubtransaction_vote(t *testing.T) {
expectedVoterState: []Voter{
{Name: "1", Votes: 1, vote: &voteA},
},
- expectedVoteCounts: map[vote]uint{
+ expectedVoteCounts: map[voting.Vote]uint{
voteA: 1,
},
expectedErr: errors.New("node already cast a vote: \"1\""),
@@ -173,7 +177,7 @@ func TestSubtransaction_vote(t *testing.T) {
expectedVoterState: []Voter{
{Name: "1", Votes: 1, result: VoteCommitted, vote: &zeroVote},
},
- expectedVoteCounts: map[vote]uint{
+ expectedVoteCounts: map[voting.Vote]uint{
zeroVote: 1,
},
},
@@ -188,7 +192,7 @@ func TestSubtransaction_vote(t *testing.T) {
expectedVoterState: []Voter{
{Name: "1", Votes: 1, result: VoteCanceled},
},
- expectedVoteCounts: map[vote]uint{},
+ expectedVoteCounts: map[voting.Vote]uint{},
expectedErr: ErrTransactionCanceled,
},
{
@@ -202,7 +206,7 @@ func TestSubtransaction_vote(t *testing.T) {
expectedVoterState: []Voter{
{Name: "1", Votes: 1, result: VoteStopped},
},
- expectedVoteCounts: map[vote]uint{},
+ expectedVoteCounts: map[voting.Vote]uint{},
expectedErr: ErrTransactionStopped,
},
{
@@ -220,7 +224,7 @@ func TestSubtransaction_vote(t *testing.T) {
{Name: "2", Votes: 1, result: VoteCommitted, vote: &voteA},
{Name: "3", Votes: 1, result: VoteCommitted, vote: &voteA},
},
- expectedVoteCounts: map[vote]uint{
+ expectedVoteCounts: map[voting.Vote]uint{
voteA: 3,
},
},
@@ -239,7 +243,7 @@ func TestSubtransaction_vote(t *testing.T) {
{Name: "2", Votes: 1},
{Name: "3", Votes: 1, vote: &voteA},
},
- expectedVoteCounts: map[vote]uint{
+ expectedVoteCounts: map[voting.Vote]uint{
voteA: 2,
},
},
@@ -258,7 +262,7 @@ func TestSubtransaction_vote(t *testing.T) {
{Name: "2", Votes: 1, result: VoteFailed, vote: &voteA},
{Name: "3", Votes: 1, result: VoteFailed, vote: &voteB},
},
- expectedVoteCounts: map[vote]uint{
+ expectedVoteCounts: map[voting.Vote]uint{
voteA: 2,
voteB: 1,
},
@@ -278,7 +282,7 @@ func TestSubtransaction_vote(t *testing.T) {
{Name: "2", Votes: 1, result: VoteCommitted, vote: &voteA},
{Name: "3", Votes: 1, result: VoteFailed, vote: &voteB},
},
- expectedVoteCounts: map[vote]uint{
+ expectedVoteCounts: map[voting.Vote]uint{
voteA: 2,
voteB: 1,
},
@@ -288,7 +292,7 @@ func TestSubtransaction_vote(t *testing.T) {
s, err := newSubtransaction(tc.voters, tc.threshold)
require.NoError(t, err)
- voteCounts := make(map[vote]uint)
+ voteCounts := make(map[voting.Vote]uint)
for _, voter := range tc.voters {
if voter.vote != nil {
voteCounts[*voter.vote] += voter.Votes
@@ -302,7 +306,7 @@ func TestSubtransaction_vote(t *testing.T) {
expectedVoterState[voter.Name] = &voter
}
- require.Equal(t, tc.expectedErr, s.vote(tc.voterName, tc.vote[:]))
+ require.Equal(t, tc.expectedErr, s.vote(tc.voterName, tc.vote))
require.Equal(t, expectedVoterState, s.votersByNode)
require.Equal(t, tc.expectedVoteCounts, s.voteCounts)
})
@@ -392,7 +396,7 @@ func TestSubtransaction_mustSignalVoters(t *testing.T) {
s, err := newSubtransaction(tc.voters, tc.threshold)
require.NoError(t, err)
- voteCounts := make(map[vote]uint)
+ voteCounts := make(map[voting.Vote]uint)
for _, voter := range tc.voters {
if voter.vote != nil {
voteCounts[*voter.vote] += voter.Votes
@@ -400,13 +404,135 @@ func TestSubtransaction_mustSignalVoters(t *testing.T) {
}
s.voteCounts = voteCounts
- s.isDone = tc.isDone
+ if tc.isDone {
+ close(s.doneCh)
+ }
require.Equal(t, tc.mustSignal, s.mustSignalVoters())
})
}
}
+func TestSubtransaction_voterStopsWaiting(t *testing.T) {
+ ctx, cancel := testhelper.Context()
+ defer cancel()
+
+ agreeingVote := newVote(t, "agreeing")
+ disagreeingVote := newVote(t, "disagreeing")
+
+ errorMessageForVote := func(agreeingVotes uint, threshold uint, vote voting.Vote) string {
+ return fmt.Sprintf("transaction did not reach quorum: got %d/%d votes for %s", agreeingVotes, threshold, vote)
+ }
+
+ type outcomes []struct {
+ drops bool
+ vote voting.Vote
+ weight uint
+ errorMessage string
+ result VoteResult
+ }
+
+ for _, tc := range []struct {
+ desc string
+ outcomes outcomes
+ }{
+ {
+ desc: "quorum not reached",
+ outcomes: outcomes{
+ {weight: 1, vote: agreeingVote, drops: true, errorMessage: context.Canceled.Error(), result: VoteCanceled},
+ {weight: 1, vote: agreeingVote, errorMessage: errorMessageForVote(1, 2, agreeingVote), result: VoteFailed},
+ {weight: 1, vote: disagreeingVote, errorMessage: errorMessageForVote(1, 2, disagreeingVote), result: VoteFailed},
+ },
+ },
+ {
+ desc: "quorum reached",
+ outcomes: outcomes{
+ {weight: 1, vote: agreeingVote, drops: true, errorMessage: context.Canceled.Error(), result: VoteCanceled},
+ {weight: 1, vote: agreeingVote, result: VoteCommitted},
+ {weight: 1, vote: agreeingVote, result: VoteCommitted},
+ },
+ },
+ {
+ desc: "can't cancel a finished transaction",
+ outcomes: outcomes{
+ {weight: 1, vote: agreeingVote, result: VoteCommitted},
+ {weight: 1, vote: agreeingVote, result: VoteCommitted},
+ {weight: 1, vote: agreeingVote, drops: true, result: VoteCommitted, errorMessage: "cancel vote: subtransaction was already finished"},
+ },
+ },
+ {
+ desc: "primary cancels its vote before transaction is finished",
+ outcomes: outcomes{
+ {weight: 2, vote: agreeingVote, drops: true, result: VoteCanceled, errorMessage: context.Canceled.Error()},
+ {weight: 1, vote: agreeingVote, result: VoteFailed, errorMessage: errorMessageForVote(2, 3, agreeingVote)},
+ {weight: 1, vote: agreeingVote, result: VoteFailed, errorMessage: errorMessageForVote(2, 3, agreeingVote)},
+ },
+ },
+ {
+ desc: "secondary cancels its vote after crossing the threshold",
+ outcomes: outcomes{
+ {weight: 2, vote: agreeingVote, result: VoteCommitted},
+ {weight: 1, vote: agreeingVote, drops: true, result: VoteCommitted, errorMessage: "cancel vote: subtransaction was already finished"},
+ {weight: 1, vote: disagreeingVote, result: VoteFailed, errorMessage: errorMessageForVote(1, 3, disagreeingVote)},
+ },
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ ctx, cancel := context.WithTimeout(ctx, 45*time.Second)
+ defer cancel()
+
+ var totalWeight uint
+ var voters []Voter
+ for i, outcome := range tc.outcomes {
+ totalWeight += outcome.weight
+ voters = append(voters, Voter{Name: fmt.Sprintf("voter-%d", i), Votes: outcome.weight})
+ }
+
+ s, err := newSubtransaction(voters, totalWeight/2+1)
+ require.NoError(t, err)
+
+ results := make([]chan error, len(tc.outcomes))
+ for i, outcome := range tc.outcomes {
+ voterName := voters[i].Name
+ resultCh := make(chan error, 1)
+ results[i] = resultCh
+
+ collectVotes := func(ctx context.Context) { resultCh <- s.collectVotes(ctx, voterName) }
+
+ require.NoError(t, s.vote(voterName, outcome.vote))
+
+ if outcome.drops {
+ ctx, dropVoter := context.WithCancel(ctx)
+ dropVoter()
+
+ // Run the dropping nodes's collectVotes in sync just to ensure
+ // we get the correct error back. If we ran all of the collectVotes
+ // async, the agreeing nodes could finish the transaction and
+ // we would not get a context.Canceled when the vote is successfully
+ // canceled.
+ collectVotes(ctx)
+ continue
+ }
+
+ go collectVotes(ctx)
+ }
+
+ for i, outcome := range tc.outcomes {
+ voterName := voters[i].Name
+ assert.Equal(t, outcome.result, s.state()[voterName], "Node: %q", voterName)
+
+ err := <-results[i]
+ if outcome.errorMessage != "" {
+ assert.EqualError(t, err, outcome.errorMessage)
+ continue
+ }
+
+ assert.NoError(t, err)
+ }
+ })
+ }
+}
+
func TestSubtransaction_race(t *testing.T) {
ctx, cancel := testhelper.Context()
defer cancel()
@@ -433,7 +559,7 @@ func TestSubtransaction_race(t *testing.T) {
require.NoError(t, err)
require.Equal(t, VoteUndecided, result)
- require.NoError(t, s.vote(voter.Name, voteA[:]))
+ require.NoError(t, s.vote(voter.Name, voteA))
require.NoError(t, s.collectVotes(ctx, voter.Name))
result, err = s.getResult(voter.Name)
@@ -447,9 +573,9 @@ func TestSubtransaction_race(t *testing.T) {
}
}
-func newVote(t *testing.T, s string) vote {
+func newVote(t *testing.T, s string) voting.Vote {
hash := sha1.Sum([]byte(s))
- vote, err := voteFromHash(hash[:])
+ vote, err := voting.VoteFromHash(hash[:])
require.NoError(t, err)
return vote
}
diff --git a/internal/praefect/transactions/transaction.go b/internal/praefect/transactions/transaction.go
index 2545ed4f6..840710d29 100644
--- a/internal/praefect/transactions/transaction.go
+++ b/internal/praefect/transactions/transaction.go
@@ -4,6 +4,8 @@ import (
"context"
"errors"
"sync"
+
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
)
var (
@@ -43,7 +45,7 @@ type Voter struct {
// this voter.
Votes uint
- vote *vote
+ vote *voting.Vote
result VoteResult
}
@@ -55,6 +57,8 @@ type Transaction interface {
CountSubtransactions() int
// State returns the state of each voter part of the transaction.
State() (map[string]VoteResult, error)
+ // DidCommitAnySubtransaction returns whether the transaction committed at least one subtransaction.
+ DidCommitAnySubtransaction() bool
}
// transaction is a session where a set of voters votes on one or more
@@ -182,6 +186,28 @@ func (t *transaction) CountSubtransactions() int {
return len(t.subtransactions)
}
+// DidCommitSubtransaction returns whether the transaction committed at least one subtransaction.
+func (t *transaction) DidCommitAnySubtransaction() bool {
+ t.lock.Lock()
+ defer t.lock.Unlock()
+
+ if len(t.subtransactions) == 0 {
+ return false
+ }
+
+ // We only need to check the first subtransaction. If it failed, there would
+ // be no further subtransactions.
+ for _, result := range t.subtransactions[0].state() {
+ // It's sufficient to find a single commit in the subtransaction
+ // to say it was committed.
+ if result == VoteCommitted {
+ return true
+ }
+ }
+
+ return false
+}
+
// getOrCreateSubtransaction gets an ongoing subtransaction on which the given
// node hasn't yet voted on or creates a new one if the node has succeeded on
// all subtransactions. In case the node has failed on any of the
@@ -244,13 +270,13 @@ func (t *transaction) getOrCreateSubtransaction(node string) (*subtransaction, e
return subtransaction, nil
}
-func (t *transaction) vote(ctx context.Context, node string, hash []byte) error {
+func (t *transaction) vote(ctx context.Context, node string, vote voting.Vote) error {
subtransaction, err := t.getOrCreateSubtransaction(node)
if err != nil {
return err
}
- if err := subtransaction.vote(node, hash); err != nil {
+ if err := subtransaction.vote(node, vote); err != nil {
return err
}
diff --git a/internal/praefect/transactions/transaction_test.go b/internal/praefect/transactions/transaction_test.go
index 6a9ff1e53..c20213c10 100644
--- a/internal/praefect/transactions/transaction_test.go
+++ b/internal/praefect/transactions/transaction_test.go
@@ -1,11 +1,11 @@
package transactions
import (
- "crypto/sha1"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/internal/transaction/voting"
)
func TestTransactionCancellationWithEmptyTransaction(t *testing.T) {
@@ -17,12 +17,10 @@ func TestTransactionCancellationWithEmptyTransaction(t *testing.T) {
}, 1)
require.NoError(t, err)
- hash := sha1.Sum([]byte{})
-
tx.cancel()
// When canceling a transaction, no more votes may happen.
- err = tx.vote(ctx, "voter", hash[:])
+ err = tx.vote(ctx, "voter", voting.VoteFromData([]byte{}))
require.Error(t, err)
require.Equal(t, err, ErrTransactionCanceled)
}
diff --git a/internal/prometheus/metrics/metrics.go b/internal/prometheus/metrics/metrics.go
index 62b72d374..04db58cdb 100644
--- a/internal/prometheus/metrics/metrics.go
+++ b/internal/prometheus/metrics/metrics.go
@@ -21,6 +21,9 @@ type Histogram interface {
Observe(float64)
}
+// HistogramVec is a subset of a prometheus HistogramVec
type HistogramVec interface {
WithLabelValues(lvs ...string) prometheus.Observer
+ Collect(chan<- prometheus.Metric)
+ Describe(chan<- *prometheus.Desc)
}
diff --git a/internal/safe/file_writer_test.go b/internal/safe/file_writer_test.go
index 7899fd0a9..1af0915ca 100644
--- a/internal/safe/file_writer_test.go
+++ b/internal/safe/file_writer_test.go
@@ -25,12 +25,11 @@ func TestFile(t *testing.T) {
_, err = io.Copy(file, bytes.NewBufferString(fileContents))
require.NoError(t, err)
- testhelper.AssertPathNotExists(t, filePath)
+ require.NoFileExists(t, filePath)
require.NoError(t, file.Commit())
- writtenContents, err := ioutil.ReadFile(filePath)
- require.NoError(t, err)
+ writtenContents := testhelper.MustReadFile(t, filePath)
require.Equal(t, fileContents, string(writtenContents))
filesInTempDir, err := ioutil.ReadDir(dir)
diff --git a/internal/tempdir/tempdir_test.go b/internal/tempdir/tempdir_test.go
index 4342e724e..377482a3b 100644
--- a/internal/tempdir/tempdir_test.go
+++ b/internal/tempdir/tempdir_test.go
@@ -104,7 +104,7 @@ func TestCleanSuccess(t *testing.T) {
func TestCleanTempDir(t *testing.T) {
cfg := testcfg.Build(t, testcfg.WithStorages("first", "second"))
- gittest.CloneRepoAtStorage(t, cfg.Storages[0], t.Name())
+ gittest.CloneRepoAtStorage(t, cfg, cfg.Storages[0], t.Name())
logrus.SetLevel(logrus.InfoLevel)
logrus.SetOutput(ioutil.Discard)
diff --git a/internal/testhelper/configure.go b/internal/testhelper/configure.go
index 8a92e54d7..7b011a76c 100644
--- a/internal/testhelper/configure.go
+++ b/internal/testhelper/configure.go
@@ -36,39 +36,8 @@ func Configure() func() {
log.Fatal(err)
}
- config.Config.Logging.Dir = filepath.Join(testDirectory, "log")
- if err := os.Mkdir(config.Config.Logging.Dir, 0755); err != nil {
- os.RemoveAll(testDirectory)
- log.Fatal(err)
- }
-
- config.Config.Storages = []config.Storage{
- {Name: "default", Path: GitlabTestStoragePath()},
- }
- if err := os.Mkdir(config.Config.Storages[0].Path, 0755); err != nil {
- os.RemoveAll(testDirectory)
- log.Fatal(err)
- }
-
- config.Config.SocketPath = "/bogus"
- config.Config.GitlabShell.Dir = "/"
-
- config.Config.InternalSocketDir = filepath.Join(testDirectory, "internal-socket")
- if err := os.Mkdir(config.Config.InternalSocketDir, 0755); err != nil {
- os.RemoveAll(testDirectory)
- log.Fatal(err)
- }
-
- config.Config.BinDir = filepath.Join(testDirectory, "bin")
- if err := os.Mkdir(config.Config.BinDir, 0755); err != nil {
- os.RemoveAll(testDirectory)
- log.Fatal(err)
- }
-
for _, f := range []func() error{
- func() error { return ConfigureRuby(&config.Config) },
ConfigureGit,
- func() error { return config.Config.Validate() },
} {
if err := f(); err != nil {
os.RemoveAll(testDirectory)
@@ -132,29 +101,14 @@ func ConfigureRuby(cfg *config.Cfg) error {
return nil
}
-// ConfigureGitalyGit2Go configures the gitaly-git2go command for tests
-func ConfigureGitalyGit2Go(outputDir string) {
- buildCommand(nil, outputDir, "gitaly-git2go")
-}
-
// ConfigureGitalyGit2GoBin configures the gitaly-git2go command for tests
func ConfigureGitalyGit2GoBin(t testing.TB, cfg config.Cfg) {
buildBinary(t, cfg.BinDir, "gitaly-git2go")
}
// ConfigureGitalyLfsSmudge configures the gitaly-lfs-smudge command for tests
-func ConfigureGitalyLfsSmudge(outputDir string) {
- buildCommand(nil, outputDir, "gitaly-lfs-smudge")
-}
-
-// ConfigureGitalySSH configures the gitaly-ssh command for tests
-func ConfigureGitalySSH(outputDir string) {
- buildCommand(nil, outputDir, "gitaly-ssh")
-}
-
-// ConfigureGitalyHooksBinary builds gitaly-hooks command for tests
-func ConfigureGitalyHooksBinary(outputDir string) {
- buildCommand(nil, outputDir, "gitaly-hooks")
+func ConfigureGitalyLfsSmudge(t *testing.T, outputDir string) {
+ buildCommand(t, outputDir, "gitaly-lfs-smudge")
}
// ConfigureGitalyHooksBin builds gitaly-hooks command for tests for the cfg.
diff --git a/internal/testhelper/grpc.go b/internal/testhelper/grpc.go
index 97cdf5bec..a67211df7 100644
--- a/internal/testhelper/grpc.go
+++ b/internal/testhelper/grpc.go
@@ -44,16 +44,6 @@ func RequireGrpcError(t testing.TB, err error, expectedCode codes.Code) {
}
}
-// GrpcErrorHasMessage checks whether the GRPC error's message matches the
-// given message.
-func GrpcErrorHasMessage(t testing.TB, grpcError error, msg string) {
- t.Helper()
-
- st, ok := status.FromError(grpcError)
- require.Truef(t, ok, "passed err is not a status.Status: %T", grpcError)
- require.Equal(t, msg, st.Message())
-}
-
// MergeOutgoingMetadata merges provided metadata-s and returns context with resulting value.
func MergeOutgoingMetadata(ctx context.Context, md ...metadata.MD) context.Context {
ctxmd, ok := metadata.FromOutgoingContext(ctx)
diff --git a/internal/testhelper/promtest/histogram.go b/internal/testhelper/promtest/histogram.go
index 35208128f..2d88d01cf 100644
--- a/internal/testhelper/promtest/histogram.go
+++ b/internal/testhelper/promtest/histogram.go
@@ -19,16 +19,19 @@ func (m *MockHistogram) Observe(v float64) {
m.Values = append(m.Values, v)
}
-func NewMockHistogramVec() *MockHistogramVec {
- return &MockHistogramVec{}
-}
-
+// MockHistogramVec implements a subset of the prometheus.HistogramVec interface.
type MockHistogramVec struct {
m sync.RWMutex
labelsCalled [][]string
observer MockObserver
}
+// NewMockHistogramVec returns a new MockHistogramVec.
+func NewMockHistogramVec() *MockHistogramVec {
+ return &MockHistogramVec{}
+}
+
+// LabelsCalled returns the set of labels which have been observed.
func (m *MockHistogramVec) LabelsCalled() [][]string {
m.m.RLock()
defer m.m.RUnlock()
@@ -36,10 +39,19 @@ func (m *MockHistogramVec) LabelsCalled() [][]string {
return m.labelsCalled
}
+// Observer returns the mocked observer.
func (m *MockHistogramVec) Observer() *MockObserver {
return &m.observer
}
+// Collect does nothing.
+func (m *MockHistogramVec) Collect(chan<- prometheus.Metric) {}
+
+// Describe does nothing.
+func (m *MockHistogramVec) Describe(chan<- *prometheus.Desc) {}
+
+// WithLabelValues records the given labels such that `LabelsCalled()` will return the set of
+// observed labels.
func (m *MockHistogramVec) WithLabelValues(lvs ...string) prometheus.Observer {
m.m.Lock()
defer m.m.Unlock()
@@ -48,11 +60,13 @@ func (m *MockHistogramVec) WithLabelValues(lvs ...string) prometheus.Observer {
return &m.observer
}
+// MockObserver implements a subset of the prometheus.Observer interface.
type MockObserver struct {
m sync.RWMutex
observed []float64
}
+// Observe records the given value in its observed values.
func (m *MockObserver) Observe(v float64) {
m.m.Lock()
defer m.m.Unlock()
@@ -60,6 +74,7 @@ func (m *MockObserver) Observe(v float64) {
m.observed = append(m.observed, v)
}
+// Observed returns all observed values.
func (m *MockObserver) Observed() []float64 {
m.m.RLock()
defer m.m.RUnlock()
diff --git a/internal/testhelper/testcfg/gitaly_builder.go b/internal/testhelper/testcfg/gitaly_builder.go
index ed2553730..1f48df715 100644
--- a/internal/testhelper/testcfg/gitaly_builder.go
+++ b/internal/testhelper/testcfg/gitaly_builder.go
@@ -137,7 +137,7 @@ func (gc *GitalyCfgBuilder) BuildWithRepoAt(t testing.TB, relativePath string) (
// clone the test repo to the each storage
repos := make([]*gitalypb.Repository, len(cfg.Storages))
for i, gitalyStorage := range cfg.Storages {
- repos[i] = gittest.CloneRepoAtStorageRoot(t, gitalyStorage.Path, relativePath)
+ repos[i] = gittest.CloneRepoAtStorageRoot(t, cfg, gitalyStorage.Path, relativePath)
repos[i].StorageName = gitalyStorage.Name
}
diff --git a/internal/testhelper/testhelper.go b/internal/testhelper/testhelper.go
index 41fee9aab..4e65088ba 100644
--- a/internal/testhelper/testhelper.go
+++ b/internal/testhelper/testhelper.go
@@ -1,5 +1,6 @@
package testhelper
+//nolint: gci
import (
"context"
"crypto/ecdsa"
@@ -31,8 +32,11 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/internal/storage"
- "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"google.golang.org/grpc/metadata"
+
+ // The goleak import only exists such that this test-only dependency is properly being
+ // attributed in our NOTICE file.
+ _ "go.uber.org/goleak"
)
const (
@@ -42,18 +46,6 @@ const (
RepositoryAuthToken = "the-secret-token"
// DefaultStorageName is the default name of the Gitaly storage.
DefaultStorageName = "default"
- // GlID is the ID of the default user.
- GlID = "user-123"
-)
-
-var (
- // TestUser is the default user for tests.
- TestUser = &gitalypb.User{
- Name: []byte("Jane Doe"),
- Email: []byte("janedoe@gitlab.com"),
- GlId: GlID,
- GlUsername: "janedoe",
- }
)
// MustReadFile returns the content of a file or fails at once.
@@ -74,24 +66,6 @@ func GitlabTestStoragePath() string {
return filepath.Join(testDirectory, "storage")
}
-// GitalyServersMetadata returns a metadata pair for gitaly-servers to be used in
-// inter-gitaly operations.
-func GitalyServersMetadata(t testing.TB, serverSocketPath string) metadata.MD {
- gitalyServers := storage.GitalyServers{
- "default": storage.ServerInfo{
- Address: serverSocketPath,
- Token: RepositoryAuthToken,
- },
- }
-
- gitalyServersJSON, err := json.Marshal(gitalyServers)
- if err != nil {
- t.Fatal(err)
- }
-
- return metadata.Pairs("gitaly-servers", base64.StdEncoding.EncodeToString(gitalyServersJSON))
-}
-
// GitalyServersMetadataFromCfg returns a metadata pair for gitaly-servers to be used in
// inter-gitaly operations.
func GitalyServersMetadataFromCfg(t testing.TB, cfg config.Cfg) metadata.MD {
@@ -121,29 +95,13 @@ storages:
// MustRunCommand runs a command with an optional standard input and returns the standard output, or fails.
func MustRunCommand(t testing.TB, stdin io.Reader, name string, args ...string) []byte {
- if t != nil {
- t.Helper()
- }
+ t.Helper()
- var cmd *exec.Cmd
- if name == "git" {
- if args[0] == "init" {
- // Many tests depend on the fact "master" is the initial branch.
- // To overcome the case when the user has set anything else in
- // their git-config, override it to be "master".
- args = append([]string{"-c", "init.defaultBranch=master"}, args...)
- }
- cmd = exec.Command(config.Config.Git.BinPath, args...)
- cmd.Env = os.Environ()
- cmd.Env = append(command.GitEnv, cmd.Env...)
- cmd.Env = append(cmd.Env,
- "GIT_AUTHOR_DATE=1572776879 +0100",
- "GIT_COMMITTER_DATE=1572776879 +0100",
- )
- } else {
- cmd = exec.Command(name, args...)
+ if filepath.Base(name) == "git" {
+ require.Fail(t, "Please use gittest.Exec or gittest.ExecStream to run git commands.")
}
+ cmd := exec.Command(name, args...)
if stdin != nil {
cmd.Stdin = stdin
}
@@ -151,15 +109,7 @@ func MustRunCommand(t testing.TB, stdin io.Reader, name string, args ...string)
output, err := cmd.Output()
if err != nil {
stderr := err.(*exec.ExitError).Stderr
- if t == nil {
- log.Print(name, args)
- log.Printf("%s", stderr)
- panic(err)
- } else {
- t.Log(name, args)
- t.Logf("%s", stderr)
- t.Fatal(err)
- }
+ require.NoError(t, err, "%s %s: %s", name, args, stderr)
}
return output
@@ -173,6 +123,20 @@ func MustClose(t testing.TB, closer io.Closer) {
require.NoError(t, closer.Close())
}
+// CopyFile copies a file at the path src to a file at the path dst
+func CopyFile(t testing.TB, src, dst string) {
+ fsrc, err := os.Open(src)
+ require.NoError(t, err)
+ defer MustClose(t, fsrc)
+
+ fdst, err := os.Create(dst)
+ require.NoError(t, err)
+ defer MustClose(t, fdst)
+
+ _, err = io.Copy(fdst, fsrc)
+ require.NoError(t, err)
+}
+
// GetTemporaryGitalySocketFileName will return a unique, useable socket file name
func GetTemporaryGitalySocketFileName(t testing.TB) string {
require.NotEmpty(t, testDirectory, "you must call testhelper.Configure() before GetTemporaryGitalySocketFileName()")
@@ -287,12 +251,6 @@ func Context(opts ...ContextOpt) (context.Context, func()) {
}
}
-// AssertPathNotExists asserts true if the path doesn't exist, false otherwise
-func AssertPathNotExists(t testing.TB, path string) {
- _, err := os.Stat(path)
- assert.True(t, os.IsNotExist(err), "file should not exist: %s", path)
-}
-
// TempDir is a wrapper around ioutil.TempDir that provides a cleanup function.
func TempDir(t testing.TB) string {
if testDirectory == "" {
diff --git a/internal/testhelper/testserver.go b/internal/testhelper/testserver.go
index 59b06419f..646bbde1a 100644
--- a/internal/testhelper/testserver.go
+++ b/internal/testhelper/testserver.go
@@ -1,7 +1,6 @@
package testhelper
import (
- "context"
"crypto/tls"
"crypto/x509"
"encoding/base64"
@@ -13,335 +12,17 @@ import (
"net/http/httptest"
"net/url"
"os"
- "os/exec"
"path/filepath"
"regexp"
"strings"
"testing"
- "time"
- grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware"
- grpc_logrus "github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus"
- grpc_ctxtags "github.com/grpc-ecosystem/go-grpc-middleware/tags"
- "github.com/pelletier/go-toml"
- log "github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
- gitalyauth "gitlab.com/gitlab-org/gitaly/auth"
- "gitlab.com/gitlab-org/gitaly/internal/backchannel"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
- "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/auth"
- gitalylog "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/log"
- serverauth "gitlab.com/gitlab-org/gitaly/internal/gitaly/server/auth"
- "gitlab.com/gitlab-org/gitaly/internal/helper/fieldextractors"
- gitalyinternallog "gitlab.com/gitlab-org/gitaly/internal/log"
- praefectconfig "gitlab.com/gitlab-org/gitaly/internal/praefect/config"
- grpccorrelation "gitlab.com/gitlab-org/labkit/correlation/grpc"
"google.golang.org/grpc"
"google.golang.org/grpc/health"
healthpb "google.golang.org/grpc/health/grpc_health_v1"
)
-// TestServerOpt is an option for TestServer
-type TestServerOpt func(t *TestServer)
-
-// WithToken is a TestServerOpt that provides a security token
-func WithToken(token string) TestServerOpt {
- return func(t *TestServer) {
- t.token = token
- }
-}
-
-// WithStorages is a TestServerOpt that sets the storages for a TestServer
-func WithStorages(storages []string) TestServerOpt {
- return func(t *TestServer) {
- t.storages = storages
- }
-}
-
-// WithInternalSocket is a TestServerOpt that will cause the TestServer to
-// listen on its internal socket.
-func WithInternalSocket(cfg config.Cfg) TestServerOpt {
- return func(t *TestServer) {
- t.withInternalSocketPath = cfg.GitalyInternalSocketPath()
- }
-}
-
-// NewTestServer instantiates a new TestServer
-func NewTestServer(srv *grpc.Server, opts ...TestServerOpt) *TestServer {
- ts := &TestServer{
- grpcServer: srv,
- storages: []string{"default"},
- }
-
- for _, opt := range opts {
- opt(ts)
- }
-
- // the health service needs to be registered in order to support health checks on all
- // gitaly services that are under test.
- // The health check is executed by the praefect in case 'test-with-praefect' verification
- // job is running.
- healthpb.RegisterHealthServer(srv, health.NewServer())
-
- return ts
-}
-
-// NewServerWithAuth creates a new test server with authentication
-func NewServerWithAuth(tb testing.TB, streamInterceptors []grpc.StreamServerInterceptor, unaryInterceptors []grpc.UnaryServerInterceptor, token string, registry *backchannel.Registry, opts ...TestServerOpt) *TestServer {
- if token != "" {
- opts = append(opts, WithToken(token))
- streamInterceptors = append(streamInterceptors, serverauth.StreamServerInterceptor(auth.Config{Token: token}))
- unaryInterceptors = append(unaryInterceptors, serverauth.UnaryServerInterceptor(auth.Config{Token: token}))
- }
-
- return newServerWithLogger(
- tb,
- NewTestLogger(tb),
- streamInterceptors,
- unaryInterceptors,
- registry,
- opts...,
- )
-}
-
-// TestServer wraps a grpc Server and handles automatically putting a praefect in front of a gitaly instance
-// if necessary
-type TestServer struct {
- grpcServer *grpc.Server
- socket string
- process *os.Process
- token string
- storages []string
- waitCh chan struct{}
- withInternalSocketPath string
-}
-
-// GrpcServer returns the underlying grpc.Server
-func (p *TestServer) GrpcServer() *grpc.Server {
- return p.grpcServer
-}
-
-// Stop will stop both the grpc server as well as the praefect process
-func (p *TestServer) Stop() {
- p.grpcServer.Stop()
- if p.process != nil {
- p.process.Kill()
- <-p.waitCh
- }
-}
-
-// Socket returns the socket file the test server is listening on
-func (p *TestServer) Socket() string {
- return p.socket
-}
-
-// Start will start the grpc server as well as spawn a praefect instance if GITALY_TEST_PRAEFECT_BIN is enabled
-func (p *TestServer) Start(t testing.TB) {
- praefectBinPath, ok := os.LookupEnv("GITALY_TEST_PRAEFECT_BIN")
- if !ok {
- p.socket = p.listen(t)
- return
- }
-
- tempDir := TempDir(t)
-
- praefectServerSocketPath := GetTemporaryGitalySocketFileName(t)
-
- c := praefectconfig.Config{
- SocketPath: praefectServerSocketPath,
- Auth: auth.Config{
- Token: p.token,
- },
- MemoryQueueEnabled: true,
- Failover: praefectconfig.Failover{
- Enabled: true,
- ElectionStrategy: praefectconfig.ElectionStrategyLocal,
- BootstrapInterval: config.Duration(time.Microsecond),
- MonitorInterval: config.Duration(time.Second),
- },
- Replication: praefectconfig.DefaultReplicationConfig(),
- Logging: gitalylog.Config{
- Format: "json",
- Level: "panic",
- },
- }
-
- for _, storage := range p.storages {
- gitalyServerSocketPath := p.listen(t)
-
- c.VirtualStorages = append(c.VirtualStorages, &praefectconfig.VirtualStorage{
- Name: storage,
- Nodes: []*praefectconfig.Node{
- {
- Storage: storage,
- Address: "unix:/" + gitalyServerSocketPath,
- Token: p.token,
- },
- },
- })
- }
-
- configFilePath := filepath.Join(tempDir, "config.toml")
- configFile, err := os.Create(configFilePath)
- require.NoError(t, err)
- defer MustClose(t, configFile)
-
- require.NoError(t, toml.NewEncoder(configFile).Encode(&c))
- require.NoError(t, configFile.Sync())
-
- cmd := exec.Command(praefectBinPath, "-config", configFilePath)
- cmd.Stderr = os.Stderr
- cmd.Stdout = os.Stdout
-
- p.socket = praefectServerSocketPath
-
- require.NoError(t, cmd.Start())
-
- p.waitCh = make(chan struct{})
- go func() {
- cmd.Wait()
- close(p.waitCh)
- }()
-
- opts := []grpc.DialOption{grpc.WithInsecure()}
- if p.token != "" {
- opts = append(opts, grpc.WithPerRPCCredentials(gitalyauth.RPCCredentialsV2(p.token)))
- }
-
- conn, err := grpc.Dial("unix://"+praefectServerSocketPath, opts...)
- require.NoError(t, err)
- defer MustClose(t, conn)
-
- waitHealthy(t, conn, 3, time.Second)
-
- p.process = cmd.Process
-}
-
-func (p *TestServer) listen(t testing.TB) string {
- gitalyServerSocketPath := GetTemporaryGitalySocketFileName(t)
-
- sockets := []string{
- gitalyServerSocketPath,
- }
-
- if p.withInternalSocketPath != "" {
- sockets = append(sockets, p.withInternalSocketPath)
- }
-
- for _, socket := range sockets {
- listener, err := net.Listen("unix", socket)
- require.NoError(t, err)
-
- go p.grpcServer.Serve(listener)
-
- opts := []grpc.DialOption{grpc.WithInsecure()}
- if p.token != "" {
- opts = append(opts, grpc.WithPerRPCCredentials(gitalyauth.RPCCredentialsV2(p.token)))
- }
-
- conn, err := grpc.Dial("unix://"+socket, opts...)
- require.NoError(t, err)
- defer MustClose(t, conn)
-
- waitHealthy(t, conn, 3, time.Second)
- }
-
- return gitalyServerSocketPath
-}
-
-// waitHealthy executes health check request `retries` times and awaits each `timeout` period to respond.
-// After `retries` unsuccessful attempts it returns an error.
-// Returns immediately without an error once get a successful health check response.
-func waitHealthy(t testing.TB, conn *grpc.ClientConn, retries int, timeout time.Duration) {
- for i := 0; i < retries; i++ {
- if IsHealthy(conn, timeout) {
- return
- }
- }
-
- require.FailNow(t, "server not yet ready to serve")
-}
-
-// IsHealthy creates a health client to passed in connection and send `Check` request.
-// It waits for `timeout` duration to get response back.
-// It returns `true` only if remote responds with `SERVING` status.
-func IsHealthy(conn *grpc.ClientConn, timeout time.Duration) bool {
- healthClient := healthpb.NewHealthClient(conn)
-
- ctx, cancel := context.WithTimeout(context.Background(), timeout)
- defer cancel()
-
- resp, err := healthClient.Check(ctx, &healthpb.HealthCheckRequest{}, grpc.WaitForReady(true))
- if err != nil {
- return false
- }
-
- if resp.Status != healthpb.HealthCheckResponse_SERVING {
- return false
- }
-
- return true
-}
-
-// NewServer creates a Server for testing purposes
-func NewServer(tb testing.TB, streamInterceptors []grpc.StreamServerInterceptor, unaryInterceptors []grpc.UnaryServerInterceptor, opts ...TestServerOpt) *TestServer {
- logger := NewTestLogger(tb)
-
- return NewServerWithLogger(tb, logger, streamInterceptors, unaryInterceptors, opts...)
-}
-
-// NewServerWithLogger lets you inject a logger into a test server. You
-// can use this to inspect log messages.
-func NewServerWithLogger(tb testing.TB, logger *log.Logger, streamInterceptors []grpc.StreamServerInterceptor, unaryInterceptors []grpc.UnaryServerInterceptor, opts ...TestServerOpt) *TestServer {
- return newServerWithLogger(tb, logger, streamInterceptors, unaryInterceptors, backchannel.NewRegistry(), opts...)
-}
-
-func newServerWithLogger(tb testing.TB, logger *log.Logger, streamInterceptors []grpc.StreamServerInterceptor, unaryInterceptors []grpc.UnaryServerInterceptor, registry *backchannel.Registry, opts ...TestServerOpt) *TestServer {
- logrusEntry := log.NewEntry(logger).WithField("test", tb.Name())
- ctxTagger := grpc_ctxtags.WithFieldExtractorForInitialReq(fieldextractors.FieldExtractor)
-
- streamInterceptors = append([]grpc.StreamServerInterceptor{
- grpc_ctxtags.StreamServerInterceptor(ctxTagger),
- grpccorrelation.StreamServerCorrelationInterceptor(),
- grpc_logrus.StreamServerInterceptor(logrusEntry,
- grpc_logrus.WithTimestampFormat(gitalyinternallog.LogTimestampFormat)),
- }, streamInterceptors...)
-
- unaryInterceptors = append([]grpc.UnaryServerInterceptor{
- grpc_ctxtags.UnaryServerInterceptor(ctxTagger),
- grpccorrelation.UnaryServerCorrelationInterceptor(),
- grpc_logrus.UnaryServerInterceptor(logrusEntry,
- grpc_logrus.WithTimestampFormat(gitalyinternallog.LogTimestampFormat)),
- }, unaryInterceptors...)
-
- return NewTestServer(
- grpc.NewServer(
- grpc.Creds(backchannel.NewServerHandshaker(logrusEntry, backchannel.Insecure(), registry, nil)),
- grpc.StreamInterceptor(grpc_middleware.ChainStreamServer(streamInterceptors...)),
- grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(unaryInterceptors...)),
- ),
- opts...,
- )
-}
-
-// NewTestGrpcServer creates a GRPC Server for testing purposes
-func NewTestGrpcServer(tb testing.TB, streamInterceptors []grpc.StreamServerInterceptor, unaryInterceptors []grpc.UnaryServerInterceptor) *grpc.Server {
- logger := NewTestLogger(tb)
- logrusEntry := log.NewEntry(logger).WithField("test", tb.Name())
-
- ctxTagger := grpc_ctxtags.WithFieldExtractorForInitialReq(fieldextractors.FieldExtractor)
- ctxStreamTagger := grpc_ctxtags.StreamServerInterceptor(ctxTagger)
- ctxUnaryTagger := grpc_ctxtags.UnaryServerInterceptor(ctxTagger)
-
- streamInterceptors = append([]grpc.StreamServerInterceptor{ctxStreamTagger, grpc_logrus.StreamServerInterceptor(logrusEntry)}, streamInterceptors...)
- unaryInterceptors = append([]grpc.UnaryServerInterceptor{ctxUnaryTagger, grpc_logrus.UnaryServerInterceptor(logrusEntry)}, unaryInterceptors...)
-
- return grpc.NewServer(
- grpc.StreamInterceptor(grpc_middleware.ChainStreamServer(streamInterceptors...)),
- grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(unaryInterceptors...)),
- )
-}
-
var changeLineRegex = regexp.MustCompile("^[a-f0-9]{40} [a-f0-9]{40} refs/[^ ]+$")
const secretHeaderName = "Gitlab-Shared-Secret"
@@ -936,26 +617,26 @@ type HTTPSettings struct {
Password string `yaml:"password"`
}
-// NewServerWithHealth creates a new GRPC server with the health server set up.
+// NewServerWithHealth creates a new gRPC server with the health server set up.
// It will listen on the socket identified by `socketName`.
-func NewServerWithHealth(t testing.TB, socketName string) (*grpc.Server, *health.Server) {
+func NewServerWithHealth(t testing.TB, socketName string) *health.Server {
lis, err := net.Listen("unix", socketName)
require.NoError(t, err)
return NewHealthServerWithListener(t, lis)
}
-// NewHealthServerWithListener creates a new GRPC server with the health server
+// NewHealthServerWithListener creates a new gRPC server with the health server
// set up. It will listen on the given listener.
-func NewHealthServerWithListener(t testing.TB, listener net.Listener) (*grpc.Server, *health.Server) {
- srv := NewTestGrpcServer(t, nil, nil)
+func NewHealthServerWithListener(t testing.TB, listener net.Listener) *health.Server {
+ srv := grpc.NewServer()
healthSrvr := health.NewServer()
healthpb.RegisterHealthServer(srv, healthSrvr)
- healthSrvr.SetServingStatus("", healthpb.HealthCheckResponse_SERVING)
- go srv.Serve(listener)
+ t.Cleanup(srv.Stop)
+ go func() { require.NoError(t, srv.Serve(listener)) }()
- return srv, healthSrvr
+ return healthSrvr
}
// SetupAndStartGitlabServer creates a new GitlabTestServer, starts it and sets
diff --git a/internal/testhelper/testserver/gitaly.go b/internal/testhelper/testserver/gitaly.go
index 64e288acc..a25413088 100644
--- a/internal/testhelper/testserver/gitaly.go
+++ b/internal/testhelper/testserver/gitaly.go
@@ -16,7 +16,9 @@ import (
gitalyauth "gitlab.com/gitlab-org/gitaly/auth"
"gitlab.com/gitlab-org/gitaly/client"
"gitlab.com/gitlab-org/gitaly/internal/backchannel"
+ "gitlab.com/gitlab-org/gitaly/internal/cache"
"gitlab.com/gitlab-org/gitaly/internal/git"
+ "gitlab.com/gitlab-org/gitaly/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/config/auth"
gitalylog "gitlab.com/gitlab-org/gitaly/internal/gitaly/config/log"
@@ -26,6 +28,7 @@ import (
"gitlab.com/gitlab-org/gitaly/internal/gitaly/server"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/transaction"
+ "gitlab.com/gitlab-org/gitaly/internal/gitlab"
praefectconfig "gitlab.com/gitlab-org/gitaly/internal/praefect/config"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
@@ -201,7 +204,12 @@ func runGitaly(t testing.TB, cfg config.Cfg, rubyServer *rubyserver.Server, regi
deps := gsd.createDependencies(t, cfg, rubyServer)
t.Cleanup(func() { gsd.conns.Close() })
- srv, err := server.New(cfg.TLS.CertPath != "" && cfg.TLS.KeyPath != "", cfg, gsd.logger.WithField("test", t.Name()), deps.GetBackchannelRegistry())
+ srv, err := server.NewGitalyServerFactory(
+ cfg,
+ gsd.logger.WithField("test", t.Name()),
+ deps.GetBackchannelRegistry(),
+ deps.GetDiskCache(),
+ ).CreateExternal(cfg.TLS.CertPath != "" && cfg.TLS.KeyPath != "")
require.NoError(t, err)
t.Cleanup(srv.Stop)
@@ -264,10 +272,12 @@ type gitalyServerDeps struct {
locator storage.Locator
txMgr transaction.Manager
hookMgr hook.Manager
- gitlabAPI hook.GitlabAPI
+ gitlabClient gitlab.Client
gitCmdFactory git.CommandFactory
linguist *linguist.Instance
backchannelReg *backchannel.Registry
+ catfileCache catfile.Cache
+ diskCache *cache.Cache
}
func (gsd *gitalyServerDeps) createDependencies(t testing.TB, cfg config.Cfg, rubyServer *rubyserver.Server) *service.Dependencies {
@@ -283,8 +293,8 @@ func (gsd *gitalyServerDeps) createDependencies(t testing.TB, cfg config.Cfg, ru
gsd.locator = config.NewLocator(cfg)
}
- if gsd.gitlabAPI == nil {
- gsd.gitlabAPI = hook.GitlabAPIStub
+ if gsd.gitlabClient == nil {
+ gsd.gitlabClient = gitlab.NewMockClient()
}
if gsd.backchannelReg == nil {
@@ -296,7 +306,7 @@ func (gsd *gitalyServerDeps) createDependencies(t testing.TB, cfg config.Cfg, ru
}
if gsd.hookMgr == nil {
- gsd.hookMgr = hook.NewManager(gsd.locator, gsd.txMgr, gsd.gitlabAPI, cfg)
+ gsd.hookMgr = hook.NewManager(gsd.locator, gsd.txMgr, gsd.gitlabClient, cfg)
}
if gsd.gitCmdFactory == nil {
@@ -309,6 +319,14 @@ func (gsd *gitalyServerDeps) createDependencies(t testing.TB, cfg config.Cfg, ru
require.NoError(t, err)
}
+ if gsd.catfileCache == nil {
+ gsd.catfileCache = catfile.NewCache(cfg)
+ }
+
+ if gsd.diskCache == nil {
+ gsd.diskCache = cache.New(cfg, gsd.locator)
+ }
+
return &service.Dependencies{
Cfg: cfg,
RubyServer: rubyServer,
@@ -319,7 +337,9 @@ func (gsd *gitalyServerDeps) createDependencies(t testing.TB, cfg config.Cfg, ru
GitCmdFactory: gsd.gitCmdFactory,
Linguist: gsd.linguist,
BackchannelRegistry: gsd.backchannelReg,
- GitlabAPI: gsd.gitlabAPI,
+ GitlabClient: gsd.gitlabClient,
+ CatfileCache: gsd.catfileCache,
+ DiskCache: gsd.diskCache,
}
}
@@ -342,10 +362,10 @@ func WithLocator(locator storage.Locator) GitalyServerOpt {
}
}
-// WithGitLabAPI sets hook.GitlabAPI instance that will be used for gitaly services initialisation.
-func WithGitLabAPI(gitlabAPI hook.GitlabAPI) GitalyServerOpt {
+// WithGitLabClient sets gitlab.Client instance that will be used for gitaly services initialisation.
+func WithGitLabClient(gitlabClient gitlab.Client) GitalyServerOpt {
return func(deps gitalyServerDeps) gitalyServerDeps {
- deps.gitlabAPI = gitlabAPI
+ deps.gitlabClient = gitlabClient
return deps
}
}
@@ -381,3 +401,11 @@ func WithBackchannelRegistry(backchannelReg *backchannel.Registry) GitalyServerO
return deps
}
}
+
+// WithCatfileCache sets catfile.Cache instance that will be used for gitaly services initialisation.
+func WithCatfileCache(catfileCache catfile.Cache) GitalyServerOpt {
+ return func(deps gitalyServerDeps) gitalyServerDeps {
+ deps.catfileCache = catfileCache
+ return deps
+ }
+}
diff --git a/internal/praefect/metadata/server.go b/internal/transaction/txinfo/server.go
index c72bec27a..de9c2cc0a 100644
--- a/internal/praefect/metadata/server.go
+++ b/internal/transaction/txinfo/server.go
@@ -1,4 +1,4 @@
-package metadata
+package txinfo
import (
"context"
@@ -235,6 +235,7 @@ func praefectFromSerialized(serialized string) (*PraefectServer, error) {
return &server, nil
}
+// Address returns the address of the Praefect server which can be used to connect to it.
func (p *PraefectServer) Address() (string, error) {
for _, addr := range []string{p.SocketPath, p.TLSListenAddr, p.ListenAddr} {
if addr != "" {
diff --git a/internal/praefect/metadata/server_test.go b/internal/transaction/txinfo/server_test.go
index cd4ad7067..d7b52ebe8 100644
--- a/internal/praefect/metadata/server_test.go
+++ b/internal/transaction/txinfo/server_test.go
@@ -1,4 +1,4 @@
-package metadata
+package txinfo
import (
"net"
diff --git a/internal/praefect/metadata/transaction.go b/internal/transaction/txinfo/transaction.go
index 08e0a43ba..a7b97a97c 100644
--- a/internal/praefect/metadata/transaction.go
+++ b/internal/transaction/txinfo/transaction.go
@@ -1,11 +1,13 @@
-package metadata
+package txinfo
import (
"context"
"encoding/base64"
"encoding/json"
"errors"
+ "fmt"
+ "gitlab.com/gitlab-org/gitaly/internal/backchannel"
"google.golang.org/grpc/metadata"
)
@@ -24,6 +26,9 @@ var (
// Transaction stores parameters required to identify a reference
// transaction.
type Transaction struct {
+ // BackchannelID is the ID of the backchannel that corresponds to the Praefect
+ // that is handling the transaction. This field is filled in by the Gitaly.
+ BackchannelID backchannel.ID `json:"backchannel_id,omitempty"`
// ID is the unique identifier of a transaction
ID uint64 `json:"id"`
// Node is the name used to cast a vote
@@ -94,12 +99,25 @@ func TransactionFromContext(ctx context.Context) (Transaction, error) {
return Transaction{}, ErrTransactionNotFound
}
- return transactionFromSerialized(serialized[0])
+ transaction, err := transactionFromSerialized(serialized[0])
+ if err != nil {
+ return Transaction{}, fmt.Errorf("from serialized: %w", err)
+ }
+
+ // For backwards compatibility during an upgrade, we still need to accept transactions
+ // from non-multiplexed connections. From 14.0 onwards, we can expect every transaction to
+ // originate from a multiplexed connection and should drop the error check below.
+ transaction.BackchannelID, err = backchannel.GetPeerID(ctx)
+ if err != nil && !errors.Is(err, backchannel.ErrNonMultiplexedConnection) {
+ return Transaction{}, fmt.Errorf("get peer id: %w", err)
+ }
+
+ return transaction, nil
}
-// TransactionMetadataFromContext extracts transaction-related metadata from
-// the given context. No error is returned in case no transaction was found.
-func TransactionMetadataFromContext(ctx context.Context) (*Transaction, *PraefectServer, error) {
+// FromContext extracts transaction-related metadata from the given context. No error is returned in
+// case no transaction was found.
+func FromContext(ctx context.Context) (*Transaction, *PraefectServer, error) {
transaction, err := TransactionFromContext(ctx)
if err != nil {
if err != ErrTransactionNotFound {
diff --git a/internal/transaction/voting/testhelper_test.go b/internal/transaction/voting/testhelper_test.go
new file mode 100644
index 000000000..dd6467db4
--- /dev/null
+++ b/internal/transaction/voting/testhelper_test.go
@@ -0,0 +1,21 @@
+package voting
+
+import (
+ "os"
+ "testing"
+
+ "gitlab.com/gitlab-org/gitaly/internal/testhelper"
+)
+
+func TestMain(m *testing.M) {
+ os.Exit(testMain(m))
+}
+
+func testMain(m *testing.M) int {
+ defer testhelper.MustHaveNoChildProcess()
+
+ cleanup := testhelper.Configure()
+ defer cleanup()
+
+ return m.Run()
+}
diff --git a/internal/gitaly/transaction/vote.go b/internal/transaction/voting/vote.go
index a205a3785..dcb0fc32e 100644
--- a/internal/gitaly/transaction/vote.go
+++ b/internal/transaction/voting/vote.go
@@ -1,7 +1,8 @@
-package transaction
+package voting
import (
"crypto/sha1"
+ "encoding/hex"
"fmt"
"hash"
)
@@ -19,6 +20,11 @@ func (v Vote) Bytes() []byte {
return v[:]
}
+// String returns the hex representation of the vote hash.
+func (v Vote) String() string {
+ return hex.EncodeToString(v[:])
+}
+
// VoteFromHash converts the given byte slice containing a hash into a vote.
func VoteFromHash(bytes []byte) (Vote, error) {
if len(bytes) != voteSize {
diff --git a/internal/gitaly/transaction/vote_test.go b/internal/transaction/voting/vote_test.go
index 36af20a09..564eb2d84 100644
--- a/internal/gitaly/transaction/vote_test.go
+++ b/internal/transaction/voting/vote_test.go
@@ -1,4 +1,4 @@
-package transaction
+package voting
import (
"bytes"
diff --git a/proto/blob.proto b/proto/blob.proto
index 65cbfe6b3..2f1b03241 100644
--- a/proto/blob.proto
+++ b/proto/blob.proto
@@ -25,36 +25,12 @@ service BlobService {
// GetLFSPointers retrieves LFS pointers from a given set of object IDs.
// This RPC filters all requested objects and only returns those which refer
// to a valid LFS pointer.
- //
- // Deprecated in favor of `ListLFSPointers`, passing object IDs as revisions.
rpc GetLFSPointers(GetLFSPointersRequest) returns (stream GetLFSPointersResponse) {
option (op_type) = {
op: ACCESSOR
};
}
- // GetNewLFSPointers retrieves LFS pointers for a limited subset of the
- // commit graph. It will return all LFS pointers which are reachable by the
- // provided revision, but not reachable by any of the limiting references.
- //
- // Deprecated in favor of `ListLFSPointers`. `NotInAll` can be replaced with
- // `REVISION` `--not` `--all`, while `NotInRefs` can be replaced with
- // `REVISION` `--not` `NotInRevs...`.
- rpc GetNewLFSPointers(GetNewLFSPointersRequest) returns (stream GetNewLFSPointersResponse) {
- option (op_type) = {
- op: ACCESSOR
- };
- }
-
- // GetAllLFSPointers retrieves all LFS pointers of the given repository.
- //
- // Deprecated in favor of `ListLFSPointers`, passing `--all` as revision.
- rpc GetAllLFSPointers(GetAllLFSPointersRequest) returns (stream GetAllLFSPointersResponse) {
- option (op_type) = {
- op: ACCESSOR
- };
- }
-
// ListLFSPointers retrieves LFS pointers reachable from a given set of
// revisions by doing a graph walk. This includes both normal revisions like
// an object ID or branch, but also the pseudo-revisions "--all" and "--not"
@@ -67,9 +43,8 @@ service BlobService {
};
}
- // ListAllLFSPointers retrieves all LFS pointers in the repository. In
- // contrast to `GetAllLFSPointers`, this RPC also includes LFS pointers which
- // are not reachable by any reference.
+ // ListAllLFSPointers retrieves all LFS pointers in the repository, including
+ // those not reachable by any reference.
rpc ListAllLFSPointers(ListAllLFSPointersRequest) returns (stream ListAllLFSPointersResponse) {
option (op_type) = {
op: ACCESSOR
@@ -158,45 +133,6 @@ message GetLFSPointersResponse {
repeated LFSPointer lfs_pointers = 1;
}
-// GetNewLFSPointersRequest is a request for the GetNewLFSPointers RPC.
-message GetNewLFSPointersRequest {
- // Repository is the repository for which LFS pointers should be retrieved
- // from.
- Repository repository = 1[(target_repository)=true];
- // Revision is the revision for which to retrieve new LFS pointers.
- bytes revision = 2;
- // Limit limits the number of LFS pointers returned.
- int32 limit = 3;
- // NotInAll limits the revision graph to not include any commits which are
- // referenced by a git reference. When `not_in_all` is true, `not_in_refs` is
- // ignored.
- bool not_in_all = 4;
- // NotInRefs is a list of references used to limit the revision graph. Any
- // commit reachable by any commit in NotInRefs will not be searched for new
- // LFS pointers. This is ignored if NotInAll is set to `true`.
- repeated bytes not_in_refs = 5;
-}
-
-// GetNewLFSPointersResponse is a response for the GetNewLFSPointers RPC.
-message GetNewLFSPointersResponse {
- // LfsPointers is the list of LFS pointers which were requested.
- repeated LFSPointer lfs_pointers = 1;
-}
-
-// GetAllLFSPointersRequest is a request for the GetAllLFSPointers RPC.
-message GetAllLFSPointersRequest {
- // Repository is the repository for which LFS pointers shoul be retrieved
- // from.
- Repository repository = 1[(target_repository)=true];
- reserved 2;
-}
-
-// GetAllLFSPointersResponse is a response for the GetAllLFSPointers RPC.
-message GetAllLFSPointersResponse {
- // LfsPointers is the list of LFS pointers.
- repeated LFSPointer lfs_pointers = 1;
-}
-
// ListLFSPointersRequest is a request for the ListLFSPointers RPC.
message ListLFSPointersRequest {
// Repository is the repository for which LFS pointers should be retrieved
diff --git a/proto/go/gitalypb/blob.pb.go b/proto/go/gitalypb/blob.pb.go
index 3a76fb489..09d80337d 100644
--- a/proto/go/gitalypb/blob.pb.go
+++ b/proto/go/gitalypb/blob.pb.go
@@ -550,212 +550,6 @@ func (m *GetLFSPointersResponse) GetLfsPointers() []*LFSPointer {
return nil
}
-// GetNewLFSPointersRequest is a request for the GetNewLFSPointers RPC.
-type GetNewLFSPointersRequest struct {
- // Repository is the repository for which LFS pointers should be retrieved
- // from.
- Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
- // Revision is the revision for which to retrieve new LFS pointers.
- Revision []byte `protobuf:"bytes,2,opt,name=revision,proto3" json:"revision,omitempty"`
- // Limit limits the number of LFS pointers returned.
- Limit int32 `protobuf:"varint,3,opt,name=limit,proto3" json:"limit,omitempty"`
- // NotInAll limits the revision graph to not include any commits which are
- // referenced by a git reference. When `not_in_all` is true, `not_in_refs` is
- // ignored.
- NotInAll bool `protobuf:"varint,4,opt,name=not_in_all,json=notInAll,proto3" json:"not_in_all,omitempty"`
- // NotInRefs is a list of references used to limit the revision graph. Any
- // commit reachable by any commit in NotInRefs will not be searched for new
- // LFS pointers. This is ignored if NotInAll is set to `true`.
- NotInRefs [][]byte `protobuf:"bytes,5,rep,name=not_in_refs,json=notInRefs,proto3" json:"not_in_refs,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *GetNewLFSPointersRequest) Reset() { *m = GetNewLFSPointersRequest{} }
-func (m *GetNewLFSPointersRequest) String() string { return proto.CompactTextString(m) }
-func (*GetNewLFSPointersRequest) ProtoMessage() {}
-func (*GetNewLFSPointersRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_6903d1e8a20272e8, []int{8}
-}
-
-func (m *GetNewLFSPointersRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_GetNewLFSPointersRequest.Unmarshal(m, b)
-}
-func (m *GetNewLFSPointersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_GetNewLFSPointersRequest.Marshal(b, m, deterministic)
-}
-func (m *GetNewLFSPointersRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_GetNewLFSPointersRequest.Merge(m, src)
-}
-func (m *GetNewLFSPointersRequest) XXX_Size() int {
- return xxx_messageInfo_GetNewLFSPointersRequest.Size(m)
-}
-func (m *GetNewLFSPointersRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_GetNewLFSPointersRequest.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_GetNewLFSPointersRequest proto.InternalMessageInfo
-
-func (m *GetNewLFSPointersRequest) GetRepository() *Repository {
- if m != nil {
- return m.Repository
- }
- return nil
-}
-
-func (m *GetNewLFSPointersRequest) GetRevision() []byte {
- if m != nil {
- return m.Revision
- }
- return nil
-}
-
-func (m *GetNewLFSPointersRequest) GetLimit() int32 {
- if m != nil {
- return m.Limit
- }
- return 0
-}
-
-func (m *GetNewLFSPointersRequest) GetNotInAll() bool {
- if m != nil {
- return m.NotInAll
- }
- return false
-}
-
-func (m *GetNewLFSPointersRequest) GetNotInRefs() [][]byte {
- if m != nil {
- return m.NotInRefs
- }
- return nil
-}
-
-// GetNewLFSPointersResponse is a response for the GetNewLFSPointers RPC.
-type GetNewLFSPointersResponse struct {
- // LfsPointers is the list of LFS pointers which were requested.
- LfsPointers []*LFSPointer `protobuf:"bytes,1,rep,name=lfs_pointers,json=lfsPointers,proto3" json:"lfs_pointers,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *GetNewLFSPointersResponse) Reset() { *m = GetNewLFSPointersResponse{} }
-func (m *GetNewLFSPointersResponse) String() string { return proto.CompactTextString(m) }
-func (*GetNewLFSPointersResponse) ProtoMessage() {}
-func (*GetNewLFSPointersResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_6903d1e8a20272e8, []int{9}
-}
-
-func (m *GetNewLFSPointersResponse) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_GetNewLFSPointersResponse.Unmarshal(m, b)
-}
-func (m *GetNewLFSPointersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_GetNewLFSPointersResponse.Marshal(b, m, deterministic)
-}
-func (m *GetNewLFSPointersResponse) XXX_Merge(src proto.Message) {
- xxx_messageInfo_GetNewLFSPointersResponse.Merge(m, src)
-}
-func (m *GetNewLFSPointersResponse) XXX_Size() int {
- return xxx_messageInfo_GetNewLFSPointersResponse.Size(m)
-}
-func (m *GetNewLFSPointersResponse) XXX_DiscardUnknown() {
- xxx_messageInfo_GetNewLFSPointersResponse.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_GetNewLFSPointersResponse proto.InternalMessageInfo
-
-func (m *GetNewLFSPointersResponse) GetLfsPointers() []*LFSPointer {
- if m != nil {
- return m.LfsPointers
- }
- return nil
-}
-
-// GetAllLFSPointersRequest is a request for the GetAllLFSPointers RPC.
-type GetAllLFSPointersRequest struct {
- // Repository is the repository for which LFS pointers shoul be retrieved
- // from.
- Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *GetAllLFSPointersRequest) Reset() { *m = GetAllLFSPointersRequest{} }
-func (m *GetAllLFSPointersRequest) String() string { return proto.CompactTextString(m) }
-func (*GetAllLFSPointersRequest) ProtoMessage() {}
-func (*GetAllLFSPointersRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_6903d1e8a20272e8, []int{10}
-}
-
-func (m *GetAllLFSPointersRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_GetAllLFSPointersRequest.Unmarshal(m, b)
-}
-func (m *GetAllLFSPointersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_GetAllLFSPointersRequest.Marshal(b, m, deterministic)
-}
-func (m *GetAllLFSPointersRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_GetAllLFSPointersRequest.Merge(m, src)
-}
-func (m *GetAllLFSPointersRequest) XXX_Size() int {
- return xxx_messageInfo_GetAllLFSPointersRequest.Size(m)
-}
-func (m *GetAllLFSPointersRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_GetAllLFSPointersRequest.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_GetAllLFSPointersRequest proto.InternalMessageInfo
-
-func (m *GetAllLFSPointersRequest) GetRepository() *Repository {
- if m != nil {
- return m.Repository
- }
- return nil
-}
-
-// GetAllLFSPointersResponse is a response for the GetAllLFSPointers RPC.
-type GetAllLFSPointersResponse struct {
- // LfsPointers is the list of LFS pointers.
- LfsPointers []*LFSPointer `protobuf:"bytes,1,rep,name=lfs_pointers,json=lfsPointers,proto3" json:"lfs_pointers,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *GetAllLFSPointersResponse) Reset() { *m = GetAllLFSPointersResponse{} }
-func (m *GetAllLFSPointersResponse) String() string { return proto.CompactTextString(m) }
-func (*GetAllLFSPointersResponse) ProtoMessage() {}
-func (*GetAllLFSPointersResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_6903d1e8a20272e8, []int{11}
-}
-
-func (m *GetAllLFSPointersResponse) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_GetAllLFSPointersResponse.Unmarshal(m, b)
-}
-func (m *GetAllLFSPointersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_GetAllLFSPointersResponse.Marshal(b, m, deterministic)
-}
-func (m *GetAllLFSPointersResponse) XXX_Merge(src proto.Message) {
- xxx_messageInfo_GetAllLFSPointersResponse.Merge(m, src)
-}
-func (m *GetAllLFSPointersResponse) XXX_Size() int {
- return xxx_messageInfo_GetAllLFSPointersResponse.Size(m)
-}
-func (m *GetAllLFSPointersResponse) XXX_DiscardUnknown() {
- xxx_messageInfo_GetAllLFSPointersResponse.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_GetAllLFSPointersResponse proto.InternalMessageInfo
-
-func (m *GetAllLFSPointersResponse) GetLfsPointers() []*LFSPointer {
- if m != nil {
- return m.LfsPointers
- }
- return nil
-}
-
// ListLFSPointersRequest is a request for the ListLFSPointers RPC.
type ListLFSPointersRequest struct {
// Repository is the repository for which LFS pointers should be retrieved
@@ -775,7 +569,7 @@ func (m *ListLFSPointersRequest) Reset() { *m = ListLFSPointersRequest{}
func (m *ListLFSPointersRequest) String() string { return proto.CompactTextString(m) }
func (*ListLFSPointersRequest) ProtoMessage() {}
func (*ListLFSPointersRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_6903d1e8a20272e8, []int{12}
+ return fileDescriptor_6903d1e8a20272e8, []int{8}
}
func (m *ListLFSPointersRequest) XXX_Unmarshal(b []byte) error {
@@ -830,7 +624,7 @@ func (m *ListLFSPointersResponse) Reset() { *m = ListLFSPointersResponse
func (m *ListLFSPointersResponse) String() string { return proto.CompactTextString(m) }
func (*ListLFSPointersResponse) ProtoMessage() {}
func (*ListLFSPointersResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_6903d1e8a20272e8, []int{13}
+ return fileDescriptor_6903d1e8a20272e8, []int{9}
}
func (m *ListLFSPointersResponse) XXX_Unmarshal(b []byte) error {
@@ -874,7 +668,7 @@ func (m *ListAllLFSPointersRequest) Reset() { *m = ListAllLFSPointersReq
func (m *ListAllLFSPointersRequest) String() string { return proto.CompactTextString(m) }
func (*ListAllLFSPointersRequest) ProtoMessage() {}
func (*ListAllLFSPointersRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_6903d1e8a20272e8, []int{14}
+ return fileDescriptor_6903d1e8a20272e8, []int{10}
}
func (m *ListAllLFSPointersRequest) XXX_Unmarshal(b []byte) error {
@@ -922,7 +716,7 @@ func (m *ListAllLFSPointersResponse) Reset() { *m = ListAllLFSPointersRe
func (m *ListAllLFSPointersResponse) String() string { return proto.CompactTextString(m) }
func (*ListAllLFSPointersResponse) ProtoMessage() {}
func (*ListAllLFSPointersResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_6903d1e8a20272e8, []int{15}
+ return fileDescriptor_6903d1e8a20272e8, []int{11}
}
func (m *ListAllLFSPointersResponse) XXX_Unmarshal(b []byte) error {
@@ -960,10 +754,6 @@ func init() {
proto.RegisterType((*NewBlobObject)(nil), "gitaly.NewBlobObject")
proto.RegisterType((*GetLFSPointersRequest)(nil), "gitaly.GetLFSPointersRequest")
proto.RegisterType((*GetLFSPointersResponse)(nil), "gitaly.GetLFSPointersResponse")
- proto.RegisterType((*GetNewLFSPointersRequest)(nil), "gitaly.GetNewLFSPointersRequest")
- proto.RegisterType((*GetNewLFSPointersResponse)(nil), "gitaly.GetNewLFSPointersResponse")
- proto.RegisterType((*GetAllLFSPointersRequest)(nil), "gitaly.GetAllLFSPointersRequest")
- proto.RegisterType((*GetAllLFSPointersResponse)(nil), "gitaly.GetAllLFSPointersResponse")
proto.RegisterType((*ListLFSPointersRequest)(nil), "gitaly.ListLFSPointersRequest")
proto.RegisterType((*ListLFSPointersResponse)(nil), "gitaly.ListLFSPointersResponse")
proto.RegisterType((*ListAllLFSPointersRequest)(nil), "gitaly.ListAllLFSPointersRequest")
@@ -973,55 +763,47 @@ func init() {
func init() { proto.RegisterFile("blob.proto", fileDescriptor_6903d1e8a20272e8) }
var fileDescriptor_6903d1e8a20272e8 = []byte{
- // 756 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xcd, 0x6e, 0xd3, 0x40,
- 0x10, 0x96, 0xe3, 0x24, 0x75, 0x26, 0xe9, 0x0f, 0x2b, 0x68, 0x5d, 0xab, 0x14, 0xd7, 0x42, 0xc8,
- 0x07, 0x48, 0xaa, 0x22, 0x24, 0x4e, 0x48, 0xad, 0x50, 0xa3, 0xd2, 0xaa, 0xad, 0x36, 0x5c, 0xa8,
- 0x90, 0x22, 0xbb, 0xd9, 0xb4, 0x0b, 0x1b, 0xaf, 0xf1, 0x6e, 0x5b, 0x85, 0x33, 0xe2, 0x15, 0xe0,
- 0x81, 0x10, 0x2f, 0xc1, 0x3b, 0xf0, 0x00, 0x9c, 0x90, 0xff, 0x12, 0x27, 0x76, 0x7a, 0x49, 0x6e,
- 0xbb, 0x33, 0x3b, 0xf3, 0x7d, 0x33, 0xf3, 0x79, 0xd7, 0x00, 0x2e, 0xe3, 0x6e, 0xd3, 0x0f, 0xb8,
- 0xe4, 0xa8, 0x7a, 0x45, 0xa5, 0xc3, 0x86, 0x06, 0x30, 0xea, 0xc9, 0xd8, 0x66, 0x34, 0xc4, 0xb5,
- 0x13, 0x90, 0x5e, 0xbc, 0xb3, 0x02, 0x58, 0x69, 0x13, 0x79, 0xc0, 0xb8, 0x8b, 0xc9, 0x97, 0x1b,
- 0x22, 0x24, 0x7a, 0x0d, 0x10, 0x10, 0x9f, 0x0b, 0x2a, 0x79, 0x30, 0xd4, 0x15, 0x53, 0xb1, 0xeb,
- 0x7b, 0xa8, 0x19, 0x27, 0x6a, 0xe2, 0x91, 0xe7, 0xa0, 0xfc, 0xf3, 0xf7, 0x73, 0x05, 0x67, 0xce,
- 0xa2, 0x35, 0x50, 0x39, 0xed, 0xe9, 0x25, 0x53, 0xb1, 0x6b, 0x38, 0x5c, 0xa2, 0x87, 0x50, 0x61,
- 0x74, 0x40, 0xa5, 0xae, 0x9a, 0x8a, 0xad, 0xe2, 0x78, 0x63, 0x1d, 0xc3, 0xea, 0x08, 0x53, 0xf8,
- 0xdc, 0x13, 0x04, 0x21, 0x28, 0x0b, 0xfa, 0x95, 0x44, 0x70, 0x2a, 0x8e, 0xd6, 0xa1, 0xad, 0xe7,
- 0x48, 0x27, 0xca, 0xd7, 0xc0, 0xd1, 0x3a, 0x85, 0x50, 0x47, 0x10, 0xd6, 0x5f, 0x65, 0x94, 0x4d,
- 0xcc, 0x5f, 0xc2, 0x31, 0xac, 0x04, 0xe4, 0x96, 0x0a, 0xca, 0xbd, 0xae, 0xef, 0xc8, 0x6b, 0xa1,
- 0x97, 0x4c, 0xd5, 0xae, 0xef, 0x3d, 0x4d, 0xa3, 0xa7, 0xa0, 0x9a, 0x38, 0x39, 0x7d, 0xee, 0xc8,
- 0x6b, 0xbc, 0x1c, 0x64, 0x76, 0xa2, 0xb8, 0x7a, 0xe3, 0x0d, 0x34, 0xb2, 0x41, 0xc8, 0x00, 0x2d,
- 0x0d, 0x8b, 0xa8, 0xd6, 0xf0, 0x68, 0x1f, 0xb6, 0x20, 0x64, 0x91, 0xb6, 0x20, 0x5c, 0x5b, 0x7f,
- 0x14, 0x58, 0x1b, 0xb3, 0x98, 0xb7, 0x7f, 0x68, 0x07, 0x1a, 0x54, 0x74, 0xc5, 0x8d, 0x3b, 0xe0,
- 0xbd, 0x1b, 0x46, 0xf4, 0xb2, 0xa9, 0xd8, 0x1a, 0xae, 0x53, 0xd1, 0x49, 0x4d, 0x61, 0xa2, 0x01,
- 0xef, 0x11, 0xbd, 0x62, 0x2a, 0x76, 0x05, 0x47, 0xeb, 0x09, 0xd6, 0xd5, 0x19, 0xac, 0x97, 0xc6,
- 0xac, 0xd1, 0x33, 0x28, 0xcb, 0xa1, 0x4f, 0x74, 0xcd, 0x54, 0xec, 0x95, 0xf1, 0x30, 0xce, 0xdc,
- 0x4f, 0xe4, 0x52, 0xbe, 0x1f, 0xfa, 0x04, 0x47, 0x7e, 0xeb, 0x10, 0xe0, 0xe4, 0xb0, 0x73, 0xce,
- 0xa9, 0x27, 0x49, 0x30, 0x87, 0x2c, 0x8e, 0x60, 0xf9, 0x94, 0xdc, 0x85, 0x4d, 0x8a, 0x21, 0x0a,
- 0x53, 0xe5, 0x05, 0x9b, 0x52, 0x57, 0x33, 0x0d, 0x67, 0xf0, 0xa8, 0x4d, 0xe4, 0x98, 0xd5, 0x02,
- 0x64, 0xb6, 0x09, 0x5a, 0xf8, 0x95, 0x76, 0x69, 0x2f, 0x16, 0x58, 0x0d, 0x2f, 0x85, 0xfb, 0xa3,
- 0x9e, 0xb0, 0xce, 0x60, 0x7d, 0x1a, 0x2d, 0x99, 0xf1, 0x2b, 0x68, 0xb0, 0xbe, 0xe8, 0xfa, 0x89,
- 0x5d, 0x57, 0x22, 0x65, 0x8e, 0x00, 0xc7, 0x21, 0xb8, 0xce, 0xfa, 0x22, 0x0d, 0xb7, 0x7e, 0x29,
- 0xa0, 0xb7, 0x89, 0x3c, 0x25, 0x77, 0x0b, 0x2d, 0x21, 0x2b, 0x80, 0x78, 0x14, 0x63, 0x01, 0x4c,
- 0x08, 0xbf, 0x92, 0x08, 0x1f, 0x6d, 0x01, 0x78, 0x5c, 0x76, 0xa9, 0xd7, 0x75, 0x18, 0x4b, 0x74,
- 0xa6, 0x79, 0x5c, 0x1e, 0x79, 0xfb, 0x8c, 0xa1, 0x6d, 0xa8, 0x27, 0xde, 0x80, 0xf4, 0x85, 0x5e,
- 0x31, 0x55, 0xbb, 0x81, 0x6b, 0x91, 0x1b, 0x93, 0xbe, 0xb0, 0x30, 0x6c, 0x16, 0x54, 0x31, 0x5f,
- 0x6b, 0x2e, 0xa2, 0xce, 0xec, 0x33, 0xb6, 0xc8, 0xce, 0xbc, 0x2b, 0x6b, 0xa5, 0x35, 0x35, 0xe1,
- 0x3b, 0x9d, 0x7b, 0x3e, 0xbe, 0xdf, 0x15, 0x58, 0x3f, 0xa1, 0x62, 0xb1, 0x5a, 0xdc, 0x82, 0x5a,
- 0x3a, 0xb8, 0x54, 0x8c, 0x63, 0x43, 0xf1, 0x28, 0xad, 0x73, 0xd8, 0xc8, 0xf1, 0x98, 0xaf, 0xb4,
- 0xcf, 0xb0, 0x19, 0x66, 0x5c, 0xf0, 0x2c, 0x66, 0xd0, 0xef, 0x80, 0x51, 0x04, 0x36, 0x57, 0x05,
- 0x7b, 0xdf, 0x2a, 0x50, 0x0f, 0xef, 0x9b, 0x0e, 0x09, 0x6e, 0xe9, 0x25, 0x41, 0x6f, 0x61, 0x29,
- 0xb9, 0xa6, 0xd1, 0xfa, 0xd4, 0xeb, 0x91, 0xd4, 0x65, 0x6c, 0xe4, 0xec, 0x31, 0x05, 0xab, 0xfa,
- 0xef, 0x87, 0x5d, 0xd2, 0x4a, 0xbb, 0x0a, 0x6a, 0x83, 0x96, 0x5e, 0xf6, 0x68, 0x63, 0xc6, 0x23,
- 0x64, 0xe8, 0x79, 0x47, 0x2e, 0xd1, 0x87, 0xe8, 0xa1, 0xcf, 0xd4, 0x8b, 0x1e, 0x67, 0xa2, 0xf2,
- 0x4d, 0x37, 0xb6, 0x67, 0xb9, 0x73, 0xa9, 0x5d, 0x78, 0x90, 0xfb, 0x34, 0x91, 0x99, 0x09, 0x2f,
- 0xbc, 0x7b, 0x8c, 0x9d, 0x7b, 0x4e, 0xcc, 0xc0, 0x98, 0x9c, 0xd8, 0x04, 0x46, 0xa1, 0x72, 0x26,
- 0x30, 0x8a, 0xc7, 0x9d, 0xc1, 0xf8, 0x08, 0xab, 0x53, 0xaa, 0x46, 0xa3, 0x26, 0x14, 0x7f, 0x76,
- 0xc6, 0x93, 0x99, 0xfe, 0x5c, 0x76, 0x02, 0x28, 0x2f, 0x3a, 0xb4, 0x93, 0x4d, 0x50, 0x5c, 0x83,
- 0x75, 0xdf, 0x91, 0x69, 0x98, 0x83, 0xdd, 0x8b, 0xf0, 0x38, 0x73, 0xdc, 0xe6, 0x25, 0x1f, 0xb4,
- 0xe2, 0xe5, 0x0b, 0x1e, 0x5c, 0xb5, 0xe2, 0x24, 0xad, 0xe8, 0xb7, 0xaf, 0x75, 0xc5, 0x93, 0xbd,
- 0xef, 0xba, 0xd5, 0xc8, 0xf4, 0xf2, 0x7f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xbd, 0x71, 0xf0, 0xaa,
- 0x39, 0x0a, 0x00, 0x00,
+ // 639 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xcb, 0x6e, 0xd3, 0x40,
+ 0x14, 0xd5, 0xc4, 0x49, 0x9a, 0xdc, 0xa4, 0x0f, 0x8d, 0xa0, 0x75, 0x2d, 0x1e, 0xae, 0x85, 0x90,
+ 0x17, 0x90, 0x54, 0x45, 0x48, 0xac, 0x90, 0xa8, 0x50, 0xab, 0xaa, 0x15, 0xad, 0x26, 0x6c, 0x40,
+ 0x48, 0x91, 0x5d, 0x4f, 0xdb, 0x81, 0x49, 0xc6, 0x78, 0xa6, 0x45, 0xe1, 0x03, 0xf8, 0x05, 0xf8,
+ 0x18, 0xd6, 0xfc, 0x04, 0xff, 0xc0, 0x07, 0xb0, 0x42, 0x63, 0xc7, 0x8f, 0xc6, 0x0e, 0x9b, 0x64,
+ 0x77, 0x1f, 0x73, 0xcf, 0x3d, 0xf7, 0xce, 0xf1, 0x18, 0xc0, 0xe7, 0xc2, 0xef, 0x85, 0x91, 0x50,
+ 0x02, 0x37, 0x2f, 0x99, 0xf2, 0xf8, 0xc4, 0x02, 0xce, 0xc6, 0x2a, 0x89, 0x59, 0x5d, 0x79, 0xe5,
+ 0x45, 0x34, 0x48, 0x3c, 0x27, 0x82, 0xb5, 0x43, 0xaa, 0xf6, 0xb9, 0xf0, 0x09, 0xfd, 0x7c, 0x4d,
+ 0xa5, 0xc2, 0x2f, 0x00, 0x22, 0x1a, 0x0a, 0xc9, 0x94, 0x88, 0x26, 0x26, 0xb2, 0x91, 0xdb, 0xd9,
+ 0xc3, 0xbd, 0x04, 0xa8, 0x47, 0xb2, 0xcc, 0x7e, 0xfd, 0xc7, 0xaf, 0x27, 0x88, 0x14, 0xce, 0xe2,
+ 0x0d, 0x30, 0x04, 0x0b, 0xcc, 0x9a, 0x8d, 0xdc, 0x36, 0xd1, 0x26, 0xbe, 0x03, 0x0d, 0xce, 0x46,
+ 0x4c, 0x99, 0x86, 0x8d, 0x5c, 0x83, 0x24, 0x8e, 0x73, 0x0c, 0xeb, 0x59, 0x4f, 0x19, 0x8a, 0xb1,
+ 0xa4, 0x18, 0x43, 0x5d, 0xb2, 0xaf, 0x34, 0x6e, 0x67, 0x90, 0xd8, 0xd6, 0xb1, 0xc0, 0x53, 0x5e,
+ 0x8c, 0xd7, 0x25, 0xb1, 0x9d, 0xb6, 0x30, 0xb2, 0x16, 0xce, 0x1f, 0x94, 0xa1, 0xc9, 0xc5, 0x47,
+ 0x38, 0x86, 0xb5, 0x88, 0xde, 0x30, 0xc9, 0xc4, 0x78, 0x18, 0x7a, 0xea, 0x4a, 0x9a, 0x35, 0xdb,
+ 0x70, 0x3b, 0x7b, 0x8f, 0xd2, 0xea, 0x99, 0x56, 0x3d, 0x32, 0x3d, 0x7d, 0xe6, 0xa9, 0x2b, 0xb2,
+ 0x1a, 0x15, 0x3c, 0x59, 0x3d, 0xbd, 0xf5, 0x12, 0xba, 0xc5, 0x22, 0x6c, 0x41, 0x2b, 0x2d, 0x8b,
+ 0xa9, 0xb6, 0x49, 0xe6, 0xeb, 0x15, 0x68, 0x16, 0xe9, 0x0a, 0xb4, 0xed, 0xfc, 0x46, 0xb0, 0x91,
+ 0xb3, 0x58, 0x74, 0x7f, 0x78, 0x07, 0xba, 0x4c, 0x0e, 0xe5, 0xb5, 0x3f, 0x12, 0xc1, 0x35, 0xa7,
+ 0x66, 0xdd, 0x46, 0x6e, 0x8b, 0x74, 0x98, 0x1c, 0xa4, 0x21, 0x0d, 0x34, 0x12, 0x01, 0x35, 0x1b,
+ 0x36, 0x72, 0x1b, 0x24, 0xb6, 0x6f, 0xb1, 0x6e, 0xce, 0x61, 0xbd, 0x92, 0xb3, 0xc6, 0x8f, 0xa1,
+ 0xae, 0x26, 0x21, 0x35, 0x5b, 0x36, 0x72, 0xd7, 0xf2, 0xcb, 0x38, 0xf5, 0x3f, 0xd2, 0x73, 0xf5,
+ 0x76, 0x12, 0x52, 0x12, 0xe7, 0x9d, 0x03, 0x80, 0x93, 0x83, 0xc1, 0x99, 0x60, 0x63, 0x45, 0xa3,
+ 0x05, 0x64, 0x71, 0x04, 0xab, 0x6f, 0xe8, 0x17, 0xbd, 0xa4, 0xa4, 0x45, 0x25, 0x54, 0x59, 0xb0,
+ 0x29, 0x75, 0xa3, 0xb0, 0x70, 0x0e, 0x77, 0x0f, 0xa9, 0xca, 0x59, 0x2d, 0x41, 0x66, 0xdb, 0xd0,
+ 0xd2, 0x5f, 0xe9, 0x90, 0x05, 0x89, 0xc0, 0xda, 0x64, 0x45, 0xfb, 0x47, 0x81, 0x74, 0x4e, 0x61,
+ 0x73, 0xb6, 0xdb, 0xf4, 0x8e, 0x9f, 0x43, 0x97, 0x5f, 0xc8, 0x61, 0x38, 0x8d, 0x9b, 0x28, 0x56,
+ 0x66, 0xd6, 0x30, 0x2f, 0x21, 0x1d, 0x7e, 0x21, 0xd3, 0x72, 0xe7, 0x1b, 0x82, 0xcd, 0x13, 0x26,
+ 0x97, 0x3b, 0xc0, 0x3d, 0x68, 0xa7, 0xd7, 0x9d, 0x4e, 0x90, 0x07, 0x6e, 0x0b, 0xbf, 0x91, 0x7e,
+ 0xf6, 0x67, 0xb0, 0x55, 0xe2, 0xb1, 0xd8, 0x68, 0x9f, 0x60, 0x5b, 0x23, 0xbe, 0xe2, 0x7c, 0xa9,
+ 0xc3, 0x55, 0xd3, 0x1f, 0x80, 0x55, 0xd5, 0x6c, 0xa1, 0x09, 0xf6, 0x7e, 0x1a, 0xd0, 0xd1, 0x22,
+ 0x1d, 0xd0, 0xe8, 0x86, 0x9d, 0x53, 0xfc, 0x1a, 0x56, 0xa6, 0xdf, 0x36, 0xde, 0x9c, 0x79, 0x72,
+ 0xa6, 0x73, 0x59, 0x5b, 0xa5, 0x78, 0x42, 0xc1, 0x69, 0xfe, 0xfd, 0xee, 0xd6, 0x5a, 0xb5, 0x5d,
+ 0x84, 0x0f, 0xa1, 0x95, 0xbe, 0x10, 0x78, 0x6b, 0xce, 0xcb, 0x65, 0x99, 0xe5, 0x44, 0x09, 0xe8,
+ 0x5d, 0xfc, 0x77, 0x28, 0xcc, 0x8b, 0xef, 0x17, 0xaa, 0xca, 0x4b, 0xb7, 0x1e, 0xcc, 0x4b, 0x97,
+ 0xa0, 0x3f, 0xc0, 0xfa, 0x8c, 0x1a, 0x70, 0x56, 0x5c, 0x2d, 0x57, 0xeb, 0xe1, 0xdc, 0x7c, 0x09,
+ 0x9d, 0x02, 0x2e, 0x5f, 0x16, 0xde, 0x29, 0x02, 0x54, 0xaa, 0xc6, 0x72, 0xfe, 0x77, 0x64, 0xb6,
+ 0xcd, 0xfe, 0xee, 0x7b, 0x7d, 0x9c, 0x7b, 0x7e, 0xef, 0x5c, 0x8c, 0xfa, 0x89, 0xf9, 0x54, 0x44,
+ 0x97, 0xfd, 0x04, 0xa4, 0x1f, 0xff, 0x63, 0xfb, 0x97, 0x62, 0xea, 0x87, 0xbe, 0xdf, 0x8c, 0x43,
+ 0xcf, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, 0xcf, 0x77, 0x87, 0x52, 0xa6, 0x07, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
@@ -1044,21 +826,7 @@ type BlobServiceClient interface {
// GetLFSPointers retrieves LFS pointers from a given set of object IDs.
// This RPC filters all requested objects and only returns those which refer
// to a valid LFS pointer.
- //
- // Deprecated in favor of `ListLFSPointers`, passing object IDs as revisions.
GetLFSPointers(ctx context.Context, in *GetLFSPointersRequest, opts ...grpc.CallOption) (BlobService_GetLFSPointersClient, error)
- // GetNewLFSPointers retrieves LFS pointers for a limited subset of the
- // commit graph. It will return all LFS pointers which are reachable by the
- // provided revision, but not reachable by any of the limiting references.
- //
- // Deprecated in favor of `ListLFSPointers`. `NotInAll` can be replaced with
- // `REVISION` `--not` `--all`, while `NotInRefs` can be replaced with
- // `REVISION` `--not` `NotInRevs...`.
- GetNewLFSPointers(ctx context.Context, in *GetNewLFSPointersRequest, opts ...grpc.CallOption) (BlobService_GetNewLFSPointersClient, error)
- // GetAllLFSPointers retrieves all LFS pointers of the given repository.
- //
- // Deprecated in favor of `ListLFSPointers`, passing `--all` as revision.
- GetAllLFSPointers(ctx context.Context, in *GetAllLFSPointersRequest, opts ...grpc.CallOption) (BlobService_GetAllLFSPointersClient, error)
// ListLFSPointers retrieves LFS pointers reachable from a given set of
// revisions by doing a graph walk. This includes both normal revisions like
// an object ID or branch, but also the pseudo-revisions "--all" and "--not"
@@ -1066,9 +834,8 @@ type BlobServiceClient interface {
// transitively reference any LFS pointers are ignored. It is not valid to
// pass revisions which do not resolve to an existing object.
ListLFSPointers(ctx context.Context, in *ListLFSPointersRequest, opts ...grpc.CallOption) (BlobService_ListLFSPointersClient, error)
- // ListAllLFSPointers retrieves all LFS pointers in the repository. In
- // contrast to `GetAllLFSPointers`, this RPC also includes LFS pointers which
- // are not reachable by any reference.
+ // ListAllLFSPointers retrieves all LFS pointers in the repository, including
+ // those not reachable by any reference.
ListAllLFSPointers(ctx context.Context, in *ListAllLFSPointersRequest, opts ...grpc.CallOption) (BlobService_ListAllLFSPointersClient, error)
}
@@ -1176,72 +943,8 @@ func (x *blobServiceGetLFSPointersClient) Recv() (*GetLFSPointersResponse, error
return m, nil
}
-func (c *blobServiceClient) GetNewLFSPointers(ctx context.Context, in *GetNewLFSPointersRequest, opts ...grpc.CallOption) (BlobService_GetNewLFSPointersClient, error) {
- stream, err := c.cc.NewStream(ctx, &_BlobService_serviceDesc.Streams[3], "/gitaly.BlobService/GetNewLFSPointers", opts...)
- if err != nil {
- return nil, err
- }
- x := &blobServiceGetNewLFSPointersClient{stream}
- if err := x.ClientStream.SendMsg(in); err != nil {
- return nil, err
- }
- if err := x.ClientStream.CloseSend(); err != nil {
- return nil, err
- }
- return x, nil
-}
-
-type BlobService_GetNewLFSPointersClient interface {
- Recv() (*GetNewLFSPointersResponse, error)
- grpc.ClientStream
-}
-
-type blobServiceGetNewLFSPointersClient struct {
- grpc.ClientStream
-}
-
-func (x *blobServiceGetNewLFSPointersClient) Recv() (*GetNewLFSPointersResponse, error) {
- m := new(GetNewLFSPointersResponse)
- if err := x.ClientStream.RecvMsg(m); err != nil {
- return nil, err
- }
- return m, nil
-}
-
-func (c *blobServiceClient) GetAllLFSPointers(ctx context.Context, in *GetAllLFSPointersRequest, opts ...grpc.CallOption) (BlobService_GetAllLFSPointersClient, error) {
- stream, err := c.cc.NewStream(ctx, &_BlobService_serviceDesc.Streams[4], "/gitaly.BlobService/GetAllLFSPointers", opts...)
- if err != nil {
- return nil, err
- }
- x := &blobServiceGetAllLFSPointersClient{stream}
- if err := x.ClientStream.SendMsg(in); err != nil {
- return nil, err
- }
- if err := x.ClientStream.CloseSend(); err != nil {
- return nil, err
- }
- return x, nil
-}
-
-type BlobService_GetAllLFSPointersClient interface {
- Recv() (*GetAllLFSPointersResponse, error)
- grpc.ClientStream
-}
-
-type blobServiceGetAllLFSPointersClient struct {
- grpc.ClientStream
-}
-
-func (x *blobServiceGetAllLFSPointersClient) Recv() (*GetAllLFSPointersResponse, error) {
- m := new(GetAllLFSPointersResponse)
- if err := x.ClientStream.RecvMsg(m); err != nil {
- return nil, err
- }
- return m, nil
-}
-
func (c *blobServiceClient) ListLFSPointers(ctx context.Context, in *ListLFSPointersRequest, opts ...grpc.CallOption) (BlobService_ListLFSPointersClient, error) {
- stream, err := c.cc.NewStream(ctx, &_BlobService_serviceDesc.Streams[5], "/gitaly.BlobService/ListLFSPointers", opts...)
+ stream, err := c.cc.NewStream(ctx, &_BlobService_serviceDesc.Streams[3], "/gitaly.BlobService/ListLFSPointers", opts...)
if err != nil {
return nil, err
}
@@ -1273,7 +976,7 @@ func (x *blobServiceListLFSPointersClient) Recv() (*ListLFSPointersResponse, err
}
func (c *blobServiceClient) ListAllLFSPointers(ctx context.Context, in *ListAllLFSPointersRequest, opts ...grpc.CallOption) (BlobService_ListAllLFSPointersClient, error) {
- stream, err := c.cc.NewStream(ctx, &_BlobService_serviceDesc.Streams[6], "/gitaly.BlobService/ListAllLFSPointers", opts...)
+ stream, err := c.cc.NewStream(ctx, &_BlobService_serviceDesc.Streams[4], "/gitaly.BlobService/ListAllLFSPointers", opts...)
if err != nil {
return nil, err
}
@@ -1314,21 +1017,7 @@ type BlobServiceServer interface {
// GetLFSPointers retrieves LFS pointers from a given set of object IDs.
// This RPC filters all requested objects and only returns those which refer
// to a valid LFS pointer.
- //
- // Deprecated in favor of `ListLFSPointers`, passing object IDs as revisions.
GetLFSPointers(*GetLFSPointersRequest, BlobService_GetLFSPointersServer) error
- // GetNewLFSPointers retrieves LFS pointers for a limited subset of the
- // commit graph. It will return all LFS pointers which are reachable by the
- // provided revision, but not reachable by any of the limiting references.
- //
- // Deprecated in favor of `ListLFSPointers`. `NotInAll` can be replaced with
- // `REVISION` `--not` `--all`, while `NotInRefs` can be replaced with
- // `REVISION` `--not` `NotInRevs...`.
- GetNewLFSPointers(*GetNewLFSPointersRequest, BlobService_GetNewLFSPointersServer) error
- // GetAllLFSPointers retrieves all LFS pointers of the given repository.
- //
- // Deprecated in favor of `ListLFSPointers`, passing `--all` as revision.
- GetAllLFSPointers(*GetAllLFSPointersRequest, BlobService_GetAllLFSPointersServer) error
// ListLFSPointers retrieves LFS pointers reachable from a given set of
// revisions by doing a graph walk. This includes both normal revisions like
// an object ID or branch, but also the pseudo-revisions "--all" and "--not"
@@ -1336,9 +1025,8 @@ type BlobServiceServer interface {
// transitively reference any LFS pointers are ignored. It is not valid to
// pass revisions which do not resolve to an existing object.
ListLFSPointers(*ListLFSPointersRequest, BlobService_ListLFSPointersServer) error
- // ListAllLFSPointers retrieves all LFS pointers in the repository. In
- // contrast to `GetAllLFSPointers`, this RPC also includes LFS pointers which
- // are not reachable by any reference.
+ // ListAllLFSPointers retrieves all LFS pointers in the repository, including
+ // those not reachable by any reference.
ListAllLFSPointers(*ListAllLFSPointersRequest, BlobService_ListAllLFSPointersServer) error
}
@@ -1355,12 +1043,6 @@ func (*UnimplementedBlobServiceServer) GetBlobs(req *GetBlobsRequest, srv BlobSe
func (*UnimplementedBlobServiceServer) GetLFSPointers(req *GetLFSPointersRequest, srv BlobService_GetLFSPointersServer) error {
return status.Errorf(codes.Unimplemented, "method GetLFSPointers not implemented")
}
-func (*UnimplementedBlobServiceServer) GetNewLFSPointers(req *GetNewLFSPointersRequest, srv BlobService_GetNewLFSPointersServer) error {
- return status.Errorf(codes.Unimplemented, "method GetNewLFSPointers not implemented")
-}
-func (*UnimplementedBlobServiceServer) GetAllLFSPointers(req *GetAllLFSPointersRequest, srv BlobService_GetAllLFSPointersServer) error {
- return status.Errorf(codes.Unimplemented, "method GetAllLFSPointers not implemented")
-}
func (*UnimplementedBlobServiceServer) ListLFSPointers(req *ListLFSPointersRequest, srv BlobService_ListLFSPointersServer) error {
return status.Errorf(codes.Unimplemented, "method ListLFSPointers not implemented")
}
@@ -1435,48 +1117,6 @@ func (x *blobServiceGetLFSPointersServer) Send(m *GetLFSPointersResponse) error
return x.ServerStream.SendMsg(m)
}
-func _BlobService_GetNewLFSPointers_Handler(srv interface{}, stream grpc.ServerStream) error {
- m := new(GetNewLFSPointersRequest)
- if err := stream.RecvMsg(m); err != nil {
- return err
- }
- return srv.(BlobServiceServer).GetNewLFSPointers(m, &blobServiceGetNewLFSPointersServer{stream})
-}
-
-type BlobService_GetNewLFSPointersServer interface {
- Send(*GetNewLFSPointersResponse) error
- grpc.ServerStream
-}
-
-type blobServiceGetNewLFSPointersServer struct {
- grpc.ServerStream
-}
-
-func (x *blobServiceGetNewLFSPointersServer) Send(m *GetNewLFSPointersResponse) error {
- return x.ServerStream.SendMsg(m)
-}
-
-func _BlobService_GetAllLFSPointers_Handler(srv interface{}, stream grpc.ServerStream) error {
- m := new(GetAllLFSPointersRequest)
- if err := stream.RecvMsg(m); err != nil {
- return err
- }
- return srv.(BlobServiceServer).GetAllLFSPointers(m, &blobServiceGetAllLFSPointersServer{stream})
-}
-
-type BlobService_GetAllLFSPointersServer interface {
- Send(*GetAllLFSPointersResponse) error
- grpc.ServerStream
-}
-
-type blobServiceGetAllLFSPointersServer struct {
- grpc.ServerStream
-}
-
-func (x *blobServiceGetAllLFSPointersServer) Send(m *GetAllLFSPointersResponse) error {
- return x.ServerStream.SendMsg(m)
-}
-
func _BlobService_ListLFSPointers_Handler(srv interface{}, stream grpc.ServerStream) error {
m := new(ListLFSPointersRequest)
if err := stream.RecvMsg(m); err != nil {
@@ -1540,16 +1180,6 @@ var _BlobService_serviceDesc = grpc.ServiceDesc{
ServerStreams: true,
},
{
- StreamName: "GetNewLFSPointers",
- Handler: _BlobService_GetNewLFSPointers_Handler,
- ServerStreams: true,
- },
- {
- StreamName: "GetAllLFSPointers",
- Handler: _BlobService_GetAllLFSPointers_Handler,
- ServerStreams: true,
- },
- {
StreamName: "ListLFSPointers",
Handler: _BlobService_ListLFSPointers_Handler,
ServerStreams: true,
diff --git a/proto/go/gitalypb/remote.pb.go b/proto/go/gitalypb/remote.pb.go
index fc5e43ff9..23ae90fe1 100644
--- a/proto/go/gitalypb/remote.pb.go
+++ b/proto/go/gitalypb/remote.pb.go
@@ -514,12 +514,25 @@ func (m *FindRemoteRepositoryResponse) GetExists() bool {
return false
}
+// FindRemoteRootRefRequest represents a request for the FindRemoteRootRef RPC.
type FindRemoteRootRefRequest struct {
- Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
- Remote string `protobuf:"bytes,2,opt,name=remote,proto3" json:"remote,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
+ // Repository is the repository in which the request shall be executed in. If
+ // a remote name is given, then this is the repository in which the remote
+ // will be looked up.
+ Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
+ // Remote is the name of the remote of which the root reference shall be
+ // looked up. The remote must have been created before this call. This
+ // parameter is deprecated in favor of `RemoteUrl`, see
+ // https://gitlab.com/gitlab-org/gitaly/-/issues/1773.
+ Remote string `protobuf:"bytes,2,opt,name=remote,proto3" json:"remote,omitempty"` // Deprecated: Do not use.
+ // RemoteUrl specifies the remote repository URL which should be fetched from.
+ RemoteUrl string `protobuf:"bytes,3,opt,name=remote_url,json=remoteUrl,proto3" json:"remote_url,omitempty"`
+ // HttpAuthorizationHeader is the HTTP header which should be added to the
+ // request in order to authenticate against the repository.
+ HttpAuthorizationHeader string `protobuf:"bytes,4,opt,name=http_authorization_header,json=httpAuthorizationHeader,proto3" json:"http_authorization_header,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
}
func (m *FindRemoteRootRefRequest) Reset() { *m = FindRemoteRootRefRequest{} }
@@ -554,6 +567,7 @@ func (m *FindRemoteRootRefRequest) GetRepository() *Repository {
return nil
}
+// Deprecated: Do not use.
func (m *FindRemoteRootRefRequest) GetRemote() string {
if m != nil {
return m.Remote
@@ -561,7 +575,24 @@ func (m *FindRemoteRootRefRequest) GetRemote() string {
return ""
}
+func (m *FindRemoteRootRefRequest) GetRemoteUrl() string {
+ if m != nil {
+ return m.RemoteUrl
+ }
+ return ""
+}
+
+func (m *FindRemoteRootRefRequest) GetHttpAuthorizationHeader() string {
+ if m != nil {
+ return m.HttpAuthorizationHeader
+ }
+ return ""
+}
+
+// FindRemoteRootRefResponse represents the response for the FindRemoteRootRef
+// request.
type FindRemoteRootRefResponse struct {
+ // Ref is the name of the remote root reference.
Ref string `protobuf:"bytes,1,opt,name=ref,proto3" json:"ref,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
@@ -754,56 +785,59 @@ func init() {
func init() { proto.RegisterFile("remote.proto", fileDescriptor_eefc82927d57d89b) }
var fileDescriptor_eefc82927d57d89b = []byte{
- // 779 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0x4d, 0x6f, 0xd3, 0x4c,
- 0x10, 0x96, 0x1d, 0x37, 0x1f, 0x93, 0xb4, 0x4a, 0x36, 0x55, 0x5f, 0xc7, 0xa9, 0xf4, 0xa6, 0x86,
- 0x8a, 0x1c, 0x68, 0x82, 0xca, 0x87, 0x38, 0x20, 0x21, 0x02, 0xaa, 0xca, 0x47, 0x2b, 0x61, 0xe8,
- 0x85, 0x03, 0xc6, 0x49, 0x36, 0x89, 0x15, 0xc7, 0x6b, 0x76, 0x9d, 0x42, 0xfe, 0x01, 0x47, 0x6e,
- 0xe5, 0x2f, 0x70, 0xe1, 0x67, 0x70, 0xe0, 0x27, 0xf5, 0x84, 0xd6, 0xbb, 0x49, 0x9c, 0xd4, 0x09,
- 0x87, 0xc2, 0xcd, 0x3b, 0x33, 0xfb, 0xcc, 0xcc, 0x33, 0x3b, 0x8f, 0x0c, 0x05, 0x8a, 0x47, 0x24,
- 0xc4, 0x8d, 0x80, 0x92, 0x90, 0xa0, 0x74, 0xdf, 0x0d, 0x1d, 0x6f, 0x62, 0x80, 0xe7, 0xfa, 0xa1,
- 0xb0, 0x19, 0x05, 0x36, 0x70, 0x28, 0xee, 0x8a, 0x93, 0xf9, 0x43, 0x81, 0xe2, 0x93, 0x6e, 0xd7,
- 0x8a, 0x6e, 0x59, 0xf8, 0xe3, 0x18, 0xb3, 0x10, 0x3d, 0x04, 0xa0, 0x38, 0x20, 0xcc, 0x0d, 0x09,
- 0x9d, 0xe8, 0x4a, 0x4d, 0xa9, 0xe7, 0x0f, 0x51, 0x43, 0x60, 0x35, 0xac, 0x99, 0xa7, 0xa5, 0x7d,
- 0xfb, 0x79, 0x5b, 0xb1, 0x62, 0xb1, 0x08, 0x81, 0xe6, 0x3b, 0x23, 0xac, 0xab, 0x35, 0xa5, 0x9e,
- 0xb3, 0xa2, 0x6f, 0x54, 0x84, 0xd4, 0x98, 0x7a, 0x7a, 0x2a, 0x32, 0xf1, 0x4f, 0xb4, 0x0f, 0x5b,
- 0x23, 0x97, 0x52, 0x42, 0x6d, 0x8a, 0x7b, 0x23, 0x27, 0x60, 0xfa, 0x46, 0x2d, 0x55, 0xcf, 0x59,
- 0x9b, 0xc2, 0x6a, 0x09, 0xe3, 0x0b, 0x2d, 0xab, 0x15, 0x37, 0xa6, 0x46, 0x19, 0x6a, 0x96, 0xa1,
- 0x14, 0xab, 0x97, 0x05, 0xc4, 0x67, 0xd8, 0xec, 0x40, 0x99, 0x5b, 0xce, 0xf1, 0x3f, 0xec, 0xc3,
- 0x6c, 0xc0, 0xf6, 0x62, 0x12, 0x91, 0x1c, 0xed, 0x40, 0x9a, 0x62, 0x36, 0xf6, 0xc2, 0x28, 0x43,
- 0xd6, 0x92, 0x27, 0xf3, 0x42, 0x01, 0xe3, 0x08, 0x87, 0x9d, 0xc1, 0x73, 0x3f, 0xc4, 0xd4, 0x77,
- 0xbc, 0xbf, 0x55, 0xdc, 0x63, 0x28, 0x89, 0x29, 0xdb, 0x31, 0x00, 0x75, 0x15, 0x80, 0x55, 0xa4,
- 0x32, 0xef, 0xd4, 0x62, 0xde, 0x87, 0x6a, 0x62, 0x61, 0x7f, 0x68, 0xe8, 0xab, 0x0a, 0x95, 0xb3,
- 0xa0, 0xeb, 0x84, 0x92, 0x81, 0x13, 0x39, 0xad, 0xeb, 0xf6, 0x53, 0x81, 0x2c, 0xc5, 0x3d, 0x3b,
- 0x46, 0x78, 0x86, 0xe2, 0xde, 0x29, 0x7f, 0x3b, 0xf7, 0x60, 0x87, 0xf8, 0xde, 0xc4, 0x6e, 0x53,
- 0xc7, 0xef, 0x0c, 0x30, 0xb3, 0x47, 0x4e, 0xd8, 0x19, 0xb8, 0x7e, 0x5f, 0x4f, 0xd5, 0x52, 0xf5,
- 0x82, 0xb5, 0xcd, 0xbd, 0x2d, 0xe9, 0x3c, 0x91, 0x3e, 0xf4, 0x1f, 0x64, 0x18, 0x1b, 0xd8, 0x43,
- 0x3c, 0xd1, 0xb5, 0x08, 0x2f, 0xcd, 0xd8, 0xe0, 0x25, 0x9e, 0xa0, 0xff, 0x21, 0x3f, 0xf4, 0xc9,
- 0x27, 0xdf, 0x1e, 0x10, 0x16, 0xf2, 0x57, 0xc7, 0x9d, 0x10, 0x99, 0x8e, 0xb9, 0x05, 0x35, 0xa0,
- 0x3c, 0xc4, 0x38, 0xb0, 0xbb, 0xee, 0x39, 0xa6, 0x7d, 0xec, 0x87, 0xfc, 0xd9, 0x31, 0x3d, 0x1d,
- 0xf1, 0x50, 0xe2, 0xae, 0x67, 0x53, 0x8f, 0x85, 0x7b, 0xcc, 0x7c, 0x0a, 0x46, 0x12, 0x23, 0x92,
- 0xc8, 0x7d, 0xd8, 0x5a, 0x02, 0x52, 0xa2, 0xaa, 0x37, 0xbb, 0x0b, 0x20, 0xef, 0xa1, 0x7a, 0xe4,
- 0xfa, 0xb3, 0x37, 0x3d, 0x1b, 0x9c, 0x24, 0x36, 0x1a, 0x07, 0x77, 0x45, 0xa4, 0xe6, 0x2c, 0x79,
- 0x42, 0xb7, 0xa0, 0xc0, 0x42, 0x42, 0x9d, 0x3e, 0x8e, 0x51, 0xd7, 0xd2, 0xbe, 0x70, 0x7a, 0xf3,
- 0xd2, 0xc3, 0x49, 0x34, 0x1f, 0xc0, 0x6e, 0x32, 0xfe, 0x7c, 0xde, 0xf8, 0xb3, 0xcb, 0x09, 0x91,
- 0xf3, 0x16, 0x27, 0xd3, 0x03, 0x3d, 0x76, 0x8f, 0x10, 0x5e, 0xed, 0xf5, 0xa7, 0x3d, 0x6f, 0x47,
- 0x8d, 0xb7, 0x63, 0x1e, 0x40, 0x25, 0x21, 0x9b, 0x2c, 0xb1, 0x08, 0x29, 0x8a, 0x7b, 0x92, 0x00,
- 0xfe, 0x69, 0x9e, 0x02, 0x7a, 0xe5, 0xb2, 0x50, 0x84, 0xb3, 0x6b, 0x97, 0x65, 0x7e, 0x57, 0xa0,
- 0xbc, 0x00, 0x28, 0x33, 0x3f, 0x82, 0x8c, 0x28, 0x50, 0x0c, 0x2f, 0x7f, 0x68, 0x4e, 0xe1, 0x12,
- 0xa2, 0x1b, 0xb2, 0xfa, 0xe9, 0x15, 0xe3, 0x2d, 0xa4, 0x85, 0x69, 0xa6, 0x28, 0x4a, 0x4c, 0x19,
- 0xab, 0x90, 0xeb, 0xf1, 0x3d, 0xb4, 0xb9, 0x3e, 0x0a, 0x36, 0xb2, 0x91, 0xe1, 0x8c, 0x7a, 0x7c,
- 0x2b, 0x82, 0x31, 0x13, 0x3e, 0xa1, 0x9d, 0x19, 0x7e, 0x3e, 0xa3, 0xde, 0xe1, 0x2f, 0x0d, 0x36,
- 0x05, 0xec, 0x1b, 0x4c, 0xcf, 0xdd, 0x0e, 0x46, 0xc7, 0x90, 0x9b, 0xa9, 0x22, 0xd2, 0xa7, 0x15,
- 0x2e, 0x0b, 0xbb, 0x51, 0x49, 0xf0, 0x48, 0x09, 0x4d, 0x5f, 0x5e, 0xd4, 0xd5, 0xac, 0x82, 0x7a,
- 0x50, 0x4e, 0xd0, 0x06, 0x34, 0xeb, 0x7a, 0xb5, 0xa2, 0x19, 0x37, 0xd6, 0xc6, 0x2c, 0xe5, 0x79,
- 0x0d, 0x85, 0xb8, 0x9a, 0xa2, 0xea, 0x7c, 0x4a, 0x57, 0x84, 0xdc, 0xd8, 0x4d, 0x76, 0x2e, 0x41,
- 0x62, 0x40, 0x57, 0x97, 0x11, 0xed, 0x4d, 0xef, 0xae, 0x94, 0x2e, 0xc3, 0x5c, 0x17, 0xb2, 0x90,
- 0x44, 0xad, 0x2b, 0x68, 0x08, 0xdb, 0x49, 0xeb, 0x84, 0xe6, 0xed, 0xaf, 0x5e, 0x66, 0xe3, 0xe6,
- 0xfa, 0x20, 0x99, 0x2c, 0x7b, 0x79, 0x51, 0xd7, 0xb2, 0x6a, 0x51, 0x45, 0x1f, 0xa0, 0x74, 0x65,
- 0x2b, 0x50, 0x2d, 0x01, 0x64, 0x61, 0x3d, 0x8d, 0xbd, 0x35, 0x11, 0x8b, 0x0d, 0xb5, 0xee, 0xbc,
- 0xe3, 0xb1, 0x9e, 0xd3, 0x6e, 0x74, 0xc8, 0xa8, 0x29, 0x3e, 0x0f, 0x08, 0xed, 0x37, 0x05, 0x42,
- 0x33, 0xfa, 0x4f, 0x68, 0xf6, 0x89, 0x3c, 0x07, 0xed, 0x76, 0x3a, 0x32, 0xdd, 0xfd, 0x1d, 0x00,
- 0x00, 0xff, 0xff, 0x03, 0xe4, 0xf2, 0x17, 0x6c, 0x08, 0x00, 0x00,
+ // 831 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xcd, 0x8e, 0xe3, 0x44,
+ 0x10, 0x96, 0x1d, 0x6f, 0xc6, 0xa9, 0x64, 0x56, 0x49, 0x67, 0xb4, 0xeb, 0x38, 0x8b, 0xc8, 0x1a,
+ 0x56, 0xe4, 0xc0, 0x26, 0x68, 0xf8, 0x11, 0x42, 0x48, 0x68, 0x03, 0x5a, 0x0d, 0x3f, 0xbb, 0x12,
+ 0x86, 0xb9, 0x70, 0xc0, 0x38, 0x49, 0x27, 0xb6, 0xe2, 0xb8, 0x4d, 0x77, 0x67, 0x20, 0x3c, 0x01,
+ 0x47, 0x6e, 0xc3, 0x2b, 0x70, 0xe1, 0x31, 0x38, 0xc0, 0x1b, 0xcd, 0x09, 0xf5, 0x4f, 0x1c, 0x27,
+ 0xe3, 0x84, 0xc3, 0xb0, 0xb7, 0xee, 0xfa, 0xaa, 0xbf, 0xaa, 0xfa, 0xaa, 0xbb, 0x6c, 0x68, 0x50,
+ 0xbc, 0x24, 0x1c, 0x0f, 0x32, 0x4a, 0x38, 0x41, 0xd5, 0x79, 0xcc, 0xc3, 0x64, 0xed, 0x42, 0x12,
+ 0xa7, 0x5c, 0xd9, 0xdc, 0x06, 0x8b, 0x42, 0x8a, 0xa7, 0x6a, 0xe7, 0xfd, 0x69, 0x40, 0xf3, 0xd9,
+ 0x74, 0xea, 0xcb, 0x53, 0x3e, 0xfe, 0x71, 0x85, 0x19, 0x47, 0x1f, 0x02, 0x50, 0x9c, 0x11, 0x16,
+ 0x73, 0x42, 0xd7, 0x8e, 0xd1, 0x33, 0xfa, 0xf5, 0x73, 0x34, 0x50, 0x5c, 0x03, 0x3f, 0x47, 0x46,
+ 0xd6, 0xef, 0x7f, 0xbd, 0x6d, 0xf8, 0x05, 0x5f, 0x84, 0xc0, 0x4a, 0xc3, 0x25, 0x76, 0xcc, 0x9e,
+ 0xd1, 0xaf, 0xf9, 0x72, 0x8d, 0x9a, 0x50, 0x59, 0xd1, 0xc4, 0xa9, 0x48, 0x93, 0x58, 0xa2, 0x27,
+ 0x70, 0x7f, 0x19, 0x53, 0x4a, 0x68, 0x40, 0xf1, 0x6c, 0x19, 0x66, 0xcc, 0xb9, 0xd7, 0xab, 0xf4,
+ 0x6b, 0xfe, 0xa9, 0xb2, 0xfa, 0xca, 0xf8, 0x85, 0x65, 0x5b, 0xcd, 0x7b, 0x1b, 0xa3, 0x76, 0xf5,
+ 0xda, 0xd0, 0x2a, 0xe4, 0xcb, 0x32, 0x92, 0x32, 0xec, 0x4d, 0xa0, 0x2d, 0x2c, 0x57, 0xf8, 0x15,
+ 0xd6, 0xe1, 0x0d, 0xe0, 0x6c, 0x37, 0x88, 0x0a, 0x8e, 0x1e, 0x40, 0x95, 0x62, 0xb6, 0x4a, 0xb8,
+ 0x8c, 0x60, 0xfb, 0x7a, 0xe7, 0x5d, 0x1b, 0xe0, 0x3e, 0xc7, 0x7c, 0x12, 0x7d, 0x9e, 0x72, 0x4c,
+ 0xd3, 0x30, 0xf9, 0xbf, 0x92, 0xfb, 0x04, 0x5a, 0xaa, 0xcb, 0x41, 0x81, 0xc0, 0x3c, 0x44, 0xe0,
+ 0x37, 0xa9, 0x8e, 0xbb, 0xb1, 0x78, 0xef, 0x43, 0xb7, 0x34, 0xb1, 0xff, 0x28, 0xe8, 0x37, 0x13,
+ 0x3a, 0x97, 0xd9, 0x34, 0xe4, 0x5a, 0x81, 0x17, 0xba, 0x5b, 0x77, 0xad, 0xa7, 0x03, 0x36, 0xc5,
+ 0xb3, 0xa0, 0x20, 0xf8, 0x09, 0xc5, 0xb3, 0x97, 0xe2, 0xee, 0xbc, 0x07, 0x0f, 0x48, 0x9a, 0xac,
+ 0x83, 0x31, 0x0d, 0xd3, 0x49, 0x84, 0x59, 0xb0, 0x0c, 0xf9, 0x24, 0x8a, 0xd3, 0xb9, 0x53, 0xe9,
+ 0x55, 0xfa, 0x0d, 0xff, 0x4c, 0xa0, 0x23, 0x0d, 0xbe, 0xd0, 0x18, 0x7a, 0x08, 0x27, 0x8c, 0x45,
+ 0xc1, 0x02, 0xaf, 0x1d, 0x4b, 0xf2, 0x55, 0x19, 0x8b, 0xbe, 0xc4, 0x6b, 0xf4, 0x3a, 0xd4, 0x17,
+ 0x29, 0xf9, 0x29, 0x0d, 0x22, 0xc2, 0xb8, 0xb8, 0x75, 0x02, 0x04, 0x69, 0xba, 0x10, 0x16, 0x34,
+ 0x80, 0xf6, 0x02, 0xe3, 0x2c, 0x98, 0xc6, 0x57, 0x98, 0xce, 0x71, 0xca, 0xc5, 0xb5, 0x63, 0x4e,
+ 0x55, 0xea, 0xd0, 0x12, 0xd0, 0x67, 0x1b, 0xc4, 0xc7, 0x33, 0xe6, 0x7d, 0x0a, 0x6e, 0x99, 0x22,
+ 0x5a, 0xc8, 0x27, 0x70, 0x7f, 0x8f, 0xc8, 0x90, 0x59, 0x9f, 0x4e, 0x77, 0x48, 0xbe, 0x87, 0xee,
+ 0xf3, 0x38, 0xcd, 0xef, 0x74, 0xde, 0x38, 0x2d, 0xac, 0x6c, 0x87, 0x80, 0xa4, 0xa8, 0x35, 0x5f,
+ 0xef, 0xd0, 0x5b, 0xd0, 0x60, 0x9c, 0xd0, 0x70, 0x8e, 0x0b, 0xd2, 0x8d, 0xac, 0x5f, 0x85, 0xbc,
+ 0x75, 0x8d, 0x08, 0x11, 0xbd, 0x0f, 0xe0, 0x51, 0x39, 0xff, 0xb6, 0xdf, 0xf8, 0xe7, 0x58, 0x08,
+ 0xa2, 0xfb, 0xad, 0x76, 0xde, 0x3f, 0x06, 0x38, 0x85, 0x83, 0x84, 0x88, 0x74, 0xef, 0xde, 0x6e,
+ 0x37, 0xaf, 0x47, 0x65, 0x6c, 0x3a, 0x46, 0x5e, 0xd3, 0x6b, 0x82, 0x55, 0x5e, 0xed, 0xed, 0xc8,
+ 0xa8, 0x29, 0xcb, 0x25, 0x4d, 0xd0, 0x47, 0xd0, 0x89, 0x38, 0xcf, 0x82, 0x70, 0xc5, 0x23, 0x42,
+ 0xe3, 0x5f, 0x42, 0x1e, 0x93, 0x34, 0x88, 0x70, 0x38, 0xc5, 0x54, 0xb7, 0xfa, 0xa1, 0x70, 0x78,
+ 0x56, 0xc4, 0x2f, 0x24, 0xec, 0x3d, 0x85, 0x4e, 0x49, 0x31, 0x5a, 0x82, 0x26, 0x54, 0x28, 0x9e,
+ 0x69, 0x81, 0xc5, 0xd2, 0x7b, 0x09, 0xe8, 0xab, 0x98, 0x71, 0xe5, 0xce, 0xee, 0x5c, 0xb5, 0xf7,
+ 0x87, 0x01, 0xed, 0x1d, 0x42, 0x1d, 0xf9, 0x63, 0x38, 0x51, 0xf5, 0xa9, 0xcb, 0x51, 0x3f, 0xf7,
+ 0x36, 0x74, 0x25, 0xde, 0x03, 0x9d, 0xfd, 0xe6, 0x88, 0xfb, 0x2d, 0x54, 0x95, 0x29, 0x9f, 0x58,
+ 0x46, 0x61, 0xf2, 0x76, 0xa1, 0x36, 0x13, 0xef, 0x5c, 0x8a, 0xa9, 0x5e, 0x96, 0x2d, 0x0d, 0x42,
+ 0xcb, 0x0e, 0xd8, 0xd9, 0x8a, 0x45, 0x05, 0xa1, 0x4f, 0xc4, 0xfe, 0x92, 0x26, 0xe7, 0x7f, 0x5b,
+ 0x70, 0xaa, 0x68, 0xbf, 0xc1, 0xf4, 0x2a, 0x9e, 0x60, 0x74, 0x01, 0xb5, 0x7c, 0xea, 0x22, 0x67,
+ 0x93, 0xe1, 0xfe, 0x87, 0xc3, 0xed, 0x94, 0x20, 0x7a, 0x44, 0x57, 0x6f, 0xae, 0xfb, 0xa6, 0x6d,
+ 0xa0, 0x19, 0xb4, 0x4b, 0x66, 0x0f, 0xca, 0xab, 0x3e, 0x3c, 0x31, 0xdd, 0x37, 0x8e, 0xfa, 0xec,
+ 0xc5, 0xf9, 0x1a, 0x1a, 0xc5, 0x69, 0x8d, 0xba, 0xdb, 0x2e, 0xdd, 0xfa, 0x50, 0xb8, 0x8f, 0xca,
+ 0xc1, 0x3d, 0x4a, 0x0c, 0xe8, 0xf6, 0x63, 0x47, 0x8f, 0x37, 0x67, 0x0f, 0x8e, 0x46, 0xd7, 0x3b,
+ 0xe6, 0xb2, 0x13, 0xc4, 0xec, 0x1b, 0x68, 0x01, 0x67, 0x65, 0xcf, 0x15, 0x6d, 0xcb, 0x3f, 0x3c,
+ 0x2c, 0xdc, 0x37, 0x8f, 0x3b, 0xe9, 0x60, 0xf6, 0xcd, 0x75, 0xdf, 0xb2, 0xcd, 0xa6, 0x89, 0x7e,
+ 0x80, 0xd6, 0xad, 0x57, 0x81, 0x7a, 0x25, 0x24, 0x3b, 0xaf, 0xdf, 0x7d, 0x7c, 0xc4, 0x63, 0xb7,
+ 0xa0, 0xd1, 0x3b, 0xdf, 0x09, 0xdf, 0x24, 0x1c, 0x0f, 0x26, 0x64, 0x39, 0x54, 0xcb, 0xa7, 0x84,
+ 0xce, 0x87, 0x8a, 0x61, 0x28, 0xff, 0x43, 0x86, 0x73, 0xa2, 0xf7, 0xd9, 0x78, 0x5c, 0x95, 0xa6,
+ 0x77, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xbe, 0xfc, 0x7f, 0x70, 0xcc, 0x08, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
@@ -828,6 +862,11 @@ type RemoteServiceClient interface {
// the patterns specified in the requests.
UpdateRemoteMirror(ctx context.Context, opts ...grpc.CallOption) (RemoteService_UpdateRemoteMirrorClient, error)
FindRemoteRepository(ctx context.Context, in *FindRemoteRepositoryRequest, opts ...grpc.CallOption) (*FindRemoteRepositoryResponse, error)
+ // FindRemoteRootRef tries to find the root reference of a remote
+ // repository. The root reference is the default branch as pointed to by
+ // the remotes HEAD reference. Returns an InvalidArgument error if the
+ // specified remote does not exist and a NotFound error in case no HEAD
+ // branch was found.
FindRemoteRootRef(ctx context.Context, in *FindRemoteRootRefRequest, opts ...grpc.CallOption) (*FindRemoteRootRefResponse, error)
}
@@ -930,6 +969,11 @@ type RemoteServiceServer interface {
// the patterns specified in the requests.
UpdateRemoteMirror(RemoteService_UpdateRemoteMirrorServer) error
FindRemoteRepository(context.Context, *FindRemoteRepositoryRequest) (*FindRemoteRepositoryResponse, error)
+ // FindRemoteRootRef tries to find the root reference of a remote
+ // repository. The root reference is the default branch as pointed to by
+ // the remotes HEAD reference. Returns an InvalidArgument error if the
+ // specified remote does not exist and a NotFound error in case no HEAD
+ // branch was found.
FindRemoteRootRef(context.Context, *FindRemoteRootRefRequest) (*FindRemoteRootRefResponse, error)
}
diff --git a/proto/go/gitalypb/repository-service.pb.go b/proto/go/gitalypb/repository-service.pb.go
index 1f781c0fd..5f93de8f2 100644
--- a/proto/go/gitalypb/repository-service.pb.go
+++ b/proto/go/gitalypb/repository-service.pb.go
@@ -24,6 +24,31 @@ var _ = math.Inf
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
+type WriteCommitGraphRequest_SplitStrategy int32
+
+const (
+ // SizeMultiple requires to use '--split --size-multiple=4' strategy to create/update commit graph.
+ // https://git-scm.com/docs/git-commit-graph#Documentation/git-commit-graph.txt-emwriteem
+ // It is a default, there is no need to explicitly set it in the request.
+ WriteCommitGraphRequest_SizeMultiple WriteCommitGraphRequest_SplitStrategy = 0
+)
+
+var WriteCommitGraphRequest_SplitStrategy_name = map[int32]string{
+ 0: "SizeMultiple",
+}
+
+var WriteCommitGraphRequest_SplitStrategy_value = map[string]int32{
+ "SizeMultiple": 0,
+}
+
+func (x WriteCommitGraphRequest_SplitStrategy) String() string {
+ return proto.EnumName(WriteCommitGraphRequest_SplitStrategy_name, int32(x))
+}
+
+func (WriteCommitGraphRequest_SplitStrategy) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_e9b1768cf174c79b, []int{10, 0}
+}
+
type GetArchiveRequest_Format int32
const (
@@ -92,7 +117,7 @@ func (x GetRawChangesResponse_RawChange_Operation) String() string {
}
func (GetRawChangesResponse_RawChange_Operation) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{65, 0, 0}
+ return fileDescriptor_e9b1768cf174c79b, []int{67, 0, 0}
}
type RepositoryExistsRequest struct {
@@ -483,10 +508,12 @@ func (m *GarbageCollectResponse) XXX_DiscardUnknown() {
var xxx_messageInfo_GarbageCollectResponse proto.InternalMessageInfo
type WriteCommitGraphRequest struct {
- Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
+ Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
+ // SplitStrategy is a strategy used to create/update commit graph.
+ SplitStrategy WriteCommitGraphRequest_SplitStrategy `protobuf:"varint,2,opt,name=splitStrategy,proto3,enum=gitaly.WriteCommitGraphRequest_SplitStrategy" json:"splitStrategy,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
}
func (m *WriteCommitGraphRequest) Reset() { *m = WriteCommitGraphRequest{} }
@@ -521,6 +548,13 @@ func (m *WriteCommitGraphRequest) GetRepository() *Repository {
return nil
}
+func (m *WriteCommitGraphRequest) GetSplitStrategy() WriteCommitGraphRequest_SplitStrategy {
+ if m != nil {
+ return m.SplitStrategy
+ }
+ return WriteCommitGraphRequest_SizeMultiple
+}
+
type WriteCommitGraphResponse struct {
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
@@ -2020,6 +2054,89 @@ func (m *CreateBundleResponse) GetData() []byte {
return nil
}
+// GetConfigRequest is a request for the GetConfig RPC.
+type GetConfigRequest struct {
+ // Repository is the repository from which the configuration should be read
+ // from.
+ Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *GetConfigRequest) Reset() { *m = GetConfigRequest{} }
+func (m *GetConfigRequest) String() string { return proto.CompactTextString(m) }
+func (*GetConfigRequest) ProtoMessage() {}
+func (*GetConfigRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_e9b1768cf174c79b, []int{44}
+}
+
+func (m *GetConfigRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_GetConfigRequest.Unmarshal(m, b)
+}
+func (m *GetConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_GetConfigRequest.Marshal(b, m, deterministic)
+}
+func (m *GetConfigRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_GetConfigRequest.Merge(m, src)
+}
+func (m *GetConfigRequest) XXX_Size() int {
+ return xxx_messageInfo_GetConfigRequest.Size(m)
+}
+func (m *GetConfigRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_GetConfigRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetConfigRequest proto.InternalMessageInfo
+
+func (m *GetConfigRequest) GetRepository() *Repository {
+ if m != nil {
+ return m.Repository
+ }
+ return nil
+}
+
+// GetConfigResponse is a response for the GetConfig RPC.
+type GetConfigResponse struct {
+ // Data contains contents of the gitconfig.
+ Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *GetConfigResponse) Reset() { *m = GetConfigResponse{} }
+func (m *GetConfigResponse) String() string { return proto.CompactTextString(m) }
+func (*GetConfigResponse) ProtoMessage() {}
+func (*GetConfigResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_e9b1768cf174c79b, []int{45}
+}
+
+func (m *GetConfigResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_GetConfigResponse.Unmarshal(m, b)
+}
+func (m *GetConfigResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_GetConfigResponse.Marshal(b, m, deterministic)
+}
+func (m *GetConfigResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_GetConfigResponse.Merge(m, src)
+}
+func (m *GetConfigResponse) XXX_Size() int {
+ return xxx_messageInfo_GetConfigResponse.Size(m)
+}
+func (m *GetConfigResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_GetConfigResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetConfigResponse proto.InternalMessageInfo
+
+func (m *GetConfigResponse) GetData() []byte {
+ if m != nil {
+ return m.Data
+ }
+ return nil
+}
+
type SetConfigRequest struct {
Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
Entries []*SetConfigRequest_Entry `protobuf:"bytes,2,rep,name=entries,proto3" json:"entries,omitempty"`
@@ -2032,7 +2149,7 @@ func (m *SetConfigRequest) Reset() { *m = SetConfigRequest{} }
func (m *SetConfigRequest) String() string { return proto.CompactTextString(m) }
func (*SetConfigRequest) ProtoMessage() {}
func (*SetConfigRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{44}
+ return fileDescriptor_e9b1768cf174c79b, []int{46}
}
func (m *SetConfigRequest) XXX_Unmarshal(b []byte) error {
@@ -2083,7 +2200,7 @@ func (m *SetConfigRequest_Entry) Reset() { *m = SetConfigRequest_Entry{}
func (m *SetConfigRequest_Entry) String() string { return proto.CompactTextString(m) }
func (*SetConfigRequest_Entry) ProtoMessage() {}
func (*SetConfigRequest_Entry) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{44, 0}
+ return fileDescriptor_e9b1768cf174c79b, []int{46, 0}
}
func (m *SetConfigRequest_Entry) XXX_Unmarshal(b []byte) error {
@@ -2180,7 +2297,7 @@ func (m *SetConfigResponse) Reset() { *m = SetConfigResponse{} }
func (m *SetConfigResponse) String() string { return proto.CompactTextString(m) }
func (*SetConfigResponse) ProtoMessage() {}
func (*SetConfigResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{45}
+ return fileDescriptor_e9b1768cf174c79b, []int{47}
}
func (m *SetConfigResponse) XXX_Unmarshal(b []byte) error {
@@ -2213,7 +2330,7 @@ func (m *DeleteConfigRequest) Reset() { *m = DeleteConfigRequest{} }
func (m *DeleteConfigRequest) String() string { return proto.CompactTextString(m) }
func (*DeleteConfigRequest) ProtoMessage() {}
func (*DeleteConfigRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{46}
+ return fileDescriptor_e9b1768cf174c79b, []int{48}
}
func (m *DeleteConfigRequest) XXX_Unmarshal(b []byte) error {
@@ -2258,7 +2375,7 @@ func (m *DeleteConfigResponse) Reset() { *m = DeleteConfigResponse{} }
func (m *DeleteConfigResponse) String() string { return proto.CompactTextString(m) }
func (*DeleteConfigResponse) ProtoMessage() {}
func (*DeleteConfigResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{47}
+ return fileDescriptor_e9b1768cf174c79b, []int{49}
}
func (m *DeleteConfigResponse) XXX_Unmarshal(b []byte) error {
@@ -2291,7 +2408,7 @@ func (m *RestoreCustomHooksRequest) Reset() { *m = RestoreCustomHooksReq
func (m *RestoreCustomHooksRequest) String() string { return proto.CompactTextString(m) }
func (*RestoreCustomHooksRequest) ProtoMessage() {}
func (*RestoreCustomHooksRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{48}
+ return fileDescriptor_e9b1768cf174c79b, []int{50}
}
func (m *RestoreCustomHooksRequest) XXX_Unmarshal(b []byte) error {
@@ -2336,7 +2453,7 @@ func (m *RestoreCustomHooksResponse) Reset() { *m = RestoreCustomHooksRe
func (m *RestoreCustomHooksResponse) String() string { return proto.CompactTextString(m) }
func (*RestoreCustomHooksResponse) ProtoMessage() {}
func (*RestoreCustomHooksResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{49}
+ return fileDescriptor_e9b1768cf174c79b, []int{51}
}
func (m *RestoreCustomHooksResponse) XXX_Unmarshal(b []byte) error {
@@ -2368,7 +2485,7 @@ func (m *BackupCustomHooksRequest) Reset() { *m = BackupCustomHooksReque
func (m *BackupCustomHooksRequest) String() string { return proto.CompactTextString(m) }
func (*BackupCustomHooksRequest) ProtoMessage() {}
func (*BackupCustomHooksRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{50}
+ return fileDescriptor_e9b1768cf174c79b, []int{52}
}
func (m *BackupCustomHooksRequest) XXX_Unmarshal(b []byte) error {
@@ -2407,7 +2524,7 @@ func (m *BackupCustomHooksResponse) Reset() { *m = BackupCustomHooksResp
func (m *BackupCustomHooksResponse) String() string { return proto.CompactTextString(m) }
func (*BackupCustomHooksResponse) ProtoMessage() {}
func (*BackupCustomHooksResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{51}
+ return fileDescriptor_e9b1768cf174c79b, []int{53}
}
func (m *BackupCustomHooksResponse) XXX_Unmarshal(b []byte) error {
@@ -2448,7 +2565,7 @@ func (m *CreateRepositoryFromBundleRequest) Reset() { *m = CreateReposit
func (m *CreateRepositoryFromBundleRequest) String() string { return proto.CompactTextString(m) }
func (*CreateRepositoryFromBundleRequest) ProtoMessage() {}
func (*CreateRepositoryFromBundleRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{52}
+ return fileDescriptor_e9b1768cf174c79b, []int{54}
}
func (m *CreateRepositoryFromBundleRequest) XXX_Unmarshal(b []byte) error {
@@ -2493,7 +2610,7 @@ func (m *CreateRepositoryFromBundleResponse) Reset() { *m = CreateReposi
func (m *CreateRepositoryFromBundleResponse) String() string { return proto.CompactTextString(m) }
func (*CreateRepositoryFromBundleResponse) ProtoMessage() {}
func (*CreateRepositoryFromBundleResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{53}
+ return fileDescriptor_e9b1768cf174c79b, []int{55}
}
func (m *CreateRepositoryFromBundleResponse) XXX_Unmarshal(b []byte) error {
@@ -2525,7 +2642,7 @@ func (m *FindLicenseRequest) Reset() { *m = FindLicenseRequest{} }
func (m *FindLicenseRequest) String() string { return proto.CompactTextString(m) }
func (*FindLicenseRequest) ProtoMessage() {}
func (*FindLicenseRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{54}
+ return fileDescriptor_e9b1768cf174c79b, []int{56}
}
func (m *FindLicenseRequest) XXX_Unmarshal(b []byte) error {
@@ -2564,7 +2681,7 @@ func (m *FindLicenseResponse) Reset() { *m = FindLicenseResponse{} }
func (m *FindLicenseResponse) String() string { return proto.CompactTextString(m) }
func (*FindLicenseResponse) ProtoMessage() {}
func (*FindLicenseResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{55}
+ return fileDescriptor_e9b1768cf174c79b, []int{57}
}
func (m *FindLicenseResponse) XXX_Unmarshal(b []byte) error {
@@ -2603,7 +2720,7 @@ func (m *GetInfoAttributesRequest) Reset() { *m = GetInfoAttributesReque
func (m *GetInfoAttributesRequest) String() string { return proto.CompactTextString(m) }
func (*GetInfoAttributesRequest) ProtoMessage() {}
func (*GetInfoAttributesRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{56}
+ return fileDescriptor_e9b1768cf174c79b, []int{58}
}
func (m *GetInfoAttributesRequest) XXX_Unmarshal(b []byte) error {
@@ -2642,7 +2759,7 @@ func (m *GetInfoAttributesResponse) Reset() { *m = GetInfoAttributesResp
func (m *GetInfoAttributesResponse) String() string { return proto.CompactTextString(m) }
func (*GetInfoAttributesResponse) ProtoMessage() {}
func (*GetInfoAttributesResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{57}
+ return fileDescriptor_e9b1768cf174c79b, []int{59}
}
func (m *GetInfoAttributesResponse) XXX_Unmarshal(b []byte) error {
@@ -2681,7 +2798,7 @@ func (m *CalculateChecksumRequest) Reset() { *m = CalculateChecksumReque
func (m *CalculateChecksumRequest) String() string { return proto.CompactTextString(m) }
func (*CalculateChecksumRequest) ProtoMessage() {}
func (*CalculateChecksumRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{58}
+ return fileDescriptor_e9b1768cf174c79b, []int{60}
}
func (m *CalculateChecksumRequest) XXX_Unmarshal(b []byte) error {
@@ -2720,7 +2837,7 @@ func (m *CalculateChecksumResponse) Reset() { *m = CalculateChecksumResp
func (m *CalculateChecksumResponse) String() string { return proto.CompactTextString(m) }
func (*CalculateChecksumResponse) ProtoMessage() {}
func (*CalculateChecksumResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{59}
+ return fileDescriptor_e9b1768cf174c79b, []int{61}
}
func (m *CalculateChecksumResponse) XXX_Unmarshal(b []byte) error {
@@ -2759,7 +2876,7 @@ func (m *GetSnapshotRequest) Reset() { *m = GetSnapshotRequest{} }
func (m *GetSnapshotRequest) String() string { return proto.CompactTextString(m) }
func (*GetSnapshotRequest) ProtoMessage() {}
func (*GetSnapshotRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{60}
+ return fileDescriptor_e9b1768cf174c79b, []int{62}
}
func (m *GetSnapshotRequest) XXX_Unmarshal(b []byte) error {
@@ -2798,7 +2915,7 @@ func (m *GetSnapshotResponse) Reset() { *m = GetSnapshotResponse{} }
func (m *GetSnapshotResponse) String() string { return proto.CompactTextString(m) }
func (*GetSnapshotResponse) ProtoMessage() {}
func (*GetSnapshotResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{61}
+ return fileDescriptor_e9b1768cf174c79b, []int{63}
}
func (m *GetSnapshotResponse) XXX_Unmarshal(b []byte) error {
@@ -2839,7 +2956,7 @@ func (m *CreateRepositoryFromSnapshotRequest) Reset() { *m = CreateRepos
func (m *CreateRepositoryFromSnapshotRequest) String() string { return proto.CompactTextString(m) }
func (*CreateRepositoryFromSnapshotRequest) ProtoMessage() {}
func (*CreateRepositoryFromSnapshotRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{62}
+ return fileDescriptor_e9b1768cf174c79b, []int{64}
}
func (m *CreateRepositoryFromSnapshotRequest) XXX_Unmarshal(b []byte) error {
@@ -2891,7 +3008,7 @@ func (m *CreateRepositoryFromSnapshotResponse) Reset() { *m = CreateRepo
func (m *CreateRepositoryFromSnapshotResponse) String() string { return proto.CompactTextString(m) }
func (*CreateRepositoryFromSnapshotResponse) ProtoMessage() {}
func (*CreateRepositoryFromSnapshotResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{63}
+ return fileDescriptor_e9b1768cf174c79b, []int{65}
}
func (m *CreateRepositoryFromSnapshotResponse) XXX_Unmarshal(b []byte) error {
@@ -2925,7 +3042,7 @@ func (m *GetRawChangesRequest) Reset() { *m = GetRawChangesRequest{} }
func (m *GetRawChangesRequest) String() string { return proto.CompactTextString(m) }
func (*GetRawChangesRequest) ProtoMessage() {}
func (*GetRawChangesRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{64}
+ return fileDescriptor_e9b1768cf174c79b, []int{66}
}
func (m *GetRawChangesRequest) XXX_Unmarshal(b []byte) error {
@@ -2978,7 +3095,7 @@ func (m *GetRawChangesResponse) Reset() { *m = GetRawChangesResponse{} }
func (m *GetRawChangesResponse) String() string { return proto.CompactTextString(m) }
func (*GetRawChangesResponse) ProtoMessage() {}
func (*GetRawChangesResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{65}
+ return fileDescriptor_e9b1768cf174c79b, []int{67}
}
func (m *GetRawChangesResponse) XXX_Unmarshal(b []byte) error {
@@ -3028,7 +3145,7 @@ func (m *GetRawChangesResponse_RawChange) Reset() { *m = GetRawChangesRe
func (m *GetRawChangesResponse_RawChange) String() string { return proto.CompactTextString(m) }
func (*GetRawChangesResponse_RawChange) ProtoMessage() {}
func (*GetRawChangesResponse_RawChange) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{65, 0}
+ return fileDescriptor_e9b1768cf174c79b, []int{67, 0}
}
func (m *GetRawChangesResponse_RawChange) XXX_Unmarshal(b []byte) error {
@@ -3140,7 +3257,7 @@ func (m *SearchFilesByNameRequest) Reset() { *m = SearchFilesByNameReque
func (m *SearchFilesByNameRequest) String() string { return proto.CompactTextString(m) }
func (*SearchFilesByNameRequest) ProtoMessage() {}
func (*SearchFilesByNameRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{66}
+ return fileDescriptor_e9b1768cf174c79b, []int{68}
}
func (m *SearchFilesByNameRequest) XXX_Unmarshal(b []byte) error {
@@ -3200,7 +3317,7 @@ func (m *SearchFilesByNameResponse) Reset() { *m = SearchFilesByNameResp
func (m *SearchFilesByNameResponse) String() string { return proto.CompactTextString(m) }
func (*SearchFilesByNameResponse) ProtoMessage() {}
func (*SearchFilesByNameResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{67}
+ return fileDescriptor_e9b1768cf174c79b, []int{69}
}
func (m *SearchFilesByNameResponse) XXX_Unmarshal(b []byte) error {
@@ -3242,7 +3359,7 @@ func (m *SearchFilesByContentRequest) Reset() { *m = SearchFilesByConten
func (m *SearchFilesByContentRequest) String() string { return proto.CompactTextString(m) }
func (*SearchFilesByContentRequest) ProtoMessage() {}
func (*SearchFilesByContentRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{68}
+ return fileDescriptor_e9b1768cf174c79b, []int{70}
}
func (m *SearchFilesByContentRequest) XXX_Unmarshal(b []byte) error {
@@ -3304,7 +3421,7 @@ func (m *SearchFilesByContentResponse) Reset() { *m = SearchFilesByConte
func (m *SearchFilesByContentResponse) String() string { return proto.CompactTextString(m) }
func (*SearchFilesByContentResponse) ProtoMessage() {}
func (*SearchFilesByContentResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{69}
+ return fileDescriptor_e9b1768cf174c79b, []int{71}
}
func (m *SearchFilesByContentResponse) XXX_Unmarshal(b []byte) error {
@@ -3374,7 +3491,7 @@ func (m *Remote) Reset() { *m = Remote{} }
func (m *Remote) String() string { return proto.CompactTextString(m) }
func (*Remote) ProtoMessage() {}
func (*Remote) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{70}
+ return fileDescriptor_e9b1768cf174c79b, []int{72}
}
func (m *Remote) XXX_Unmarshal(b []byte) error {
@@ -3427,7 +3544,7 @@ func (m *GetObjectDirectorySizeRequest) Reset() { *m = GetObjectDirector
func (m *GetObjectDirectorySizeRequest) String() string { return proto.CompactTextString(m) }
func (*GetObjectDirectorySizeRequest) ProtoMessage() {}
func (*GetObjectDirectorySizeRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{71}
+ return fileDescriptor_e9b1768cf174c79b, []int{73}
}
func (m *GetObjectDirectorySizeRequest) XXX_Unmarshal(b []byte) error {
@@ -3467,7 +3584,7 @@ func (m *GetObjectDirectorySizeResponse) Reset() { *m = GetObjectDirecto
func (m *GetObjectDirectorySizeResponse) String() string { return proto.CompactTextString(m) }
func (*GetObjectDirectorySizeResponse) ProtoMessage() {}
func (*GetObjectDirectorySizeResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{72}
+ return fileDescriptor_e9b1768cf174c79b, []int{74}
}
func (m *GetObjectDirectorySizeResponse) XXX_Unmarshal(b []byte) error {
@@ -3508,7 +3625,7 @@ func (m *CloneFromPoolRequest) Reset() { *m = CloneFromPoolRequest{} }
func (m *CloneFromPoolRequest) String() string { return proto.CompactTextString(m) }
func (*CloneFromPoolRequest) ProtoMessage() {}
func (*CloneFromPoolRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{73}
+ return fileDescriptor_e9b1768cf174c79b, []int{75}
}
func (m *CloneFromPoolRequest) XXX_Unmarshal(b []byte) error {
@@ -3560,7 +3677,7 @@ func (m *CloneFromPoolResponse) Reset() { *m = CloneFromPoolResponse{} }
func (m *CloneFromPoolResponse) String() string { return proto.CompactTextString(m) }
func (*CloneFromPoolResponse) ProtoMessage() {}
func (*CloneFromPoolResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{74}
+ return fileDescriptor_e9b1768cf174c79b, []int{76}
}
func (m *CloneFromPoolResponse) XXX_Unmarshal(b []byte) error {
@@ -3594,7 +3711,7 @@ func (m *CloneFromPoolInternalRequest) Reset() { *m = CloneFromPoolInter
func (m *CloneFromPoolInternalRequest) String() string { return proto.CompactTextString(m) }
func (*CloneFromPoolInternalRequest) ProtoMessage() {}
func (*CloneFromPoolInternalRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{75}
+ return fileDescriptor_e9b1768cf174c79b, []int{77}
}
func (m *CloneFromPoolInternalRequest) XXX_Unmarshal(b []byte) error {
@@ -3646,7 +3763,7 @@ func (m *CloneFromPoolInternalResponse) Reset() { *m = CloneFromPoolInte
func (m *CloneFromPoolInternalResponse) String() string { return proto.CompactTextString(m) }
func (*CloneFromPoolInternalResponse) ProtoMessage() {}
func (*CloneFromPoolInternalResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{76}
+ return fileDescriptor_e9b1768cf174c79b, []int{78}
}
func (m *CloneFromPoolInternalResponse) XXX_Unmarshal(b []byte) error {
@@ -3678,7 +3795,7 @@ func (m *RemoveRepositoryRequest) Reset() { *m = RemoveRepositoryRequest
func (m *RemoveRepositoryRequest) String() string { return proto.CompactTextString(m) }
func (*RemoveRepositoryRequest) ProtoMessage() {}
func (*RemoveRepositoryRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{77}
+ return fileDescriptor_e9b1768cf174c79b, []int{79}
}
func (m *RemoveRepositoryRequest) XXX_Unmarshal(b []byte) error {
@@ -3716,7 +3833,7 @@ func (m *RemoveRepositoryResponse) Reset() { *m = RemoveRepositoryRespon
func (m *RemoveRepositoryResponse) String() string { return proto.CompactTextString(m) }
func (*RemoveRepositoryResponse) ProtoMessage() {}
func (*RemoveRepositoryResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{78}
+ return fileDescriptor_e9b1768cf174c79b, []int{80}
}
func (m *RemoveRepositoryResponse) XXX_Unmarshal(b []byte) error {
@@ -3749,7 +3866,7 @@ func (m *RenameRepositoryRequest) Reset() { *m = RenameRepositoryRequest
func (m *RenameRepositoryRequest) String() string { return proto.CompactTextString(m) }
func (*RenameRepositoryRequest) ProtoMessage() {}
func (*RenameRepositoryRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{79}
+ return fileDescriptor_e9b1768cf174c79b, []int{81}
}
func (m *RenameRepositoryRequest) XXX_Unmarshal(b []byte) error {
@@ -3794,7 +3911,7 @@ func (m *RenameRepositoryResponse) Reset() { *m = RenameRepositoryRespon
func (m *RenameRepositoryResponse) String() string { return proto.CompactTextString(m) }
func (*RenameRepositoryResponse) ProtoMessage() {}
func (*RenameRepositoryResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{80}
+ return fileDescriptor_e9b1768cf174c79b, []int{82}
}
func (m *RenameRepositoryResponse) XXX_Unmarshal(b []byte) error {
@@ -3827,7 +3944,7 @@ func (m *ReplicateRepositoryRequest) Reset() { *m = ReplicateRepositoryR
func (m *ReplicateRepositoryRequest) String() string { return proto.CompactTextString(m) }
func (*ReplicateRepositoryRequest) ProtoMessage() {}
func (*ReplicateRepositoryRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{81}
+ return fileDescriptor_e9b1768cf174c79b, []int{83}
}
func (m *ReplicateRepositoryRequest) XXX_Unmarshal(b []byte) error {
@@ -3872,7 +3989,7 @@ func (m *ReplicateRepositoryResponse) Reset() { *m = ReplicateRepository
func (m *ReplicateRepositoryResponse) String() string { return proto.CompactTextString(m) }
func (*ReplicateRepositoryResponse) ProtoMessage() {}
func (*ReplicateRepositoryResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{82}
+ return fileDescriptor_e9b1768cf174c79b, []int{84}
}
func (m *ReplicateRepositoryResponse) XXX_Unmarshal(b []byte) error {
@@ -3904,7 +4021,7 @@ func (m *OptimizeRepositoryRequest) Reset() { *m = OptimizeRepositoryReq
func (m *OptimizeRepositoryRequest) String() string { return proto.CompactTextString(m) }
func (*OptimizeRepositoryRequest) ProtoMessage() {}
func (*OptimizeRepositoryRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{83}
+ return fileDescriptor_e9b1768cf174c79b, []int{85}
}
func (m *OptimizeRepositoryRequest) XXX_Unmarshal(b []byte) error {
@@ -3942,7 +4059,7 @@ func (m *OptimizeRepositoryResponse) Reset() { *m = OptimizeRepositoryRe
func (m *OptimizeRepositoryResponse) String() string { return proto.CompactTextString(m) }
func (*OptimizeRepositoryResponse) ProtoMessage() {}
func (*OptimizeRepositoryResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_e9b1768cf174c79b, []int{84}
+ return fileDescriptor_e9b1768cf174c79b, []int{86}
}
func (m *OptimizeRepositoryResponse) XXX_Unmarshal(b []byte) error {
@@ -3964,6 +4081,7 @@ func (m *OptimizeRepositoryResponse) XXX_DiscardUnknown() {
var xxx_messageInfo_OptimizeRepositoryResponse proto.InternalMessageInfo
func init() {
+ proto.RegisterEnum("gitaly.WriteCommitGraphRequest_SplitStrategy", WriteCommitGraphRequest_SplitStrategy_name, WriteCommitGraphRequest_SplitStrategy_value)
proto.RegisterEnum("gitaly.GetArchiveRequest_Format", GetArchiveRequest_Format_name, GetArchiveRequest_Format_value)
proto.RegisterEnum("gitaly.GetRawChangesResponse_RawChange_Operation", GetRawChangesResponse_RawChange_Operation_name, GetRawChangesResponse_RawChange_Operation_value)
proto.RegisterType((*RepositoryExistsRequest)(nil), "gitaly.RepositoryExistsRequest")
@@ -4010,6 +4128,8 @@ func init() {
proto.RegisterType((*CreateRepositoryFromURLResponse)(nil), "gitaly.CreateRepositoryFromURLResponse")
proto.RegisterType((*CreateBundleRequest)(nil), "gitaly.CreateBundleRequest")
proto.RegisterType((*CreateBundleResponse)(nil), "gitaly.CreateBundleResponse")
+ proto.RegisterType((*GetConfigRequest)(nil), "gitaly.GetConfigRequest")
+ proto.RegisterType((*GetConfigResponse)(nil), "gitaly.GetConfigResponse")
proto.RegisterType((*SetConfigRequest)(nil), "gitaly.SetConfigRequest")
proto.RegisterType((*SetConfigRequest_Entry)(nil), "gitaly.SetConfigRequest.Entry")
proto.RegisterType((*SetConfigResponse)(nil), "gitaly.SetConfigResponse")
@@ -4058,201 +4178,206 @@ func init() {
func init() { proto.RegisterFile("repository-service.proto", fileDescriptor_e9b1768cf174c79b) }
var fileDescriptor_e9b1768cf174c79b = []byte{
- // 3094 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x5a, 0x4b, 0x6f, 0xdc, 0xc8,
- 0xb5, 0x76, 0x4b, 0xad, 0x7e, 0x9c, 0x6e, 0xdb, 0xad, 0x92, 0x6c, 0xb5, 0x68, 0xc9, 0x0f, 0xda,
- 0xe3, 0xf1, 0x78, 0x3c, 0xf2, 0x8c, 0x7d, 0x81, 0xeb, 0x7b, 0x2f, 0x2e, 0x02, 0xb5, 0xde, 0xb6,
- 0xf5, 0x18, 0x4a, 0xce, 0x60, 0x0c, 0x0c, 0x38, 0x6c, 0x76, 0xb5, 0x9a, 0x11, 0x9b, 0x6c, 0x17,
- 0xab, 0x2d, 0x6b, 0x80, 0x2c, 0x12, 0x20, 0x8b, 0x00, 0xc1, 0xac, 0x82, 0x4c, 0x96, 0x59, 0xe7,
- 0x17, 0x64, 0x93, 0x45, 0x36, 0xf9, 0x0f, 0x83, 0xfc, 0x83, 0x00, 0xf9, 0x03, 0x59, 0x05, 0xf5,
- 0x20, 0x8b, 0x6c, 0x92, 0x3d, 0x0e, 0xba, 0x31, 0xd9, 0xb1, 0xce, 0xa9, 0x3a, 0xe7, 0xd4, 0xa9,
- 0x53, 0x8f, 0xf3, 0x1d, 0x42, 0x93, 0xe0, 0x81, 0x1f, 0x38, 0xd4, 0x27, 0x17, 0x9f, 0x04, 0x98,
- 0xbc, 0x75, 0x6c, 0xbc, 0x36, 0x20, 0x3e, 0xf5, 0x51, 0xe9, 0xd4, 0xa1, 0x96, 0x7b, 0xa1, 0x81,
- 0xeb, 0x78, 0x54, 0xd0, 0xb4, 0x7a, 0xd0, 0xb3, 0x08, 0xee, 0x88, 0x96, 0x7e, 0x0c, 0x4b, 0x46,
- 0x34, 0x7a, 0xeb, 0x9d, 0x13, 0xd0, 0xc0, 0xc0, 0x6f, 0x86, 0x38, 0xa0, 0xe8, 0x19, 0x80, 0x12,
- 0xdc, 0x2c, 0xdc, 0x2e, 0x3c, 0xa8, 0x3d, 0x41, 0x6b, 0x42, 0xe2, 0x9a, 0x1a, 0xd4, 0x2a, 0xfe,
- 0xfe, 0xaf, 0x8f, 0x0a, 0x46, 0xac, 0xaf, 0xfe, 0x04, 0x9a, 0x69, 0xa1, 0xc1, 0xc0, 0xf7, 0x02,
- 0x8c, 0xae, 0x43, 0x09, 0x73, 0x0a, 0x97, 0x58, 0x31, 0x64, 0x4b, 0x3f, 0xe1, 0x63, 0x2c, 0xfb,
- 0x6c, 0xcf, 0xb3, 0x09, 0xee, 0x63, 0x8f, 0x5a, 0xee, 0xe4, 0x96, 0xdc, 0x80, 0xe5, 0x0c, 0xa9,
- 0xc2, 0x14, 0x9d, 0xc0, 0xbc, 0x60, 0x6e, 0x0f, 0xdd, 0xc9, 0x75, 0xa1, 0xbb, 0x70, 0xd9, 0x26,
- 0xd8, 0xa2, 0xd8, 0x6c, 0x3b, 0xb4, 0x6f, 0x0d, 0x9a, 0x33, 0x7c, 0x82, 0x75, 0x41, 0x6c, 0x71,
- 0x9a, 0xbe, 0x08, 0x28, 0xae, 0x53, 0x5a, 0xb2, 0x0f, 0xf3, 0xfb, 0x4e, 0xe7, 0x9d, 0xe0, 0x4c,
- 0x3e, 0xeb, 0x45, 0x40, 0x71, 0x71, 0x52, 0xc9, 0x6f, 0x0a, 0x70, 0x6d, 0xc7, 0x22, 0x6d, 0xeb,
- 0x14, 0x6f, 0xf8, 0xae, 0x8b, 0x6d, 0xfa, 0xe3, 0xcc, 0x19, 0x2d, 0xc2, 0xdc, 0x80, 0x0c, 0x3d,
- 0xdc, 0x9c, 0xe5, 0x4c, 0xd1, 0xd0, 0x9b, 0x70, 0x7d, 0xd4, 0x1a, 0x69, 0xe8, 0x31, 0x2c, 0x7d,
- 0x41, 0x1c, 0x8a, 0x37, 0xfc, 0x7e, 0xdf, 0xa1, 0x3b, 0xc4, 0x1a, 0xf4, 0x26, 0xf7, 0x89, 0x06,
- 0xcd, 0xb4, 0x50, 0xa9, 0xf0, 0x39, 0x5c, 0xd9, 0x70, 0xb1, 0xe5, 0x0d, 0x07, 0x93, 0xeb, 0x99,
- 0x87, 0xab, 0x91, 0x2c, 0x29, 0xfe, 0x73, 0xb8, 0xa6, 0x86, 0x1c, 0x3b, 0xdf, 0xe0, 0xc9, 0xb5,
- 0x3c, 0x82, 0xeb, 0xa3, 0x22, 0xe5, 0xfe, 0x42, 0x50, 0x0c, 0x9c, 0x6f, 0x30, 0x97, 0x36, 0x6b,
- 0xf0, 0x6f, 0xfd, 0x0d, 0x2c, 0xaf, 0x0f, 0x06, 0xee, 0xc5, 0x8e, 0x43, 0x2d, 0x4a, 0x89, 0xd3,
- 0x1e, 0x52, 0x3c, 0xf9, 0x36, 0x47, 0x1a, 0x54, 0x08, 0x7e, 0xeb, 0x04, 0x8e, 0xef, 0xf1, 0x75,
- 0xaf, 0x1b, 0x51, 0x5b, 0x5f, 0x01, 0x2d, 0x4b, 0xa5, 0xf4, 0xc8, 0x3f, 0x66, 0x00, 0x6d, 0x63,
- 0x6a, 0xf7, 0x0c, 0xdc, 0xf7, 0xe9, 0xe4, 0xfe, 0x60, 0xa7, 0x0a, 0xe1, 0xa2, 0xb8, 0x21, 0x55,
- 0x43, 0xb6, 0x58, 0xe8, 0x75, 0x7d, 0x62, 0x47, 0xa1, 0xc7, 0x1b, 0x68, 0x09, 0xca, 0x9e, 0x6f,
- 0x52, 0xeb, 0x34, 0x68, 0x16, 0xc5, 0x21, 0xe4, 0xf9, 0x27, 0xd6, 0x69, 0x80, 0x9a, 0x50, 0xa6,
- 0x4e, 0x1f, 0xfb, 0x43, 0xda, 0x9c, 0xbb, 0x5d, 0x78, 0x30, 0x67, 0x84, 0x4d, 0x36, 0x24, 0x08,
- 0x7a, 0xe6, 0x19, 0xbe, 0x68, 0x96, 0x84, 0x86, 0x20, 0xe8, 0xbd, 0xc0, 0x17, 0xe8, 0x16, 0xd4,
- 0xce, 0x3c, 0xff, 0xdc, 0x33, 0x7b, 0x3e, 0x3b, 0xd4, 0xca, 0x9c, 0x09, 0x9c, 0xb4, 0xcb, 0x28,
- 0x68, 0x19, 0x2a, 0x9e, 0x6f, 0x8a, 0x0d, 0x50, 0xe5, 0xda, 0xca, 0x9e, 0x7f, 0xc4, 0x9a, 0xe8,
- 0x29, 0x5c, 0x16, 0x76, 0x9a, 0x03, 0x8b, 0x58, 0xfd, 0xa0, 0x09, 0x7c, 0xca, 0x57, 0xd4, 0x94,
- 0xb9, 0x77, 0xea, 0xa2, 0xd3, 0x11, 0xef, 0x83, 0x1e, 0x01, 0xb2, 0x7b, 0xd8, 0x3e, 0xe3, 0xf6,
- 0x9b, 0x76, 0xcf, 0xf2, 0x4e, 0x71, 0xa7, 0x59, 0xe3, 0x92, 0x1b, 0x9c, 0xc3, 0xa6, 0xb2, 0x21,
- 0xe8, 0xcf, 0x8b, 0x95, 0x4a, 0xa3, 0xaa, 0x3f, 0x83, 0x85, 0x84, 0xbb, 0x65, 0xac, 0xdc, 0x81,
- 0x7a, 0x42, 0x88, 0x38, 0x91, 0x6b, 0x54, 0x8d, 0x67, 0x7b, 0x71, 0x83, 0xef, 0x65, 0xe5, 0xfe,
- 0xa9, 0xec, 0xc5, 0xb4, 0x50, 0x19, 0x1a, 0x7f, 0x9f, 0x81, 0xf9, 0x1d, 0x4c, 0xd7, 0x89, 0xdd,
- 0x73, 0xde, 0x4e, 0x21, 0x32, 0x6e, 0x40, 0xd5, 0xe6, 0x5b, 0xde, 0x74, 0x3a, 0x32, 0x38, 0x2a,
- 0x82, 0xb0, 0xd7, 0x61, 0x61, 0x33, 0x20, 0xb8, 0xeb, 0xbc, 0xe3, 0xf1, 0x51, 0x35, 0x64, 0x0b,
- 0x3d, 0x83, 0x52, 0xd7, 0x27, 0x7d, 0x8b, 0xf2, 0xf8, 0xb8, 0xf2, 0xe4, 0x76, 0xa8, 0x2a, 0x65,
- 0xd9, 0xda, 0x36, 0xef, 0x67, 0xc8, 0xfe, 0x6c, 0xfb, 0x0d, 0x2c, 0xda, 0xe3, 0xe1, 0x53, 0x37,
- 0xf8, 0x37, 0x8b, 0x2a, 0xfc, 0xce, 0x76, 0x87, 0x1d, 0xdc, 0x2c, 0xdd, 0x9e, 0x7d, 0x50, 0x37,
- 0xc2, 0x26, 0x5a, 0x05, 0xc0, 0xae, 0xd3, 0x61, 0xeb, 0x4f, 0x7b, 0x3c, 0x76, 0x2a, 0x46, 0x95,
- 0x53, 0x8e, 0xd8, 0xc0, 0x87, 0x30, 0xef, 0x78, 0xbc, 0xa7, 0xe9, 0x76, 0x03, 0xb3, 0xed, 0xfa,
- 0xed, 0xa0, 0x59, 0xe1, 0xbd, 0xae, 0x4a, 0xc6, 0xcb, 0x6e, 0xd0, 0x62, 0x64, 0xfd, 0x29, 0x94,
- 0x84, 0x29, 0xa8, 0x0c, 0xb3, 0xaf, 0xf7, 0x8e, 0x1a, 0x97, 0xd8, 0xc7, 0xc9, 0xba, 0xd1, 0x28,
- 0x20, 0x80, 0xd2, 0xc9, 0xba, 0x61, 0xee, 0xbc, 0x6e, 0xcc, 0xa0, 0x1a, 0x94, 0xd9, 0x77, 0xeb,
- 0xf5, 0x93, 0xc6, 0xac, 0xfe, 0x00, 0x50, 0x7c, 0x46, 0xea, 0x08, 0xe9, 0x58, 0xd4, 0xe2, 0x6e,
- 0xae, 0x1b, 0xfc, 0x9b, 0xc5, 0xc1, 0xae, 0x15, 0xbc, 0xf4, 0x6d, 0xcb, 0x6d, 0x11, 0xcb, 0xb3,
- 0x7b, 0x53, 0x38, 0x40, 0xf4, 0x4f, 0xa1, 0x99, 0x16, 0x2a, 0x8d, 0x58, 0x84, 0xb9, 0xb7, 0x96,
- 0x3b, 0xc4, 0x32, 0x28, 0x45, 0x43, 0xff, 0xbe, 0x00, 0x4d, 0x1e, 0xc9, 0xc7, 0xfe, 0x90, 0xd8,
- 0x58, 0x8c, 0x9a, 0x3c, 0x48, 0x7e, 0x02, 0xf3, 0x01, 0x17, 0x68, 0xc6, 0x04, 0xcc, 0xe4, 0x09,
- 0x30, 0x1a, 0xa2, 0xb3, 0x91, 0xb8, 0x07, 0xa5, 0x80, 0x36, 0x37, 0x89, 0xc7, 0x53, 0xdd, 0xa8,
- 0x07, 0x31, 0x33, 0xd9, 0x6a, 0x53, 0x8b, 0x9c, 0x62, 0x6a, 0x12, 0xdc, 0xe5, 0x91, 0x55, 0x37,
- 0xaa, 0x82, 0x62, 0xe0, 0xae, 0xfe, 0x14, 0x96, 0x33, 0xa6, 0xa6, 0x9e, 0x4d, 0x04, 0x07, 0x43,
- 0x97, 0x86, 0xcf, 0x26, 0xd1, 0xd2, 0x77, 0xa0, 0xb6, 0x1d, 0x4c, 0xe3, 0xcd, 0x70, 0x0f, 0xea,
- 0x42, 0x90, 0xf2, 0x3f, 0x26, 0xc4, 0x27, 0x32, 0x0a, 0x44, 0x43, 0xff, 0x53, 0x01, 0xae, 0xf2,
- 0x6b, 0xd4, 0xc0, 0xdd, 0xc9, 0xdd, 0xde, 0x80, 0x59, 0xe6, 0x09, 0x71, 0x77, 0xb0, 0xcf, 0xc4,
- 0x95, 0x32, 0x9b, 0xbc, 0x52, 0xd8, 0x69, 0xe5, 0xbb, 0x1d, 0x33, 0xe2, 0x0b, 0x07, 0xd6, 0x7c,
- 0xb7, 0x63, 0x84, 0x5d, 0xa2, 0xe3, 0x7e, 0x2e, 0x76, 0xdc, 0x3f, 0x2f, 0x56, 0x4a, 0x8d, 0xb2,
- 0xde, 0x84, 0x86, 0xb2, 0x5c, 0x4c, 0xf2, 0x79, 0xb1, 0x52, 0x68, 0xcc, 0xe8, 0x1e, 0x2c, 0x6e,
- 0x3b, 0x5e, 0x67, 0x1f, 0x93, 0x53, 0xdc, 0xb2, 0x82, 0x29, 0x1c, 0x3a, 0x2b, 0x50, 0x0d, 0xcd,
- 0x0c, 0x9a, 0x33, 0x7c, 0xcf, 0x2b, 0x82, 0xfe, 0x31, 0x5c, 0x1b, 0xd1, 0xa7, 0x36, 0x5e, 0xdb,
- 0x0a, 0x44, 0xc8, 0x57, 0x0d, 0xfe, 0xad, 0x7f, 0x5b, 0x80, 0x79, 0x71, 0x58, 0x6e, 0xfb, 0xe4,
- 0xec, 0x3f, 0x1f, 0xea, 0xec, 0x71, 0x19, 0xb7, 0x27, 0x7a, 0x4b, 0x2f, 0xef, 0x05, 0x06, 0x66,
- 0x26, 0xef, 0x79, 0x47, 0xc4, 0x3f, 0x25, 0x38, 0x08, 0xa6, 0x72, 0x7a, 0x13, 0x2e, 0x34, 0x76,
- 0x7a, 0x0b, 0xc2, 0x5e, 0x47, 0xff, 0x7f, 0xd0, 0xb2, 0x74, 0x4a, 0x67, 0xde, 0x82, 0x9a, 0xe3,
- 0x99, 0x03, 0x49, 0x96, 0xdb, 0x06, 0x9c, 0xa8, 0xa3, 0x30, 0xf9, 0xf8, 0xcd, 0xd0, 0x0a, 0x7a,
- 0x53, 0x36, 0x39, 0xe0, 0x42, 0x63, 0x26, 0x0b, 0x42, 0x68, 0x72, 0x5a, 0xe7, 0xfb, 0x9a, 0xec,
- 0xc2, 0xcd, 0xd1, 0x8b, 0x73, 0x9b, 0xf8, 0xfd, 0x57, 0xc6, 0xcb, 0xa9, 0x6c, 0xc6, 0x21, 0x71,
- 0xa5, 0xc5, 0xec, 0x53, 0xbf, 0x03, 0xb7, 0x72, 0xb5, 0xc9, 0x65, 0x3f, 0x84, 0x05, 0xd1, 0xa5,
- 0x35, 0xf4, 0x3a, 0xee, 0x14, 0x1e, 0xb6, 0x0f, 0x61, 0x31, 0x29, 0x70, 0xcc, 0x9d, 0xf4, 0xed,
- 0x0c, 0x34, 0x8e, 0x31, 0xdd, 0xf0, 0xbd, 0xae, 0x73, 0x3a, 0xb9, 0x03, 0x9e, 0x41, 0x19, 0x7b,
- 0x94, 0x38, 0x58, 0x6c, 0xd9, 0xda, 0x93, 0x9b, 0xe1, 0xb0, 0x51, 0x25, 0x6b, 0x5b, 0x1e, 0x25,
- 0x17, 0x46, 0xd8, 0x5d, 0xfb, 0x55, 0x01, 0xe6, 0x38, 0x89, 0x39, 0x91, 0x3d, 0x11, 0xc5, 0x06,
- 0x66, 0x9f, 0x68, 0x15, 0xaa, 0xfc, 0xea, 0x32, 0x03, 0x4a, 0x84, 0x73, 0x77, 0x2f, 0x19, 0x15,
- 0x4e, 0x3a, 0xa6, 0x04, 0xdd, 0x81, 0x9a, 0x60, 0x3b, 0x1e, 0x7d, 0xfa, 0x84, 0x9f, 0x79, 0x73,
- 0xbb, 0x97, 0x0c, 0xe0, 0xc4, 0x3d, 0x46, 0x43, 0xb7, 0x40, 0xb4, 0xcc, 0xb6, 0xef, 0xbb, 0xe2,
- 0xc1, 0xba, 0x7b, 0xc9, 0x10, 0x52, 0x5b, 0xbe, 0xef, 0xb6, 0xca, 0xf2, 0xaa, 0xd4, 0x17, 0x60,
- 0x3e, 0x66, 0xaa, 0x5c, 0x22, 0x1b, 0x16, 0x36, 0xb1, 0x8b, 0x59, 0xe6, 0x33, 0x1d, 0x3f, 0x21,
- 0x28, 0x9e, 0xe1, 0x0b, 0xe1, 0xa4, 0xaa, 0xc1, 0xbf, 0xf5, 0xeb, 0xb0, 0x98, 0x54, 0x22, 0x95,
- 0x3b, 0x2c, 0xff, 0x0e, 0xa8, 0x4f, 0xf0, 0xc6, 0x30, 0xa0, 0x7e, 0x7f, 0xd7, 0xf7, 0xcf, 0x82,
- 0xa9, 0x98, 0xc0, 0xa3, 0x61, 0x26, 0x16, 0x0d, 0x2b, 0xa0, 0x65, 0xa9, 0x92, 0x86, 0x9c, 0x40,
- 0xb3, 0x65, 0xd9, 0x67, 0xc3, 0xc1, 0x34, 0xed, 0xd0, 0x1f, 0xc3, 0x72, 0x86, 0xd4, 0x31, 0x21,
- 0xfb, 0x06, 0xee, 0x64, 0x6d, 0xa9, 0x29, 0xed, 0x9e, 0x4c, 0xbf, 0xdc, 0x03, 0x7d, 0x9c, 0x4a,
- 0xe9, 0x9f, 0x03, 0x40, 0xec, 0x4e, 0x7a, 0xe9, 0xd8, 0xd8, 0x9b, 0xc2, 0x0d, 0xa8, 0x6f, 0xc0,
- 0x42, 0x42, 0x9e, 0xf4, 0xc9, 0x23, 0x40, 0xae, 0x20, 0x99, 0x41, 0xcf, 0x27, 0xd4, 0xf4, 0xac,
- 0x7e, 0x78, 0xdf, 0x35, 0x24, 0xe7, 0x98, 0x31, 0x0e, 0xac, 0x3e, 0x5f, 0xb4, 0x1d, 0x4c, 0xf7,
- 0xbc, 0xae, 0xbf, 0x3e, 0xbd, 0xb4, 0x55, 0xff, 0x3f, 0x58, 0xce, 0x90, 0x2a, 0x0d, 0xbc, 0x09,
- 0xa0, 0xf2, 0x55, 0xb9, 0x74, 0x31, 0x0a, 0x33, 0x69, 0xc3, 0x72, 0xed, 0xa1, 0x6b, 0x51, 0xbc,
- 0xc1, 0x92, 0xad, 0x60, 0xd8, 0x9f, 0xdc, 0xa4, 0xff, 0x86, 0xe5, 0x0c, 0xa9, 0xd2, 0x24, 0x0d,
- 0x2a, 0xb6, 0xa4, 0x49, 0x4f, 0x45, 0x6d, 0xb6, 0x6c, 0x3b, 0x98, 0x1e, 0x7b, 0xd6, 0x20, 0xe8,
- 0xf9, 0x93, 0xe3, 0x39, 0xfa, 0x47, 0xb0, 0x90, 0x90, 0x37, 0x26, 0x94, 0xbf, 0x2b, 0xc0, 0xdd,
- 0xac, 0xc0, 0x9a, 0x9a, 0x31, 0x2c, 0x73, 0xee, 0x51, 0x3a, 0x30, 0xd5, 0xb5, 0x54, 0x66, 0xed,
- 0x57, 0xc4, 0x65, 0x97, 0x2c, 0x67, 0x59, 0x43, 0xda, 0x93, 0xb9, 0x1b, 0xef, 0xbb, 0x3e, 0xa4,
- 0x3d, 0xfd, 0x3e, 0xdc, 0x1b, 0x6f, 0x98, 0x8c, 0xf9, 0xdf, 0x15, 0x60, 0x71, 0x07, 0x53, 0xc3,
- 0x3a, 0x17, 0xd9, 0x6e, 0x30, 0x15, 0x3c, 0xac, 0x4b, 0xfc, 0xbe, 0x99, 0xc0, 0x45, 0xaa, 0x46,
- 0x9d, 0x11, 0xa3, 0x57, 0xea, 0x2d, 0xa8, 0x51, 0xdf, 0x4c, 0xbc, 0x73, 0xab, 0x06, 0x50, 0x3f,
- 0xec, 0xa0, 0xff, 0xb9, 0x08, 0xd7, 0x46, 0x0c, 0x93, 0x0b, 0xb1, 0x0b, 0x35, 0x62, 0x9d, 0xcb,
- 0x84, 0x9d, 0xc5, 0x27, 0xbb, 0xa7, 0x3e, 0x8c, 0x65, 0xa7, 0xe9, 0x31, 0x6b, 0x11, 0xc9, 0x00,
- 0x12, 0x71, 0xb5, 0xef, 0x67, 0xa1, 0x1a, 0x71, 0xd0, 0x12, 0x94, 0x59, 0x76, 0xc9, 0x9e, 0x2c,
- 0x22, 0xc4, 0x4a, 0xac, 0xb9, 0xd7, 0x89, 0xe0, 0xa4, 0x19, 0x05, 0x27, 0xa1, 0x55, 0xa8, 0x78,
- 0xf8, 0x5c, 0xe4, 0xac, 0xdc, 0xf8, 0xd6, 0x4c, 0xb3, 0x60, 0x94, 0x3d, 0x7c, 0xce, 0xb3, 0xd6,
- 0x55, 0xa8, 0xb0, 0x77, 0x3a, 0x67, 0x17, 0x15, 0xdb, 0x77, 0x3b, 0x9c, 0x7d, 0x08, 0x55, 0x7f,
- 0x80, 0x89, 0x45, 0xd9, 0xdc, 0xe7, 0x78, 0x7a, 0xfd, 0xd9, 0x7b, 0x4e, 0x60, 0xed, 0x30, 0x1c,
- 0x68, 0x28, 0x19, 0xcc, 0xe7, 0xcc, 0x27, 0x4a, 0xa8, 0x00, 0x68, 0xea, 0xc4, 0x3a, 0x8f, 0xfa,
- 0xb3, 0x58, 0x62, 0x46, 0xf5, 0xfd, 0x0e, 0xe6, 0x79, 0xf6, 0x1c, 0x37, 0x68, 0xdf, 0xef, 0x60,
- 0x0e, 0xd0, 0xe0, 0x73, 0xc1, 0xaa, 0x08, 0x96, 0x87, 0xcf, 0x39, 0xeb, 0x1e, 0x5c, 0x09, 0x67,
- 0x6a, 0xb6, 0x2f, 0xd8, 0x89, 0x50, 0x15, 0x79, 0x9d, 0x9c, 0x6b, 0x8b, 0xd1, 0x58, 0xaf, 0x70,
- 0xc2, 0xb2, 0x17, 0x88, 0x5e, 0x72, 0xca, 0xbc, 0x97, 0xee, 0x40, 0x55, 0x99, 0x53, 0x83, 0xf2,
- 0xab, 0x83, 0x17, 0x07, 0x87, 0x5f, 0x1c, 0x34, 0x2e, 0xa1, 0x2a, 0xcc, 0xad, 0x6f, 0x6e, 0x6e,
- 0x6d, 0x8a, 0x4c, 0x7d, 0xe3, 0xf0, 0x68, 0x6f, 0x6b, 0x53, 0x64, 0xea, 0x9b, 0x5b, 0x2f, 0xb7,
- 0x4e, 0xb6, 0x36, 0x1b, 0xb3, 0xa8, 0x0e, 0x95, 0xfd, 0xc3, 0xcd, 0xbd, 0x6d, 0xc6, 0x2a, 0x32,
- 0x96, 0xb1, 0x75, 0xb0, 0xbe, 0xbf, 0xb5, 0xd9, 0x98, 0x43, 0x0d, 0xa8, 0x9f, 0x7c, 0x79, 0xb4,
- 0x65, 0x6e, 0xec, 0xae, 0x1f, 0xec, 0x6c, 0x6d, 0x36, 0x4a, 0xfa, 0x6f, 0x0b, 0xd0, 0x3c, 0xc6,
- 0x16, 0xb1, 0x7b, 0xdb, 0x8e, 0x8b, 0x83, 0xd6, 0x05, 0x3b, 0x4d, 0x27, 0x0f, 0xee, 0x45, 0x98,
- 0x7b, 0x33, 0xc4, 0x32, 0x5d, 0xa8, 0x1a, 0xa2, 0x11, 0x26, 0x71, 0xb3, 0x2a, 0x89, 0xbb, 0x0e,
- 0xa5, 0xae, 0xe3, 0x52, 0x4c, 0xc4, 0xf2, 0x1b, 0xb2, 0xa5, 0x7f, 0x06, 0xcb, 0x19, 0x56, 0xa9,
- 0x7c, 0xb3, 0xcb, 0xc8, 0x3c, 0xa6, 0xeb, 0x86, 0x68, 0xe8, 0x7f, 0x2c, 0xc0, 0x8d, 0xc4, 0x98,
- 0x0d, 0xdf, 0xa3, 0xd8, 0xa3, 0x3f, 0xde, 0x64, 0x3e, 0x82, 0x86, 0xdd, 0x1b, 0x7a, 0x67, 0x98,
- 0x65, 0x9e, 0xc2, 0x56, 0x09, 0x1a, 0x5e, 0x95, 0xf4, 0xe8, 0x3c, 0xb9, 0x80, 0x95, 0x6c, 0x5b,
- 0xe5, 0x14, 0x9b, 0x50, 0xee, 0x5b, 0xd4, 0xee, 0x45, 0x93, 0x0c, 0x9b, 0x68, 0x15, 0x80, 0x7f,
- 0x9a, 0xb1, 0xdb, 0xbb, 0xca, 0x29, 0x9b, 0x16, 0xb5, 0xd0, 0x6d, 0xa8, 0x63, 0xaf, 0x63, 0xfa,
- 0x5d, 0x93, 0xd3, 0x24, 0x98, 0x09, 0xd8, 0xeb, 0x1c, 0x76, 0xf7, 0x19, 0x45, 0xff, 0x75, 0x01,
- 0x4a, 0x02, 0xdc, 0x0b, 0xdf, 0xf1, 0x85, 0xe8, 0x1d, 0x8f, 0xfe, 0x17, 0x96, 0xa3, 0xc3, 0xd2,
- 0x27, 0xce, 0x37, 0x3c, 0x04, 0xcd, 0x1e, 0xb6, 0x3a, 0x98, 0xc8, 0xd3, 0x67, 0x29, 0x3c, 0x3c,
- 0x23, 0xfe, 0x2e, 0x67, 0xa3, 0x0f, 0xe0, 0x4a, 0xdf, 0x61, 0xa9, 0xbf, 0x49, 0x70, 0xb7, 0x6f,
- 0x0d, 0x82, 0x66, 0x91, 0x3f, 0xfb, 0x2e, 0x0b, 0xaa, 0x21, 0x88, 0xcf, 0x8b, 0x95, 0x99, 0xc6,
- 0xac, 0x51, 0x64, 0xb7, 0xb9, 0xfe, 0x25, 0xac, 0xee, 0x60, 0x7a, 0xd8, 0xfe, 0x19, 0xb6, 0xe9,
- 0xa6, 0x43, 0xb0, 0x3d, 0x3d, 0xd8, 0xfb, 0xbf, 0xe0, 0x66, 0x9e, 0xe8, 0x31, 0xf0, 0xf7, 0x1f,
- 0x0a, 0xb0, 0xb8, 0xe1, 0xfa, 0x1e, 0x66, 0xb7, 0xc0, 0x91, 0xef, 0x4f, 0xa1, 0xd6, 0x73, 0x1f,
- 0x8a, 0x03, 0xf6, 0x1a, 0x1f, 0x49, 0x9c, 0x85, 0x65, 0x5c, 0x05, 0xe7, 0xa3, 0xfb, 0x11, 0x2e,
- 0x3d, 0x9b, 0x09, 0xed, 0x4a, 0xae, 0xbe, 0x04, 0xd7, 0x46, 0x2c, 0x94, 0x31, 0xf5, 0x97, 0x02,
- 0xac, 0x24, 0x38, 0x7b, 0x1e, 0xc5, 0xc4, 0xb3, 0x7e, 0xc4, 0x39, 0x64, 0x22, 0x06, 0xb3, 0xff,
- 0x06, 0x62, 0x70, 0x0b, 0x56, 0x73, 0xa6, 0xa0, 0x0a, 0x3e, 0xcc, 0x1f, 0x6f, 0xa7, 0x0d, 0x32,
- 0xa7, 0x85, 0x4a, 0x85, 0xef, 0x98, 0x42, 0x8f, 0x1f, 0x3f, 0x53, 0x53, 0xc8, 0xef, 0x21, 0xec,
- 0x5a, 0xd4, 0x79, 0x2b, 0xf1, 0x5c, 0x79, 0xf7, 0x87, 0x44, 0x76, 0x15, 0x08, 0xab, 0x46, 0x35,
- 0x4b, 0xab, 0x7e, 0x59, 0x60, 0x29, 0xcc, 0xc0, 0x75, 0xec, 0xe9, 0xe2, 0xed, 0xe8, 0x21, 0x94,
- 0xc4, 0xa2, 0x8c, 0x01, 0x7a, 0x64, 0x0f, 0x7d, 0x15, 0x6e, 0x64, 0xda, 0x20, 0x6d, 0x7c, 0x05,
- 0xcb, 0x87, 0x03, 0xea, 0xf4, 0xf9, 0x9e, 0x9b, 0xde, 0x62, 0xad, 0x80, 0x96, 0x25, 0x56, 0x28,
- 0x7d, 0xf2, 0xb7, 0x9b, 0xbc, 0x52, 0x1b, 0x96, 0xbb, 0x44, 0x89, 0x1b, 0x7d, 0x05, 0x8d, 0xd1,
- 0x2a, 0x33, 0xba, 0x95, 0xd6, 0x96, 0x28, 0x6a, 0x6b, 0xb7, 0xf3, 0x3b, 0xc8, 0x19, 0x96, 0xfe,
- 0xf9, 0xdd, 0x83, 0x99, 0xca, 0x0c, 0xfa, 0x3a, 0xac, 0x0e, 0xc7, 0x4a, 0xc7, 0x28, 0x3e, 0x3c,
- 0xb3, 0x56, 0xad, 0xdd, 0x19, 0xd3, 0x23, 0xa1, 0xa1, 0x80, 0x5e, 0x00, 0xa8, 0x5a, 0x30, 0x5a,
- 0x4e, 0x0e, 0x8c, 0xd5, 0xa4, 0x35, 0x2d, 0x8b, 0x95, 0x16, 0xa6, 0x6a, 0xbe, 0x4a, 0x58, 0xaa,
- 0xac, 0xac, 0x84, 0x65, 0x94, 0x88, 0x43, 0x61, 0x5f, 0xc0, 0x95, 0x64, 0x6d, 0x16, 0xad, 0x46,
- 0x4f, 0xb4, 0xac, 0x0a, 0xb2, 0x76, 0x33, 0x8f, 0x3d, 0x22, 0xf8, 0x2b, 0x09, 0xc2, 0xc6, 0xaa,
- 0xb0, 0x6a, 0xcd, 0x72, 0x8a, 0xbe, 0x6a, 0xcd, 0x72, 0x0b, 0xb8, 0x31, 0xbb, 0x93, 0x65, 0x51,
- 0x65, 0x77, 0x66, 0x05, 0x56, 0xd9, 0x9d, 0x5d, 0x4d, 0x8d, 0x82, 0xc1, 0x06, 0x94, 0x2e, 0x67,
- 0xa2, 0x68, 0xad, 0x73, 0xab, 0xab, 0x9a, 0x3e, 0xae, 0xcb, 0x88, 0xf5, 0x07, 0x50, 0x8b, 0x55,
- 0xe9, 0x50, 0xb4, 0x50, 0xe9, 0x4a, 0xa9, 0x76, 0x23, 0x93, 0x97, 0x76, 0xf6, 0x68, 0x1e, 0xa4,
- 0x9c, 0x9d, 0x53, 0xd5, 0x53, 0xce, 0xce, 0xad, 0xd0, 0x85, 0xe2, 0xf7, 0x01, 0x54, 0xf1, 0x48,
- 0x45, 0x5c, 0xaa, 0x44, 0xa6, 0x22, 0x2e, 0x5d, 0x6b, 0x0a, 0x1d, 0xfc, 0x29, 0xb7, 0x76, 0xb4,
- 0x18, 0xa4, 0xac, 0xcd, 0xa9, 0x3d, 0x29, 0x6b, 0xf3, 0xea, 0x48, 0xf1, 0xed, 0x9c, 0xaa, 0xae,
- 0xa8, 0xed, 0x9c, 0x57, 0x53, 0x52, 0xdb, 0x39, 0xb7, 0x34, 0x13, 0xf9, 0xe3, 0x7f, 0xa0, 0xb8,
- 0x1d, 0xd8, 0x67, 0x68, 0x21, 0x1a, 0xa2, 0x0a, 0x33, 0xda, 0x62, 0x92, 0x38, 0x32, 0x74, 0x0b,
- 0x2a, 0x61, 0x6d, 0x02, 0x2d, 0x25, 0xa2, 0x5d, 0xd5, 0x59, 0xb4, 0x66, 0x9a, 0x31, 0x22, 0xe6,
- 0x04, 0x2e, 0x27, 0x0a, 0x0b, 0x68, 0x25, 0xd2, 0x9a, 0x51, 0xdf, 0xd0, 0x56, 0x73, 0xb8, 0x23,
- 0x9e, 0x7b, 0x01, 0xa0, 0x00, 0x7f, 0xb5, 0xce, 0xa9, 0xa2, 0x84, 0x5a, 0xe7, 0x8c, 0xfa, 0x40,
- 0x68, 0xa2, 0x0d, 0x28, 0x8d, 0xd9, 0xab, 0x8d, 0x94, 0x5b, 0x43, 0x50, 0x1b, 0x29, 0x1f, 0xf2,
- 0x8f, 0xef, 0xd6, 0x34, 0xca, 0x1e, 0x57, 0x92, 0x83, 0xfa, 0xc7, 0x95, 0xe4, 0x81, 0xf4, 0x91,
- 0x12, 0x92, 0xae, 0x8c, 0x4b, 0x74, 0x1c, 0xdd, 0xcf, 0xdb, 0x43, 0x49, 0xb0, 0x5e, 0xfb, 0xf0,
- 0x07, 0xfb, 0x8d, 0x78, 0xef, 0x18, 0xea, 0x71, 0x74, 0x1c, 0xdd, 0x48, 0x0a, 0x48, 0xc0, 0x88,
- 0xda, 0x4a, 0x36, 0x33, 0xb5, 0xf1, 0x7e, 0x0e, 0x5a, 0x3e, 0x40, 0x88, 0x3e, 0x1a, 0x67, 0x63,
- 0x52, 0xe1, 0xc3, 0xf7, 0xe9, 0x9a, 0x9c, 0xd1, 0x83, 0x02, 0xda, 0x85, 0x6a, 0x04, 0x5a, 0xa3,
- 0x66, 0x1e, 0xe4, 0xae, 0x2d, 0x67, 0x70, 0x46, 0xbc, 0xf3, 0x39, 0xd4, 0xe3, 0x20, 0xb4, 0xf2,
- 0x4e, 0x06, 0xfe, 0xad, 0xbc, 0x93, 0x89, 0x5b, 0xc7, 0x8f, 0x64, 0x05, 0x63, 0xc6, 0x8e, 0xe4,
- 0x14, 0x56, 0x1a, 0x3b, 0x92, 0xd3, 0xb8, 0x67, 0x14, 0x34, 0x6d, 0xfe, 0x73, 0x43, 0x12, 0x7b,
- 0x44, 0xf1, 0xbf, 0x0b, 0x32, 0xc1, 0x4e, 0x75, 0x0a, 0xe5, 0x02, 0x97, 0xb1, 0xf5, 0xfc, 0x1a,
- 0xe6, 0x53, 0x60, 0xa2, 0xd2, 0x91, 0x87, 0x5e, 0x2a, 0x1d, 0xb9, 0x48, 0x64, 0x34, 0x8b, 0x16,
- 0x94, 0xe5, 0x3f, 0x4e, 0xe8, 0x7a, 0x34, 0x2a, 0xf1, 0x03, 0x95, 0xb6, 0x94, 0xa2, 0x8f, 0x78,
- 0xf6, 0x08, 0x6a, 0x31, 0xa4, 0x11, 0xc5, 0xef, 0x88, 0x11, 0x04, 0x51, 0x79, 0x36, 0x03, 0x9a,
- 0x8c, 0xcd, 0xfb, 0x17, 0x2c, 0x55, 0x1a, 0x83, 0xfb, 0xa1, 0x8f, 0xc7, 0xc5, 0xe7, 0xa8, 0xd2,
- 0x47, 0xef, 0xd7, 0x79, 0x64, 0x56, 0x3f, 0x85, 0xcb, 0x09, 0x0c, 0x4b, 0x9d, 0xc0, 0x59, 0x40,
- 0xa3, 0x3a, 0x81, 0x33, 0x81, 0xaf, 0xd8, 0xdc, 0xce, 0x60, 0x31, 0x0b, 0x5a, 0x40, 0x77, 0xd5,
- 0xae, 0xc8, 0x05, 0x49, 0xb4, 0x7b, 0xe3, 0x3b, 0xa5, 0x94, 0xb5, 0x61, 0x3e, 0x85, 0xd3, 0xa8,
- 0x00, 0xca, 0x03, 0x96, 0x54, 0x00, 0xe5, 0x82, 0x3c, 0x31, 0x1d, 0x18, 0x50, 0xba, 0x5a, 0x83,
- 0x62, 0x8f, 0xe7, 0x9c, 0xa2, 0x91, 0x3a, 0xa2, 0xc7, 0x14, 0x7b, 0xd4, 0xe1, 0xd2, 0x86, 0xf9,
- 0x54, 0x81, 0x46, 0x4d, 0x25, 0xaf, 0x22, 0xa4, 0xa6, 0x92, 0x5b, 0xdd, 0x89, 0x4d, 0xc5, 0x87,
- 0xeb, 0xd9, 0xa0, 0x04, 0xfa, 0x20, 0xb6, 0xbc, 0xf9, 0x78, 0x88, 0x76, 0xff, 0x87, 0xba, 0x8d,
- 0x6c, 0xbf, 0x13, 0xb8, 0x9c, 0xc8, 0xa7, 0x55, 0x90, 0x65, 0xa1, 0x1c, 0x2a, 0xc8, 0xb2, 0x11,
- 0x86, 0x30, 0x74, 0xdd, 0x11, 0x08, 0x22, 0xcc, 0xd2, 0xd1, 0xbd, 0xcc, 0xf1, 0x23, 0x38, 0x84,
- 0xf6, 0xc1, 0x0f, 0xf4, 0x4a, 0xbf, 0x4d, 0x47, 0xb3, 0xf3, 0x78, 0xf2, 0x96, 0x09, 0x06, 0xc4,
- 0x93, 0xb7, 0x9c, 0xc4, 0x3e, 0x21, 0x3e, 0x99, 0x66, 0xc7, 0xc5, 0x67, 0xa6, 0xfe, 0x71, 0xf1,
- 0x39, 0x19, 0x7a, 0x28, 0xbe, 0x0b, 0x0b, 0x19, 0x49, 0x32, 0x8a, 0xc5, 0x66, 0x5e, 0x16, 0xaf,
- 0xdd, 0x1d, 0xdb, 0x27, 0xfd, 0x5a, 0x4a, 0xa7, 0xc5, 0x6a, 0x97, 0xe4, 0x66, 0xe2, 0x6a, 0x97,
- 0xe4, 0x67, 0xd5, 0xa1, 0x92, 0xd6, 0xa7, 0xaf, 0x59, 0x67, 0xd7, 0x6a, 0xaf, 0xd9, 0x7e, 0xff,
- 0xb1, 0xf8, 0xfc, 0xc4, 0x27, 0xa7, 0x8f, 0x85, 0x88, 0xc7, 0xfc, 0x47, 0xf1, 0xc7, 0xa7, 0xbe,
- 0x6c, 0x0f, 0xda, 0xed, 0x12, 0x27, 0x3d, 0xfd, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc5, 0x6f,
- 0x97, 0x8b, 0x79, 0x2e, 0x00, 0x00,
+ // 3177 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x5a, 0x4b, 0x6f, 0x1b, 0xc9,
+ 0x11, 0x36, 0xf5, 0xe0, 0xa3, 0x48, 0xd9, 0x54, 0x4b, 0xb6, 0xa8, 0xb1, 0x64, 0xd9, 0x63, 0xaf,
+ 0xd7, 0xeb, 0xf5, 0xca, 0xbb, 0x72, 0x80, 0x38, 0x09, 0x82, 0x40, 0xd4, 0xdb, 0xb6, 0x1e, 0x3b,
+ 0x94, 0xb3, 0x58, 0x03, 0x0b, 0xee, 0x70, 0xd8, 0x14, 0x27, 0x1a, 0xce, 0xd0, 0x3d, 0x4d, 0xcb,
+ 0x5a, 0x20, 0x87, 0x04, 0xc8, 0x21, 0x40, 0xb0, 0xa7, 0x20, 0x9b, 0x63, 0x6e, 0x01, 0xf2, 0x0b,
+ 0x72, 0xc9, 0x21, 0x97, 0x5c, 0xf2, 0x0b, 0xf6, 0x2f, 0x04, 0xc8, 0x1f, 0xc8, 0x29, 0xe8, 0xc7,
+ 0x4c, 0xcf, 0x70, 0x66, 0xb8, 0x0e, 0x48, 0x6c, 0x6e, 0xd3, 0x55, 0xdd, 0x55, 0xd5, 0xdd, 0xd5,
+ 0xd5, 0x5d, 0x5f, 0x0d, 0xd4, 0x08, 0xee, 0x7b, 0xbe, 0x4d, 0x3d, 0x72, 0xf9, 0x91, 0x8f, 0xc9,
+ 0x1b, 0xdb, 0xc2, 0xeb, 0x7d, 0xe2, 0x51, 0x0f, 0xe5, 0xcf, 0x6c, 0x6a, 0x3a, 0x97, 0x1a, 0x38,
+ 0xb6, 0x4b, 0x05, 0x4d, 0xab, 0xf8, 0x5d, 0x93, 0xe0, 0xb6, 0x68, 0xe9, 0x0d, 0x58, 0x32, 0xc2,
+ 0xd1, 0x3b, 0x6f, 0x6d, 0x9f, 0xfa, 0x06, 0x7e, 0x3d, 0xc0, 0x3e, 0x45, 0x4f, 0x01, 0x94, 0xe0,
+ 0x5a, 0xee, 0x76, 0xee, 0x41, 0x79, 0x03, 0xad, 0x0b, 0x89, 0xeb, 0x6a, 0x50, 0x7d, 0xe6, 0x8f,
+ 0xff, 0x78, 0x94, 0x33, 0x22, 0x7d, 0xf5, 0x0d, 0xa8, 0x25, 0x85, 0xfa, 0x7d, 0xcf, 0xf5, 0x31,
+ 0xba, 0x01, 0x79, 0xcc, 0x29, 0x5c, 0x62, 0xd1, 0x90, 0x2d, 0xfd, 0x94, 0x8f, 0x31, 0xad, 0xf3,
+ 0x03, 0xd7, 0x22, 0xb8, 0x87, 0x5d, 0x6a, 0x3a, 0xe3, 0x5b, 0x72, 0x13, 0x96, 0x53, 0xa4, 0x0a,
+ 0x53, 0x74, 0x02, 0xf3, 0x82, 0xb9, 0x3b, 0x70, 0xc6, 0xd7, 0x85, 0xee, 0xc2, 0x9c, 0x45, 0xb0,
+ 0x49, 0x71, 0xb3, 0x65, 0xd3, 0x9e, 0xd9, 0xaf, 0x4d, 0xf1, 0x09, 0x56, 0x04, 0xb1, 0xce, 0x69,
+ 0xfa, 0x22, 0xa0, 0xa8, 0x4e, 0x69, 0xc9, 0x21, 0xcc, 0x1f, 0xda, 0xed, 0xb7, 0x82, 0x33, 0xfe,
+ 0xac, 0x17, 0x01, 0x45, 0xc5, 0x49, 0x25, 0xbf, 0xcb, 0xc1, 0xf5, 0x3d, 0x93, 0xb4, 0xcc, 0x33,
+ 0xbc, 0xe5, 0x39, 0x0e, 0xb6, 0xe8, 0xf7, 0x33, 0x67, 0xb4, 0x08, 0xb3, 0x7d, 0x32, 0x70, 0x71,
+ 0x6d, 0x9a, 0x33, 0x45, 0x43, 0xaf, 0xc1, 0x8d, 0x61, 0x6b, 0xa4, 0xa1, 0xff, 0xcc, 0xc1, 0xd2,
+ 0x67, 0xc4, 0xa6, 0x78, 0xcb, 0xeb, 0xf5, 0x6c, 0xba, 0x47, 0xcc, 0x7e, 0x77, 0x7c, 0x53, 0x1b,
+ 0x30, 0xe7, 0xf7, 0x1d, 0x9b, 0x36, 0x28, 0x31, 0x29, 0x3e, 0xbb, 0xe4, 0xa6, 0x5e, 0xdd, 0xf8,
+ 0x28, 0x18, 0x9c, 0xa1, 0x71, 0xbd, 0x11, 0x1d, 0x64, 0xc4, 0x65, 0xe8, 0x77, 0x60, 0x2e, 0xc6,
+ 0x47, 0x55, 0xa8, 0x34, 0xec, 0xaf, 0xf0, 0xe1, 0xc0, 0xa1, 0x76, 0xdf, 0xc1, 0xd5, 0x2b, 0xba,
+ 0x06, 0xb5, 0xa4, 0x68, 0x39, 0xd3, 0x67, 0x70, 0x75, 0xcb, 0xc1, 0xa6, 0x3b, 0xe8, 0x8f, 0xbf,
+ 0xe9, 0xf3, 0x70, 0x2d, 0x94, 0x25, 0xc5, 0x7f, 0x0a, 0xd7, 0xd5, 0x10, 0x66, 0xd6, 0xf8, 0x5a,
+ 0x1e, 0xc1, 0x8d, 0x61, 0x91, 0xf2, 0x60, 0x23, 0x98, 0xf1, 0xed, 0xaf, 0x30, 0x97, 0x36, 0x6d,
+ 0xf0, 0x6f, 0xfd, 0x35, 0x2c, 0x6f, 0xf6, 0xfb, 0xce, 0xe5, 0x9e, 0x4d, 0x4d, 0x4a, 0x89, 0xdd,
+ 0x1a, 0x50, 0x3c, 0x7e, 0x7c, 0x41, 0x1a, 0x14, 0x09, 0x7e, 0x63, 0xfb, 0xb6, 0xe7, 0xf2, 0x5d,
+ 0xac, 0x18, 0x61, 0x5b, 0x5f, 0x01, 0x2d, 0x4d, 0xa5, 0x5c, 0x91, 0x7f, 0x4f, 0x01, 0xda, 0xc5,
+ 0xd4, 0xea, 0x1a, 0xb8, 0xe7, 0xd1, 0xf1, 0xd7, 0x83, 0x85, 0x33, 0xc2, 0x45, 0x71, 0x43, 0x4a,
+ 0x86, 0x6c, 0x31, 0x9f, 0xef, 0x78, 0xc4, 0x0a, 0x7d, 0x9e, 0x37, 0xd0, 0x12, 0x14, 0x5c, 0xaf,
+ 0x49, 0xcd, 0x33, 0xbf, 0x36, 0x23, 0xa2, 0x9f, 0xeb, 0x9d, 0x9a, 0x67, 0x3e, 0xaa, 0x41, 0x81,
+ 0xda, 0x3d, 0xec, 0x0d, 0x68, 0x6d, 0xf6, 0x76, 0xee, 0xc1, 0xac, 0x11, 0x34, 0xd9, 0x10, 0xdf,
+ 0xef, 0x36, 0xcf, 0xf1, 0x65, 0x2d, 0x2f, 0x34, 0xf8, 0x7e, 0xf7, 0x39, 0xbe, 0x44, 0x6b, 0x50,
+ 0x3e, 0x77, 0xbd, 0x0b, 0xb7, 0xd9, 0xf5, 0x58, 0x34, 0x2d, 0x70, 0x26, 0x70, 0xd2, 0x3e, 0xa3,
+ 0xa0, 0x65, 0x28, 0xba, 0x5e, 0x53, 0x9c, 0xbc, 0x12, 0xd7, 0x56, 0x70, 0xbd, 0x13, 0xd6, 0x44,
+ 0x4f, 0x60, 0x4e, 0xd8, 0xd9, 0xec, 0x9b, 0xc4, 0xec, 0xf9, 0x35, 0xe0, 0x53, 0xbe, 0xaa, 0xa6,
+ 0xcc, 0x57, 0xa7, 0x22, 0x3a, 0x9d, 0xf0, 0x3e, 0xe8, 0x11, 0x20, 0xab, 0x8b, 0xad, 0x73, 0x6e,
+ 0x7f, 0xd3, 0xea, 0x9a, 0xee, 0x19, 0x6e, 0xd7, 0xca, 0x5c, 0x72, 0x95, 0x73, 0xd8, 0x54, 0xb6,
+ 0x04, 0xfd, 0xd9, 0x4c, 0xb1, 0x58, 0x2d, 0xe9, 0x4f, 0x61, 0x21, 0xb6, 0xdc, 0xd2, 0x57, 0xee,
+ 0x40, 0x25, 0x26, 0x44, 0x5c, 0x05, 0x65, 0xaa, 0xc6, 0xb3, 0x8b, 0x69, 0x8b, 0x07, 0x11, 0xb5,
+ 0xfc, 0xe3, 0x7b, 0xaf, 0x06, 0xb5, 0xa4, 0x50, 0xe9, 0x1a, 0xff, 0x9a, 0x82, 0xf9, 0x3d, 0x4c,
+ 0x37, 0x89, 0xd5, 0xb5, 0xdf, 0x4c, 0xc0, 0x33, 0x6e, 0x42, 0xc9, 0xe2, 0x47, 0xbe, 0x69, 0xb7,
+ 0xa5, 0x73, 0x14, 0x05, 0xe1, 0xa0, 0xcd, 0xdc, 0xa6, 0x4f, 0x70, 0xc7, 0x7e, 0xcb, 0xfd, 0xa3,
+ 0x64, 0xc8, 0x16, 0x7a, 0x0a, 0xf9, 0x8e, 0x47, 0x7a, 0x26, 0xe5, 0xfe, 0x71, 0x75, 0xe3, 0x76,
+ 0xa0, 0x2a, 0x61, 0xd9, 0xfa, 0x2e, 0xef, 0x67, 0xc8, 0xfe, 0xec, 0xf8, 0xf5, 0x4d, 0xda, 0xe5,
+ 0xee, 0x53, 0x31, 0xf8, 0x37, 0xf3, 0x2a, 0xfc, 0xd6, 0x72, 0x06, 0x6d, 0x5c, 0xcb, 0xdf, 0x9e,
+ 0x7e, 0x50, 0x31, 0x82, 0x26, 0x5a, 0x05, 0xc0, 0x8e, 0xdd, 0x66, 0xfb, 0x4f, 0xbb, 0xdc, 0x77,
+ 0x8a, 0x46, 0x89, 0x53, 0x4e, 0xd8, 0xc0, 0x87, 0x30, 0x6f, 0xbb, 0xbc, 0x67, 0xd3, 0xe9, 0xf8,
+ 0xcd, 0x96, 0xe3, 0xb5, 0xfc, 0x5a, 0x91, 0xf7, 0xba, 0x26, 0x19, 0x2f, 0x3a, 0x7e, 0x9d, 0x91,
+ 0xf5, 0x27, 0x90, 0x17, 0xa6, 0xa0, 0x02, 0x4c, 0xbf, 0x3a, 0x38, 0xa9, 0x5e, 0x61, 0x1f, 0xa7,
+ 0x9b, 0x46, 0x35, 0x87, 0x00, 0xf2, 0xa7, 0x9b, 0x46, 0x73, 0xef, 0x55, 0x75, 0x0a, 0x95, 0xa1,
+ 0xc0, 0xbe, 0xeb, 0xaf, 0x36, 0xaa, 0xd3, 0xfa, 0x03, 0x40, 0xd1, 0x19, 0xa9, 0x10, 0xd2, 0x36,
+ 0xa9, 0xc9, 0x97, 0xb9, 0x62, 0xf0, 0x6f, 0xe6, 0x07, 0xfb, 0xa6, 0xff, 0xc2, 0xb3, 0x4c, 0xa7,
+ 0x4e, 0x4c, 0xd7, 0xea, 0x4e, 0x20, 0x80, 0xe8, 0x1f, 0x43, 0x2d, 0x29, 0x54, 0x1a, 0xb1, 0x08,
+ 0xb3, 0x6f, 0x4c, 0x67, 0x80, 0xa5, 0x53, 0x8a, 0x86, 0xfe, 0x6d, 0x0e, 0x6a, 0xdc, 0x93, 0x1b,
+ 0xde, 0x80, 0x58, 0x58, 0x8c, 0x1a, 0xdf, 0x49, 0x7e, 0x06, 0xf3, 0x3e, 0x17, 0xd8, 0x8c, 0x08,
+ 0x98, 0xca, 0x12, 0x60, 0x54, 0x45, 0x67, 0x23, 0x76, 0x01, 0x4b, 0x01, 0x2d, 0x6e, 0x12, 0xf7,
+ 0xa7, 0x8a, 0x51, 0xf1, 0x23, 0x66, 0xb2, 0xdd, 0xa6, 0x26, 0x39, 0xc3, 0xb4, 0x49, 0x70, 0x87,
+ 0x7b, 0x56, 0xc5, 0x28, 0x09, 0x8a, 0x81, 0x3b, 0xfa, 0x13, 0x58, 0x4e, 0x99, 0x9a, 0x7a, 0xaf,
+ 0x11, 0xec, 0x0f, 0x1c, 0x1a, 0xbc, 0xd7, 0x44, 0x4b, 0xdf, 0x83, 0xf2, 0xae, 0x3f, 0x89, 0xc7,
+ 0xca, 0x3d, 0xa8, 0x08, 0x41, 0x6a, 0xfd, 0x31, 0x21, 0x1e, 0x91, 0x5e, 0x20, 0x1a, 0xfa, 0x5f,
+ 0x73, 0x70, 0x8d, 0x5f, 0xa3, 0x06, 0xee, 0x8c, 0xbf, 0xec, 0x55, 0x98, 0x66, 0x2b, 0x21, 0xee,
+ 0x0e, 0xf6, 0x19, 0xbb, 0x52, 0xa6, 0xe3, 0x57, 0x0a, 0x8b, 0x56, 0x9e, 0xd3, 0x6e, 0x86, 0x7c,
+ 0xb1, 0x80, 0x65, 0xcf, 0x69, 0x1b, 0x41, 0x97, 0x30, 0xdc, 0xcf, 0x46, 0xc2, 0xfd, 0xb3, 0x99,
+ 0x62, 0xbe, 0x5a, 0xd0, 0x6b, 0x50, 0x55, 0x96, 0x8b, 0x49, 0x3e, 0x9b, 0x29, 0xe6, 0xaa, 0x53,
+ 0xba, 0x0b, 0x8b, 0xbb, 0xb6, 0xdb, 0x3e, 0xc4, 0xe4, 0x0c, 0xd7, 0x4d, 0x7f, 0x02, 0x41, 0x67,
+ 0x05, 0x4a, 0x81, 0x99, 0x7e, 0x6d, 0x8a, 0x9f, 0x79, 0x45, 0xd0, 0x3f, 0x84, 0xeb, 0x43, 0xfa,
+ 0xd4, 0xc1, 0x6b, 0x99, 0xbe, 0x70, 0xf9, 0x92, 0xc1, 0xbf, 0xf5, 0xaf, 0x73, 0x30, 0x2f, 0x82,
+ 0xe5, 0xae, 0x47, 0xce, 0xff, 0xff, 0xae, 0xce, 0x5e, 0xb5, 0x51, 0x7b, 0xc2, 0x47, 0xfc, 0xf2,
+ 0x81, 0x6f, 0x60, 0x66, 0xf2, 0x81, 0x7b, 0x42, 0xbc, 0x33, 0x82, 0x7d, 0x7f, 0x22, 0xd1, 0x9b,
+ 0x70, 0xa1, 0x91, 0xe8, 0x2d, 0x08, 0x07, 0x6d, 0xfd, 0xa7, 0xa0, 0xa5, 0xe9, 0x94, 0x8b, 0xb9,
+ 0x06, 0x65, 0xdb, 0x6d, 0xf6, 0x25, 0x59, 0x1e, 0x1b, 0xb0, 0xc3, 0x8e, 0xc2, 0xe4, 0xc6, 0xeb,
+ 0x81, 0xe9, 0x77, 0x27, 0x6c, 0xb2, 0xcf, 0x85, 0x46, 0x4c, 0x16, 0x84, 0xc0, 0xe4, 0xa4, 0xce,
+ 0x77, 0x35, 0xd9, 0x81, 0x5b, 0xc3, 0x17, 0xe7, 0x2e, 0xf1, 0x7a, 0x2f, 0x8d, 0x17, 0x13, 0x39,
+ 0x8c, 0x03, 0xe2, 0x48, 0x8b, 0xd9, 0xa7, 0x7e, 0x07, 0xd6, 0x32, 0xb5, 0xc9, 0x6d, 0x3f, 0x86,
+ 0x05, 0xd1, 0xa5, 0x3e, 0x70, 0xdb, 0xce, 0x04, 0x1e, 0xb6, 0x0f, 0x61, 0x31, 0x2e, 0x70, 0xc4,
+ 0x9d, 0xf4, 0x02, 0xaa, 0x7b, 0x98, 0x6e, 0x79, 0x6e, 0xc7, 0x3e, 0x1b, 0x5f, 0xf3, 0xfb, 0xfc,
+ 0xdd, 0x11, 0x48, 0x1b, 0xa1, 0xf6, 0xeb, 0x29, 0xa8, 0x36, 0x26, 0xa6, 0x17, 0x3d, 0x85, 0x02,
+ 0x76, 0x29, 0xb1, 0xb1, 0x88, 0x14, 0xe5, 0x8d, 0x5b, 0xc1, 0xb0, 0x61, 0x25, 0xeb, 0x3b, 0x2e,
+ 0x25, 0x97, 0x46, 0xd0, 0x5d, 0xfb, 0x4d, 0x0e, 0x66, 0x39, 0x89, 0xed, 0x1d, 0x7b, 0x99, 0x8a,
+ 0xb8, 0xc1, 0x3e, 0xd1, 0x2a, 0x94, 0xf8, 0x8d, 0xd9, 0xf4, 0x29, 0x11, 0x7b, 0xba, 0x7f, 0xc5,
+ 0x28, 0x72, 0x52, 0x83, 0x12, 0x74, 0x07, 0xca, 0x82, 0x6d, 0xbb, 0xf4, 0xc9, 0x06, 0x0f, 0xb5,
+ 0xb3, 0xfb, 0x57, 0x0c, 0xe0, 0xc4, 0x03, 0x46, 0x43, 0x6b, 0x20, 0x5a, 0xcd, 0x96, 0xe7, 0x39,
+ 0xe2, 0x9d, 0xbc, 0x7f, 0xc5, 0x10, 0x52, 0xeb, 0x9e, 0xe7, 0xd4, 0x0b, 0xf2, 0x86, 0xd6, 0x17,
+ 0x60, 0xbe, 0x31, 0xbc, 0x72, 0xba, 0x05, 0x0b, 0xdb, 0xd8, 0xc1, 0x2c, 0xe1, 0x9a, 0xcc, 0x3a,
+ 0x21, 0x98, 0x39, 0xc7, 0x97, 0x62, 0x91, 0x4a, 0x06, 0xff, 0xd6, 0x6f, 0xc0, 0x62, 0x5c, 0x89,
+ 0x54, 0x6e, 0xc3, 0xb2, 0x81, 0x7d, 0xea, 0x11, 0xbc, 0x35, 0xf0, 0xa9, 0xd7, 0xdb, 0xf7, 0xbc,
+ 0x73, 0x7f, 0x22, 0x26, 0x70, 0x6f, 0x98, 0x8a, 0x78, 0xc3, 0x0a, 0x68, 0x69, 0xaa, 0xa4, 0x21,
+ 0xa7, 0x50, 0xab, 0x9b, 0xd6, 0xf9, 0xa0, 0x3f, 0x49, 0x3b, 0xf4, 0xc7, 0xb0, 0x9c, 0x22, 0x75,
+ 0x84, 0xcb, 0xbe, 0x86, 0x3b, 0x69, 0x27, 0x79, 0x42, 0x87, 0x36, 0x75, 0x5d, 0xee, 0x81, 0x3e,
+ 0x4a, 0xa5, 0x5c, 0x9f, 0x23, 0x40, 0xec, 0x2a, 0x7c, 0x61, 0x5b, 0xd8, 0x9d, 0xc0, 0xc5, 0xab,
+ 0x6f, 0xc1, 0x42, 0x4c, 0x9e, 0x5c, 0x93, 0x47, 0x80, 0x1c, 0x41, 0x6a, 0xfa, 0x5d, 0x8f, 0xd0,
+ 0xa6, 0x6b, 0xf6, 0x82, 0x6b, 0xb6, 0x2a, 0x39, 0x0d, 0xc6, 0x38, 0x32, 0x7b, 0x7c, 0xd3, 0xf6,
+ 0x30, 0x3d, 0x70, 0x3b, 0xde, 0xe6, 0xe4, 0xb2, 0x65, 0xfd, 0x27, 0xb0, 0x9c, 0x22, 0x55, 0x1a,
+ 0x78, 0x0b, 0x40, 0xa5, 0xc9, 0x72, 0xeb, 0x22, 0x14, 0x66, 0xd2, 0x96, 0xe9, 0x58, 0x03, 0xc7,
+ 0xa4, 0x78, 0x8b, 0xe5, 0x78, 0xfe, 0xa0, 0x37, 0xbe, 0x49, 0x3f, 0x84, 0xe5, 0x14, 0xa9, 0xd2,
+ 0x24, 0x0d, 0x8a, 0x96, 0xa4, 0xc9, 0x95, 0x0a, 0xdb, 0x6c, 0xdb, 0xf6, 0x30, 0x6d, 0xb8, 0x66,
+ 0xdf, 0xef, 0x7a, 0xe3, 0xe3, 0x57, 0xfa, 0x07, 0xb0, 0x10, 0x93, 0x37, 0xc2, 0x95, 0xbf, 0xc9,
+ 0xc1, 0xdd, 0x34, 0xc7, 0x9a, 0x98, 0x31, 0x2c, 0x61, 0xef, 0x52, 0xda, 0x6f, 0xaa, 0xdb, 0xb0,
+ 0xc0, 0xda, 0x2f, 0x89, 0xc3, 0xee, 0x76, 0xce, 0x32, 0x07, 0xb4, 0x2b, 0x53, 0x46, 0xde, 0x77,
+ 0x73, 0x40, 0xbb, 0xfa, 0x7d, 0xb8, 0x37, 0xda, 0x30, 0xe9, 0xf3, 0x7f, 0xc8, 0xc1, 0xe2, 0x1e,
+ 0xa6, 0x86, 0x79, 0x21, 0x92, 0x6c, 0x7f, 0x22, 0xf8, 0x5f, 0x87, 0x78, 0xbd, 0x66, 0x0c, 0x8e,
+ 0x29, 0x19, 0x15, 0x46, 0x0c, 0x1f, 0xc7, 0x6b, 0x50, 0xa6, 0x5e, 0x33, 0xf6, 0xbc, 0x2e, 0x19,
+ 0x40, 0xbd, 0xa0, 0x83, 0xfe, 0xb7, 0x19, 0xb8, 0x3e, 0x64, 0x98, 0xdc, 0x88, 0x7d, 0x28, 0x13,
+ 0xf3, 0x42, 0xe2, 0x04, 0xcc, 0x3f, 0xd9, 0x3d, 0xf5, 0x7e, 0x24, 0x29, 0x4e, 0x8e, 0x59, 0x0f,
+ 0x49, 0x06, 0x90, 0x90, 0xab, 0x7d, 0x3b, 0x0d, 0xa5, 0x90, 0x83, 0x96, 0xa0, 0xc0, 0x92, 0x5a,
+ 0xf6, 0x52, 0x12, 0x2e, 0x96, 0x67, 0xcd, 0x83, 0x76, 0x88, 0x62, 0x4d, 0x29, 0x14, 0x0b, 0xad,
+ 0x42, 0xd1, 0xc5, 0x17, 0x22, 0x55, 0xe6, 0xc6, 0xd7, 0xa7, 0x6a, 0x39, 0xa3, 0xe0, 0xe2, 0x0b,
+ 0x9e, 0x2c, 0xaf, 0x42, 0x91, 0xa5, 0x07, 0x9c, 0x3d, 0xa3, 0xd8, 0x9e, 0xd3, 0xe6, 0xec, 0x63,
+ 0x28, 0x79, 0x7d, 0x4c, 0x4c, 0xca, 0xe6, 0x3e, 0xcb, 0xb3, 0xfa, 0x4f, 0xde, 0x71, 0x02, 0xeb,
+ 0xc7, 0xc1, 0x40, 0x43, 0xc9, 0x60, 0x6b, 0xce, 0xd6, 0x44, 0x09, 0x15, 0xb8, 0x50, 0x85, 0x98,
+ 0x17, 0x61, 0x7f, 0xe6, 0x4b, 0xcc, 0xa8, 0x9e, 0xd7, 0xc6, 0x3c, 0xbd, 0x9f, 0xe5, 0x06, 0x1d,
+ 0x7a, 0x6d, 0xcc, 0x71, 0x21, 0x7c, 0x21, 0x58, 0x45, 0xc1, 0x72, 0xf1, 0x05, 0x67, 0xdd, 0x83,
+ 0xab, 0xc1, 0x4c, 0x9b, 0xad, 0x4b, 0x16, 0x11, 0x4a, 0x22, 0x9d, 0x94, 0x73, 0xad, 0x33, 0x1a,
+ 0xeb, 0x15, 0x4c, 0x58, 0xf6, 0x02, 0xd1, 0x4b, 0x4e, 0x99, 0xf7, 0xd2, 0x6d, 0x28, 0x29, 0x73,
+ 0xca, 0x50, 0x78, 0x79, 0xf4, 0xfc, 0xe8, 0xf8, 0xb3, 0xa3, 0xea, 0x15, 0x54, 0x82, 0xd9, 0xcd,
+ 0xed, 0xed, 0x9d, 0x6d, 0x01, 0x10, 0x6c, 0x1d, 0x9f, 0x1c, 0xec, 0x6c, 0x0b, 0x80, 0x60, 0x7b,
+ 0xe7, 0xc5, 0xce, 0xe9, 0xce, 0x76, 0x75, 0x1a, 0x55, 0xa0, 0x78, 0x78, 0xbc, 0x7d, 0xb0, 0xcb,
+ 0x58, 0x33, 0x8c, 0x65, 0xec, 0x1c, 0x6d, 0x1e, 0xee, 0x6c, 0x57, 0x67, 0x51, 0x15, 0x2a, 0xa7,
+ 0x9f, 0x9f, 0xec, 0x34, 0xb7, 0xf6, 0x37, 0x8f, 0xf6, 0x76, 0xb6, 0xab, 0x79, 0xfd, 0xf7, 0x39,
+ 0xa8, 0x35, 0xb0, 0x49, 0xac, 0xee, 0xae, 0xed, 0x60, 0xbf, 0x7e, 0xc9, 0xa2, 0xe9, 0xf8, 0xce,
+ 0xbd, 0x08, 0xb3, 0xaf, 0x07, 0x58, 0x66, 0x29, 0x25, 0x43, 0x34, 0x82, 0xdc, 0x71, 0x5a, 0xe5,
+ 0x8e, 0x37, 0x20, 0xdf, 0xb1, 0x1d, 0x8a, 0x89, 0xd8, 0x7e, 0x43, 0xb6, 0xf4, 0x4f, 0x60, 0x39,
+ 0xc5, 0x2a, 0x95, 0xe6, 0x76, 0x18, 0x99, 0xfb, 0x74, 0xc5, 0x10, 0x0d, 0xfd, 0x2f, 0x39, 0xb8,
+ 0x19, 0x1b, 0xb3, 0xe5, 0xb9, 0x14, 0xbb, 0xf4, 0xfb, 0x9b, 0xcc, 0x07, 0x50, 0xb5, 0xba, 0x03,
+ 0xf7, 0x1c, 0xb3, 0x84, 0x57, 0xd8, 0x2a, 0xb1, 0xca, 0x6b, 0x92, 0x1e, 0xc6, 0x93, 0x4b, 0x58,
+ 0x49, 0xb7, 0x55, 0x4e, 0xb1, 0x06, 0x85, 0x9e, 0x49, 0xad, 0x6e, 0x38, 0xc9, 0xa0, 0x89, 0x56,
+ 0x01, 0xf8, 0x67, 0x33, 0x72, 0x7b, 0x97, 0x38, 0x65, 0xdb, 0xa4, 0x26, 0xba, 0x0d, 0x15, 0xec,
+ 0xb6, 0x9b, 0x5e, 0xa7, 0xc9, 0x69, 0x12, 0x43, 0x05, 0xec, 0xb6, 0x8f, 0x3b, 0x87, 0x8c, 0xa2,
+ 0xff, 0x36, 0x07, 0x79, 0x81, 0x29, 0x06, 0xe9, 0x43, 0x2e, 0x4c, 0x1f, 0xd0, 0x8f, 0x61, 0x39,
+ 0x0c, 0x96, 0x1e, 0xb1, 0xbf, 0xe2, 0x2e, 0xd8, 0xec, 0x62, 0xb3, 0x8d, 0x89, 0x8c, 0x3e, 0x4b,
+ 0x41, 0xf0, 0x0c, 0xf9, 0xfb, 0x9c, 0x8d, 0xde, 0x83, 0xab, 0x3d, 0x9b, 0x10, 0x8f, 0x34, 0x09,
+ 0xee, 0xf4, 0xcc, 0xbe, 0x5f, 0x9b, 0xe1, 0xcf, 0xbe, 0x39, 0x41, 0x35, 0x04, 0xf1, 0xd9, 0x4c,
+ 0x71, 0xaa, 0x3a, 0x6d, 0xcc, 0xb0, 0xdb, 0x5c, 0xff, 0x1c, 0x56, 0xf7, 0x30, 0x3d, 0x6e, 0xfd,
+ 0x02, 0x5b, 0x74, 0xdb, 0x26, 0xd8, 0x9a, 0x1c, 0xda, 0xfe, 0x03, 0xb8, 0x95, 0x25, 0x7a, 0x04,
+ 0xea, 0xfe, 0xa7, 0x1c, 0x2c, 0x6e, 0x39, 0x9e, 0x8b, 0xd9, 0x2d, 0x70, 0xe2, 0x79, 0x13, 0xa8,
+ 0x6d, 0xdd, 0x87, 0x99, 0x3e, 0x7b, 0x8d, 0x0f, 0xe5, 0xeb, 0xc2, 0x32, 0xae, 0x82, 0xf3, 0xd1,
+ 0xfd, 0x10, 0x0e, 0x9f, 0x4e, 0x45, 0x94, 0x25, 0x57, 0x5f, 0x82, 0xeb, 0x43, 0x16, 0x4a, 0x9f,
+ 0xfa, 0x7b, 0x0e, 0x56, 0x62, 0x9c, 0x03, 0x97, 0x62, 0xe2, 0x9a, 0xdf, 0xe3, 0x1c, 0x52, 0x81,
+ 0x8a, 0xe9, 0xff, 0x01, 0xa8, 0x58, 0x83, 0xd5, 0x8c, 0x29, 0xc8, 0x49, 0xf2, 0xa2, 0x6b, 0xcf,
+ 0x7b, 0x33, 0x69, 0x6c, 0x3b, 0x29, 0x54, 0x2a, 0x7c, 0xcb, 0x14, 0xba, 0x3c, 0xfc, 0x4c, 0x4c,
+ 0x21, 0xbf, 0x87, 0xb0, 0x63, 0x52, 0xfb, 0x8d, 0x84, 0x91, 0xe5, 0xdd, 0x1f, 0x10, 0xd9, 0x55,
+ 0x20, 0xac, 0x1a, 0xd6, 0x2c, 0xad, 0xfa, 0x75, 0x8e, 0xa5, 0x30, 0x7d, 0xc7, 0xb6, 0x26, 0x0b,
+ 0xf3, 0xa3, 0x87, 0x90, 0x17, 0x9b, 0x32, 0x02, 0x5f, 0x92, 0x3d, 0xf4, 0x55, 0xb8, 0x99, 0x6a,
+ 0x83, 0xb4, 0xf1, 0x25, 0x2c, 0x1f, 0xf7, 0xa9, 0xdd, 0xe3, 0x67, 0x6e, 0x72, 0x9b, 0xb5, 0x02,
+ 0x5a, 0x9a, 0x58, 0xa1, 0x74, 0xe3, 0xcf, 0x6b, 0xbc, 0x32, 0x1d, 0x54, 0xd9, 0x44, 0x49, 0x1f,
+ 0x7d, 0x01, 0xd5, 0xe1, 0xaa, 0x3a, 0x5a, 0x4b, 0x6a, 0x8b, 0x15, 0xf1, 0xb5, 0xdb, 0xd9, 0x1d,
+ 0xe4, 0x0c, 0xf3, 0xff, 0xf9, 0xe6, 0xc1, 0x54, 0x71, 0x0a, 0x7d, 0x19, 0x54, 0xc3, 0x23, 0xa5,
+ 0x72, 0x14, 0x1d, 0x9e, 0x5a, 0x9b, 0xd7, 0xee, 0x8c, 0xe8, 0x11, 0xd3, 0x90, 0x43, 0xcf, 0x01,
+ 0x54, 0xed, 0x1b, 0x2d, 0xc7, 0x07, 0x46, 0x6a, 0xf0, 0x9a, 0x96, 0xc6, 0x4a, 0x0a, 0x53, 0x35,
+ 0x6e, 0x25, 0x2c, 0x51, 0x46, 0x57, 0xc2, 0x52, 0x4a, 0xe2, 0x81, 0xb0, 0xcf, 0xe0, 0x6a, 0xbc,
+ 0x16, 0x8d, 0x56, 0xc3, 0x27, 0x5a, 0x5a, 0xc5, 0x5c, 0xbb, 0x95, 0xc5, 0x1e, 0x12, 0xfc, 0x85,
+ 0xc4, 0x7e, 0x23, 0xc5, 0x5f, 0xb5, 0x67, 0x19, 0x15, 0x67, 0xb5, 0x67, 0x99, 0x75, 0xe3, 0x88,
+ 0xdd, 0xf1, 0x6a, 0xac, 0xb2, 0x3b, 0xb5, 0xf0, 0xab, 0xec, 0x4e, 0x2f, 0xe2, 0x86, 0xce, 0x60,
+ 0x01, 0x4a, 0x56, 0x51, 0x51, 0xb8, 0xd7, 0x99, 0x45, 0x5d, 0x4d, 0x1f, 0xd5, 0x65, 0xc8, 0xfa,
+ 0x23, 0x28, 0x47, 0x8a, 0x83, 0x28, 0xdc, 0xa8, 0x64, 0x81, 0x56, 0xbb, 0x99, 0xca, 0x4b, 0x2e,
+ 0xf6, 0x70, 0x1e, 0xa4, 0x16, 0x3b, 0xa3, 0x98, 0xa8, 0x16, 0x3b, 0xb3, 0x30, 0x18, 0x88, 0x3f,
+ 0x04, 0x50, 0x35, 0x2b, 0xe5, 0x71, 0x89, 0xca, 0x9c, 0xf2, 0xb8, 0x64, 0x89, 0x2b, 0x58, 0xe0,
+ 0x8f, 0xb9, 0xb5, 0xc3, 0x35, 0x28, 0x65, 0x6d, 0x46, 0xc9, 0x4b, 0x59, 0x9b, 0x55, 0xbe, 0x8a,
+ 0x1e, 0xe7, 0x44, 0x51, 0x47, 0x1d, 0xe7, 0xac, 0x52, 0x96, 0x3a, 0xce, 0x99, 0x15, 0xa1, 0x70,
+ 0x3d, 0x7e, 0x04, 0x33, 0xbb, 0xbe, 0x75, 0x8e, 0x16, 0xc2, 0x21, 0xaa, 0x1e, 0xa4, 0x2d, 0xc6,
+ 0x89, 0x43, 0xc6, 0xed, 0x40, 0x31, 0x28, 0x89, 0xa0, 0xa5, 0x98, 0xb7, 0xab, 0xf2, 0x8e, 0x56,
+ 0x4b, 0x32, 0x86, 0x2c, 0x38, 0x85, 0xb9, 0x58, 0x3d, 0x03, 0xad, 0x84, 0x5a, 0x53, 0xca, 0x2a,
+ 0xda, 0x6a, 0x06, 0x77, 0xc8, 0xb8, 0xe7, 0x00, 0xaa, 0xce, 0xa0, 0xf6, 0x39, 0x51, 0x0b, 0x51,
+ 0xfb, 0x9c, 0x52, 0x96, 0x08, 0x4c, 0xb4, 0x00, 0x25, 0x4b, 0x05, 0xea, 0x20, 0x65, 0x96, 0x2e,
+ 0xd4, 0x41, 0xca, 0xae, 0x34, 0x44, 0x4f, 0x6b, 0x12, 0xdc, 0x8f, 0x2a, 0xc9, 0x28, 0x36, 0x44,
+ 0x95, 0x64, 0xd5, 0x06, 0x42, 0x25, 0x24, 0x59, 0x90, 0x97, 0xa0, 0x3c, 0xba, 0x9f, 0x75, 0x86,
+ 0xe2, 0x35, 0x02, 0xed, 0xfd, 0xef, 0xec, 0x37, 0xb4, 0x7a, 0x0d, 0xa8, 0x44, 0x41, 0x79, 0x74,
+ 0x33, 0x2e, 0x20, 0x06, 0x23, 0x6a, 0x2b, 0xe9, 0xcc, 0xc4, 0xc1, 0xfb, 0x25, 0x68, 0xd9, 0x00,
+ 0x21, 0xfa, 0x60, 0x94, 0x8d, 0x71, 0x85, 0x0f, 0xdf, 0xa5, 0x6b, 0x7c, 0x46, 0x0f, 0x72, 0xe8,
+ 0x19, 0x94, 0x42, 0xb8, 0x1f, 0xd5, 0x22, 0xa1, 0x22, 0x86, 0x57, 0x6b, 0xcb, 0x29, 0x9c, 0xc4,
+ 0x54, 0xf6, 0xa1, 0xd4, 0x48, 0xca, 0x6a, 0x64, 0xca, 0x6a, 0x64, 0xc8, 0xca, 0xa1, 0x4f, 0xa1,
+ 0x12, 0x05, 0xb4, 0xd5, 0x4a, 0xa7, 0x60, 0xe9, 0x6a, 0xa5, 0x53, 0x31, 0xf0, 0x68, 0x78, 0x57,
+ 0x90, 0x68, 0x24, 0xbc, 0x27, 0x70, 0xd7, 0x48, 0x78, 0x4f, 0x62, 0xa8, 0xa1, 0x03, 0xb6, 0x78,
+ 0x9d, 0x24, 0x8e, 0x63, 0xa2, 0xe8, 0x0f, 0x12, 0xa9, 0xc0, 0xa9, 0x8a, 0x68, 0x99, 0x20, 0x68,
+ 0x64, 0x41, 0xbf, 0x84, 0xf9, 0x04, 0x30, 0xa9, 0x74, 0x64, 0x21, 0xa1, 0x4a, 0x47, 0x26, 0xaa,
+ 0x19, 0xce, 0xa2, 0x0e, 0x05, 0xf9, 0x9b, 0x16, 0xba, 0x11, 0x8e, 0x8a, 0xfd, 0x03, 0xa6, 0x2d,
+ 0x25, 0xe8, 0x43, 0x2b, 0x7b, 0x02, 0xe5, 0x08, 0x6a, 0x89, 0xa2, 0xf7, 0xcd, 0x10, 0x1a, 0xa9,
+ 0x56, 0x36, 0x05, 0xe6, 0x8c, 0xcc, 0xfb, 0x57, 0x2c, 0xed, 0x1a, 0x81, 0x21, 0xa2, 0x0f, 0x47,
+ 0xf9, 0xfa, 0xb0, 0xd2, 0x47, 0xef, 0xd6, 0x79, 0x68, 0x56, 0x3f, 0x87, 0xb9, 0x18, 0x1e, 0xa6,
+ 0xa2, 0x79, 0x1a, 0x68, 0xa9, 0xa2, 0x79, 0x2a, 0x88, 0x16, 0x99, 0xdb, 0x39, 0x2c, 0xa6, 0xc1,
+ 0x14, 0xe8, 0xae, 0x3a, 0x15, 0x99, 0x80, 0x8b, 0x76, 0x6f, 0x74, 0xa7, 0x84, 0xb2, 0x16, 0xcc,
+ 0x27, 0x30, 0x1f, 0xe5, 0x40, 0x59, 0x20, 0x95, 0x72, 0xa0, 0x4c, 0xc0, 0x28, 0xa2, 0x03, 0x03,
+ 0x4a, 0x56, 0x7e, 0x50, 0xe4, 0x21, 0x9e, 0x51, 0x80, 0x52, 0xe1, 0x7e, 0x44, 0xe1, 0x48, 0x05,
+ 0xaa, 0x16, 0xcc, 0x27, 0x8a, 0x3d, 0x6a, 0x2a, 0x59, 0xd5, 0x25, 0x35, 0x95, 0xcc, 0x4a, 0x51,
+ 0x64, 0x2a, 0x1e, 0xdc, 0x48, 0x07, 0x38, 0xd0, 0x7b, 0x91, 0xed, 0xcd, 0xc6, 0x56, 0xb4, 0xfb,
+ 0xdf, 0xd5, 0x6d, 0xe8, 0xf8, 0x9d, 0xc2, 0x5c, 0x2c, 0x37, 0x57, 0x4e, 0x96, 0x86, 0x98, 0x28,
+ 0x27, 0x4b, 0x47, 0x2b, 0x02, 0xd7, 0x75, 0x86, 0xe0, 0x8c, 0x20, 0xe3, 0x47, 0xf7, 0x52, 0xc7,
+ 0x0f, 0x61, 0x1a, 0xda, 0x7b, 0xdf, 0xd1, 0x2b, 0xf9, 0xce, 0x1d, 0xce, 0xf4, 0xa3, 0x89, 0x60,
+ 0x2a, 0xb0, 0x10, 0x4d, 0x04, 0x33, 0x40, 0x82, 0x98, 0xf8, 0x78, 0xca, 0x1e, 0x15, 0x9f, 0x0a,
+ 0x23, 0x44, 0xc5, 0x67, 0x64, 0xfb, 0x81, 0xf8, 0x0e, 0x2c, 0xa4, 0x24, 0xdc, 0x28, 0xe2, 0x9b,
+ 0x59, 0x88, 0x80, 0x76, 0x77, 0x64, 0x9f, 0xe4, 0xcb, 0x2b, 0x99, 0x62, 0xab, 0x53, 0x92, 0x99,
+ 0xd5, 0xab, 0x53, 0x92, 0x9d, 0xa1, 0x07, 0x4a, 0xea, 0x1f, 0xbf, 0x62, 0x9d, 0x1d, 0xb3, 0xb5,
+ 0x6e, 0x79, 0xbd, 0xc7, 0xe2, 0xf3, 0x23, 0x8f, 0x9c, 0x3d, 0x16, 0x22, 0x1e, 0xf3, 0x9f, 0xec,
+ 0x1f, 0x9f, 0x79, 0xb2, 0xdd, 0x6f, 0xb5, 0xf2, 0x9c, 0xf4, 0xe4, 0xbf, 0x01, 0x00, 0x00, 0xff,
+ 0xff, 0x99, 0x62, 0x61, 0x2b, 0xb5, 0x2f, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
@@ -4293,6 +4418,9 @@ type RepositoryServiceClient interface {
CreateRepositoryFromURL(ctx context.Context, in *CreateRepositoryFromURLRequest, opts ...grpc.CallOption) (*CreateRepositoryFromURLResponse, error)
CreateBundle(ctx context.Context, in *CreateBundleRequest, opts ...grpc.CallOption) (RepositoryService_CreateBundleClient, error)
CreateRepositoryFromBundle(ctx context.Context, opts ...grpc.CallOption) (RepositoryService_CreateRepositoryFromBundleClient, error)
+ // GetConfig reads the target repository's gitconfig and streams its contents
+ // back. Returns a NotFound error in case no gitconfig was found.
+ GetConfig(ctx context.Context, in *GetConfigRequest, opts ...grpc.CallOption) (RepositoryService_GetConfigClient, error)
SetConfig(ctx context.Context, in *SetConfigRequest, opts ...grpc.CallOption) (*SetConfigResponse, error)
DeleteConfig(ctx context.Context, in *DeleteConfigRequest, opts ...grpc.CallOption) (*DeleteConfigResponse, error)
FindLicense(ctx context.Context, in *FindLicenseRequest, opts ...grpc.CallOption) (*FindLicenseResponse, error)
@@ -4595,6 +4723,38 @@ func (x *repositoryServiceCreateRepositoryFromBundleClient) CloseAndRecv() (*Cre
return m, nil
}
+func (c *repositoryServiceClient) GetConfig(ctx context.Context, in *GetConfigRequest, opts ...grpc.CallOption) (RepositoryService_GetConfigClient, error) {
+ stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[3], "/gitaly.RepositoryService/GetConfig", opts...)
+ if err != nil {
+ return nil, err
+ }
+ x := &repositoryServiceGetConfigClient{stream}
+ if err := x.ClientStream.SendMsg(in); err != nil {
+ return nil, err
+ }
+ if err := x.ClientStream.CloseSend(); err != nil {
+ return nil, err
+ }
+ return x, nil
+}
+
+type RepositoryService_GetConfigClient interface {
+ Recv() (*GetConfigResponse, error)
+ grpc.ClientStream
+}
+
+type repositoryServiceGetConfigClient struct {
+ grpc.ClientStream
+}
+
+func (x *repositoryServiceGetConfigClient) Recv() (*GetConfigResponse, error) {
+ m := new(GetConfigResponse)
+ if err := x.ClientStream.RecvMsg(m); err != nil {
+ return nil, err
+ }
+ return m, nil
+}
+
func (c *repositoryServiceClient) SetConfig(ctx context.Context, in *SetConfigRequest, opts ...grpc.CallOption) (*SetConfigResponse, error) {
out := new(SetConfigResponse)
err := c.cc.Invoke(ctx, "/gitaly.RepositoryService/SetConfig", in, out, opts...)
@@ -4623,7 +4783,7 @@ func (c *repositoryServiceClient) FindLicense(ctx context.Context, in *FindLicen
}
func (c *repositoryServiceClient) GetInfoAttributes(ctx context.Context, in *GetInfoAttributesRequest, opts ...grpc.CallOption) (RepositoryService_GetInfoAttributesClient, error) {
- stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[3], "/gitaly.RepositoryService/GetInfoAttributes", opts...)
+ stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[4], "/gitaly.RepositoryService/GetInfoAttributes", opts...)
if err != nil {
return nil, err
}
@@ -4673,7 +4833,7 @@ func (c *repositoryServiceClient) Cleanup(ctx context.Context, in *CleanupReques
}
func (c *repositoryServiceClient) GetSnapshot(ctx context.Context, in *GetSnapshotRequest, opts ...grpc.CallOption) (RepositoryService_GetSnapshotClient, error) {
- stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[4], "/gitaly.RepositoryService/GetSnapshot", opts...)
+ stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[5], "/gitaly.RepositoryService/GetSnapshot", opts...)
if err != nil {
return nil, err
}
@@ -4714,7 +4874,7 @@ func (c *repositoryServiceClient) CreateRepositoryFromSnapshot(ctx context.Conte
}
func (c *repositoryServiceClient) GetRawChanges(ctx context.Context, in *GetRawChangesRequest, opts ...grpc.CallOption) (RepositoryService_GetRawChangesClient, error) {
- stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[5], "/gitaly.RepositoryService/GetRawChanges", opts...)
+ stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[6], "/gitaly.RepositoryService/GetRawChanges", opts...)
if err != nil {
return nil, err
}
@@ -4746,7 +4906,7 @@ func (x *repositoryServiceGetRawChangesClient) Recv() (*GetRawChangesResponse, e
}
func (c *repositoryServiceClient) SearchFilesByContent(ctx context.Context, in *SearchFilesByContentRequest, opts ...grpc.CallOption) (RepositoryService_SearchFilesByContentClient, error) {
- stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[6], "/gitaly.RepositoryService/SearchFilesByContent", opts...)
+ stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[7], "/gitaly.RepositoryService/SearchFilesByContent", opts...)
if err != nil {
return nil, err
}
@@ -4778,7 +4938,7 @@ func (x *repositoryServiceSearchFilesByContentClient) Recv() (*SearchFilesByCont
}
func (c *repositoryServiceClient) SearchFilesByName(ctx context.Context, in *SearchFilesByNameRequest, opts ...grpc.CallOption) (RepositoryService_SearchFilesByNameClient, error) {
- stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[7], "/gitaly.RepositoryService/SearchFilesByName", opts...)
+ stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[8], "/gitaly.RepositoryService/SearchFilesByName", opts...)
if err != nil {
return nil, err
}
@@ -4810,7 +4970,7 @@ func (x *repositoryServiceSearchFilesByNameClient) Recv() (*SearchFilesByNameRes
}
func (c *repositoryServiceClient) RestoreCustomHooks(ctx context.Context, opts ...grpc.CallOption) (RepositoryService_RestoreCustomHooksClient, error) {
- stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[8], "/gitaly.RepositoryService/RestoreCustomHooks", opts...)
+ stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[9], "/gitaly.RepositoryService/RestoreCustomHooks", opts...)
if err != nil {
return nil, err
}
@@ -4844,7 +5004,7 @@ func (x *repositoryServiceRestoreCustomHooksClient) CloseAndRecv() (*RestoreCust
}
func (c *repositoryServiceClient) BackupCustomHooks(ctx context.Context, in *BackupCustomHooksRequest, opts ...grpc.CallOption) (RepositoryService_BackupCustomHooksClient, error) {
- stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[9], "/gitaly.RepositoryService/BackupCustomHooks", opts...)
+ stream, err := c.cc.NewStream(ctx, &_RepositoryService_serviceDesc.Streams[10], "/gitaly.RepositoryService/BackupCustomHooks", opts...)
if err != nil {
return nil, err
}
@@ -4966,6 +5126,9 @@ type RepositoryServiceServer interface {
CreateRepositoryFromURL(context.Context, *CreateRepositoryFromURLRequest) (*CreateRepositoryFromURLResponse, error)
CreateBundle(*CreateBundleRequest, RepositoryService_CreateBundleServer) error
CreateRepositoryFromBundle(RepositoryService_CreateRepositoryFromBundleServer) error
+ // GetConfig reads the target repository's gitconfig and streams its contents
+ // back. Returns a NotFound error in case no gitconfig was found.
+ GetConfig(*GetConfigRequest, RepositoryService_GetConfigServer) error
SetConfig(context.Context, *SetConfigRequest) (*SetConfigResponse, error)
DeleteConfig(context.Context, *DeleteConfigRequest) (*DeleteConfigResponse, error)
FindLicense(context.Context, *FindLicenseRequest) (*FindLicenseResponse, error)
@@ -5061,6 +5224,9 @@ func (*UnimplementedRepositoryServiceServer) CreateBundle(req *CreateBundleReque
func (*UnimplementedRepositoryServiceServer) CreateRepositoryFromBundle(srv RepositoryService_CreateRepositoryFromBundleServer) error {
return status.Errorf(codes.Unimplemented, "method CreateRepositoryFromBundle not implemented")
}
+func (*UnimplementedRepositoryServiceServer) GetConfig(req *GetConfigRequest, srv RepositoryService_GetConfigServer) error {
+ return status.Errorf(codes.Unimplemented, "method GetConfig not implemented")
+}
func (*UnimplementedRepositoryServiceServer) SetConfig(ctx context.Context, req *SetConfigRequest) (*SetConfigResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method SetConfig not implemented")
}
@@ -5536,6 +5702,27 @@ func (x *repositoryServiceCreateRepositoryFromBundleServer) Recv() (*CreateRepos
return m, nil
}
+func _RepositoryService_GetConfig_Handler(srv interface{}, stream grpc.ServerStream) error {
+ m := new(GetConfigRequest)
+ if err := stream.RecvMsg(m); err != nil {
+ return err
+ }
+ return srv.(RepositoryServiceServer).GetConfig(m, &repositoryServiceGetConfigServer{stream})
+}
+
+type RepositoryService_GetConfigServer interface {
+ Send(*GetConfigResponse) error
+ grpc.ServerStream
+}
+
+type repositoryServiceGetConfigServer struct {
+ grpc.ServerStream
+}
+
+func (x *repositoryServiceGetConfigServer) Send(m *GetConfigResponse) error {
+ return x.ServerStream.SendMsg(m)
+}
+
func _RepositoryService_SetConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(SetConfigRequest)
if err := dec(in); err != nil {
@@ -6072,6 +6259,11 @@ var _RepositoryService_serviceDesc = grpc.ServiceDesc{
ClientStreams: true,
},
{
+ StreamName: "GetConfig",
+ Handler: _RepositoryService_GetConfig_Handler,
+ ServerStreams: true,
+ },
+ {
StreamName: "GetInfoAttributes",
Handler: _RepositoryService_GetInfoAttributes_Handler,
ServerStreams: true,
diff --git a/proto/go/gitalypb/wiki.pb.go b/proto/go/gitalypb/wiki.pb.go
index da2d12c75..fa62071b4 100644
--- a/proto/go/gitalypb/wiki.pb.go
+++ b/proto/go/gitalypb/wiki.pb.go
@@ -46,7 +46,7 @@ func (x WikiGetAllPagesRequest_SortBy) String() string {
}
func (WikiGetAllPagesRequest_SortBy) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{15, 0}
+ return fileDescriptor_5c56f90469cec0af, []int{11, 0}
}
type WikiListPagesRequest_SortBy int32
@@ -71,7 +71,7 @@ func (x WikiListPagesRequest_SortBy) String() string {
}
func (WikiListPagesRequest_SortBy) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{17, 0}
+ return fileDescriptor_5c56f90469cec0af, []int{13, 0}
}
type WikiCommitDetails struct {
@@ -289,108 +289,6 @@ func (m *WikiPage) GetRawData() []byte {
return nil
}
-type WikiGetPageVersionsRequest struct {
- Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
- PagePath []byte `protobuf:"bytes,2,opt,name=page_path,json=pagePath,proto3" json:"page_path,omitempty"`
- Page int32 `protobuf:"varint,3,opt,name=page,proto3" json:"page,omitempty"`
- PerPage int32 `protobuf:"varint,4,opt,name=per_page,json=perPage,proto3" json:"per_page,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *WikiGetPageVersionsRequest) Reset() { *m = WikiGetPageVersionsRequest{} }
-func (m *WikiGetPageVersionsRequest) String() string { return proto.CompactTextString(m) }
-func (*WikiGetPageVersionsRequest) ProtoMessage() {}
-func (*WikiGetPageVersionsRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{3}
-}
-
-func (m *WikiGetPageVersionsRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_WikiGetPageVersionsRequest.Unmarshal(m, b)
-}
-func (m *WikiGetPageVersionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_WikiGetPageVersionsRequest.Marshal(b, m, deterministic)
-}
-func (m *WikiGetPageVersionsRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_WikiGetPageVersionsRequest.Merge(m, src)
-}
-func (m *WikiGetPageVersionsRequest) XXX_Size() int {
- return xxx_messageInfo_WikiGetPageVersionsRequest.Size(m)
-}
-func (m *WikiGetPageVersionsRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_WikiGetPageVersionsRequest.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_WikiGetPageVersionsRequest proto.InternalMessageInfo
-
-func (m *WikiGetPageVersionsRequest) GetRepository() *Repository {
- if m != nil {
- return m.Repository
- }
- return nil
-}
-
-func (m *WikiGetPageVersionsRequest) GetPagePath() []byte {
- if m != nil {
- return m.PagePath
- }
- return nil
-}
-
-func (m *WikiGetPageVersionsRequest) GetPage() int32 {
- if m != nil {
- return m.Page
- }
- return 0
-}
-
-func (m *WikiGetPageVersionsRequest) GetPerPage() int32 {
- if m != nil {
- return m.PerPage
- }
- return 0
-}
-
-type WikiGetPageVersionsResponse struct {
- Versions []*WikiPageVersion `protobuf:"bytes,1,rep,name=versions,proto3" json:"versions,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *WikiGetPageVersionsResponse) Reset() { *m = WikiGetPageVersionsResponse{} }
-func (m *WikiGetPageVersionsResponse) String() string { return proto.CompactTextString(m) }
-func (*WikiGetPageVersionsResponse) ProtoMessage() {}
-func (*WikiGetPageVersionsResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{4}
-}
-
-func (m *WikiGetPageVersionsResponse) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_WikiGetPageVersionsResponse.Unmarshal(m, b)
-}
-func (m *WikiGetPageVersionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_WikiGetPageVersionsResponse.Marshal(b, m, deterministic)
-}
-func (m *WikiGetPageVersionsResponse) XXX_Merge(src proto.Message) {
- xxx_messageInfo_WikiGetPageVersionsResponse.Merge(m, src)
-}
-func (m *WikiGetPageVersionsResponse) XXX_Size() int {
- return xxx_messageInfo_WikiGetPageVersionsResponse.Size(m)
-}
-func (m *WikiGetPageVersionsResponse) XXX_DiscardUnknown() {
- xxx_messageInfo_WikiGetPageVersionsResponse.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_WikiGetPageVersionsResponse proto.InternalMessageInfo
-
-func (m *WikiGetPageVersionsResponse) GetVersions() []*WikiPageVersion {
- if m != nil {
- return m.Versions
- }
- return nil
-}
-
// This message is sent in a stream because the 'content' field may be large.
type WikiWritePageRequest struct {
// These following fields are only present in the first message.
@@ -409,7 +307,7 @@ func (m *WikiWritePageRequest) Reset() { *m = WikiWritePageRequest{} }
func (m *WikiWritePageRequest) String() string { return proto.CompactTextString(m) }
func (*WikiWritePageRequest) ProtoMessage() {}
func (*WikiWritePageRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{5}
+ return fileDescriptor_5c56f90469cec0af, []int{3}
}
func (m *WikiWritePageRequest) XXX_Unmarshal(b []byte) error {
@@ -476,7 +374,7 @@ func (m *WikiWritePageResponse) Reset() { *m = WikiWritePageResponse{} }
func (m *WikiWritePageResponse) String() string { return proto.CompactTextString(m) }
func (*WikiWritePageResponse) ProtoMessage() {}
func (*WikiWritePageResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{6}
+ return fileDescriptor_5c56f90469cec0af, []int{4}
}
func (m *WikiWritePageResponse) XXX_Unmarshal(b []byte) error {
@@ -522,7 +420,7 @@ func (m *WikiUpdatePageRequest) Reset() { *m = WikiUpdatePageRequest{} }
func (m *WikiUpdatePageRequest) String() string { return proto.CompactTextString(m) }
func (*WikiUpdatePageRequest) ProtoMessage() {}
func (*WikiUpdatePageRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{7}
+ return fileDescriptor_5c56f90469cec0af, []int{5}
}
func (m *WikiUpdatePageRequest) XXX_Unmarshal(b []byte) error {
@@ -596,7 +494,7 @@ func (m *WikiUpdatePageResponse) Reset() { *m = WikiUpdatePageResponse{}
func (m *WikiUpdatePageResponse) String() string { return proto.CompactTextString(m) }
func (*WikiUpdatePageResponse) ProtoMessage() {}
func (*WikiUpdatePageResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{8}
+ return fileDescriptor_5c56f90469cec0af, []int{6}
}
func (m *WikiUpdatePageResponse) XXX_Unmarshal(b []byte) error {
@@ -637,7 +535,7 @@ func (m *WikiDeletePageRequest) Reset() { *m = WikiDeletePageRequest{} }
func (m *WikiDeletePageRequest) String() string { return proto.CompactTextString(m) }
func (*WikiDeletePageRequest) ProtoMessage() {}
func (*WikiDeletePageRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{9}
+ return fileDescriptor_5c56f90469cec0af, []int{7}
}
func (m *WikiDeletePageRequest) XXX_Unmarshal(b []byte) error {
@@ -689,7 +587,7 @@ func (m *WikiDeletePageResponse) Reset() { *m = WikiDeletePageResponse{}
func (m *WikiDeletePageResponse) String() string { return proto.CompactTextString(m) }
func (*WikiDeletePageResponse) ProtoMessage() {}
func (*WikiDeletePageResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{10}
+ return fileDescriptor_5c56f90469cec0af, []int{8}
}
func (m *WikiDeletePageResponse) XXX_Unmarshal(b []byte) error {
@@ -724,7 +622,7 @@ func (m *WikiFindPageRequest) Reset() { *m = WikiFindPageRequest{} }
func (m *WikiFindPageRequest) String() string { return proto.CompactTextString(m) }
func (*WikiFindPageRequest) ProtoMessage() {}
func (*WikiFindPageRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{11}
+ return fileDescriptor_5c56f90469cec0af, []int{9}
}
func (m *WikiFindPageRequest) XXX_Unmarshal(b []byte) error {
@@ -786,7 +684,7 @@ func (m *WikiFindPageResponse) Reset() { *m = WikiFindPageResponse{} }
func (m *WikiFindPageResponse) String() string { return proto.CompactTextString(m) }
func (*WikiFindPageResponse) ProtoMessage() {}
func (*WikiFindPageResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{12}
+ return fileDescriptor_5c56f90469cec0af, []int{10}
}
func (m *WikiFindPageResponse) XXX_Unmarshal(b []byte) error {
@@ -814,126 +712,6 @@ func (m *WikiFindPageResponse) GetPage() *WikiPage {
return nil
}
-type WikiFindFileRequest struct {
- Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
- Name []byte `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
- // Optional: revision
- Revision []byte `protobuf:"bytes,3,opt,name=revision,proto3" json:"revision,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *WikiFindFileRequest) Reset() { *m = WikiFindFileRequest{} }
-func (m *WikiFindFileRequest) String() string { return proto.CompactTextString(m) }
-func (*WikiFindFileRequest) ProtoMessage() {}
-func (*WikiFindFileRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{13}
-}
-
-func (m *WikiFindFileRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_WikiFindFileRequest.Unmarshal(m, b)
-}
-func (m *WikiFindFileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_WikiFindFileRequest.Marshal(b, m, deterministic)
-}
-func (m *WikiFindFileRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_WikiFindFileRequest.Merge(m, src)
-}
-func (m *WikiFindFileRequest) XXX_Size() int {
- return xxx_messageInfo_WikiFindFileRequest.Size(m)
-}
-func (m *WikiFindFileRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_WikiFindFileRequest.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_WikiFindFileRequest proto.InternalMessageInfo
-
-func (m *WikiFindFileRequest) GetRepository() *Repository {
- if m != nil {
- return m.Repository
- }
- return nil
-}
-
-func (m *WikiFindFileRequest) GetName() []byte {
- if m != nil {
- return m.Name
- }
- return nil
-}
-
-func (m *WikiFindFileRequest) GetRevision() []byte {
- if m != nil {
- return m.Revision
- }
- return nil
-}
-
-type WikiFindFileResponse struct {
- // If 'name' is empty, the file was not found.
- Name []byte `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
- MimeType string `protobuf:"bytes,2,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"`
- RawData []byte `protobuf:"bytes,3,opt,name=raw_data,json=rawData,proto3" json:"raw_data,omitempty"`
- Path []byte `protobuf:"bytes,4,opt,name=path,proto3" json:"path,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *WikiFindFileResponse) Reset() { *m = WikiFindFileResponse{} }
-func (m *WikiFindFileResponse) String() string { return proto.CompactTextString(m) }
-func (*WikiFindFileResponse) ProtoMessage() {}
-func (*WikiFindFileResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{14}
-}
-
-func (m *WikiFindFileResponse) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_WikiFindFileResponse.Unmarshal(m, b)
-}
-func (m *WikiFindFileResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_WikiFindFileResponse.Marshal(b, m, deterministic)
-}
-func (m *WikiFindFileResponse) XXX_Merge(src proto.Message) {
- xxx_messageInfo_WikiFindFileResponse.Merge(m, src)
-}
-func (m *WikiFindFileResponse) XXX_Size() int {
- return xxx_messageInfo_WikiFindFileResponse.Size(m)
-}
-func (m *WikiFindFileResponse) XXX_DiscardUnknown() {
- xxx_messageInfo_WikiFindFileResponse.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_WikiFindFileResponse proto.InternalMessageInfo
-
-func (m *WikiFindFileResponse) GetName() []byte {
- if m != nil {
- return m.Name
- }
- return nil
-}
-
-func (m *WikiFindFileResponse) GetMimeType() string {
- if m != nil {
- return m.MimeType
- }
- return ""
-}
-
-func (m *WikiFindFileResponse) GetRawData() []byte {
- if m != nil {
- return m.RawData
- }
- return nil
-}
-
-func (m *WikiFindFileResponse) GetPath() []byte {
- if m != nil {
- return m.Path
- }
- return nil
-}
-
type WikiGetAllPagesRequest struct {
Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
// Passing 0 means no limit is applied
@@ -949,7 +727,7 @@ func (m *WikiGetAllPagesRequest) Reset() { *m = WikiGetAllPagesRequest{}
func (m *WikiGetAllPagesRequest) String() string { return proto.CompactTextString(m) }
func (*WikiGetAllPagesRequest) ProtoMessage() {}
func (*WikiGetAllPagesRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{15}
+ return fileDescriptor_5c56f90469cec0af, []int{11}
}
func (m *WikiGetAllPagesRequest) XXX_Unmarshal(b []byte) error {
@@ -1012,7 +790,7 @@ func (m *WikiGetAllPagesResponse) Reset() { *m = WikiGetAllPagesResponse
func (m *WikiGetAllPagesResponse) String() string { return proto.CompactTextString(m) }
func (*WikiGetAllPagesResponse) ProtoMessage() {}
func (*WikiGetAllPagesResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{16}
+ return fileDescriptor_5c56f90469cec0af, []int{12}
}
func (m *WikiGetAllPagesResponse) XXX_Unmarshal(b []byte) error {
@@ -1063,7 +841,7 @@ func (m *WikiListPagesRequest) Reset() { *m = WikiListPagesRequest{} }
func (m *WikiListPagesRequest) String() string { return proto.CompactTextString(m) }
func (*WikiListPagesRequest) ProtoMessage() {}
func (*WikiListPagesRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{17}
+ return fileDescriptor_5c56f90469cec0af, []int{13}
}
func (m *WikiListPagesRequest) XXX_Unmarshal(b []byte) error {
@@ -1131,7 +909,7 @@ func (m *WikiListPagesResponse) Reset() { *m = WikiListPagesResponse{} }
func (m *WikiListPagesResponse) String() string { return proto.CompactTextString(m) }
func (*WikiListPagesResponse) ProtoMessage() {}
func (*WikiListPagesResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_5c56f90469cec0af, []int{18}
+ return fileDescriptor_5c56f90469cec0af, []int{14}
}
func (m *WikiListPagesResponse) XXX_Unmarshal(b []byte) error {
@@ -1165,8 +943,6 @@ func init() {
proto.RegisterType((*WikiCommitDetails)(nil), "gitaly.WikiCommitDetails")
proto.RegisterType((*WikiPageVersion)(nil), "gitaly.WikiPageVersion")
proto.RegisterType((*WikiPage)(nil), "gitaly.WikiPage")
- proto.RegisterType((*WikiGetPageVersionsRequest)(nil), "gitaly.WikiGetPageVersionsRequest")
- proto.RegisterType((*WikiGetPageVersionsResponse)(nil), "gitaly.WikiGetPageVersionsResponse")
proto.RegisterType((*WikiWritePageRequest)(nil), "gitaly.WikiWritePageRequest")
proto.RegisterType((*WikiWritePageResponse)(nil), "gitaly.WikiWritePageResponse")
proto.RegisterType((*WikiUpdatePageRequest)(nil), "gitaly.WikiUpdatePageRequest")
@@ -1175,8 +951,6 @@ func init() {
proto.RegisterType((*WikiDeletePageResponse)(nil), "gitaly.WikiDeletePageResponse")
proto.RegisterType((*WikiFindPageRequest)(nil), "gitaly.WikiFindPageRequest")
proto.RegisterType((*WikiFindPageResponse)(nil), "gitaly.WikiFindPageResponse")
- proto.RegisterType((*WikiFindFileRequest)(nil), "gitaly.WikiFindFileRequest")
- proto.RegisterType((*WikiFindFileResponse)(nil), "gitaly.WikiFindFileResponse")
proto.RegisterType((*WikiGetAllPagesRequest)(nil), "gitaly.WikiGetAllPagesRequest")
proto.RegisterType((*WikiGetAllPagesResponse)(nil), "gitaly.WikiGetAllPagesResponse")
proto.RegisterType((*WikiListPagesRequest)(nil), "gitaly.WikiListPagesRequest")
@@ -1186,75 +960,66 @@ func init() {
func init() { proto.RegisterFile("wiki.proto", fileDescriptor_5c56f90469cec0af) }
var fileDescriptor_5c56f90469cec0af = []byte{
- // 1085 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x57, 0x4f, 0x6f, 0x1b, 0x45,
- 0x14, 0x67, 0x1d, 0xff, 0x59, 0xbf, 0x34, 0x6e, 0x3a, 0x94, 0x66, 0xeb, 0x84, 0x10, 0x6d, 0xa8,
- 0x08, 0x12, 0x38, 0x25, 0x3d, 0x00, 0x12, 0x48, 0x4d, 0x9a, 0x34, 0xaa, 0x54, 0x41, 0xb5, 0x31,
- 0x8d, 0x40, 0x48, 0xab, 0xc9, 0xee, 0xc4, 0x1e, 0x75, 0xff, 0x31, 0x3b, 0x4e, 0xe4, 0x13, 0x07,
- 0xce, 0x9c, 0xe1, 0x86, 0xc4, 0x27, 0xe0, 0x53, 0xf0, 0x19, 0xb8, 0x73, 0xe2, 0xc4, 0x15, 0x71,
- 0x42, 0x33, 0xb3, 0xf6, 0xce, 0xee, 0xda, 0x86, 0x40, 0x40, 0xdc, 0xf6, 0xbd, 0x37, 0xf3, 0xe6,
- 0xfd, 0x7e, 0xef, 0x9f, 0x0d, 0x70, 0x49, 0x5f, 0xd0, 0x5e, 0xc2, 0x62, 0x1e, 0xa3, 0xe6, 0x80,
- 0x72, 0x1c, 0x8c, 0xbb, 0x10, 0xd0, 0x88, 0x2b, 0x5d, 0xf7, 0x46, 0x3a, 0xc4, 0x8c, 0xf8, 0x4a,
- 0xb2, 0xbf, 0x36, 0xe0, 0xd6, 0x29, 0x7d, 0x41, 0x1f, 0xc5, 0x61, 0x48, 0xf9, 0x21, 0xe1, 0x98,
- 0x06, 0x29, 0x42, 0x50, 0x8f, 0x70, 0x48, 0x2c, 0x63, 0xcb, 0xd8, 0xb9, 0xe1, 0xc8, 0x6f, 0x74,
- 0x1b, 0x1a, 0x24, 0xc4, 0x34, 0xb0, 0x6a, 0x52, 0xa9, 0x04, 0x64, 0x41, 0x2b, 0x24, 0x69, 0x8a,
- 0x07, 0xc4, 0x5a, 0x92, 0xfa, 0x89, 0x88, 0xd6, 0xa0, 0x35, 0x4a, 0x09, 0x73, 0xa9, 0x6f, 0xd5,
- 0xb7, 0x8c, 0x9d, 0x86, 0xd3, 0x14, 0xe2, 0x13, 0x1f, 0xad, 0x43, 0x5b, 0x1a, 0xe4, 0x0b, 0x0d,
- 0x79, 0xc9, 0x14, 0x8a, 0x8f, 0x70, 0x48, 0xec, 0x3e, 0xdc, 0x14, 0xe1, 0x3c, 0xc3, 0x03, 0xf2,
- 0x9c, 0xb0, 0x94, 0xc6, 0x11, 0x7a, 0x13, 0x9a, 0x9e, 0x8c, 0x4e, 0x86, 0xb3, 0xbc, 0x77, 0xab,
- 0xa7, 0x50, 0xf5, 0x8e, 0x29, 0x57, 0x61, 0x3b, 0xd9, 0x01, 0x74, 0x07, 0x9a, 0xe7, 0x31, 0x0b,
- 0x31, 0x97, 0x41, 0xb6, 0x9d, 0x4c, 0xb2, 0x7f, 0x31, 0xc0, 0x9c, 0xb8, 0x45, 0xef, 0x40, 0xeb,
- 0x42, 0xb9, 0xce, 0x1c, 0xae, 0x4d, 0x1c, 0x96, 0x5e, 0x76, 0x26, 0xe7, 0xe6, 0xf9, 0x15, 0x9c,
- 0x70, 0xca, 0x83, 0x09, 0x76, 0x25, 0xa0, 0xbb, 0x60, 0x8e, 0x58, 0xe0, 0x26, 0x98, 0x0f, 0x25,
- 0xf4, 0xb6, 0xd3, 0x1a, 0xb1, 0xe0, 0x19, 0xe6, 0x43, 0x41, 0xac, 0x54, 0x2b, 0xd8, 0xf2, 0x7b,
- 0x4a, 0x76, 0x53, 0x23, 0x7b, 0x13, 0x60, 0x48, 0x53, 0x1e, 0x33, 0xea, 0xe1, 0xc0, 0x6a, 0x6d,
- 0x19, 0x3b, 0xa6, 0xa3, 0x69, 0xc4, 0x13, 0x0c, 0x5f, 0xba, 0x3e, 0xe6, 0xd8, 0x32, 0x15, 0xef,
- 0x0c, 0x5f, 0x1e, 0x62, 0x8e, 0xed, 0xef, 0x0d, 0xe8, 0x0a, 0x20, 0xc7, 0x84, 0x6b, 0x58, 0x52,
- 0x87, 0x7c, 0x31, 0x22, 0x29, 0x47, 0xef, 0x01, 0x30, 0x92, 0xc4, 0x29, 0xe5, 0x31, 0x1b, 0x67,
- 0x04, 0xa0, 0x09, 0x01, 0xce, 0xd4, 0x72, 0x50, 0xff, 0xf6, 0xc7, 0xb7, 0x0c, 0x47, 0x3b, 0x2b,
- 0xf2, 0x96, 0xe0, 0x01, 0x51, 0xb8, 0x54, 0x11, 0x98, 0x42, 0x91, 0x03, 0xcb, 0x8a, 0xa0, 0xe1,
- 0xc8, 0x6f, 0x11, 0x64, 0x42, 0x98, 0x2b, 0xf5, 0xaa, 0x04, 0x5a, 0x09, 0x61, 0x22, 0x28, 0xdb,
- 0x81, 0xf5, 0x99, 0x31, 0xa6, 0x49, 0x1c, 0xa5, 0x04, 0x3d, 0x00, 0x33, 0xa3, 0x3e, 0xb5, 0x8c,
- 0xad, 0xa5, 0x45, 0x39, 0x9a, 0x1e, 0xb4, 0x7f, 0x32, 0xe0, 0xb6, 0xb0, 0x9e, 0x32, 0xca, 0x89,
- 0x38, 0xf2, 0xcf, 0x21, 0x4f, 0x52, 0x53, 0xd3, 0x52, 0x93, 0xd7, 0xc2, 0x52, 0xa1, 0x16, 0x1e,
- 0x42, 0x47, 0x55, 0xa1, 0xeb, 0xab, 0x2e, 0x92, 0x98, 0x97, 0xf7, 0xee, 0xea, 0x91, 0x17, 0xda,
- 0xcc, 0x59, 0xf1, 0x0a, 0x5d, 0x67, 0x41, 0xcb, 0x8b, 0x23, 0x4e, 0x22, 0x9e, 0xd5, 0xc7, 0x44,
- 0xb4, 0x1f, 0xc2, 0x2b, 0x25, 0x64, 0x19, 0x51, 0x6f, 0xc0, 0x4d, 0x7f, 0x94, 0x04, 0xd4, 0xc3,
- 0x9c, 0xb8, 0x84, 0xb1, 0x98, 0x65, 0x3d, 0xdb, 0x99, 0xaa, 0x8f, 0x84, 0xd6, 0xfe, 0xcd, 0x50,
- 0x2e, 0x3e, 0x49, 0x7c, 0x7c, 0x5d, 0xec, 0x2c, 0x2c, 0x88, 0xd9, 0xad, 0x91, 0x93, 0x57, 0xff,
- 0x13, 0xf2, 0x1a, 0x7f, 0x9f, 0xbc, 0x66, 0x91, 0xbc, 0x1e, 0xdc, 0x29, 0x23, 0xcf, 0xd8, 0x13,
- 0x23, 0x4d, 0xe3, 0x4c, 0x09, 0xf6, 0x0f, 0x19, 0x55, 0x87, 0x24, 0x20, 0xff, 0x09, 0x55, 0x55,
- 0xf0, 0x4b, 0x57, 0x03, 0x6f, 0x5b, 0x0a, 0xa2, 0x1e, 0xb1, 0x82, 0x68, 0x7f, 0x67, 0xc0, 0xcb,
- 0xc2, 0xf4, 0x98, 0x46, 0xfe, 0xf5, 0x40, 0x99, 0x26, 0xb6, 0xa6, 0x27, 0xb6, 0x0b, 0x26, 0x23,
- 0x17, 0x54, 0x4e, 0x55, 0x95, 0xf1, 0xa9, 0x8c, 0x36, 0xa0, 0xed, 0x53, 0x46, 0x3c, 0xf9, 0x54,
- 0x5d, 0x1a, 0x73, 0x85, 0xfd, 0x81, 0xea, 0xda, 0x3c, 0xc0, 0x2c, 0x39, 0xaf, 0x67, 0x13, 0x45,
- 0xc5, 0xb6, 0x5a, 0xee, 0x7f, 0x35, 0x63, 0xec, 0x2f, 0x73, 0x78, 0x8f, 0x69, 0xf0, 0x2f, 0xb5,
- 0xfc, 0x02, 0x70, 0xf6, 0x45, 0x1e, 0xbe, 0x0a, 0x20, 0x0b, 0x7f, 0xd6, 0x0a, 0x5d, 0x87, 0x76,
- 0x48, 0x43, 0xe2, 0xf2, 0x71, 0x42, 0xb2, 0x4d, 0x62, 0x0a, 0x45, 0x7f, 0x9c, 0x90, 0xc2, 0x48,
- 0x5f, 0x2a, 0x8c, 0xf4, 0xe9, 0xd6, 0xa8, 0xe7, 0x5b, 0xc3, 0xfe, 0xd5, 0x50, 0x39, 0x3f, 0x26,
- 0x7c, 0x3f, 0x08, 0x04, 0x23, 0xe9, 0xb5, 0xe4, 0x36, 0xa0, 0x62, 0xd3, 0x8a, 0xe0, 0x56, 0x1c,
- 0x25, 0xa0, 0x7b, 0xd0, 0x51, 0xe9, 0xa2, 0x71, 0xe4, 0xfa, 0x24, 0xf5, 0x64, 0x7c, 0xa6, 0xb3,
- 0x32, 0xd5, 0x1e, 0x92, 0xd4, 0x43, 0xef, 0x43, 0x3d, 0x8d, 0x99, 0xea, 0xec, 0xce, 0xde, 0x3d,
- 0x3d, 0x61, 0xd5, 0x20, 0x7b, 0x27, 0x31, 0xe3, 0x07, 0x63, 0x47, 0x5e, 0xb1, 0xb7, 0xa1, 0xa9,
- 0x64, 0xd4, 0x86, 0x46, 0xff, 0x49, 0xff, 0xe9, 0xd1, 0xea, 0x4b, 0xa8, 0x03, 0xf0, 0xc8, 0x39,
- 0xda, 0xef, 0x1f, 0x1d, 0xba, 0xfb, 0xfd, 0x55, 0xc3, 0x76, 0x61, 0xad, 0xe2, 0xeb, 0x2a, 0xb5,
- 0x82, 0x36, 0x61, 0x99, 0x44, 0xbe, 0x1b, 0x9f, 0xab, 0x95, 0x54, 0x93, 0x20, 0xda, 0x24, 0xf2,
- 0x3f, 0x3e, 0x97, 0x4b, 0xe9, 0xab, 0x9a, 0xca, 0xe5, 0x53, 0x9a, 0xf2, 0xff, 0x03, 0xa1, 0xef,
- 0x16, 0x08, 0xdd, 0xd6, 0x51, 0x95, 0x43, 0x2c, 0xd0, 0x29, 0xa6, 0x6c, 0x7c, 0x7e, 0x9e, 0x12,
- 0xb5, 0x47, 0x56, 0x9c, 0x4c, 0xfa, 0x6b, 0x34, 0x7f, 0xa8, 0xa6, 0x9f, 0xf6, 0xc2, 0x55, 0x48,
- 0xde, 0xfb, 0xb9, 0x01, 0xcb, 0x42, 0x75, 0x42, 0xd8, 0x05, 0xf5, 0x08, 0x1a, 0xaa, 0x06, 0x2d,
- 0x6d, 0x7a, 0x64, 0x97, 0xca, 0x63, 0xc6, 0x4f, 0x95, 0xee, 0xf6, 0xc2, 0x33, 0xd9, 0x80, 0x6b,
- 0xfe, 0xfe, 0xcd, 0x4e, 0xcd, 0xac, 0xdd, 0x37, 0xd0, 0x73, 0x58, 0x29, 0x2c, 0x49, 0xb4, 0xa1,
- 0xdf, 0x2f, 0xff, 0x2a, 0xe8, 0xbe, 0x3a, 0xc7, 0x5a, 0xf0, 0x6b, 0xec, 0x18, 0xe8, 0x53, 0xe8,
- 0x14, 0xf7, 0x07, 0x2a, 0x5c, 0xad, 0x6c, 0xd4, 0xee, 0xe6, 0x3c, 0x73, 0xc5, 0xf5, 0xa9, 0x72,
- 0x9d, 0xcf, 0xed, 0xa2, 0xeb, 0xca, 0x06, 0x2a, 0xba, 0x9e, 0x31, 0xee, 0x33, 0xd7, 0xe8, 0x04,
- 0x6e, 0xe8, 0x43, 0x15, 0xad, 0xeb, 0xf7, 0x4a, 0xbb, 0xa0, 0xbb, 0x31, 0xdb, 0x58, 0x21, 0x58,
- 0x73, 0x2a, 0x46, 0x5d, 0xd5, 0xa9, 0x36, 0x81, 0xab, 0x4e, 0xf5, 0xe9, 0xa8, 0x39, 0xfd, 0x5c,
- 0xfd, 0xe0, 0xd7, 0xba, 0x1a, 0x6d, 0x2e, 0x1e, 0x1d, 0xdd, 0xd7, 0xe6, 0xda, 0xe7, 0xd5, 0xc4,
- 0xb4, 0x98, 0x8b, 0x35, 0x51, 0xee, 0xa2, 0x62, 0x4d, 0x54, 0x3a, 0x20, 0xf7, 0x7b, 0x70, 0xff,
- 0x33, 0x71, 0x32, 0xc0, 0x67, 0x3d, 0x2f, 0x0e, 0x77, 0xd5, 0xe7, 0xdb, 0x31, 0x1b, 0xec, 0xaa,
- 0xfb, 0xbb, 0xf2, 0xcf, 0xd5, 0xee, 0x20, 0xce, 0xe4, 0xe4, 0xec, 0xac, 0x29, 0x55, 0x0f, 0xfe,
- 0x08, 0x00, 0x00, 0xff, 0xff, 0x82, 0x6a, 0x90, 0x7b, 0x9f, 0x0d, 0x00, 0x00,
+ // 941 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x56, 0x4f, 0x6f, 0x1b, 0x45,
+ 0x14, 0x67, 0x1d, 0x7b, 0xbd, 0x7e, 0x89, 0xdd, 0x74, 0x28, 0xcd, 0xd6, 0x09, 0x21, 0xda, 0x52,
+ 0x61, 0x24, 0x70, 0x4a, 0x38, 0x00, 0x12, 0x48, 0x4d, 0xea, 0x50, 0x55, 0xaa, 0xa0, 0x9a, 0x98,
+ 0x46, 0x20, 0xa4, 0xd5, 0x64, 0x77, 0xec, 0x8c, 0xb2, 0xde, 0x31, 0x33, 0xe3, 0x44, 0x39, 0x73,
+ 0xe6, 0x0c, 0x37, 0x2e, 0x7c, 0x00, 0x3e, 0x05, 0x9f, 0x81, 0xaf, 0xc0, 0x89, 0x2b, 0xe2, 0x84,
+ 0x66, 0x66, 0x6d, 0xef, 0x7a, 0x63, 0x20, 0x28, 0x42, 0xbd, 0xed, 0x7b, 0x6f, 0xe6, 0x37, 0xef,
+ 0xf7, 0x7b, 0x7f, 0x6c, 0x80, 0x0b, 0x76, 0xc6, 0xba, 0x63, 0xc1, 0x15, 0x47, 0xee, 0x90, 0x29,
+ 0x92, 0x5c, 0xb6, 0x21, 0x61, 0xa9, 0xb2, 0xbe, 0xf6, 0x9a, 0x3c, 0x25, 0x82, 0xc6, 0xd6, 0x0a,
+ 0xbe, 0x73, 0xe0, 0xf6, 0x31, 0x3b, 0x63, 0x8f, 0xf9, 0x68, 0xc4, 0x54, 0x8f, 0x2a, 0xc2, 0x12,
+ 0x89, 0x10, 0x54, 0x53, 0x32, 0xa2, 0xbe, 0xb3, 0xe3, 0x74, 0xd6, 0xb0, 0xf9, 0x46, 0x77, 0xa0,
+ 0x46, 0x47, 0x84, 0x25, 0x7e, 0xc5, 0x38, 0xad, 0x81, 0x7c, 0xa8, 0x8f, 0xa8, 0x94, 0x64, 0x48,
+ 0xfd, 0x15, 0xe3, 0x9f, 0x9a, 0x68, 0x03, 0xea, 0x13, 0x49, 0x45, 0xc8, 0x62, 0xbf, 0xba, 0xe3,
+ 0x74, 0x6a, 0xd8, 0xd5, 0xe6, 0xd3, 0x18, 0x6d, 0x42, 0xc3, 0x04, 0xcc, 0x0b, 0x35, 0x73, 0xc9,
+ 0xd3, 0x8e, 0xcf, 0xc8, 0x88, 0x06, 0x7d, 0xb8, 0xa5, 0xd3, 0x79, 0x4e, 0x86, 0xf4, 0x05, 0x15,
+ 0x92, 0xf1, 0x14, 0xbd, 0x0d, 0x6e, 0x64, 0xb2, 0x33, 0xe9, 0xac, 0xee, 0xdd, 0xee, 0x5a, 0x56,
+ 0xdd, 0x27, 0x4c, 0xd9, 0xb4, 0x71, 0x76, 0x00, 0xdd, 0x05, 0x77, 0xc0, 0xc5, 0x88, 0x28, 0x93,
+ 0x64, 0x03, 0x67, 0x56, 0xf0, 0x9b, 0x03, 0xde, 0x14, 0x16, 0xbd, 0x07, 0xf5, 0x73, 0x0b, 0x9d,
+ 0x01, 0x6e, 0x4c, 0x01, 0x17, 0x5e, 0xc6, 0xd3, 0x73, 0xcb, 0x70, 0xb5, 0x26, 0x8a, 0xa9, 0x64,
+ 0xca, 0xdd, 0x1a, 0xe8, 0x1e, 0x78, 0x13, 0x91, 0x84, 0x63, 0xa2, 0x4e, 0x0d, 0xf5, 0x06, 0xae,
+ 0x4f, 0x44, 0xf2, 0x9c, 0xa8, 0x53, 0x2d, 0xac, 0x71, 0x5b, 0xda, 0xe6, 0x7b, 0x26, 0xb6, 0x9b,
+ 0x13, 0x7b, 0x1b, 0xe0, 0x94, 0x49, 0xc5, 0x05, 0x8b, 0x48, 0xe2, 0xd7, 0x77, 0x9c, 0x8e, 0x87,
+ 0x73, 0x1e, 0xfd, 0x84, 0x20, 0x17, 0x61, 0x4c, 0x14, 0xf1, 0x3d, 0xab, 0xbb, 0x20, 0x17, 0x3d,
+ 0xa2, 0x48, 0xf0, 0xab, 0x03, 0x77, 0x34, 0x91, 0x63, 0xc1, 0x14, 0xd5, 0x6c, 0x30, 0xfd, 0x66,
+ 0x42, 0xa5, 0x42, 0x1f, 0x02, 0x08, 0x3a, 0xe6, 0x92, 0x29, 0x2e, 0x2e, 0x33, 0xea, 0x68, 0x4a,
+ 0x1d, 0xcf, 0x22, 0x07, 0xd5, 0x1f, 0x7e, 0x79, 0xc7, 0xc1, 0xb9, 0xb3, 0xb3, 0x0c, 0x2b, 0xb9,
+ 0x0c, 0xe7, 0x92, 0xac, 0x14, 0x24, 0x79, 0x04, 0x2d, 0x5b, 0x8c, 0x30, 0xb6, 0xcd, 0x64, 0x24,
+ 0x58, 0xdd, 0xbb, 0x97, 0x17, 0xb9, 0xd0, 0x6d, 0xb8, 0x19, 0x15, 0x9a, 0xcf, 0x87, 0x7a, 0xc4,
+ 0x53, 0x45, 0x53, 0x95, 0xc9, 0x34, 0x35, 0x83, 0x47, 0xf0, 0xda, 0x02, 0x33, 0x39, 0xe6, 0xa9,
+ 0xa4, 0xe8, 0x2d, 0xb8, 0x15, 0x4f, 0xc6, 0x09, 0x8b, 0x88, 0xa2, 0x21, 0x15, 0x82, 0x8b, 0xac,
+ 0x75, 0x5b, 0x33, 0xf7, 0xa1, 0xf6, 0x06, 0x7f, 0x38, 0x16, 0xe2, 0x8b, 0x71, 0x4c, 0x6e, 0x4a,
+ 0x9d, 0x4d, 0x68, 0x8c, 0xc9, 0x90, 0xda, 0x7a, 0x5b, 0x89, 0x3c, 0xed, 0x30, 0x05, 0xbf, 0xba,
+ 0x43, 0xe6, 0xe2, 0x55, 0xff, 0x41, 0xbc, 0xda, 0x7f, 0x17, 0xcf, 0x2d, 0x8a, 0xd7, 0x85, 0xbb,
+ 0x8b, 0xcc, 0x33, 0xf5, 0xf4, 0x64, 0xe7, 0x34, 0xb3, 0x46, 0xf0, 0x73, 0x26, 0x55, 0x8f, 0x26,
+ 0xf4, 0x7f, 0x91, 0xaa, 0x4c, 0x7e, 0xe5, 0x7a, 0xe4, 0x03, 0xdf, 0x52, 0xcc, 0x67, 0x6c, 0x29,
+ 0x06, 0x3f, 0x3a, 0xf0, 0xaa, 0x0e, 0x7d, 0xca, 0xd2, 0xf8, 0x66, 0xa8, 0xcc, 0x0a, 0x5b, 0xc9,
+ 0x17, 0xb6, 0x0d, 0x9e, 0xa0, 0xe7, 0xcc, 0x2c, 0x17, 0x5b, 0xf1, 0x99, 0x8d, 0xb6, 0xa0, 0x11,
+ 0x33, 0x41, 0x23, 0xf3, 0x54, 0xd5, 0x04, 0xe7, 0x8e, 0xe0, 0x63, 0x3b, 0xb5, 0xf3, 0x04, 0xb3,
+ 0xe2, 0xbc, 0xa9, 0x37, 0xc6, 0x90, 0x66, 0xb9, 0xad, 0x2f, 0xae, 0x2a, 0x6c, 0xa2, 0xc1, 0xef,
+ 0x8e, 0xa5, 0xfe, 0x84, 0xaa, 0xfd, 0x24, 0xd1, 0x01, 0x79, 0x23, 0x14, 0x13, 0xa6, 0xf7, 0xae,
+ 0xa6, 0xd8, 0xc4, 0xd6, 0x40, 0x0f, 0xa0, 0x65, 0xb3, 0x66, 0x3c, 0x0d, 0x63, 0x2a, 0x23, 0x43,
+ 0xd4, 0xc3, 0xcd, 0x99, 0xb7, 0x47, 0x65, 0x84, 0x3e, 0x82, 0xaa, 0xe4, 0xc2, 0x36, 0x78, 0x6b,
+ 0xef, 0x41, 0x3e, 0xef, 0x72, 0x92, 0xdd, 0x23, 0x2e, 0xd4, 0xc1, 0x25, 0x36, 0x57, 0x82, 0xfb,
+ 0xe0, 0x5a, 0x1b, 0x35, 0xa0, 0xd6, 0x7f, 0xda, 0x7f, 0x76, 0xb8, 0xfe, 0x0a, 0x6a, 0x01, 0x3c,
+ 0xc6, 0x87, 0xfb, 0xfd, 0xc3, 0x5e, 0xb8, 0xdf, 0x5f, 0x77, 0x82, 0x10, 0x36, 0x4a, 0x58, 0xd7,
+ 0x91, 0x0c, 0x6d, 0xc3, 0x2a, 0x4d, 0xe3, 0x90, 0x0f, 0x42, 0x73, 0xb8, 0x62, 0x48, 0x34, 0x68,
+ 0x1a, 0x7f, 0x3e, 0xd0, 0xa7, 0x82, 0x6f, 0x2b, 0xb6, 0x22, 0xcf, 0x98, 0x54, 0x2f, 0x83, 0xa0,
+ 0x1f, 0x14, 0x04, 0xbd, 0x9f, 0x67, 0xb5, 0x98, 0x62, 0x41, 0x4e, 0xbd, 0x6c, 0xf8, 0x60, 0x20,
+ 0xa9, 0x5d, 0xa7, 0x4d, 0x9c, 0x59, 0xff, 0x4e, 0xe6, 0x4f, 0xec, 0x12, 0xc8, 0xbd, 0x70, 0x1d,
+ 0x91, 0xf7, 0x7e, 0xaa, 0xc2, 0xaa, 0x76, 0x1d, 0x51, 0x71, 0xce, 0x22, 0x8a, 0x5e, 0x40, 0xb3,
+ 0xb0, 0xc1, 0xd1, 0x56, 0xfe, 0xe2, 0xe2, 0x4f, 0x56, 0xfb, 0xf5, 0x25, 0xd1, 0x6c, 0xaa, 0xdd,
+ 0x3f, 0xbf, 0xef, 0x54, 0x3c, 0xa7, 0xe3, 0xa0, 0x2f, 0xa1, 0x55, 0x5c, 0x6e, 0xa8, 0x70, 0xb5,
+ 0xb4, 0xee, 0xdb, 0xdb, 0xcb, 0xc2, 0x25, 0xe8, 0x63, 0x0b, 0x3d, 0x5f, 0x2a, 0x45, 0xe8, 0xd2,
+ 0x7a, 0x2c, 0x42, 0x5f, 0xb1, 0x8b, 0x32, 0x68, 0x74, 0x04, 0x6b, 0xf9, 0x89, 0x47, 0x9b, 0xf9,
+ 0x7b, 0x0b, 0x8b, 0xaa, 0xbd, 0x75, 0x75, 0xb0, 0x00, 0x59, 0x79, 0xe8, 0xa0, 0xaf, 0xed, 0xff,
+ 0xa7, 0xdc, 0x58, 0xa0, 0xed, 0xbf, 0x9f, 0xbd, 0xf6, 0x1b, 0x4b, 0xe3, 0x25, 0xf4, 0xac, 0x7c,
+ 0xb3, 0x6e, 0x28, 0x96, 0x6f, 0xb1, 0x0d, 0x8b, 0xe5, 0x2b, 0xb5, 0xd0, 0x1c, 0xf7, 0xe0, 0xe1,
+ 0x57, 0xfa, 0x64, 0x42, 0x4e, 0xba, 0x11, 0x1f, 0xed, 0xda, 0xcf, 0x77, 0xb9, 0x18, 0xee, 0xda,
+ 0xfb, 0xbb, 0xe6, 0xbf, 0xea, 0xee, 0x90, 0x67, 0xf6, 0xf8, 0xe4, 0xc4, 0x35, 0xae, 0xf7, 0xff,
+ 0x0a, 0x00, 0x00, 0xff, 0xff, 0x96, 0xc5, 0xef, 0x98, 0xee, 0x0a, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
@@ -1269,13 +1034,11 @@ const _ = grpc.SupportPackageIsVersion4
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type WikiServiceClient interface {
- WikiGetPageVersions(ctx context.Context, in *WikiGetPageVersionsRequest, opts ...grpc.CallOption) (WikiService_WikiGetPageVersionsClient, error)
WikiWritePage(ctx context.Context, opts ...grpc.CallOption) (WikiService_WikiWritePageClient, error)
WikiUpdatePage(ctx context.Context, opts ...grpc.CallOption) (WikiService_WikiUpdatePageClient, error)
WikiDeletePage(ctx context.Context, in *WikiDeletePageRequest, opts ...grpc.CallOption) (*WikiDeletePageResponse, error)
// WikiFindPage returns a stream because the page's raw_data field may be arbitrarily large.
WikiFindPage(ctx context.Context, in *WikiFindPageRequest, opts ...grpc.CallOption) (WikiService_WikiFindPageClient, error)
- WikiFindFile(ctx context.Context, in *WikiFindFileRequest, opts ...grpc.CallOption) (WikiService_WikiFindFileClient, error)
WikiGetAllPages(ctx context.Context, in *WikiGetAllPagesRequest, opts ...grpc.CallOption) (WikiService_WikiGetAllPagesClient, error)
WikiListPages(ctx context.Context, in *WikiListPagesRequest, opts ...grpc.CallOption) (WikiService_WikiListPagesClient, error)
}
@@ -1288,40 +1051,8 @@ func NewWikiServiceClient(cc *grpc.ClientConn) WikiServiceClient {
return &wikiServiceClient{cc}
}
-func (c *wikiServiceClient) WikiGetPageVersions(ctx context.Context, in *WikiGetPageVersionsRequest, opts ...grpc.CallOption) (WikiService_WikiGetPageVersionsClient, error) {
- stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[0], "/gitaly.WikiService/WikiGetPageVersions", opts...)
- if err != nil {
- return nil, err
- }
- x := &wikiServiceWikiGetPageVersionsClient{stream}
- if err := x.ClientStream.SendMsg(in); err != nil {
- return nil, err
- }
- if err := x.ClientStream.CloseSend(); err != nil {
- return nil, err
- }
- return x, nil
-}
-
-type WikiService_WikiGetPageVersionsClient interface {
- Recv() (*WikiGetPageVersionsResponse, error)
- grpc.ClientStream
-}
-
-type wikiServiceWikiGetPageVersionsClient struct {
- grpc.ClientStream
-}
-
-func (x *wikiServiceWikiGetPageVersionsClient) Recv() (*WikiGetPageVersionsResponse, error) {
- m := new(WikiGetPageVersionsResponse)
- if err := x.ClientStream.RecvMsg(m); err != nil {
- return nil, err
- }
- return m, nil
-}
-
func (c *wikiServiceClient) WikiWritePage(ctx context.Context, opts ...grpc.CallOption) (WikiService_WikiWritePageClient, error) {
- stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[1], "/gitaly.WikiService/WikiWritePage", opts...)
+ stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[0], "/gitaly.WikiService/WikiWritePage", opts...)
if err != nil {
return nil, err
}
@@ -1355,7 +1086,7 @@ func (x *wikiServiceWikiWritePageClient) CloseAndRecv() (*WikiWritePageResponse,
}
func (c *wikiServiceClient) WikiUpdatePage(ctx context.Context, opts ...grpc.CallOption) (WikiService_WikiUpdatePageClient, error) {
- stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[2], "/gitaly.WikiService/WikiUpdatePage", opts...)
+ stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[1], "/gitaly.WikiService/WikiUpdatePage", opts...)
if err != nil {
return nil, err
}
@@ -1398,7 +1129,7 @@ func (c *wikiServiceClient) WikiDeletePage(ctx context.Context, in *WikiDeletePa
}
func (c *wikiServiceClient) WikiFindPage(ctx context.Context, in *WikiFindPageRequest, opts ...grpc.CallOption) (WikiService_WikiFindPageClient, error) {
- stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[3], "/gitaly.WikiService/WikiFindPage", opts...)
+ stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[2], "/gitaly.WikiService/WikiFindPage", opts...)
if err != nil {
return nil, err
}
@@ -1429,40 +1160,8 @@ func (x *wikiServiceWikiFindPageClient) Recv() (*WikiFindPageResponse, error) {
return m, nil
}
-func (c *wikiServiceClient) WikiFindFile(ctx context.Context, in *WikiFindFileRequest, opts ...grpc.CallOption) (WikiService_WikiFindFileClient, error) {
- stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[4], "/gitaly.WikiService/WikiFindFile", opts...)
- if err != nil {
- return nil, err
- }
- x := &wikiServiceWikiFindFileClient{stream}
- if err := x.ClientStream.SendMsg(in); err != nil {
- return nil, err
- }
- if err := x.ClientStream.CloseSend(); err != nil {
- return nil, err
- }
- return x, nil
-}
-
-type WikiService_WikiFindFileClient interface {
- Recv() (*WikiFindFileResponse, error)
- grpc.ClientStream
-}
-
-type wikiServiceWikiFindFileClient struct {
- grpc.ClientStream
-}
-
-func (x *wikiServiceWikiFindFileClient) Recv() (*WikiFindFileResponse, error) {
- m := new(WikiFindFileResponse)
- if err := x.ClientStream.RecvMsg(m); err != nil {
- return nil, err
- }
- return m, nil
-}
-
func (c *wikiServiceClient) WikiGetAllPages(ctx context.Context, in *WikiGetAllPagesRequest, opts ...grpc.CallOption) (WikiService_WikiGetAllPagesClient, error) {
- stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[5], "/gitaly.WikiService/WikiGetAllPages", opts...)
+ stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[3], "/gitaly.WikiService/WikiGetAllPages", opts...)
if err != nil {
return nil, err
}
@@ -1494,7 +1193,7 @@ func (x *wikiServiceWikiGetAllPagesClient) Recv() (*WikiGetAllPagesResponse, err
}
func (c *wikiServiceClient) WikiListPages(ctx context.Context, in *WikiListPagesRequest, opts ...grpc.CallOption) (WikiService_WikiListPagesClient, error) {
- stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[6], "/gitaly.WikiService/WikiListPages", opts...)
+ stream, err := c.cc.NewStream(ctx, &_WikiService_serviceDesc.Streams[4], "/gitaly.WikiService/WikiListPages", opts...)
if err != nil {
return nil, err
}
@@ -1527,13 +1226,11 @@ func (x *wikiServiceWikiListPagesClient) Recv() (*WikiListPagesResponse, error)
// WikiServiceServer is the server API for WikiService service.
type WikiServiceServer interface {
- WikiGetPageVersions(*WikiGetPageVersionsRequest, WikiService_WikiGetPageVersionsServer) error
WikiWritePage(WikiService_WikiWritePageServer) error
WikiUpdatePage(WikiService_WikiUpdatePageServer) error
WikiDeletePage(context.Context, *WikiDeletePageRequest) (*WikiDeletePageResponse, error)
// WikiFindPage returns a stream because the page's raw_data field may be arbitrarily large.
WikiFindPage(*WikiFindPageRequest, WikiService_WikiFindPageServer) error
- WikiFindFile(*WikiFindFileRequest, WikiService_WikiFindFileServer) error
WikiGetAllPages(*WikiGetAllPagesRequest, WikiService_WikiGetAllPagesServer) error
WikiListPages(*WikiListPagesRequest, WikiService_WikiListPagesServer) error
}
@@ -1542,9 +1239,6 @@ type WikiServiceServer interface {
type UnimplementedWikiServiceServer struct {
}
-func (*UnimplementedWikiServiceServer) WikiGetPageVersions(req *WikiGetPageVersionsRequest, srv WikiService_WikiGetPageVersionsServer) error {
- return status.Errorf(codes.Unimplemented, "method WikiGetPageVersions not implemented")
-}
func (*UnimplementedWikiServiceServer) WikiWritePage(srv WikiService_WikiWritePageServer) error {
return status.Errorf(codes.Unimplemented, "method WikiWritePage not implemented")
}
@@ -1557,9 +1251,6 @@ func (*UnimplementedWikiServiceServer) WikiDeletePage(ctx context.Context, req *
func (*UnimplementedWikiServiceServer) WikiFindPage(req *WikiFindPageRequest, srv WikiService_WikiFindPageServer) error {
return status.Errorf(codes.Unimplemented, "method WikiFindPage not implemented")
}
-func (*UnimplementedWikiServiceServer) WikiFindFile(req *WikiFindFileRequest, srv WikiService_WikiFindFileServer) error {
- return status.Errorf(codes.Unimplemented, "method WikiFindFile not implemented")
-}
func (*UnimplementedWikiServiceServer) WikiGetAllPages(req *WikiGetAllPagesRequest, srv WikiService_WikiGetAllPagesServer) error {
return status.Errorf(codes.Unimplemented, "method WikiGetAllPages not implemented")
}
@@ -1571,27 +1262,6 @@ func RegisterWikiServiceServer(s *grpc.Server, srv WikiServiceServer) {
s.RegisterService(&_WikiService_serviceDesc, srv)
}
-func _WikiService_WikiGetPageVersions_Handler(srv interface{}, stream grpc.ServerStream) error {
- m := new(WikiGetPageVersionsRequest)
- if err := stream.RecvMsg(m); err != nil {
- return err
- }
- return srv.(WikiServiceServer).WikiGetPageVersions(m, &wikiServiceWikiGetPageVersionsServer{stream})
-}
-
-type WikiService_WikiGetPageVersionsServer interface {
- Send(*WikiGetPageVersionsResponse) error
- grpc.ServerStream
-}
-
-type wikiServiceWikiGetPageVersionsServer struct {
- grpc.ServerStream
-}
-
-func (x *wikiServiceWikiGetPageVersionsServer) Send(m *WikiGetPageVersionsResponse) error {
- return x.ServerStream.SendMsg(m)
-}
-
func _WikiService_WikiWritePage_Handler(srv interface{}, stream grpc.ServerStream) error {
return srv.(WikiServiceServer).WikiWritePage(&wikiServiceWikiWritePageServer{stream})
}
@@ -1683,27 +1353,6 @@ func (x *wikiServiceWikiFindPageServer) Send(m *WikiFindPageResponse) error {
return x.ServerStream.SendMsg(m)
}
-func _WikiService_WikiFindFile_Handler(srv interface{}, stream grpc.ServerStream) error {
- m := new(WikiFindFileRequest)
- if err := stream.RecvMsg(m); err != nil {
- return err
- }
- return srv.(WikiServiceServer).WikiFindFile(m, &wikiServiceWikiFindFileServer{stream})
-}
-
-type WikiService_WikiFindFileServer interface {
- Send(*WikiFindFileResponse) error
- grpc.ServerStream
-}
-
-type wikiServiceWikiFindFileServer struct {
- grpc.ServerStream
-}
-
-func (x *wikiServiceWikiFindFileServer) Send(m *WikiFindFileResponse) error {
- return x.ServerStream.SendMsg(m)
-}
-
func _WikiService_WikiGetAllPages_Handler(srv interface{}, stream grpc.ServerStream) error {
m := new(WikiGetAllPagesRequest)
if err := stream.RecvMsg(m); err != nil {
@@ -1757,11 +1406,6 @@ var _WikiService_serviceDesc = grpc.ServiceDesc{
},
Streams: []grpc.StreamDesc{
{
- StreamName: "WikiGetPageVersions",
- Handler: _WikiService_WikiGetPageVersions_Handler,
- ServerStreams: true,
- },
- {
StreamName: "WikiWritePage",
Handler: _WikiService_WikiWritePage_Handler,
ClientStreams: true,
@@ -1777,11 +1421,6 @@ var _WikiService_serviceDesc = grpc.ServiceDesc{
ServerStreams: true,
},
{
- StreamName: "WikiFindFile",
- Handler: _WikiService_WikiFindFile_Handler,
- ServerStreams: true,
- },
- {
StreamName: "WikiGetAllPages",
Handler: _WikiService_WikiGetAllPages_Handler,
ServerStreams: true,
diff --git a/proto/remote.proto b/proto/remote.proto
index c228e3880..d427accef 100644
--- a/proto/remote.proto
+++ b/proto/remote.proto
@@ -40,6 +40,12 @@ service RemoteService {
scope_level: STORAGE
};
}
+
+ // FindRemoteRootRef tries to find the root reference of a remote
+ // repository. The root reference is the default branch as pointed to by
+ // the remotes HEAD reference. Returns an InvalidArgument error if the
+ // specified remote does not exist and a NotFound error in case no HEAD
+ // branch was found.
rpc FindRemoteRootRef(FindRemoteRootRefRequest) returns (FindRemoteRootRefResponse) {
option (op_type) = {
op: ACCESSOR
@@ -119,12 +125,28 @@ message FindRemoteRepositoryResponse {
bool exists = 1;
}
+// FindRemoteRootRefRequest represents a request for the FindRemoteRootRef RPC.
message FindRemoteRootRefRequest {
+ // Repository is the repository in which the request shall be executed in. If
+ // a remote name is given, then this is the repository in which the remote
+ // will be looked up.
Repository repository = 1 [(target_repository)=true];
- string remote = 2;
+ // Remote is the name of the remote of which the root reference shall be
+ // looked up. The remote must have been created before this call. This
+ // parameter is deprecated in favor of `RemoteUrl`, see
+ // https://gitlab.com/gitlab-org/gitaly/-/issues/1773.
+ string remote = 2 [deprecated=true];
+ // RemoteUrl specifies the remote repository URL which should be fetched from.
+ string remote_url = 3;
+ // HttpAuthorizationHeader is the HTTP header which should be added to the
+ // request in order to authenticate against the repository.
+ string http_authorization_header = 4;
}
+// FindRemoteRootRefResponse represents the response for the FindRemoteRootRef
+// request.
message FindRemoteRootRefResponse {
+ // Ref is the name of the remote root reference.
string ref = 1;
}
diff --git a/proto/repository-service.proto b/proto/repository-service.proto
index e5ca6bf37..6d6d580df 100644
--- a/proto/repository-service.proto
+++ b/proto/repository-service.proto
@@ -81,7 +81,7 @@ service RepositoryService {
}
rpc Fsck(FsckRequest) returns (FsckResponse) {
option (op_type) = {
- op: MUTATOR
+ op: ACCESSOR
};
}
rpc WriteRef(WriteRefRequest) returns (WriteRefResponse) {
@@ -125,16 +125,27 @@ service RepositoryService {
op: MUTATOR
};
}
+
+ // GetConfig reads the target repository's gitconfig and streams its contents
+ // back. Returns a NotFound error in case no gitconfig was found.
+ rpc GetConfig(GetConfigRequest) returns (stream GetConfigResponse) {
+ option (op_type) = {
+ op: ACCESSOR
+ };
+ }
+
rpc SetConfig(SetConfigRequest) returns (SetConfigResponse) {
option (op_type) = {
op: MUTATOR
};
}
+
rpc DeleteConfig(DeleteConfigRequest) returns (DeleteConfigResponse) {
option (op_type) = {
op: MUTATOR
};
}
+
rpc FindLicense(FindLicenseRequest) returns (FindLicenseResponse) {
option (op_type) = {
op: ACCESSOR
@@ -271,7 +282,15 @@ message GarbageCollectRequest {
message GarbageCollectResponse {}
message WriteCommitGraphRequest {
+ enum SplitStrategy {
+ // SizeMultiple requires to use '--split --size-multiple=4' strategy to create/update commit graph.
+ // https://git-scm.com/docs/git-commit-graph#Documentation/git-commit-graph.txt-emwriteem
+ // It is a default, there is no need to explicitly set it in the request.
+ SizeMultiple = 0;
+ }
Repository repository = 1 [(target_repository)=true];
+ // SplitStrategy is a strategy used to create/update commit graph.
+ SplitStrategy splitStrategy = 2;
}
message WriteCommitGraphResponse {}
@@ -472,6 +491,19 @@ message CreateBundleResponse {
bytes data = 1;
}
+// GetConfigRequest is a request for the GetConfig RPC.
+message GetConfigRequest {
+ // Repository is the repository from which the configuration should be read
+ // from.
+ Repository repository = 1 [(target_repository)=true];
+}
+
+// GetConfigResponse is a response for the GetConfig RPC.
+message GetConfigResponse {
+ // Data contains contents of the gitconfig.
+ bytes data = 1;
+}
+
message SetConfigRequest {
Repository repository = 1 [(target_repository)=true];
message Entry {
diff --git a/proto/wiki.proto b/proto/wiki.proto
index 011c228ec..c23584338 100644
--- a/proto/wiki.proto
+++ b/proto/wiki.proto
@@ -8,11 +8,6 @@ package gitaly;
option go_package = "gitlab.com/gitlab-org/gitaly/proto/go/gitalypb";
service WikiService {
- rpc WikiGetPageVersions(WikiGetPageVersionsRequest) returns (stream WikiGetPageVersionsResponse) {
- option (op_type) = {
- op: ACCESSOR
- };
- }
rpc WikiWritePage(stream WikiWritePageRequest) returns (WikiWritePageResponse) {
option (op_type) = {
op: MUTATOR
@@ -34,11 +29,6 @@ service WikiService {
op: ACCESSOR
};
}
- rpc WikiFindFile(WikiFindFileRequest) returns (stream WikiFindFileResponse) {
- option (op_type) = {
- op: ACCESSOR
- };
- }
rpc WikiGetAllPages(WikiGetAllPagesRequest) returns (stream WikiGetAllPagesResponse) {
option (op_type) = {
op: ACCESSOR
@@ -80,18 +70,6 @@ message WikiPage {
bytes raw_data = 8;
}
-message WikiGetPageVersionsRequest {
- Repository repository = 1 [(target_repository)=true];
- bytes page_path = 2;
-
- int32 page = 3;
- int32 per_page = 4;
-}
-
-message WikiGetPageVersionsResponse {
- repeated WikiPageVersion versions = 1;
-}
-
// This message is sent in a stream because the 'content' field may be large.
message WikiWritePageRequest {
// These following fields are only present in the first message.
@@ -144,21 +122,6 @@ message WikiFindPageResponse {
WikiPage page = 1;
}
-message WikiFindFileRequest {
- Repository repository = 1 [(target_repository)=true];
- bytes name = 2;
- // Optional: revision
- bytes revision = 3;
-}
-
-message WikiFindFileResponse {
- // If 'name' is empty, the file was not found.
- bytes name = 1;
- string mime_type = 2;
- bytes raw_data = 3;
- bytes path = 4;
-}
-
message WikiGetAllPagesRequest {
Repository repository = 1 [(target_repository)=true];
// Passing 0 means no limit is applied
diff --git a/ruby/lib/gitaly_server/operations_service.rb b/ruby/lib/gitaly_server/operations_service.rb
index 839f69b5c..f8f9b2e24 100644
--- a/ruby/lib/gitaly_server/operations_service.rb
+++ b/ruby/lib/gitaly_server/operations_service.rb
@@ -20,22 +20,6 @@ module GitalyServer
Gitaly::UserUpdateBranchResponse.new(pre_receive_error: set_utf8!(ex.message))
end
- def user_ff_branch(request, call)
- repo = Gitlab::Git::Repository.from_gitaly(request.repository, call)
- user = Gitlab::Git::User.from_gitaly(request.user)
-
- result = repo.ff_merge(user, request.commit_id, request.branch)
- branch_update = branch_update_result(result)
-
- Gitaly::UserFFBranchResponse.new(branch_update: branch_update)
- rescue Gitlab::Git::CommitError => e
- raise GRPC::FailedPrecondition.new(e.to_s)
- rescue ArgumentError => e
- raise GRPC::InvalidArgument.new(e.to_s)
- rescue Gitlab::Git::PreReceiveError => e
- Gitaly::UserFFBranchResponse.new(pre_receive_error: set_utf8!(e.message))
- end
-
def user_cherry_pick(request, call)
repo = Gitlab::Git::Repository.from_gitaly(request.repository, call)
user = Gitlab::Git::User.from_gitaly(request.user)
diff --git a/ruby/lib/gitaly_server/wiki_service.rb b/ruby/lib/gitaly_server/wiki_service.rb
index 57c934988..b92cd34ff 100644
--- a/ruby/lib/gitaly_server/wiki_service.rb
+++ b/ruby/lib/gitaly_server/wiki_service.rb
@@ -97,65 +97,6 @@ module GitalyServer
end
end
- def wiki_find_file(request, call)
- repo = Gitlab::Git::Repository.from_gitaly(request.repository, call)
- wiki = Gitlab::Git::Wiki.new(repo)
-
- file = wiki.file(set_utf8!(request.name), request.revision.presence)
-
- unless file
- return Enumerator.new do |y|
- y.yield Gitaly::WikiFindFileResponse.new
- end
- end
-
- response = Gitaly::WikiFindFileResponse.new(
- name: file.name.b,
- mime_type: file.mime_type,
- path: file.path.b
- )
-
- Enumerator.new do |y|
- y.yield response
-
- io = StringIO.new(file.raw_data)
- while chunk = io.read(Gitlab.config.git.write_buffer_size)
- y.yield Gitaly::WikiFindFileResponse.new(raw_data: chunk)
- end
- end
- end
-
- def wiki_get_page_versions(request, call)
- repo = Gitlab::Git::Repository.from_gitaly(request.repository, call)
- wiki = Gollum::Wiki.new(repo.path)
- path = set_utf8!(request.page_path)
-
- page = wiki.paged(Gollum::Page.canonicalize_filename(path), File.split(path).first)
-
- unless page
- return Enumerator.new do |y|
- y.yield Gitaly::WikiGetPageVersionsResponse.new(versions: [])
- end
- end
-
- Enumerator.new do |y|
- page.versions(per_page: request.per_page, page: request.page).each_slice(20) do |slice|
- versions =
- slice.map do |commit|
- gollum_page = wiki.page(page.title, commit.id)
- obj = repo.rugged.rev_parse(commit.id)
-
- Gitaly::WikiPageVersion.new(
- commit: gitaly_commit_from_rugged(obj),
- format: gollum_page&.format.to_s
- )
- end
-
- y.yield Gitaly::WikiGetPageVersionsResponse.new(versions: versions)
- end
- end
- end
-
def wiki_update_page(call)
repo = wiki = title = format = page_path = commit_details = nil
content = ""
diff --git a/ruby/lib/gitlab/git/hooks_service.rb b/ruby/lib/gitlab/git/hooks_service.rb
index 67ac4c3fb..baccaa5c0 100644
--- a/ruby/lib/gitlab/git/hooks_service.rb
+++ b/ruby/lib/gitlab/git/hooks_service.rb
@@ -20,6 +20,9 @@ module Gitlab
end
yield(self).tap do
+ status, message = run_hook('reference-transaction')
+ Gitlab::GitLogger.error("reference-transaction committed hook: #{message}") unless status
+
status, message = run_hook('post-receive')
Gitlab::GitLogger.error("post-receive hook: #{message}") unless status
diff --git a/ruby/lib/gitlab/git/repository.rb b/ruby/lib/gitlab/git/repository.rb
index 657432021..382f0d0cb 100644
--- a/ruby/lib/gitlab/git/repository.rb
+++ b/ruby/lib/gitlab/git/repository.rb
@@ -209,16 +209,6 @@ module Gitlab
OperationService.new(user, self).update_branch(branch_name, newrev, oldrev, push_options: push_options, transaction: transaction)
end
- def ff_merge(user, source_sha, target_branch)
- OperationService.new(user, self).with_branch(target_branch) do |our_commit|
- raise ArgumentError, 'Invalid merge target' unless our_commit
-
- source_sha
- end
- rescue Rugged::ReferenceError, InvalidRef
- raise ArgumentError, 'Invalid merge source'
- end
-
# rubocop:disable Metrics/ParameterLists
def revert(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:, dry_run: false, timestamp: nil)
OperationService.new(user, self).with_branch(
diff --git a/ruby/lib/gitlab/git/wiki.rb b/ruby/lib/gitlab/git/wiki.rb
index 07f3ba52d..445feb0a2 100644
--- a/ruby/lib/gitlab/git/wiki.rb
+++ b/ruby/lib/gitlab/git/wiki.rb
@@ -47,10 +47,6 @@ module Gitlab
gollum_find_page(title: title, version: version, dir: dir)
end
- def file(name, version)
- gollum_find_file(name, version)
- end
-
def count_page_versions(page_path)
@repository.count_commits(ref: 'HEAD', path: page_path)
end
@@ -175,14 +171,6 @@ module Gitlab
new_page(gollum_page)
end
- def gollum_find_file(name, version)
- version ||= self.class.default_ref
- gollum_file = gollum_wiki.file(name, version)
- return unless gollum_file
-
- Gitlab::Git::WikiFile.new(gollum_file)
- end
-
def gollum_get_all_pages(limit: nil, sort: nil, direction_desc: false)
gollum_wiki.pages(
limit: limit, sort: sort, direction_desc: direction_desc
diff --git a/ruby/lib/gitlab/git/wiki_file.rb b/ruby/lib/gitlab/git/wiki_file.rb
deleted file mode 100644
index 84335aca4..000000000
--- a/ruby/lib/gitlab/git/wiki_file.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-module Gitlab
- module Git
- class WikiFile
- attr_reader :mime_type, :raw_data, :name, :path
-
- # This class is meant to be serializable so that it can be constructed
- # by Gitaly and sent over the network to GitLab.
- #
- # Because Gollum::File is not serializable we must get all the data from
- # 'gollum_file' during initialization, and NOT store it in an instance
- # variable.
- def initialize(gollum_file)
- @mime_type = gollum_file.mime_type
- @raw_data = gollum_file.raw_data
- @name = gollum_file.name
- @path = gollum_file.path
- end
- end
- end
-end
diff --git a/ruby/proto/gitaly/blob_pb.rb b/ruby/proto/gitaly/blob_pb.rb
index e6e6b1ed6..77d5a22f5 100644
--- a/ruby/proto/gitaly/blob_pb.rb
+++ b/ruby/proto/gitaly/blob_pb.rb
@@ -53,22 +53,6 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
add_message "gitaly.GetLFSPointersResponse" do
repeated :lfs_pointers, :message, 1, "gitaly.LFSPointer"
end
- add_message "gitaly.GetNewLFSPointersRequest" do
- optional :repository, :message, 1, "gitaly.Repository"
- optional :revision, :bytes, 2
- optional :limit, :int32, 3
- optional :not_in_all, :bool, 4
- repeated :not_in_refs, :bytes, 5
- end
- add_message "gitaly.GetNewLFSPointersResponse" do
- repeated :lfs_pointers, :message, 1, "gitaly.LFSPointer"
- end
- add_message "gitaly.GetAllLFSPointersRequest" do
- optional :repository, :message, 1, "gitaly.Repository"
- end
- add_message "gitaly.GetAllLFSPointersResponse" do
- repeated :lfs_pointers, :message, 1, "gitaly.LFSPointer"
- end
add_message "gitaly.ListLFSPointersRequest" do
optional :repository, :message, 1, "gitaly.Repository"
repeated :revisions, :string, 2
@@ -97,10 +81,6 @@ module Gitaly
NewBlobObject = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.NewBlobObject").msgclass
GetLFSPointersRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.GetLFSPointersRequest").msgclass
GetLFSPointersResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.GetLFSPointersResponse").msgclass
- GetNewLFSPointersRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.GetNewLFSPointersRequest").msgclass
- GetNewLFSPointersResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.GetNewLFSPointersResponse").msgclass
- GetAllLFSPointersRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.GetAllLFSPointersRequest").msgclass
- GetAllLFSPointersResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.GetAllLFSPointersResponse").msgclass
ListLFSPointersRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.ListLFSPointersRequest").msgclass
ListLFSPointersResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.ListLFSPointersResponse").msgclass
ListAllLFSPointersRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.ListAllLFSPointersRequest").msgclass
diff --git a/ruby/proto/gitaly/blob_services_pb.rb b/ruby/proto/gitaly/blob_services_pb.rb
index 21a3bc6e5..1048b21da 100644
--- a/ruby/proto/gitaly/blob_services_pb.rb
+++ b/ruby/proto/gitaly/blob_services_pb.rb
@@ -22,21 +22,7 @@ module Gitaly
# GetLFSPointers retrieves LFS pointers from a given set of object IDs.
# This RPC filters all requested objects and only returns those which refer
# to a valid LFS pointer.
- #
- # Deprecated in favor of `ListLFSPointers`, passing object IDs as revisions.
rpc :GetLFSPointers, Gitaly::GetLFSPointersRequest, stream(Gitaly::GetLFSPointersResponse)
- # GetNewLFSPointers retrieves LFS pointers for a limited subset of the
- # commit graph. It will return all LFS pointers which are reachable by the
- # provided revision, but not reachable by any of the limiting references.
- #
- # Deprecated in favor of `ListLFSPointers`. `NotInAll` can be replaced with
- # `REVISION` `--not` `--all`, while `NotInRefs` can be replaced with
- # `REVISION` `--not` `NotInRevs...`.
- rpc :GetNewLFSPointers, Gitaly::GetNewLFSPointersRequest, stream(Gitaly::GetNewLFSPointersResponse)
- # GetAllLFSPointers retrieves all LFS pointers of the given repository.
- #
- # Deprecated in favor of `ListLFSPointers`, passing `--all` as revision.
- rpc :GetAllLFSPointers, Gitaly::GetAllLFSPointersRequest, stream(Gitaly::GetAllLFSPointersResponse)
# ListLFSPointers retrieves LFS pointers reachable from a given set of
# revisions by doing a graph walk. This includes both normal revisions like
# an object ID or branch, but also the pseudo-revisions "--all" and "--not"
@@ -44,9 +30,8 @@ module Gitaly
# transitively reference any LFS pointers are ignored. It is not valid to
# pass revisions which do not resolve to an existing object.
rpc :ListLFSPointers, Gitaly::ListLFSPointersRequest, stream(Gitaly::ListLFSPointersResponse)
- # ListAllLFSPointers retrieves all LFS pointers in the repository. In
- # contrast to `GetAllLFSPointers`, this RPC also includes LFS pointers which
- # are not reachable by any reference.
+ # ListAllLFSPointers retrieves all LFS pointers in the repository, including
+ # those not reachable by any reference.
rpc :ListAllLFSPointers, Gitaly::ListAllLFSPointersRequest, stream(Gitaly::ListAllLFSPointersResponse)
end
diff --git a/ruby/proto/gitaly/remote_pb.rb b/ruby/proto/gitaly/remote_pb.rb
index bd5859965..722b34b0c 100644
--- a/ruby/proto/gitaly/remote_pb.rb
+++ b/ruby/proto/gitaly/remote_pb.rb
@@ -50,6 +50,8 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
add_message "gitaly.FindRemoteRootRefRequest" do
optional :repository, :message, 1, "gitaly.Repository"
optional :remote, :string, 2
+ optional :remote_url, :string, 3
+ optional :http_authorization_header, :string, 4
end
add_message "gitaly.FindRemoteRootRefResponse" do
optional :ref, :string, 1
diff --git a/ruby/proto/gitaly/remote_services_pb.rb b/ruby/proto/gitaly/remote_services_pb.rb
index 93959d0db..90c4abc25 100644
--- a/ruby/proto/gitaly/remote_services_pb.rb
+++ b/ruby/proto/gitaly/remote_services_pb.rb
@@ -24,6 +24,11 @@ module Gitaly
# the patterns specified in the requests.
rpc :UpdateRemoteMirror, stream(Gitaly::UpdateRemoteMirrorRequest), Gitaly::UpdateRemoteMirrorResponse
rpc :FindRemoteRepository, Gitaly::FindRemoteRepositoryRequest, Gitaly::FindRemoteRepositoryResponse
+ # FindRemoteRootRef tries to find the root reference of a remote
+ # repository. The root reference is the default branch as pointed to by
+ # the remotes HEAD reference. Returns an InvalidArgument error if the
+ # specified remote does not exist and a NotFound error in case no HEAD
+ # branch was found.
rpc :FindRemoteRootRef, Gitaly::FindRemoteRootRefRequest, Gitaly::FindRemoteRootRefResponse
end
diff --git a/ruby/proto/gitaly/repository-service_pb.rb b/ruby/proto/gitaly/repository-service_pb.rb
index 37ed5cfc7..09e3de5c6 100644
--- a/ruby/proto/gitaly/repository-service_pb.rb
+++ b/ruby/proto/gitaly/repository-service_pb.rb
@@ -38,6 +38,10 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
end
add_message "gitaly.WriteCommitGraphRequest" do
optional :repository, :message, 1, "gitaly.Repository"
+ optional :splitStrategy, :enum, 2, "gitaly.WriteCommitGraphRequest.SplitStrategy"
+ end
+ add_enum "gitaly.WriteCommitGraphRequest.SplitStrategy" do
+ value :SizeMultiple, 0
end
add_message "gitaly.WriteCommitGraphResponse" do
end
@@ -166,6 +170,12 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
add_message "gitaly.CreateBundleResponse" do
optional :data, :bytes, 1
end
+ add_message "gitaly.GetConfigRequest" do
+ optional :repository, :message, 1, "gitaly.Repository"
+ end
+ add_message "gitaly.GetConfigResponse" do
+ optional :data, :bytes, 1
+ end
add_message "gitaly.SetConfigRequest" do
optional :repository, :message, 1, "gitaly.Repository"
repeated :entries, :message, 2, "gitaly.SetConfigRequest.Entry"
@@ -346,6 +356,7 @@ module Gitaly
GarbageCollectRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.GarbageCollectRequest").msgclass
GarbageCollectResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.GarbageCollectResponse").msgclass
WriteCommitGraphRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WriteCommitGraphRequest").msgclass
+ WriteCommitGraphRequest::SplitStrategy = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WriteCommitGraphRequest.SplitStrategy").enummodule
WriteCommitGraphResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WriteCommitGraphResponse").msgclass
CleanupRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.CleanupRequest").msgclass
CleanupResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.CleanupResponse").msgclass
@@ -380,6 +391,8 @@ module Gitaly
CreateRepositoryFromURLResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.CreateRepositoryFromURLResponse").msgclass
CreateBundleRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.CreateBundleRequest").msgclass
CreateBundleResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.CreateBundleResponse").msgclass
+ GetConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.GetConfigRequest").msgclass
+ GetConfigResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.GetConfigResponse").msgclass
SetConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.SetConfigRequest").msgclass
SetConfigRequest::Entry = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.SetConfigRequest.Entry").msgclass
SetConfigResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.SetConfigResponse").msgclass
diff --git a/ruby/proto/gitaly/repository-service_services_pb.rb b/ruby/proto/gitaly/repository-service_services_pb.rb
index 47d1b49a7..71120a29d 100644
--- a/ruby/proto/gitaly/repository-service_services_pb.rb
+++ b/ruby/proto/gitaly/repository-service_services_pb.rb
@@ -40,6 +40,9 @@ module Gitaly
rpc :CreateRepositoryFromURL, Gitaly::CreateRepositoryFromURLRequest, Gitaly::CreateRepositoryFromURLResponse
rpc :CreateBundle, Gitaly::CreateBundleRequest, stream(Gitaly::CreateBundleResponse)
rpc :CreateRepositoryFromBundle, stream(Gitaly::CreateRepositoryFromBundleRequest), Gitaly::CreateRepositoryFromBundleResponse
+ # GetConfig reads the target repository's gitconfig and streams its contents
+ # back. Returns a NotFound error in case no gitconfig was found.
+ rpc :GetConfig, Gitaly::GetConfigRequest, stream(Gitaly::GetConfigResponse)
rpc :SetConfig, Gitaly::SetConfigRequest, Gitaly::SetConfigResponse
rpc :DeleteConfig, Gitaly::DeleteConfigRequest, Gitaly::DeleteConfigResponse
rpc :FindLicense, Gitaly::FindLicenseRequest, Gitaly::FindLicenseResponse
diff --git a/ruby/proto/gitaly/version.rb b/ruby/proto/gitaly/version.rb
index bc1b9d103..cafe9038d 100644
--- a/ruby/proto/gitaly/version.rb
+++ b/ruby/proto/gitaly/version.rb
@@ -2,5 +2,5 @@
# (https://gitlab.com/gitlab-org/release-tools/), and should not be
# modified.
module Gitaly
- VERSION = '13.11.0-rc1'
+ VERSION = '13.12.0-rc1'
end
diff --git a/ruby/proto/gitaly/wiki_pb.rb b/ruby/proto/gitaly/wiki_pb.rb
index 145edb038..b655a5299 100644
--- a/ruby/proto/gitaly/wiki_pb.rb
+++ b/ruby/proto/gitaly/wiki_pb.rb
@@ -28,15 +28,6 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
optional :historical, :bool, 7
optional :raw_data, :bytes, 8
end
- add_message "gitaly.WikiGetPageVersionsRequest" do
- optional :repository, :message, 1, "gitaly.Repository"
- optional :page_path, :bytes, 2
- optional :page, :int32, 3
- optional :per_page, :int32, 4
- end
- add_message "gitaly.WikiGetPageVersionsResponse" do
- repeated :versions, :message, 1, "gitaly.WikiPageVersion"
- end
add_message "gitaly.WikiWritePageRequest" do
optional :repository, :message, 1, "gitaly.Repository"
optional :name, :bytes, 2
@@ -74,17 +65,6 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
add_message "gitaly.WikiFindPageResponse" do
optional :page, :message, 1, "gitaly.WikiPage"
end
- add_message "gitaly.WikiFindFileRequest" do
- optional :repository, :message, 1, "gitaly.Repository"
- optional :name, :bytes, 2
- optional :revision, :bytes, 3
- end
- add_message "gitaly.WikiFindFileResponse" do
- optional :name, :bytes, 1
- optional :mime_type, :string, 2
- optional :raw_data, :bytes, 3
- optional :path, :bytes, 4
- end
add_message "gitaly.WikiGetAllPagesRequest" do
optional :repository, :message, 1, "gitaly.Repository"
optional :limit, :uint32, 2
@@ -120,8 +100,6 @@ module Gitaly
WikiCommitDetails = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiCommitDetails").msgclass
WikiPageVersion = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiPageVersion").msgclass
WikiPage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiPage").msgclass
- WikiGetPageVersionsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiGetPageVersionsRequest").msgclass
- WikiGetPageVersionsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiGetPageVersionsResponse").msgclass
WikiWritePageRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiWritePageRequest").msgclass
WikiWritePageResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiWritePageResponse").msgclass
WikiUpdatePageRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiUpdatePageRequest").msgclass
@@ -130,8 +108,6 @@ module Gitaly
WikiDeletePageResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiDeletePageResponse").msgclass
WikiFindPageRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiFindPageRequest").msgclass
WikiFindPageResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiFindPageResponse").msgclass
- WikiFindFileRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiFindFileRequest").msgclass
- WikiFindFileResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiFindFileResponse").msgclass
WikiGetAllPagesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiGetAllPagesRequest").msgclass
WikiGetAllPagesRequest::SortBy = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiGetAllPagesRequest.SortBy").enummodule
WikiGetAllPagesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.WikiGetAllPagesResponse").msgclass
diff --git a/ruby/proto/gitaly/wiki_services_pb.rb b/ruby/proto/gitaly/wiki_services_pb.rb
index 3ff43eb69..d1cd1ceaa 100644
--- a/ruby/proto/gitaly/wiki_services_pb.rb
+++ b/ruby/proto/gitaly/wiki_services_pb.rb
@@ -14,13 +14,11 @@ module Gitaly
self.unmarshal_class_method = :decode
self.service_name = 'gitaly.WikiService'
- rpc :WikiGetPageVersions, Gitaly::WikiGetPageVersionsRequest, stream(Gitaly::WikiGetPageVersionsResponse)
rpc :WikiWritePage, stream(Gitaly::WikiWritePageRequest), Gitaly::WikiWritePageResponse
rpc :WikiUpdatePage, stream(Gitaly::WikiUpdatePageRequest), Gitaly::WikiUpdatePageResponse
rpc :WikiDeletePage, Gitaly::WikiDeletePageRequest, Gitaly::WikiDeletePageResponse
# WikiFindPage returns a stream because the page's raw_data field may be arbitrarily large.
rpc :WikiFindPage, Gitaly::WikiFindPageRequest, stream(Gitaly::WikiFindPageResponse)
- rpc :WikiFindFile, Gitaly::WikiFindFileRequest, stream(Gitaly::WikiFindFileResponse)
rpc :WikiGetAllPages, Gitaly::WikiGetAllPagesRequest, stream(Gitaly::WikiGetAllPagesResponse)
rpc :WikiListPages, Gitaly::WikiListPagesRequest, stream(Gitaly::WikiListPagesResponse)
end
diff --git a/ruby/spec/lib/gitlab/git/repository_spec.rb b/ruby/spec/lib/gitlab/git/repository_spec.rb
index ff2e84564..70899d4c7 100644
--- a/ruby/spec/lib/gitlab/git/repository_spec.rb
+++ b/ruby/spec/lib/gitlab/git/repository_spec.rb
@@ -304,53 +304,6 @@ describe Gitlab::Git::Repository do # rubocop:disable Metrics/BlockLength
end
end
- describe '#ff_merge' do
- let(:repository) { mutable_repository }
- let(:branch_head) { '6d394385cf567f80a8fd85055db1ab4c5295806f' }
- let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
- let(:target_branch) { 'test-ff-target-branch' }
-
- before do
- create_branch(repository, target_branch, branch_head)
- end
-
- subject { repository.ff_merge(user, source_sha, target_branch) }
-
- it 'performs a ff_merge' do
- expect(subject.newrev).to eq(source_sha)
- expect(subject.repo_created).to be(false)
- expect(subject.branch_created).to be(false)
-
- expect(repository.commit(target_branch).id).to eq(source_sha)
- end
-
- context 'with a non-existing target branch' do
- subject { repository.ff_merge(user, source_sha, 'this-isnt-real') }
-
- it 'throws an ArgumentError' do
- expect { subject }.to raise_error(ArgumentError)
- end
- end
-
- context 'with a non-existing source commit' do
- let(:source_sha) { 'f001' }
-
- it 'throws an ArgumentError' do
- expect { subject }.to raise_error(ArgumentError)
- end
- end
-
- context 'when the source sha is not a descendant of the branch head' do
- let(:source_sha) { '1a0b36b3cdad1d2ee32457c102a8c0b7056fa863' }
-
- it "doesn't perform the ff_merge" do
- expect { subject }.to raise_error(Gitlab::Git::CommitError)
-
- expect(repository.commit(target_branch).id).to eq(branch_head)
- end
- end
- end
-
describe 'remotes' do
let(:repository) { mutable_repository }
let(:remote_name) { 'my-remote' }