Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitaly.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab-ci.yml5
-rw-r--r--cmd/gitaly-git2go/commit/commit.go3
-rw-r--r--internal/backup/backup_test.go40
-rw-r--r--internal/git/command_description.go3
-rw-r--r--internal/git/gittest/repository_suite.go50
-rw-r--r--internal/git/localrepo/remote_extra_test.go45
-rw-r--r--internal/git/localrepo/repo_test.go17
-rw-r--r--internal/git/remoterepo/repository_test.go28
-rw-r--r--internal/git2go/resolve_conflicts.go8
-rw-r--r--internal/gitaly/server/server.go2
-rw-r--r--internal/gitaly/service/objectpool/alternates_test.go12
-rw-r--r--internal/gitaly/service/objectpool/create_test.go27
-rw-r--r--internal/gitaly/service/objectpool/fetch_into_object_pool_test.go45
-rw-r--r--internal/gitaly/service/objectpool/get_test.go10
-rw-r--r--internal/gitaly/service/objectpool/link_test.go30
-rw-r--r--internal/gitaly/service/objectpool/reduplicate_test.go2
-rw-r--r--internal/gitaly/service/objectpool/testhelper_test.go26
-rw-r--r--internal/gitaly/service/repository/cleanup.go35
-rw-r--r--internal/gitaly/service/repository/cleanup_test.go29
-rw-r--r--internal/gitaly/service/repository/optimize.go404
-rw-r--r--internal/gitaly/service/repository/optimize_test.go557
-rw-r--r--internal/gitaly/service/repository/prune_unreachable_objects.go44
-rw-r--r--internal/gitaly/service/repository/prune_unreachable_objects_test.go134
-rw-r--r--internal/gitaly/service/repository/repack.go43
-rw-r--r--internal/log/log.go44
-rw-r--r--internal/log/log_test.go70
-rw-r--r--internal/middleware/limithandler/concurrency_limiter.go9
-rw-r--r--internal/middleware/limithandler/concurrency_limiter_test.go43
-rw-r--r--internal/middleware/limithandler/middleware.go14
-rw-r--r--internal/middleware/limithandler/middleware_test.go97
-rw-r--r--internal/middleware/limithandler/monitor.go12
-rw-r--r--internal/praefect/coordinator.go44
-rw-r--r--internal/praefect/coordinator_test.go4
-rw-r--r--internal/praefect/datastore/datastore.go2
-rw-r--r--internal/praefect/protoregistry/protoregistry_test.go114
-rw-r--r--internal/praefect/replicator.go21
-rw-r--r--internal/praefect/replicator_test.go37
-rw-r--r--internal/praefect/router.go2
-rw-r--r--internal/praefect/router_node_manager.go11
-rw-r--r--internal/praefect/router_per_repository.go45
-rw-r--r--internal/praefect/router_per_repository_test.go63
-rw-r--r--proto/go/gitalypb/repository-service.pb.go893
-rw-r--r--proto/go/gitalypb/repository-service_grpc.pb.go58
-rw-r--r--proto/repository-service.proto29
-rw-r--r--ruby/Gemfile2
-rw-r--r--ruby/Gemfile.lock20
-rw-r--r--ruby/proto/gitaly/repository-service_pb.rb7
-rw-r--r--ruby/proto/gitaly/repository-service_services_pb.rb12
48 files changed, 2557 insertions, 695 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 818fc3bd4..449656fec 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -4,7 +4,7 @@ stages:
- qa
default:
- image: registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-${RUBY_VERSION}-golang-${GO_VERSION}-git-2.31
+ image: registry.gitlab.com/gitlab-org/gitlab-build-images:debian-${DEBIAN_VERSION}-ruby-${RUBY_VERSION}-golang-${GO_VERSION}-git-2.33-postgresql-11
tags:
- gitlab-org
@@ -12,6 +12,7 @@ variables:
DOCKER_DRIVER: overlay2
SAST_DISABLE_DIND: "true"
SAST_DEFAULT_ANALYZERS: "gosec"
+ DEBIAN_VERSION: "bullseye"
# We use Gitaly's Git version by default.
GIT_VERSION: "default"
GO_VERSION: "1.17"
@@ -45,7 +46,7 @@ include:
key:
files:
- ruby/Gemfile.lock
- prefix: ruby-${RUBY_VERSION}
+ prefix: debian-${DEBIAN_VERSION}-ruby-${RUBY_VERSION}
paths:
- .ruby
policy: pull
diff --git a/cmd/gitaly-git2go/commit/commit.go b/cmd/gitaly-git2go/commit/commit.go
index 0a3ad965a..58bb1a51f 100644
--- a/cmd/gitaly-git2go/commit/commit.go
+++ b/cmd/gitaly-git2go/commit/commit.go
@@ -25,7 +25,8 @@ func Run(ctx context.Context, stdin io.Reader, stdout io.Writer) error {
commitID, err := commit(ctx, params)
return gob.NewEncoder(stdout).Encode(git2go.Result{
CommitID: commitID,
- Error: git2go.SerializableError(err),
+ Error: git2go.SerializableError(err), // Set both fields for backwards compatibility.
+ Err: git2go.SerializableError(err),
})
}
diff --git a/internal/backup/backup_test.go b/internal/backup/backup_test.go
index 0536c5449..ee9c44ee6 100644
--- a/internal/backup/backup_test.go
+++ b/internal/backup/backup_test.go
@@ -31,7 +31,9 @@ func TestManager_Create(t *testing.T) {
cfg := testcfg.Build(t)
- gitalyAddr := testserver.RunGitalyServer(t, cfg, nil, setup.RegisterAll)
+ cfg.SocketPath = testserver.RunGitalyServer(t, cfg, nil, setup.RegisterAll, testserver.WithDisableMetadataForceCreation())
+
+ ctx := testhelper.Context(t)
for _, tc := range []struct {
desc string
@@ -43,8 +45,9 @@ func TestManager_Create(t *testing.T) {
{
desc: "no hooks",
setup: func(t testing.TB) *gitalypb.Repository {
- noHooksRepo, _ := gittest.CloneRepo(t, cfg, cfg.Storages[0], gittest.CloneRepoOpts{
+ noHooksRepo, _ := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
RelativePath: "no-hooks",
+ Seed: gittest.SeedGitLabTest,
})
return noHooksRepo
},
@@ -54,8 +57,9 @@ func TestManager_Create(t *testing.T) {
{
desc: "hooks",
setup: func(t testing.TB) *gitalypb.Repository {
- hooksRepo, hooksRepoPath := gittest.CloneRepo(t, cfg, cfg.Storages[0], gittest.CloneRepoOpts{
+ hooksRepo, hooksRepoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
RelativePath: "hooks",
+ Seed: gittest.SeedGitLabTest,
})
require.NoError(t, os.Mkdir(filepath.Join(hooksRepoPath, "custom_hooks"), os.ModePerm))
require.NoError(t, os.WriteFile(filepath.Join(hooksRepoPath, "custom_hooks/pre-commit.sample"), []byte("Some hooks"), os.ModePerm))
@@ -67,7 +71,7 @@ func TestManager_Create(t *testing.T) {
{
desc: "empty repo",
setup: func(t testing.TB) *gitalypb.Repository {
- emptyRepo, _ := gittest.InitRepo(t, cfg, cfg.Storages[0])
+ emptyRepo, _ := gittest.CreateRepository(ctx, t, cfg)
return emptyRepo
},
createsBundle: false,
@@ -77,7 +81,7 @@ func TestManager_Create(t *testing.T) {
{
desc: "nonexistent repo",
setup: func(t testing.TB) *gitalypb.Repository {
- emptyRepo, _ := gittest.InitRepo(t, cfg, cfg.Storages[0])
+ emptyRepo, _ := gittest.CreateRepository(ctx, t, cfg)
nonexistentRepo := proto.Clone(emptyRepo).(*gitalypb.Repository)
nonexistentRepo.RelativePath = "nonexistent"
return nonexistentRepo
@@ -94,7 +98,6 @@ func TestManager_Create(t *testing.T) {
refsPath := filepath.Join(path, repo.RelativePath, backupID, "001.refs")
bundlePath := filepath.Join(path, repo.RelativePath, backupID, "001.bundle")
customHooksPath := filepath.Join(path, repo.RelativePath, backupID, "001.custom_hooks.tar")
- ctx := testhelper.Context(t)
pool := client.NewPool()
defer testhelper.MustClose(t, pool)
@@ -105,7 +108,7 @@ func TestManager_Create(t *testing.T) {
fsBackup := NewManager(sink, locator, pool, backupID)
err = fsBackup.Create(ctx, &CreateRequest{
- Server: storage.ServerInfo{Address: gitalyAddr, Token: cfg.Auth.Token},
+ Server: storage.ServerInfo{Address: cfg.SocketPath, Token: cfg.Auth.Token},
Repository: repo,
})
if tc.err == nil {
@@ -152,7 +155,8 @@ func TestManager_Create_incremental(t *testing.T) {
cfg := testcfg.Build(t)
- gitalyAddr := testserver.RunGitalyServer(t, cfg, nil, setup.RegisterAll)
+ cfg.SocketPath = testserver.RunGitalyServer(t, cfg, nil, setup.RegisterAll, testserver.WithDisableMetadataForceCreation())
+ ctx := testhelper.Context(t)
for _, tc := range []struct {
desc string
@@ -163,7 +167,10 @@ func TestManager_Create_incremental(t *testing.T) {
{
desc: "no previous backup",
setup: func(t testing.TB, backupRoot string) *gitalypb.Repository {
- repo, _ := gittest.CloneRepo(t, cfg, cfg.Storages[0], gittest.CloneRepoOpts{RelativePath: "repo"})
+ repo, _ := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ RelativePath: "repo",
+ Seed: gittest.SeedGitLabTest,
+ })
return repo
},
expectedIncrement: "001",
@@ -171,7 +178,10 @@ func TestManager_Create_incremental(t *testing.T) {
{
desc: "previous backup, no updates",
setup: func(t testing.TB, backupRoot string) *gitalypb.Repository {
- repo, repoPath := gittest.CloneRepo(t, cfg, cfg.Storages[0], gittest.CloneRepoOpts{RelativePath: "repo"})
+ repo, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ RelativePath: "repo",
+ Seed: gittest.SeedGitLabTest,
+ })
backupRepoPath := filepath.Join(backupRoot, repo.RelativePath)
backupPath := filepath.Join(backupRepoPath, backupID)
@@ -194,7 +204,10 @@ func TestManager_Create_incremental(t *testing.T) {
{
desc: "previous backup, updates",
setup: func(t testing.TB, backupRoot string) *gitalypb.Repository {
- repo, repoPath := gittest.CloneRepo(t, cfg, cfg.Storages[0], gittest.CloneRepoOpts{RelativePath: "repo"})
+ repo, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ RelativePath: "repo",
+ Seed: gittest.SeedGitLabTest,
+ })
backupRepoPath := filepath.Join(backupRoot, repo.RelativePath)
backupPath := filepath.Join(backupRepoPath, backupID)
@@ -224,7 +237,6 @@ func TestManager_Create_incremental(t *testing.T) {
repoPath := filepath.Join(cfg.Storages[0].Path, repo.RelativePath)
refsPath := filepath.Join(path, repo.RelativePath, backupID, tc.expectedIncrement+".refs")
bundlePath := filepath.Join(path, repo.RelativePath, backupID, tc.expectedIncrement+".bundle")
- ctx := testhelper.Context(t)
pool := client.NewPool()
defer testhelper.MustClose(t, pool)
@@ -235,7 +247,7 @@ func TestManager_Create_incremental(t *testing.T) {
fsBackup := NewManager(sink, locator, pool, backupID)
err = fsBackup.Create(ctx, &CreateRequest{
- Server: storage.ServerInfo{Address: gitalyAddr, Token: cfg.Auth.Token},
+ Server: storage.ServerInfo{Address: cfg.SocketPath, Token: cfg.Auth.Token},
Repository: repo,
Incremental: true,
})
@@ -262,7 +274,7 @@ func TestManager_Restore(t *testing.T) {
cfg := testcfg.Build(t)
testcfg.BuildGitalyHooks(t, cfg)
- gitalyAddr := testserver.RunGitalyServer(t, cfg, nil, setup.RegisterAll)
+ gitalyAddr := testserver.RunGitalyServer(t, cfg, nil, setup.RegisterAll, testserver.WithDisableMetadataForceCreation())
ctx := testhelper.Context(t)
testManagerRestore(t, ctx, cfg, gitalyAddr)
diff --git a/internal/git/command_description.go b/internal/git/command_description.go
index d4d5ec82d..a1a33c037 100644
--- a/internal/git/command_description.go
+++ b/internal/git/command_description.go
@@ -176,6 +176,9 @@ var commandDescriptions = map[string]commandDescription{
"pack-objects": {
flags: scNoRefUpdates | scGeneratesPackfiles,
},
+ "prune": {
+ flags: scNoRefUpdates,
+ },
"push": {
flags: scNoRefUpdates,
opts: []GlobalOption{
diff --git a/internal/git/gittest/repository_suite.go b/internal/git/gittest/repository_suite.go
index 4a0776964..9fe27eaaa 100644
--- a/internal/git/gittest/repository_suite.go
+++ b/internal/git/gittest/repository_suite.go
@@ -1,6 +1,7 @@
package gittest
import (
+ "context"
"testing"
"github.com/stretchr/testify/require"
@@ -8,14 +9,17 @@ import (
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
- "gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
+// GetRepositoryFunc is used to get a clean test repository for the different implementations of the
+// Repository interface in the common test suite TestRepository.
+type GetRepositoryFunc func(ctx context.Context, t testing.TB, seeded bool) (git.Repository, string)
+
// TestRepository tests an implementation of Repository.
-func TestRepository(t *testing.T, cfg config.Cfg, getRepository func(testing.TB, *gitalypb.Repository) git.Repository) {
+func TestRepository(t *testing.T, cfg config.Cfg, getRepository GetRepositoryFunc) {
for _, tc := range []struct {
desc string
- test func(*testing.T, config.Cfg, func(testing.TB, *gitalypb.Repository) git.Repository)
+ test func(*testing.T, config.Cfg, GetRepositoryFunc)
}{
{
desc: "ResolveRevision",
@@ -36,11 +40,9 @@ func TestRepository(t *testing.T, cfg config.Cfg, getRepository func(testing.TB,
}
}
-func testRepositoryResolveRevision(t *testing.T, cfg config.Cfg, getRepository func(testing.TB, *gitalypb.Repository) git.Repository) {
+func testRepositoryResolveRevision(t *testing.T, cfg config.Cfg, getRepository GetRepositoryFunc) {
ctx := testhelper.Context(t)
- pbRepo, _ := CloneRepo(t, cfg, cfg.Storages[0])
-
for _, tc := range []struct {
desc string
revision string
@@ -76,7 +78,8 @@ func testRepositoryResolveRevision(t *testing.T, cfg config.Cfg, getRepository f
},
} {
t.Run(tc.desc, func(t *testing.T) {
- oid, err := getRepository(t, pbRepo).ResolveRevision(ctx, git.Revision(tc.revision))
+ repo, _ := getRepository(ctx, t, true)
+ oid, err := repo.ResolveRevision(ctx, git.Revision(tc.revision))
if tc.expected == "" {
require.Equal(t, err, git.ErrReferenceNotFound)
return
@@ -88,12 +91,10 @@ func testRepositoryResolveRevision(t *testing.T, cfg config.Cfg, getRepository f
}
}
-func testRepositoryHasBranches(t *testing.T, cfg config.Cfg, getRepository func(testing.TB, *gitalypb.Repository) git.Repository) {
+func testRepositoryHasBranches(t *testing.T, cfg config.Cfg, getRepository GetRepositoryFunc) {
ctx := testhelper.Context(t)
- pbRepo, repoPath := InitRepo(t, cfg, cfg.Storages[0])
-
- repo := getRepository(t, pbRepo)
+ repo, repoPath := getRepository(ctx, t, false)
emptyCommit := text.ChompBytes(Exec(t, cfg, "-C", repoPath, "commit-tree", git.EmptyTreeOID.String()))
@@ -110,8 +111,9 @@ func testRepositoryHasBranches(t *testing.T, cfg config.Cfg, getRepository func(
require.True(t, hasBranches)
}
-func testRepositoryGetDefaultBranch(t *testing.T, cfg config.Cfg, getRepository func(testing.TB, *gitalypb.Repository) git.Repository) {
+func testRepositoryGetDefaultBranch(t *testing.T, cfg config.Cfg, getRepository GetRepositoryFunc) {
const testOID = "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863"
+ ctx := testhelper.Context(t)
for _, tc := range []struct {
desc string
@@ -121,8 +123,7 @@ func testRepositoryGetDefaultBranch(t *testing.T, cfg config.Cfg, getRepository
{
desc: "default ref",
repo: func(t *testing.T) git.Repository {
- repoProto, repoPath := InitRepo(t, cfg, cfg.Storages[0])
- repo := getRepository(t, repoProto)
+ repo, repoPath := getRepository(ctx, t, false)
oid := WriteCommit(t, cfg, repoPath, WithParents(), WithBranch("apple"))
WriteCommit(t, cfg, repoPath, WithParents(oid), WithBranch("main"))
return repo
@@ -132,8 +133,7 @@ func testRepositoryGetDefaultBranch(t *testing.T, cfg config.Cfg, getRepository
{
desc: "legacy default ref",
repo: func(t *testing.T) git.Repository {
- repoProto, repoPath := InitRepo(t, cfg, cfg.Storages[0])
- repo := getRepository(t, repoProto)
+ repo, repoPath := getRepository(ctx, t, false)
oid := WriteCommit(t, cfg, repoPath, WithParents(), WithBranch("apple"))
WriteCommit(t, cfg, repoPath, WithParents(oid), WithBranch("master"))
return repo
@@ -143,16 +143,14 @@ func testRepositoryGetDefaultBranch(t *testing.T, cfg config.Cfg, getRepository
{
desc: "no branches",
repo: func(t *testing.T) git.Repository {
- repoProto, _ := InitRepo(t, cfg, cfg.Storages[0])
- repo := getRepository(t, repoProto)
+ repo, _ := getRepository(ctx, t, false)
return repo
},
},
{
desc: "one branch",
repo: func(t *testing.T) git.Repository {
- repoProto, repoPath := InitRepo(t, cfg, cfg.Storages[0])
- repo := getRepository(t, repoProto)
+ repo, repoPath := getRepository(ctx, t, false)
WriteCommit(t, cfg, repoPath, WithParents(), WithBranch("apple"))
return repo
},
@@ -161,8 +159,7 @@ func testRepositoryGetDefaultBranch(t *testing.T, cfg config.Cfg, getRepository
{
desc: "no default branches",
repo: func(t *testing.T) git.Repository {
- repoProto, repoPath := InitRepo(t, cfg, cfg.Storages[0])
- repo := getRepository(t, repoProto)
+ repo, repoPath := getRepository(ctx, t, false)
oid := WriteCommit(t, cfg, repoPath, WithParents(), WithBranch("apple"))
WriteCommit(t, cfg, repoPath, WithParents(oid), WithBranch("banana"))
return repo
@@ -172,16 +169,15 @@ func testRepositoryGetDefaultBranch(t *testing.T, cfg config.Cfg, getRepository
{
desc: "test repo default",
repo: func(t *testing.T) git.Repository {
- repoProto, _ := CloneRepo(t, cfg, cfg.Storages[0])
- return getRepository(t, repoProto)
+ repo, _ := getRepository(ctx, t, true)
+ return repo
},
expectedName: git.LegacyDefaultRef,
},
{
desc: "test repo HEAD set",
repo: func(t *testing.T) git.Repository {
- repoProto, repoPath := CloneRepo(t, cfg, cfg.Storages[0])
- repo := getRepository(t, repoProto)
+ repo, repoPath := getRepository(ctx, t, true)
Exec(t, cfg, "-C", repoPath, "update-ref", "refs/heads/feature", testOID)
Exec(t, cfg, "-C", repoPath, "symbolic-ref", "HEAD", "refs/heads/feature")
return repo
@@ -190,8 +186,6 @@ func testRepositoryGetDefaultBranch(t *testing.T, cfg config.Cfg, getRepository
},
} {
t.Run(tc.desc, func(t *testing.T) {
- ctx := testhelper.Context(t)
-
name, err := tc.repo(t).GetDefaultBranch(ctx)
require.NoError(t, err)
require.Equal(t, tc.expectedName, name)
diff --git a/internal/git/localrepo/remote_extra_test.go b/internal/git/localrepo/remote_extra_test.go
index 1603a896d..4157fc491 100644
--- a/internal/git/localrepo/remote_extra_test.go
+++ b/internal/git/localrepo/remote_extra_test.go
@@ -11,6 +11,7 @@ import (
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/hook"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/repository"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/ssh"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
@@ -22,7 +23,7 @@ import (
func TestRepo_FetchInternal(t *testing.T) {
ctx := testhelper.Context(t)
- cfg, remoteRepoProto, _ := testcfg.BuildWithRepo(t)
+ cfg := testcfg.Build(t)
gitCmdFactory, readGitProtocol := gittest.NewProtocolDetectingCommandFactory(ctx, t, cfg)
cfg.SocketPath = testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
@@ -36,7 +37,21 @@ func TestRepo_FetchInternal(t *testing.T) {
deps.GetGitCmdFactory(),
deps.GetPackObjectsCache(),
))
- }, testserver.WithGitCommandFactory(gitCmdFactory))
+ gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
+ deps.GetCfg(),
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetTxManager(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ deps.GetConnsPool(),
+ deps.GetGit2goExecutor(),
+ ))
+ }, testserver.WithGitCommandFactory(gitCmdFactory), testserver.WithDisableMetadataForceCreation())
+
+ remoteRepoProto, _ := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ Seed: gittest.SeedGitLabTest,
+ })
remoteRepo := localrepo.NewTestRepo(t, cfg, remoteRepoProto)
testcfg.BuildGitalySSH(t, cfg)
@@ -54,7 +69,7 @@ func TestRepo_FetchInternal(t *testing.T) {
t.Run("refspec with tag", func(t *testing.T) {
ctx := testhelper.MergeIncomingMetadata(ctx, testcfg.GitalyServersMetadataFromCfg(t, cfg))
- repoProto, repoPath := gittest.InitRepo(t, cfg, cfg.Storages[0])
+ repoProto, repoPath := gittest.CreateRepository(ctx, t, cfg)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
gittest.Exec(t, cfg, "-C", repoPath, "config", "fetch.writeCommitGraph", "true")
@@ -86,7 +101,7 @@ func TestRepo_FetchInternal(t *testing.T) {
t.Run("refspec without tags", func(t *testing.T) {
ctx := testhelper.MergeIncomingMetadata(ctx, testcfg.GitalyServersMetadataFromCfg(t, cfg))
- repoProto, _ := gittest.InitRepo(t, cfg, cfg.Storages[0])
+ repoProto, _ := gittest.CreateRepository(ctx, t, cfg)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
require.NoError(t, repo.FetchInternal(
@@ -104,7 +119,7 @@ func TestRepo_FetchInternal(t *testing.T) {
t.Run("object ID", func(t *testing.T) {
ctx := testhelper.MergeIncomingMetadata(ctx, testcfg.GitalyServersMetadataFromCfg(t, cfg))
- repoProto, _ := gittest.InitRepo(t, cfg, cfg.Storages[0])
+ repoProto, _ := gittest.CreateRepository(ctx, t, cfg)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
require.NoError(t, repo.FetchInternal(
@@ -120,7 +135,7 @@ func TestRepo_FetchInternal(t *testing.T) {
t.Run("nonexistent revision", func(t *testing.T) {
ctx := testhelper.MergeIncomingMetadata(ctx, testcfg.GitalyServersMetadataFromCfg(t, cfg))
- repoProto, _ := gittest.InitRepo(t, cfg, cfg.Storages[0])
+ repoProto, _ := gittest.CreateRepository(ctx, t, cfg)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
var stderr bytes.Buffer
@@ -136,7 +151,7 @@ func TestRepo_FetchInternal(t *testing.T) {
t.Run("with env", func(t *testing.T) {
ctx := testhelper.MergeIncomingMetadata(ctx, testcfg.GitalyServersMetadataFromCfg(t, cfg))
- repoProto, _ := gittest.InitRepo(t, cfg, cfg.Storages[0])
+ repoProto, _ := gittest.CreateRepository(ctx, t, cfg)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
var stderr bytes.Buffer
@@ -151,7 +166,7 @@ func TestRepo_FetchInternal(t *testing.T) {
t.Run("with disabled transactions", func(t *testing.T) {
ctx := testhelper.MergeIncomingMetadata(ctx, testcfg.GitalyServersMetadataFromCfg(t, cfg))
- repoProto, _ := gittest.InitRepo(t, cfg, cfg.Storages[0])
+ repoProto, _ := gittest.CreateRepository(ctx, t, cfg)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
var stderr bytes.Buffer
@@ -166,7 +181,7 @@ func TestRepo_FetchInternal(t *testing.T) {
t.Run("invalid remote repo", func(t *testing.T) {
ctx := testhelper.MergeIncomingMetadata(ctx, testcfg.GitalyServersMetadataFromCfg(t, cfg))
- repoProto, _ := gittest.InitRepo(t, cfg, cfg.Storages[0])
+ repoProto, _ := gittest.CreateRepository(ctx, t, cfg)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
err := repo.FetchInternal(ctx, &gitalypb.Repository{
@@ -175,13 +190,21 @@ func TestRepo_FetchInternal(t *testing.T) {
}, []string{"refs/does/not/exist"}, localrepo.FetchOpts{})
require.Error(t, err)
require.IsType(t, err, localrepo.ErrFetchFailed{})
- require.Contains(t, err.Error(), "GetRepoPath: not a git repository")
+
+ expectedMsg := "GetRepoPath: not a git repository"
+ if testhelper.IsPraefectEnabled() {
+ expectedMsg = `repository \"default\"/\"does/not/exist\" not found`
+ }
+
+ require.Contains(t, err.Error(), expectedMsg)
})
t.Run("pruning", func(t *testing.T) {
ctx := testhelper.MergeIncomingMetadata(ctx, testcfg.GitalyServersMetadataFromCfg(t, cfg))
- repoProto, _ := gittest.CloneRepo(t, cfg, cfg.Storages[0])
+ repoProto, _ := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ Seed: gittest.SeedGitLabTest,
+ })
repo := localrepo.NewTestRepo(t, cfg, repoProto)
// Create a local reference. Given that it doesn't exist on the remote side, it
diff --git a/internal/git/localrepo/repo_test.go b/internal/git/localrepo/repo_test.go
index fb733e31c..356ce2ce5 100644
--- a/internal/git/localrepo/repo_test.go
+++ b/internal/git/localrepo/repo_test.go
@@ -1,6 +1,7 @@
package localrepo
import (
+ "context"
"testing"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
@@ -14,11 +15,23 @@ import (
func TestRepo(t *testing.T) {
cfg := testcfg.Build(t)
- gittest.TestRepository(t, cfg, func(t testing.TB, pbRepo *gitalypb.Repository) git.Repository {
+ gittest.TestRepository(t, cfg, func(ctx context.Context, t testing.TB, seeded bool) (git.Repository, string) {
t.Helper()
+
+ var (
+ pbRepo *gitalypb.Repository
+ repoPath string
+ )
+
+ if seeded {
+ pbRepo, repoPath = gittest.CloneRepo(t, cfg, cfg.Storages[0])
+ } else {
+ pbRepo, repoPath = gittest.InitRepo(t, cfg, cfg.Storages[0])
+ }
+
gitCmdFactory := gittest.NewCommandFactory(t, cfg)
catfileCache := catfile.NewCache(cfg)
t.Cleanup(catfileCache.Stop)
- return New(config.NewLocator(cfg), gitCmdFactory, catfileCache, pbRepo)
+ return New(config.NewLocator(cfg), gitCmdFactory, catfileCache, pbRepo), repoPath
})
}
diff --git a/internal/git/remoterepo/repository_test.go b/internal/git/remoterepo/repository_test.go
index d61403e45..2c4b6279f 100644
--- a/internal/git/remoterepo/repository_test.go
+++ b/internal/git/remoterepo/repository_test.go
@@ -1,6 +1,7 @@
package remoterepo_test
import (
+ "context"
"testing"
"github.com/stretchr/testify/require"
@@ -14,7 +15,6 @@ import (
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/repository"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/storage"
"gitlab.com/gitlab-org/gitaly/v14/internal/metadata"
- "gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
@@ -24,7 +24,7 @@ import (
func TestRepository(t *testing.T) {
cfg := testcfg.Build(t)
- serverSocketPath := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
+ cfg.SocketPath = testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
deps.GetCfg(),
deps.GetRubyServer(),
@@ -47,20 +47,28 @@ func TestRepository(t *testing.T) {
deps.GetTxManager(),
deps.GetCatfileCache(),
))
- })
- ctx := testhelper.Context(t)
-
- ctx, err := storage.InjectGitalyServers(ctx, "default", serverSocketPath, cfg.Auth.Token)
- require.NoError(t, err)
+ }, testserver.WithDisableMetadataForceCreation())
pool := client.NewPool()
defer pool.Close()
- gittest.TestRepository(t, cfg, func(t testing.TB, pbRepo *gitalypb.Repository) git.Repository {
+ gittest.TestRepository(t, cfg, func(ctx context.Context, t testing.TB, seeded bool) (git.Repository, string) {
t.Helper()
- r, err := remoterepo.New(metadata.OutgoingToIncoming(ctx), pbRepo, pool)
+ seed := ""
+ if seeded {
+ seed = gittest.SeedGitLabTest
+ }
+
+ ctx, err := storage.InjectGitalyServers(ctx, "default", cfg.SocketPath, cfg.Auth.Token)
+ require.NoError(t, err)
+
+ pbRepo, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ Seed: seed,
+ })
+
+ repo, err := remoterepo.New(metadata.OutgoingToIncoming(ctx), pbRepo, pool)
require.NoError(t, err)
- return r
+ return repo, repoPath
})
}
diff --git a/internal/git2go/resolve_conflicts.go b/internal/git2go/resolve_conflicts.go
index 5be31e96a..ae2545531 100644
--- a/internal/git2go/resolve_conflicts.go
+++ b/internal/git2go/resolve_conflicts.go
@@ -20,6 +20,10 @@ type ResolveCommand struct {
// ResolveResult returns information about the successful merge and resolution
type ResolveResult struct {
MergeResult
+
+ // Err is set if an error occurred. Err must exist on all gob serialized
+ // results so that any error can be returned.
+ Err error
}
// Resolve will attempt merging and resolving conflicts for the provided request
@@ -43,5 +47,9 @@ func (b *Executor) Resolve(ctx context.Context, repo repository.GitRepo, r Resol
return ResolveResult{}, fmt.Errorf("resolve: %w", err)
}
+ if response.Err != nil {
+ return ResolveResult{}, response.Err
+ }
+
return response, nil
}
diff --git a/internal/gitaly/server/server.go b/internal/gitaly/server/server.go
index bc34b49d8..2304c0b8f 100644
--- a/internal/gitaly/server/server.go
+++ b/internal/gitaly/server/server.go
@@ -110,6 +110,7 @@ func New(
grpcmwlogrus.StreamServerInterceptor(logrusEntry,
grpcmwlogrus.WithTimestampFormat(gitalylog.LogTimestampFormat),
logMsgProducer,
+ gitalylog.DeciderOption(),
),
gitalylog.StreamLogDataCatcherServerInterceptor(),
sentryhandler.StreamLogHandler,
@@ -131,6 +132,7 @@ func New(
grpcmwlogrus.UnaryServerInterceptor(logrusEntry,
grpcmwlogrus.WithTimestampFormat(gitalylog.LogTimestampFormat),
logMsgProducer,
+ gitalylog.DeciderOption(),
),
gitalylog.UnaryLogDataCatcherServerInterceptor(),
sentryhandler.UnaryLogHandler,
diff --git a/internal/gitaly/service/objectpool/alternates_test.go b/internal/gitaly/service/objectpool/alternates_test.go
index 4ba93937d..85ce42bbd 100644
--- a/internal/gitaly/service/objectpool/alternates_test.go
+++ b/internal/gitaly/service/objectpool/alternates_test.go
@@ -14,8 +14,8 @@ import (
)
func TestDisconnectGitAlternates(t *testing.T) {
- cfg, repoProto, repoPath, _, client := setup(t)
ctx := testhelper.Context(t)
+ cfg, repoProto, repoPath, _, client := setup(ctx, t)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
gitCmdFactory := gittest.NewCommandFactory(t, cfg)
@@ -53,9 +53,9 @@ func TestDisconnectGitAlternates(t *testing.T) {
}
func TestDisconnectGitAlternatesNoAlternates(t *testing.T) {
- cfg, repoProto, repoPath, _, client := setup(t)
- repo := localrepo.NewTestRepo(t, cfg, repoProto)
ctx := testhelper.Context(t)
+ cfg, repoProto, repoPath, _, client := setup(ctx, t)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
altPath, err := repo.InfoAlternatesPath()
require.NoError(t, err, "find info/alternates")
@@ -68,8 +68,8 @@ func TestDisconnectGitAlternatesNoAlternates(t *testing.T) {
}
func TestDisconnectGitAlternatesUnexpectedAlternates(t *testing.T) {
- cfg, _, _, _, client := setup(t)
ctx := testhelper.Context(t)
+ cfg, _, _, _, client := setup(ctx, t)
testCases := []struct {
desc string
@@ -100,9 +100,9 @@ func TestDisconnectGitAlternatesUnexpectedAlternates(t *testing.T) {
}
func TestRemoveAlternatesIfOk(t *testing.T) {
- cfg, repoProto, repoPath, _, _ := setup(t)
- repo := localrepo.NewTestRepo(t, cfg, repoProto)
ctx := testhelper.Context(t)
+ cfg, repoProto, repoPath, _, _ := setup(ctx, t)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
altPath, err := repo.InfoAlternatesPath()
require.NoError(t, err, "find info/alternates")
diff --git a/internal/gitaly/service/objectpool/create_test.go b/internal/gitaly/service/objectpool/create_test.go
index dd14ab48b..63ae86ebf 100644
--- a/internal/gitaly/service/objectpool/create_test.go
+++ b/internal/gitaly/service/objectpool/create_test.go
@@ -1,6 +1,7 @@
package objectpool
import (
+ "fmt"
"path/filepath"
"strings"
"testing"
@@ -9,14 +10,16 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/praefect/commonerr"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
func TestCreate(t *testing.T) {
- cfg, repo, _, _, client := setup(t)
ctx := testhelper.Context(t)
+ cfg, repo, _, _, client := setup(ctx, t)
pool := initObjectPool(t, cfg, cfg.Storages[0])
@@ -48,8 +51,8 @@ func TestCreate(t *testing.T) {
}
func TestUnsuccessfulCreate(t *testing.T) {
- cfg, repo, _, _, client := setup(t, testserver.WithDisablePraefect())
ctx := testhelper.Context(t)
+ cfg, repo, _, _, client := setup(ctx, t, testserver.WithDisablePraefect())
storageName := repo.GetStorageName()
pool := initObjectPool(t, cfg, cfg.Storages[0])
@@ -137,13 +140,18 @@ func TestUnsuccessfulCreate(t *testing.T) {
}
func TestDelete(t *testing.T) {
- cfg, repoProto, _, _, client := setup(t)
ctx := testhelper.Context(t)
+ cfg, repoProto, _, _, client := setup(ctx, t)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
pool := initObjectPool(t, cfg, cfg.Storages[0])
+ _, err := client.CreateObjectPool(ctx, &gitalypb.CreateObjectPoolRequest{
+ ObjectPool: pool.ToProto(),
+ Origin: repoProto,
+ })
+ require.NoError(t, err)
+
validPoolPath := pool.GetRelativePath()
- require.NoError(t, pool.Create(ctx, repo))
for _, tc := range []struct {
desc string
@@ -196,7 +204,16 @@ func TestDelete(t *testing.T) {
RelativePath: tc.relativePath,
},
}})
- testhelper.RequireGrpcError(t, tc.error, err)
+
+ expectedErr := tc.error
+ if tc.error == errInvalidPoolDir && testhelper.IsPraefectEnabled() {
+ expectedErr = helper.ErrNotFound(fmt.Errorf(
+ "mutator call: route repository mutator: get repository id: %w",
+ commonerr.NewRepositoryNotFoundError(repo.GetStorageName(), tc.relativePath),
+ ))
+ }
+
+ testhelper.RequireGrpcError(t, expectedErr, err)
})
}
}
diff --git a/internal/gitaly/service/objectpool/fetch_into_object_pool_test.go b/internal/gitaly/service/objectpool/fetch_into_object_pool_test.go
index 13367f3d0..3b6ed2430 100644
--- a/internal/gitaly/service/objectpool/fetch_into_object_pool_test.go
+++ b/internal/gitaly/service/objectpool/fetch_into_object_pool_test.go
@@ -27,12 +27,17 @@ import (
)
func TestFetchIntoObjectPool_Success(t *testing.T) {
- cfg, repo, repoPath, locator, client := setup(t)
ctx := testhelper.Context(t)
+ cfg, repo, repoPath, locator, client := setup(ctx, t)
repoCommit := gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch(t.Name()))
pool := initObjectPool(t, cfg, cfg.Storages[0])
+ _, err := client.CreateObjectPool(ctx, &gitalypb.CreateObjectPoolRequest{
+ ObjectPool: pool.ToProto(),
+ Origin: repo,
+ })
+ require.NoError(t, err)
req := &gitalypb.FetchIntoObjectPoolRequest{
ObjectPool: pool.ToProto(),
@@ -40,7 +45,7 @@ func TestFetchIntoObjectPool_Success(t *testing.T) {
Repack: true,
}
- _, err := client.FetchIntoObjectPool(ctx, req)
+ _, err = client.FetchIntoObjectPool(ctx, req)
require.NoError(t, err)
require.True(t, pool.IsValid(), "ensure underlying repository is valid")
@@ -77,19 +82,28 @@ func TestFetchIntoObjectPool_Success(t *testing.T) {
}
func TestFetchIntoObjectPool_hooks(t *testing.T) {
- cfg, repo, _ := testcfg.BuildWithRepo(t)
+ cfg := testcfg.Build(t)
gitCmdFactory := gittest.NewCommandFactory(t, cfg, git.WithHooksPath(testhelper.TempDir(t)))
- addr := runObjectPoolServer(t, cfg, config.NewLocator(cfg), testhelper.NewDiscardingLogger(t), testserver.WithGitCommandFactory(gitCmdFactory))
+ cfg.SocketPath = runObjectPoolServer(t, cfg, config.NewLocator(cfg), testhelper.NewDiscardingLogger(t), testserver.WithGitCommandFactory(gitCmdFactory))
- conn, err := grpc.Dial(addr, grpc.WithInsecure())
+ ctx := testhelper.Context(t)
+ repo, _ := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ Seed: gittest.SeedGitLabTest,
+ })
+
+ conn, err := grpc.Dial(cfg.SocketPath, grpc.WithInsecure())
require.NoError(t, err)
defer testhelper.MustClose(t, conn)
client := gitalypb.NewObjectPoolServiceClient(conn)
- ctx := testhelper.Context(t)
pool := initObjectPool(t, cfg, cfg.Storages[0])
+ _, err = client.CreateObjectPool(ctx, &gitalypb.CreateObjectPoolRequest{
+ ObjectPool: pool.ToProto(),
+ Origin: repo,
+ })
+ require.NoError(t, err)
// Set up a custom reference-transaction hook which simply exits failure. This asserts that
// the RPC doesn't invoke any reference-transaction.
@@ -106,23 +120,32 @@ func TestFetchIntoObjectPool_hooks(t *testing.T) {
}
func TestFetchIntoObjectPool_CollectLogStatistics(t *testing.T) {
- cfg, repo, _ := testcfg.BuildWithRepo(t)
+ cfg := testcfg.Build(t)
testcfg.BuildGitalyHooks(t, cfg)
locator := config.NewLocator(cfg)
logger, hook := test.NewNullLogger()
- serverSocketPath := runObjectPoolServer(t, cfg, locator, logger)
+ cfg.SocketPath = runObjectPoolServer(t, cfg, locator, logger)
- conn, err := grpc.Dial(serverSocketPath, grpc.WithInsecure())
+ ctx := testhelper.Context(t)
+ ctx = ctxlogrus.ToContext(ctx, log.WithField("test", "logging"))
+ repo, _ := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ Seed: gittest.SeedGitLabTest,
+ })
+
+ conn, err := grpc.Dial(cfg.SocketPath, grpc.WithInsecure())
require.NoError(t, err)
t.Cleanup(func() { testhelper.MustClose(t, conn) })
client := gitalypb.NewObjectPoolServiceClient(conn)
- ctx := testhelper.Context(t)
- ctx = ctxlogrus.ToContext(ctx, log.WithField("test", "logging"))
pool := initObjectPool(t, cfg, cfg.Storages[0])
+ _, err = client.CreateObjectPool(ctx, &gitalypb.CreateObjectPoolRequest{
+ ObjectPool: pool.ToProto(),
+ Origin: repo,
+ })
+ require.NoError(t, err)
req := &gitalypb.FetchIntoObjectPoolRequest{
ObjectPool: pool.ToProto(),
diff --git a/internal/gitaly/service/objectpool/get_test.go b/internal/gitaly/service/objectpool/get_test.go
index 69bc446c3..41764461f 100644
--- a/internal/gitaly/service/objectpool/get_test.go
+++ b/internal/gitaly/service/objectpool/get_test.go
@@ -12,13 +12,13 @@ import (
)
func TestGetObjectPoolSuccess(t *testing.T) {
- cfg, repoProto, _, _, client := setup(t)
+ poolCtx := testhelper.Context(t)
+ cfg, repoProto, _, _, client := setup(poolCtx, t)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
pool := initObjectPool(t, cfg, cfg.Storages[0])
relativePoolPath := pool.GetRelativePath()
- poolCtx := testhelper.Context(t)
require.NoError(t, pool.Create(poolCtx, repo))
require.NoError(t, pool.Link(poolCtx, repo))
@@ -36,8 +36,8 @@ func TestGetObjectPoolSuccess(t *testing.T) {
}
func TestGetObjectPoolNoFile(t *testing.T) {
- _, repoo, _, _, client := setup(t)
ctx := testhelper.Context(t)
+ _, repoo, _, _, client := setup(ctx, t)
resp, err := client.GetObjectPool(ctx, &gitalypb.GetObjectPoolRequest{
Repository: repoo,
@@ -48,12 +48,12 @@ func TestGetObjectPoolNoFile(t *testing.T) {
}
func TestGetObjectPoolBadFile(t *testing.T) {
- _, repo, repoPath, _, client := setup(t)
+ ctx := testhelper.Context(t)
+ _, repo, repoPath, _, client := setup(ctx, t)
alternatesFilePath := filepath.Join(repoPath, "objects", "info", "alternates")
require.NoError(t, os.MkdirAll(filepath.Dir(alternatesFilePath), 0o755))
require.NoError(t, os.WriteFile(alternatesFilePath, []byte("not-a-directory"), 0o644))
- ctx := testhelper.Context(t)
resp, err := client.GetObjectPool(ctx, &gitalypb.GetObjectPoolRequest{
Repository: repo,
diff --git a/internal/gitaly/service/objectpool/link_test.go b/internal/gitaly/service/objectpool/link_test.go
index cfe46dc1c..a1a7fb154 100644
--- a/internal/gitaly/service/objectpool/link_test.go
+++ b/internal/gitaly/service/objectpool/link_test.go
@@ -18,8 +18,8 @@ import (
)
func TestLink(t *testing.T) {
- cfg, repo, _, _, client := setup(t, testserver.WithDisablePraefect())
ctx := testhelper.Context(t)
+ cfg, repo, _, _, client := setup(ctx, t, testserver.WithDisablePraefect())
localRepo := localrepo.NewTestRepo(t, cfg, repo)
@@ -84,19 +84,22 @@ func TestLink(t *testing.T) {
}
func TestLinkIdempotent(t *testing.T) {
- cfg, repoProto, _, _, client := setup(t)
ctx := testhelper.Context(t)
- repo := localrepo.NewTestRepo(t, cfg, repoProto)
+ cfg, repoProto, _, _, client := setup(ctx, t)
pool := initObjectPool(t, cfg, cfg.Storages[0])
- require.NoError(t, pool.Create(ctx, repo))
+ _, err := client.CreateObjectPool(ctx, &gitalypb.CreateObjectPoolRequest{
+ ObjectPool: pool.ToProto(),
+ Origin: repoProto,
+ })
+ require.NoError(t, err)
request := &gitalypb.LinkRepositoryToObjectPoolRequest{
Repository: repoProto,
ObjectPool: pool.ToProto(),
}
- _, err := client.LinkRepositoryToObjectPool(ctx, request)
+ _, err = client.LinkRepositoryToObjectPool(ctx, request)
require.NoError(t, err)
_, err = client.LinkRepositoryToObjectPool(ctx, request)
@@ -104,8 +107,8 @@ func TestLinkIdempotent(t *testing.T) {
}
func TestLinkNoClobber(t *testing.T) {
- cfg, repoProto, repoPath, _, client := setup(t)
ctx := testhelper.Context(t)
+ cfg, repoProto, repoPath, _, client := setup(ctx, t)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
pool := initObjectPool(t, cfg, cfg.Storages[0])
@@ -130,18 +133,27 @@ func TestLinkNoClobber(t *testing.T) {
}
func TestLinkNoPool(t *testing.T) {
- cfg, repo, _, locator, client := setup(t)
ctx := testhelper.Context(t)
+ cfg, repo, _, locator, client := setup(ctx, t)
pool := initObjectPool(t, cfg, cfg.Storages[0])
- require.NoError(t, pool.Remove(ctx))
+ _, err := client.CreateObjectPool(ctx, &gitalypb.CreateObjectPoolRequest{
+ ObjectPool: pool.ToProto(),
+ Origin: repo,
+ })
+ require.NoError(t, err)
+
+ _, err = client.DeleteObjectPool(ctx, &gitalypb.DeleteObjectPoolRequest{
+ ObjectPool: pool.ToProto(),
+ })
+ require.NoError(t, err)
request := &gitalypb.LinkRepositoryToObjectPoolRequest{
Repository: repo,
ObjectPool: pool.ToProto(),
}
- _, err := client.LinkRepositoryToObjectPool(ctx, request)
+ _, err = client.LinkRepositoryToObjectPool(ctx, request)
require.NoError(t, err)
poolRepoPath, err := locator.GetRepoPath(pool)
diff --git a/internal/gitaly/service/objectpool/reduplicate_test.go b/internal/gitaly/service/objectpool/reduplicate_test.go
index 1fd77fdb4..76a7406d8 100644
--- a/internal/gitaly/service/objectpool/reduplicate_test.go
+++ b/internal/gitaly/service/objectpool/reduplicate_test.go
@@ -13,8 +13,8 @@ import (
)
func TestReduplicate(t *testing.T) {
- cfg, repoProto, repoPath, _, client := setup(t)
ctx := testhelper.Context(t)
+ cfg, repoProto, repoPath, _, client := setup(ctx, t)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
gitCmdFactory := gittest.NewCommandFactory(t, cfg)
diff --git a/internal/gitaly/service/objectpool/testhelper_test.go b/internal/gitaly/service/objectpool/testhelper_test.go
index 351251a90..d6b1dcd4f 100644
--- a/internal/gitaly/service/objectpool/testhelper_test.go
+++ b/internal/gitaly/service/objectpool/testhelper_test.go
@@ -1,6 +1,7 @@
package objectpool
import (
+ "context"
"os"
"path/filepath"
"testing"
@@ -14,6 +15,7 @@ import (
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service"
hookservice "gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/hook"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/repository"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/storage"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
@@ -27,24 +29,30 @@ func TestMain(m *testing.M) {
testhelper.Run(m)
}
-func setup(t *testing.T, opts ...testserver.GitalyServerOpt) (config.Cfg, *gitalypb.Repository, string, storage.Locator, gitalypb.ObjectPoolServiceClient) {
+func setup(ctx context.Context, t *testing.T, opts ...testserver.GitalyServerOpt) (config.Cfg, *gitalypb.Repository, string, storage.Locator, gitalypb.ObjectPoolServiceClient) {
t.Helper()
- cfg, repo, repoPath := testcfg.BuildWithRepo(t)
+ cfg := testcfg.Build(t)
testcfg.BuildGitalyHooks(t, cfg)
locator := config.NewLocator(cfg)
- addr := runObjectPoolServer(t, cfg, locator, testhelper.NewDiscardingLogger(t), opts...)
+ cfg.SocketPath = runObjectPoolServer(t, cfg, locator, testhelper.NewDiscardingLogger(t), opts...)
- conn, err := grpc.Dial(addr, grpc.WithInsecure())
+ conn, err := grpc.Dial(cfg.SocketPath, grpc.WithInsecure())
require.NoError(t, err)
t.Cleanup(func() { testhelper.MustClose(t, conn) })
+ repo, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ Seed: gittest.SeedGitLabTest,
+ })
+
return cfg, repo, repoPath, locator, gitalypb.NewObjectPoolServiceClient(conn)
}
func runObjectPoolServer(t *testing.T, cfg config.Cfg, locator storage.Locator, logger *logrus.Logger, opts ...testserver.GitalyServerOpt) string {
+ opts = append(opts, testserver.WithDisableMetadataForceCreation())
+
return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
gitalypb.RegisterObjectPoolServiceServer(srv, NewServer(
deps.GetLocator(),
@@ -57,6 +65,16 @@ func runObjectPoolServer(t *testing.T, cfg config.Cfg, locator storage.Locator,
deps.GetGitCmdFactory(),
deps.GetPackObjectsCache(),
))
+ gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
+ cfg,
+ deps.GetRubyServer(),
+ deps.GetLocator(),
+ deps.GetTxManager(),
+ deps.GetGitCmdFactory(),
+ deps.GetCatfileCache(),
+ deps.GetConnsPool(),
+ deps.GetGit2goExecutor(),
+ ))
}, append(opts, testserver.WithLocator(locator), testserver.WithLogger(logger))...)
}
diff --git a/internal/gitaly/service/repository/cleanup.go b/internal/gitaly/service/repository/cleanup.go
index 3bb71faa0..8755c053d 100644
--- a/internal/gitaly/service/repository/cleanup.go
+++ b/internal/gitaly/service/repository/cleanup.go
@@ -127,6 +127,41 @@ func isExitWithCode(err error, code int) bool {
}
func cleanDisconnectedWorktrees(ctx context.Context, repo *localrepo.Repo) error {
+ repoPath, err := repo.Path()
+ if err != nil {
+ return err
+ }
+
+ // Spawning a command is expensive. We thus try to avoid the overhead by first
+ // determining if there could possibly be any work to be done by git-worktree(1). We do so
+ // by reading the directory in which worktrees are stored, and if it's empty then we know
+ // that there aren't any worktrees in the first place.
+ worktreeEntries, err := os.ReadDir(filepath.Join(repoPath, "worktrees"))
+ if err != nil {
+ if errors.Is(err, os.ErrNotExist) {
+ return nil
+ }
+ }
+
+ hasWorktrees := false
+ for _, worktreeEntry := range worktreeEntries {
+ if !worktreeEntry.IsDir() {
+ continue
+ }
+
+ if worktreeEntry.Name() == "." || worktreeEntry.Name() == ".." {
+ continue
+ }
+
+ hasWorktrees = true
+ break
+ }
+
+ // There are no worktrees, so let's avoid spawning the Git command.
+ if !hasWorktrees {
+ return nil
+ }
+
return repo.ExecAndWait(ctx, git.SubSubCmd{
Name: "worktree",
Action: "prune",
diff --git a/internal/gitaly/service/repository/cleanup_test.go b/internal/gitaly/service/repository/cleanup_test.go
index 215a57b11..9975a22ab 100644
--- a/internal/gitaly/service/repository/cleanup_test.go
+++ b/internal/gitaly/service/repository/cleanup_test.go
@@ -8,8 +8,10 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
@@ -141,6 +143,33 @@ func TestCleanupDisconnectedWorktrees(t *testing.T) {
gittest.AddWorktree(t, cfg, repoPath, worktreePath)
}
+func TestCleanupDisconnectedWorktrees_doesNothingWithoutWorktrees(t *testing.T) {
+ t.Parallel()
+
+ ctx := testhelper.Context(t)
+ cfg, repoProto, repoPath, _ := setupRepositoryService(ctx, t)
+ worktreePath := filepath.Join(testhelper.TempDir(t), "worktree")
+
+ failingGitCmdFactory := gittest.NewInterceptingCommandFactory(ctx, t, cfg, func(git.ExecutionEnvironment) string {
+ return `#!/usr/bin/env bash
+ exit 15
+ `
+ })
+
+ repo := localrepo.New(config.NewLocator(cfg), failingGitCmdFactory, nil, repoProto)
+
+ // If this command did spawn git-worktree(1) we'd see an error. It doesn't though because it
+ // detects that there aren't any worktrees at all.
+ require.NoError(t, cleanDisconnectedWorktrees(ctx, repo))
+
+ gittest.AddWorktree(t, cfg, repoPath, worktreePath)
+
+ // We have now added a worktree now, so it should detect that there are worktrees and thus
+ // spawn the Git command. We thus expect the error code we inject via the failing Git
+ // command factory.
+ require.EqualError(t, cleanDisconnectedWorktrees(ctx, repo), "exit status 15")
+}
+
func TestRemoveWorktree(t *testing.T) {
t.Parallel()
diff --git a/internal/gitaly/service/repository/optimize.go b/internal/gitaly/service/repository/optimize.go
index 4bfbfb386..b13566036 100644
--- a/internal/gitaly/service/repository/optimize.go
+++ b/internal/gitaly/service/repository/optimize.go
@@ -1,10 +1,18 @@
package repository
import (
+ "bytes"
"context"
+ "errors"
"fmt"
+ "io/fs"
+ "math"
"os"
+ "path/filepath"
+ "strings"
+ "github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/housekeeping"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/stats"
@@ -12,86 +20,404 @@ import (
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
+const (
+ // looseObjectLimit is the limit of loose objects we accept both when doing incremental
+ // repacks and when pruning objects.
+ looseObjectLimit = 1024
+)
+
+func (s *server) OptimizeRepository(ctx context.Context, in *gitalypb.OptimizeRepositoryRequest) (*gitalypb.OptimizeRepositoryResponse, error) {
+ if err := s.validateOptimizeRepositoryRequest(in); err != nil {
+ return nil, err
+ }
+
+ repo := s.localrepo(in.GetRepository())
+
+ if err := s.optimizeRepository(ctx, repo); err != nil {
+ return nil, helper.ErrInternal(err)
+ }
+
+ return &gitalypb.OptimizeRepositoryResponse{}, nil
+}
+
+func (s *server) validateOptimizeRepositoryRequest(in *gitalypb.OptimizeRepositoryRequest) error {
+ if in.GetRepository() == nil {
+ return helper.ErrInvalidArgumentf("empty repository")
+ }
+
+ _, err := s.locator.GetRepoPath(in.GetRepository())
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (s *server) optimizeRepository(ctx context.Context, repo *localrepo.Repo) error {
+ optimizations := struct {
+ PackedObjects bool `json:"packed_objects"`
+ PrunedObjects bool `json:"pruned_objects"`
+ PackedRefs bool `json:"packed_refs"`
+ }{}
+ defer func() {
+ ctxlogrus.Extract(ctx).WithField("optimizations", optimizations).Info("optimized repository")
+ }()
+
+ if err := housekeeping.Perform(ctx, repo, s.txManager); err != nil {
+ return fmt.Errorf("could not execute houskeeping: %w", err)
+ }
+
+ if err := cleanupWorktrees(ctx, repo); err != nil {
+ return fmt.Errorf("could not clean up worktrees: %w", err)
+ }
+
+ didRepack, err := repackIfNeeded(ctx, repo)
+ if err != nil {
+ return fmt.Errorf("could not repack: %w", err)
+ }
+ optimizations.PackedObjects = didRepack
+
+ didPrune, err := pruneIfNeeded(ctx, repo)
+ if err != nil {
+ return fmt.Errorf("could not prune: %w", err)
+ }
+ optimizations.PrunedObjects = didPrune
+
+ didPackRefs, err := packRefsIfNeeded(ctx, repo)
+ if err != nil {
+ return fmt.Errorf("could not pack refs: %w", err)
+ }
+ optimizations.PackedRefs = didPackRefs
+
+ return nil
+}
+
// repackIfNeeded uses a set of heuristics to determine whether the repository needs a
// full repack and, if so, repacks it.
-func (s *server) repackIfNeeded(ctx context.Context, repo *localrepo.Repo, repoProto *gitalypb.Repository) error {
+func repackIfNeeded(ctx context.Context, repo *localrepo.Repo) (bool, error) {
+ repackNeeded, cfg, err := needsRepacking(repo)
+ if err != nil {
+ return false, fmt.Errorf("determining whether repo needs repack: %w", err)
+ }
+
+ if !repackNeeded {
+ return false, nil
+ }
+
+ if err := repack(ctx, repo, cfg); err != nil {
+ return false, err
+ }
+
+ return true, nil
+}
+
+func needsRepacking(repo *localrepo.Repo) (bool, repackCommandConfig, error) {
repoPath, err := repo.Path()
if err != nil {
- return err
+ return false, repackCommandConfig{}, fmt.Errorf("getting repository path: %w", err)
+ }
+
+ altFile, err := repo.InfoAlternatesPath()
+ if err != nil {
+ return false, repackCommandConfig{}, helper.ErrInternal(err)
+ }
+
+ hasAlternate := true
+ if _, err := os.Stat(altFile); os.IsNotExist(err) {
+ hasAlternate = false
}
hasBitmap, err := stats.HasBitmap(repoPath)
if err != nil {
- return helper.ErrInternal(err)
+ return false, repackCommandConfig{}, fmt.Errorf("checking for bitmap: %w", err)
+ }
+
+ // Bitmaps are used to efficiently determine transitive reachability of objects from a
+ // set of commits. They are an essential part of the puzzle required to serve fetches
+ // efficiently, as we'd otherwise need to traverse the object graph every time to find
+ // which objects we have to send. We thus repack the repository with bitmaps enabled in
+ // case they're missing.
+ //
+ // There is one exception: repositories which are connected to an object pool must not have
+ // a bitmap on their own. We do not yet use multi-pack indices, and in that case Git can
+ // only use one bitmap. We already generate this bitmap in the pool, so member of it
+ // shouldn't have another bitmap on their own.
+ if !hasBitmap && !hasAlternate {
+ return true, repackCommandConfig{
+ fullRepack: true,
+ writeBitmap: true,
+ }, nil
}
missingBloomFilters, err := stats.IsMissingBloomFilters(repoPath)
if err != nil {
- return helper.ErrInternal(err)
+ return false, repackCommandConfig{}, fmt.Errorf("checking for bloom filters: %w", err)
}
- if hasBitmap && !missingBloomFilters {
- return nil
+ // Bloom filters are part of the commit-graph and allow us to efficiently determine which
+ // paths have been modified in a given commit without having to look into the object
+ // database. In the past we didn't compute bloom filters at all, so we want to rewrite the
+ // whole commit-graph to generate them.
+ //
+ // Note that we'll eventually want to move out commit-graph generation from repacking. When
+ // that happens we should update the commit-graph either if it's missing, when bloom filters
+ // are missing or when packfiles have been updated.
+ if missingBloomFilters {
+ return true, repackCommandConfig{
+ fullRepack: true,
+ writeBitmap: !hasAlternate,
+ }, nil
}
- altFile, err := repo.InfoAlternatesPath()
+ largestPackfileSize, packfileCount, err := packfileSizeAndCount(repo)
if err != nil {
- return helper.ErrInternal(err)
+ return false, repackCommandConfig{}, fmt.Errorf("checking largest packfile size: %w", err)
}
- // Repositories with alternates should never have a bitmap, as Git will otherwise complain about
- // multiple bitmaps being present in parent and alternate repository.
- // In case of an error it still tries it is best to optimise the repository.
- createBitMap := false
- if _, err := os.Stat(altFile); os.IsNotExist(err) {
- createBitMap = true
+ // Whenever we do an incremental repack we create a new packfile, and as a result Git may
+ // have to look into every one of the packfiles to find objects. This is less efficient the
+ // more packfiles we have, but we cannot repack the whole repository every time either given
+ // that this may take a lot of time.
+ //
+ // Instead, we determine whether the repository has "too many" packfiles. "Too many" is
+ // relative though: for small repositories it's fine to do full repacks regularly, but for
+ // large repositories we need to be more careful. We thus use a heuristic of "repository
+ // largeness": we take the biggest packfile that exists, and then the maximum allowed number
+ // of packfiles is `log(largestpackfile_size_in_mb) / log(1.3)`. This gives the following
+ // allowed number of packfiles:
+ //
+ // - No packfile: 5 packfile. This is a special case.
+ // - 10MB packfile: 8 packfiles.
+ // - 100MB packfile: 17 packfiles.
+ // - 500MB packfile: 23 packfiles.
+ // - 1GB packfile: 26 packfiles.
+ // - 5GB packfile: 32 packfiles.
+ // - 10GB packfile: 35 packfiles.
+ // - 100GB packfile: 43 packfiles.
+ //
+ // The goal is to have a comparatively quick ramp-up of allowed packfiles as the repository
+ // size grows, but then slow down such that we're effectively capped and don't end up with
+ // an excessive amount of packfiles.
+ //
+ // This is a heuristic and thus imperfect by necessity. We may tune it as we gain experience
+ // with the way it behaves.
+ if int64(math.Max(5, math.Log(float64(largestPackfileSize))/math.Log(1.3))) < packfileCount {
+ return true, repackCommandConfig{
+ fullRepack: true,
+ writeBitmap: !hasAlternate,
+ }, nil
}
- if _, err = s.RepackFull(ctx, &gitalypb.RepackFullRequest{
- Repository: repoProto,
- CreateBitmap: createBitMap,
- }); err != nil {
- return err
+ looseObjectCount, err := estimateLooseObjectCount(repo)
+ if err != nil {
+ return false, repackCommandConfig{}, fmt.Errorf("estimating loose object count: %w", err)
}
- return nil
+ // Most Git commands do not write packfiles directly, but instead write loose objects into
+ // the object database. So while we now know that there ain't too many packfiles, we still
+ // need to check whether we have too many objects.
+ //
+ // In this case it doesn't make a lot of sense to scale incremental repacks with the repo's
+ // size: we only pack loose objects, so the time to pack them doesn't scale with repository
+ // size but with the number of loose objects we have. git-gc(1) uses a threshold of 6700
+ // loose objects to start an incremental repack, but one needs to keep in mind that Git
+ // typically has defaults which are better suited for the client-side instead of the
+ // server-side in most commands.
+ //
+ // In our case we typically want to ensure that our repositories are much better packed than
+ // it is necessary on the client side. We thus take a much stricter limit of 1024 objects.
+ if looseObjectCount > looseObjectLimit {
+ return true, repackCommandConfig{
+ fullRepack: false,
+ writeBitmap: false,
+ }, nil
+ }
+
+ return false, repackCommandConfig{}, nil
}
-func (s *server) optimizeRepository(ctx context.Context, repository *gitalypb.Repository) error {
- repo := s.localrepo(repository)
+func packfileSizeAndCount(repo *localrepo.Repo) (int64, int64, error) {
+ repoPath, err := repo.Path()
+ if err != nil {
+ return 0, 0, fmt.Errorf("getting repository path: %w", err)
+ }
- if err := s.repackIfNeeded(ctx, repo, repository); err != nil {
- return fmt.Errorf("could not repack: %w", err)
+ entries, err := os.ReadDir(filepath.Join(repoPath, "objects/pack"))
+ if err != nil {
+ if errors.Is(err, os.ErrNotExist) {
+ return 0, 0, nil
+ }
+
+ return 0, 0, err
}
- if err := housekeeping.Perform(ctx, repo, s.txManager); err != nil {
- return fmt.Errorf("could not execute houskeeping: %w", err)
+ largestSize := int64(0)
+ count := int64(0)
+
+ for _, entry := range entries {
+ if !strings.HasSuffix(entry.Name(), ".pack") {
+ continue
+ }
+
+ entryInfo, err := entry.Info()
+ if err != nil {
+ if errors.Is(err, os.ErrNotExist) {
+ continue
+ }
+
+ return 0, 0, fmt.Errorf("getting packfile info: %w", err)
+ }
+
+ if entryInfo.Size() > largestSize {
+ largestSize = entryInfo.Size()
+ }
+
+ count++
}
- return nil
+ return largestSize / 1024 / 1024, count, nil
}
-func (s *server) OptimizeRepository(ctx context.Context, in *gitalypb.OptimizeRepositoryRequest) (*gitalypb.OptimizeRepositoryResponse, error) {
- if err := s.validateOptimizeRepositoryRequest(in); err != nil {
- return nil, err
+// estimateLooseObjectCount estimates the number of loose objects in the repository. Due to the
+// object name being derived via a cryptographic hash we know that in the general case, objects are
+// evenly distributed across their sharding directories. We can thus estimate the number of loose
+// objects by opening a single sharding directory and counting its entries.
+func estimateLooseObjectCount(repo *localrepo.Repo) (int64, error) {
+ repoPath, err := repo.Path()
+ if err != nil {
+ return 0, fmt.Errorf("getting repository path: %w", err)
}
- if err := s.optimizeRepository(ctx, in.GetRepository()); err != nil {
- return nil, helper.ErrInternal(err)
+ // We use the same sharding directory as git-gc(1) does for its estimation.
+ entries, err := os.ReadDir(filepath.Join(repoPath, "objects/17"))
+ if err != nil {
+ if errors.Is(err, os.ErrNotExist) {
+ return 0, nil
+ }
+
+ return 0, fmt.Errorf("reading loose object shard: %w", err)
}
- return &gitalypb.OptimizeRepositoryResponse{}, nil
+ looseObjects := int64(0)
+ for _, entry := range entries {
+ if strings.LastIndexAny(entry.Name(), "0123456789abcdef") != len(entry.Name())-1 {
+ continue
+ }
+
+ looseObjects++
+ }
+
+ // Scale up found loose objects by the number of sharding directories.
+ return looseObjects * 256, nil
}
-func (s *server) validateOptimizeRepositoryRequest(in *gitalypb.OptimizeRepositoryRequest) error {
- if in.GetRepository() == nil {
- return helper.ErrInvalidArgumentf("empty repository")
+// pruneIfNeeded removes objects from the repository which are either unreachable or which are
+// already part of a packfile. We use a grace period of two weeks.
+func pruneIfNeeded(ctx context.Context, repo *localrepo.Repo) (bool, error) {
+ // Pool repositories must never prune any objects, or otherwise we may corrupt members of
+ // that pool if they still refer to that object.
+ if strings.HasPrefix(repo.GetRelativePath(), "@pools") {
+ return false, nil
}
- _, err := s.locator.GetRepoPath(in.GetRepository())
+ looseObjectCount, err := estimateLooseObjectCount(repo)
if err != nil {
- return err
+ return false, fmt.Errorf("estimating loose object count: %w", err)
}
- return nil
+ // We again use the same limit here as we do when doing an incremental repack. This is done
+ // intentionally: if we determine that there's too many loose objects and try to repack, but
+ // all of those loose objects are in fact unreachable, then we'd still have the same number
+ // of unreachable objects after the incremental repack. We'd thus try to repack every single
+ // time.
+ //
+ // Using the same limit here doesn't quite fix this case: the unreachable objects would only
+ // be pruned after a grace period of two weeks. But at least we know that we will eventually
+ // prune up those unreachable objects, at which point we won't try to do another incremental
+ // repack.
+ if looseObjectCount <= looseObjectLimit {
+ return false, nil
+ }
+
+ if err := repo.ExecAndWait(ctx, git.SubCmd{
+ Name: "prune",
+ }); err != nil {
+ return false, fmt.Errorf("pruning objects: %w", err)
+ }
+
+ return true, nil
+}
+
+func packRefsIfNeeded(ctx context.Context, repo *localrepo.Repo) (bool, error) {
+ repoPath, err := repo.Path()
+ if err != nil {
+ return false, fmt.Errorf("getting repository path: %w", err)
+ }
+ refsPath := filepath.Join(repoPath, "refs")
+
+ looseRefs := int64(0)
+ if err := filepath.WalkDir(refsPath, func(path string, entry fs.DirEntry, err error) error {
+ if err != nil {
+ return err
+ }
+
+ if !entry.IsDir() {
+ looseRefs++
+ }
+
+ return nil
+ }); err != nil {
+ return false, fmt.Errorf("counting loose refs: %w", err)
+ }
+
+ // If there aren't any loose refs then there is nothing we need to do.
+ if looseRefs == 0 {
+ return false, nil
+ }
+
+ packedRefsSize := int64(0)
+ if stat, err := os.Stat(filepath.Join(repoPath, "packed-refs")); err != nil {
+ if !errors.Is(err, os.ErrNotExist) {
+ return false, fmt.Errorf("getting packed-refs size: %w", err)
+ }
+ } else {
+ packedRefsSize = stat.Size()
+ }
+
+ // Packing loose references into the packed-refs file scales with the number of references
+ // we're about to write. We thus decide whether we repack refs by weighing the current size
+ // of the packed-refs file against the number of loose references. This is done such that we
+ // do not repack too often on repositories with a huge number of references, where we can
+ // expect a lot of churn in the number of references.
+ //
+ // As a heuristic, we repack if the number of loose references in the repository exceeds
+ // `log(packed_refs_size_in_bytes/100)/log(1.15)`, which scales as following (number of refs
+ // is estimated with 100 bytes per reference):
+ //
+ // - 1kB ~ 10 packed refs: 16 refs
+ // - 10kB ~ 100 packed refs: 33 refs
+ // - 100kB ~ 1k packed refs: 49 refs
+ // - 1MB ~ 10k packed refs: 66 refs
+ // - 10MB ~ 100k packed refs: 82 refs
+ // - 100MB ~ 1m packed refs: 99 refs
+ //
+ // We thus allow roughly 16 additional loose refs per factor of ten of packed refs.
+ //
+ // This heuristic may likely need tweaking in the future, but should serve as a good first
+ // iteration.
+ if int64(math.Max(16, math.Log(float64(packedRefsSize)/100)/math.Log(1.15))) > looseRefs {
+ return false, nil
+ }
+
+ var stderr bytes.Buffer
+ if err := repo.ExecAndWait(ctx, git.SubCmd{
+ Name: "pack-refs",
+ Flags: []git.Option{
+ git.Flag{Name: "--all"},
+ },
+ }, git.WithStderr(&stderr)); err != nil {
+ return false, fmt.Errorf("packing refs: %w, stderr: %q", err, stderr.String())
+ }
+
+ return true, nil
}
diff --git a/internal/gitaly/service/repository/optimize_test.go b/internal/gitaly/service/repository/optimize_test.go
index c35ffa924..f7e4152e0 100644
--- a/internal/gitaly/service/repository/optimize_test.go
+++ b/internal/gitaly/service/repository/optimize_test.go
@@ -2,6 +2,8 @@ package repository
import (
"bytes"
+ "fmt"
+ "io"
"os"
"path/filepath"
"testing"
@@ -10,6 +12,7 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/stats"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
@@ -186,3 +189,557 @@ func TestOptimizeRepositoryValidation(t *testing.T) {
_, err := client.OptimizeRepository(ctx, &gitalypb.OptimizeRepositoryRequest{Repository: repo})
require.NoError(t, err)
}
+
+type infiniteReader struct{}
+
+func (r infiniteReader) Read(b []byte) (int, error) {
+ for i := range b {
+ b[i] = '\000'
+ }
+ return len(b), nil
+}
+
+func TestNeedsRepacking(t *testing.T) {
+ t.Parallel()
+
+ ctx := testhelper.Context(t)
+ cfg, _ := setupRepositoryServiceWithoutRepo(t)
+
+ for _, tc := range []struct {
+ desc string
+ setup func(t *testing.T) *gitalypb.Repository
+ expectedErr error
+ expectedNeeded bool
+ expectedConfig repackCommandConfig
+ }{
+ {
+ desc: "empty repo",
+ setup: func(t *testing.T) *gitalypb.Repository {
+ repoProto, _ := gittest.CreateRepository(ctx, t, cfg)
+ return repoProto
+ },
+ // This is a bug: if the repo is empty then we wouldn't ever generate a
+ // packfile, but we determine a repack is needed because it's missing a
+ // bitmap. It's a rather benign bug though: git-repack(1) will exit
+ // immediately because it knows that there's nothing to repack.
+ expectedNeeded: true,
+ expectedConfig: repackCommandConfig{
+ fullRepack: true,
+ writeBitmap: true,
+ },
+ },
+ {
+ desc: "missing bitmap",
+ setup: func(t *testing.T) *gitalypb.Repository {
+ repoProto, _ := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ Seed: gittest.SeedGitLabTest,
+ })
+ return repoProto
+ },
+ expectedNeeded: true,
+ expectedConfig: repackCommandConfig{
+ fullRepack: true,
+ writeBitmap: true,
+ },
+ },
+ {
+ desc: "missing bitmap with alternate",
+ setup: func(t *testing.T) *gitalypb.Repository {
+ repoProto, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ Seed: gittest.SeedGitLabTest,
+ })
+
+ // Create the alternates file. If it exists, then we shouldn't try
+ // to generate a bitmap.
+ require.NoError(t, os.WriteFile(filepath.Join(repoPath, "objects", "info", "alternates"), nil, 0o755))
+
+ return repoProto
+ },
+ expectedNeeded: true,
+ expectedConfig: repackCommandConfig{
+ fullRepack: true,
+ writeBitmap: false,
+ },
+ },
+ {
+ desc: "missing commit-graph",
+ setup: func(t *testing.T) *gitalypb.Repository {
+ repoProto, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ Seed: gittest.SeedGitLabTest,
+ })
+
+ gittest.Exec(t, cfg, "-C", repoPath, "repack", "-Ad", "--write-bitmap-index")
+
+ return repoProto
+ },
+ expectedNeeded: true,
+ expectedConfig: repackCommandConfig{
+ fullRepack: true,
+ writeBitmap: true,
+ },
+ },
+ {
+ desc: "commit-graph without bloom filters",
+ setup: func(t *testing.T) *gitalypb.Repository {
+ repoProto, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ Seed: gittest.SeedGitLabTest,
+ })
+
+ gittest.Exec(t, cfg, "-C", repoPath, "repack", "-Ad", "--write-bitmap-index")
+ gittest.Exec(t, cfg, "-C", repoPath, "commit-graph", "write")
+
+ return repoProto
+ },
+ expectedNeeded: true,
+ expectedConfig: repackCommandConfig{
+ fullRepack: true,
+ writeBitmap: true,
+ },
+ },
+ {
+ desc: "no repack needed",
+ setup: func(t *testing.T) *gitalypb.Repository {
+ repoProto, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ Seed: gittest.SeedGitLabTest,
+ })
+
+ gittest.Exec(t, cfg, "-C", repoPath, "repack", "-Ad", "--write-bitmap-index")
+ gittest.Exec(t, cfg, "-C", repoPath, "commit-graph", "write", "--changed-paths", "--split")
+
+ return repoProto
+ },
+ expectedNeeded: false,
+ },
+ } {
+ tc := tc
+ t.Run(tc.desc, func(t *testing.T) {
+ t.Parallel()
+
+ repoProto := tc.setup(t)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
+
+ repackNeeded, repackCfg, err := needsRepacking(repo)
+ require.Equal(t, tc.expectedErr, err)
+ require.Equal(t, tc.expectedNeeded, repackNeeded)
+ require.Equal(t, tc.expectedConfig, repackCfg)
+ })
+ }
+
+ const megaByte = 1024 * 1024
+
+ for _, tc := range []struct {
+ packfileSize int64
+ requiredPackfiles int
+ }{
+ {
+ packfileSize: 1,
+ requiredPackfiles: 5,
+ },
+ {
+ packfileSize: 5 * megaByte,
+ requiredPackfiles: 6,
+ },
+ {
+ packfileSize: 10 * megaByte,
+ requiredPackfiles: 8,
+ },
+ {
+ packfileSize: 50 * megaByte,
+ requiredPackfiles: 14,
+ },
+ {
+ packfileSize: 100 * megaByte,
+ requiredPackfiles: 17,
+ },
+ {
+ packfileSize: 500 * megaByte,
+ requiredPackfiles: 23,
+ },
+ {
+ packfileSize: 1000 * megaByte,
+ requiredPackfiles: 26,
+ },
+ // Let's not go any further than this, we're thrashing the temporary directory.
+ } {
+ t.Run(fmt.Sprintf("packfile with %d bytes", tc.packfileSize), func(t *testing.T) {
+ repoProto, repoPath := gittest.CreateRepository(ctx, t, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
+ packDir := filepath.Join(repoPath, "objects", "pack")
+
+ // Emulate the existence of a bitmap and a commit-graph with bloom filters.
+ // We explicitly don't want to generate them via Git commands as they would
+ // require us to already have objects in the repository, and we want to be
+ // in full control over all objects and packfiles in the repo.
+ require.NoError(t, os.WriteFile(filepath.Join(packDir, "something.bitmap"), nil, 0o644))
+ commitGraphChainPath := filepath.Join(repoPath, stats.CommitGraphChainRelPath)
+ require.NoError(t, os.MkdirAll(filepath.Dir(commitGraphChainPath), 0o755))
+ require.NoError(t, os.WriteFile(commitGraphChainPath, nil, 0o644))
+
+ // We first create a single big packfile which is used to determine the
+ // boundary of when we repack.
+ bigPackfile, err := os.OpenFile(filepath.Join(packDir, "big.pack"), os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0o644)
+ require.NoError(t, err)
+ defer testhelper.MustClose(t, bigPackfile)
+ _, err = io.Copy(bigPackfile, io.LimitReader(infiniteReader{}, tc.packfileSize))
+ require.NoError(t, err)
+
+ // And then we create one less packfile than we need to hit the boundary.
+ // This is done to assert that we indeed don't repack before hitting the
+ // boundary.
+ for i := 0; i < tc.requiredPackfiles-1; i++ {
+ additionalPackfile, err := os.Create(filepath.Join(packDir, fmt.Sprintf("%d.pack", i)))
+ require.NoError(t, err)
+ testhelper.MustClose(t, additionalPackfile)
+ }
+
+ repackNeeded, _, err := needsRepacking(repo)
+ require.NoError(t, err)
+ require.False(t, repackNeeded)
+
+ // Now we create the additional packfile that causes us to hit the boundary.
+ // We should thus see that we want to repack now.
+ lastPackfile, err := os.Create(filepath.Join(packDir, "last.pack"))
+ require.NoError(t, err)
+ testhelper.MustClose(t, lastPackfile)
+
+ repackNeeded, repackCfg, err := needsRepacking(repo)
+ require.NoError(t, err)
+ require.True(t, repackNeeded)
+ require.Equal(t, repackCommandConfig{
+ fullRepack: true,
+ writeBitmap: true,
+ }, repackCfg)
+ })
+ }
+
+ for _, tc := range []struct {
+ desc string
+ looseObjects []string
+ expectedRepack bool
+ }{
+ {
+ desc: "no objects",
+ looseObjects: nil,
+ expectedRepack: false,
+ },
+ {
+ desc: "object not in 17 shard",
+ looseObjects: []string{
+ filepath.Join("ab/12345"),
+ },
+ expectedRepack: false,
+ },
+ {
+ desc: "object in 17 shard",
+ looseObjects: []string{
+ filepath.Join("17/12345"),
+ },
+ expectedRepack: false,
+ },
+ {
+ desc: "objects in different shards",
+ looseObjects: []string{
+ filepath.Join("ab/12345"),
+ filepath.Join("cd/12345"),
+ filepath.Join("12/12345"),
+ filepath.Join("17/12345"),
+ },
+ expectedRepack: false,
+ },
+ {
+ desc: "boundary",
+ looseObjects: []string{
+ filepath.Join("17/1"),
+ filepath.Join("17/2"),
+ filepath.Join("17/3"),
+ filepath.Join("17/4"),
+ },
+ expectedRepack: false,
+ },
+ {
+ desc: "exceeding boundary should cause repack",
+ looseObjects: []string{
+ filepath.Join("17/1"),
+ filepath.Join("17/2"),
+ filepath.Join("17/3"),
+ filepath.Join("17/4"),
+ filepath.Join("17/5"),
+ },
+ expectedRepack: true,
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ repoProto, repoPath := gittest.CreateRepository(ctx, t, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
+
+ // Emulate the existence of a bitmap and a commit-graph with bloom filters.
+ // We explicitly don't want to generate them via Git commands as they would
+ // require us to already have objects in the repository, and we want to be
+ // in full control over all objects and packfiles in the repo.
+ require.NoError(t, os.WriteFile(filepath.Join(repoPath, "objects", "pack", "something.bitmap"), nil, 0o644))
+ commitGraphChainPath := filepath.Join(repoPath, stats.CommitGraphChainRelPath)
+ require.NoError(t, os.MkdirAll(filepath.Dir(commitGraphChainPath), 0o755))
+ require.NoError(t, os.WriteFile(commitGraphChainPath, nil, 0o644))
+
+ for _, looseObjectPath := range tc.looseObjects {
+ looseObjectPath := filepath.Join(repoPath, "objects", looseObjectPath)
+ require.NoError(t, os.MkdirAll(filepath.Dir(looseObjectPath), 0o755))
+
+ looseObjectFile, err := os.Create(looseObjectPath)
+ require.NoError(t, err)
+ testhelper.MustClose(t, looseObjectFile)
+ }
+
+ repackNeeded, repackCfg, err := needsRepacking(repo)
+ require.NoError(t, err)
+ require.Equal(t, tc.expectedRepack, repackNeeded)
+ if tc.expectedRepack {
+ require.Equal(t, repackCommandConfig{
+ fullRepack: false,
+ writeBitmap: false,
+ }, repackCfg)
+ }
+ })
+ }
+}
+
+func TestPruneIfNeeded(t *testing.T) {
+ t.Parallel()
+
+ ctx := testhelper.Context(t)
+ cfg, _ := setupRepositoryServiceWithoutRepo(t)
+
+ for _, tc := range []struct {
+ desc string
+ isPool bool
+ looseObjects []string
+ expectedPrune bool
+ }{
+ {
+ desc: "no objects",
+ looseObjects: nil,
+ expectedPrune: false,
+ },
+ {
+ desc: "object not in 17 shard",
+ looseObjects: []string{
+ filepath.Join("ab/12345"),
+ },
+ expectedPrune: false,
+ },
+ {
+ desc: "object in 17 shard",
+ looseObjects: []string{
+ filepath.Join("17/12345"),
+ },
+ expectedPrune: false,
+ },
+ {
+ desc: "objects in different shards",
+ looseObjects: []string{
+ filepath.Join("ab/12345"),
+ filepath.Join("cd/12345"),
+ filepath.Join("12/12345"),
+ filepath.Join("17/12345"),
+ },
+ expectedPrune: false,
+ },
+ {
+ desc: "boundary",
+ looseObjects: []string{
+ filepath.Join("17/1"),
+ filepath.Join("17/2"),
+ filepath.Join("17/3"),
+ filepath.Join("17/4"),
+ },
+ expectedPrune: false,
+ },
+ {
+ desc: "exceeding boundary should cause repack",
+ looseObjects: []string{
+ filepath.Join("17/1"),
+ filepath.Join("17/2"),
+ filepath.Join("17/3"),
+ filepath.Join("17/4"),
+ filepath.Join("17/5"),
+ },
+ expectedPrune: true,
+ },
+ {
+ desc: "exceeding boundary on pool",
+ isPool: true,
+ looseObjects: []string{
+ filepath.Join("17/1"),
+ filepath.Join("17/2"),
+ filepath.Join("17/3"),
+ filepath.Join("17/4"),
+ filepath.Join("17/5"),
+ },
+ expectedPrune: false,
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ relativePath := gittest.NewRepositoryName(t, true)
+ if tc.isPool {
+ relativePath = gittest.NewObjectPoolName(t)
+ }
+
+ repoProto, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
+ RelativePath: relativePath,
+ })
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
+
+ for _, looseObjectPath := range tc.looseObjects {
+ looseObjectPath := filepath.Join(repoPath, "objects", looseObjectPath)
+ require.NoError(t, os.MkdirAll(filepath.Dir(looseObjectPath), 0o755))
+
+ looseObjectFile, err := os.Create(looseObjectPath)
+ require.NoError(t, err)
+ testhelper.MustClose(t, looseObjectFile)
+ }
+
+ didPrune, err := pruneIfNeeded(ctx, repo)
+ require.Equal(t, tc.expectedPrune, didPrune)
+ require.NoError(t, err)
+ })
+ }
+}
+
+func TestPackRefsIfNeeded(t *testing.T) {
+ t.Parallel()
+
+ ctx := testhelper.Context(t)
+ cfg, _ := setupRepositoryServiceWithoutRepo(t)
+
+ const kiloByte = 1024
+
+ for _, tc := range []struct {
+ packedRefsSize int64
+ requiredRefs int
+ }{
+ {
+ packedRefsSize: 1,
+ requiredRefs: 16,
+ },
+ {
+ packedRefsSize: 1 * kiloByte,
+ requiredRefs: 16,
+ },
+ {
+ packedRefsSize: 10 * kiloByte,
+ requiredRefs: 33,
+ },
+ {
+ packedRefsSize: 100 * kiloByte,
+ requiredRefs: 49,
+ },
+ {
+ packedRefsSize: 1000 * kiloByte,
+ requiredRefs: 66,
+ },
+ {
+ packedRefsSize: 10000 * kiloByte,
+ requiredRefs: 82,
+ },
+ {
+ packedRefsSize: 100000 * kiloByte,
+ requiredRefs: 99,
+ },
+ } {
+ t.Run(fmt.Sprintf("packed-refs with %d bytes", tc.packedRefsSize), func(t *testing.T) {
+ repoProto, repoPath := gittest.CreateRepository(ctx, t, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
+
+ // Write an empty commit such that we can create valid refs.
+ commitID := gittest.WriteCommit(t, cfg, repoPath, gittest.WithParents())
+ looseRefContent := []byte(commitID.String() + "\n")
+
+ // We first create a single big packfile which is used to determine the
+ // boundary of when we repack. We need to write a valid packed-refs file or
+ // otherwise git-pack-refs(1) would choke later on, so we just write the
+ // file such that every line is a separate ref of exactly 128 bytes in
+ // length (a divisor of 1024), referring to the commit we created above.
+ packedRefs, err := os.OpenFile(filepath.Join(repoPath, "packed-refs"), os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0o644)
+ require.NoError(t, err)
+ defer testhelper.MustClose(t, packedRefs)
+ for i := int64(0); i < tc.packedRefsSize/128; i++ {
+ packedRefLine := fmt.Sprintf("%s refs/something/this-line-is-padded-to-exactly-128-bytes-%030d\n", commitID.String(), i)
+ require.Len(t, packedRefLine, 128)
+ _, err := packedRefs.WriteString(packedRefLine)
+ require.NoError(t, err)
+ }
+ require.NoError(t, packedRefs.Sync())
+
+ // And then we create one less loose ref than we need to hit the boundary.
+ // This is done to assert that we indeed don't repack before hitting the
+ // boundary.
+ for i := 0; i < tc.requiredRefs-1; i++ {
+ looseRefPath := filepath.Join(repoPath, "refs", "heads", fmt.Sprintf("branch-%d", i))
+ require.NoError(t, os.WriteFile(looseRefPath, looseRefContent, 0o644))
+ }
+
+ didRepack, err := packRefsIfNeeded(ctx, repo)
+ require.NoError(t, err)
+ require.False(t, didRepack)
+
+ // Now we create the additional loose ref that causes us to hit the
+ // boundary. We should thus see that we want to repack now.
+ looseRefPath := filepath.Join(repoPath, "refs", "heads", "last-branch")
+ require.NoError(t, os.WriteFile(looseRefPath, looseRefContent, 0o644))
+
+ didRepack, err = packRefsIfNeeded(ctx, repo)
+ require.NoError(t, err)
+ require.True(t, didRepack)
+ })
+ }
+}
+
+func TestEstimateLooseObjectCount(t *testing.T) {
+ t.Parallel()
+
+ ctx := testhelper.Context(t)
+ cfg, _ := setupRepositoryServiceWithoutRepo(t)
+ repoProto, repoPath := gittest.CreateRepository(ctx, t, cfg)
+ repo := localrepo.NewTestRepo(t, cfg, repoProto)
+
+ t.Run("empty repository", func(t *testing.T) {
+ looseObjects, err := estimateLooseObjectCount(repo)
+ require.NoError(t, err)
+ require.Zero(t, looseObjects)
+ })
+
+ t.Run("object in different shard", func(t *testing.T) {
+ differentShard := filepath.Join(repoPath, "objects", "a0")
+ require.NoError(t, os.MkdirAll(differentShard, 0o755))
+
+ object, err := os.Create(filepath.Join(differentShard, "123456"))
+ require.NoError(t, err)
+ testhelper.MustClose(t, object)
+
+ looseObjects, err := estimateLooseObjectCount(repo)
+ require.NoError(t, err)
+ require.Zero(t, looseObjects)
+ })
+
+ t.Run("object in estimation shard", func(t *testing.T) {
+ estimationShard := filepath.Join(repoPath, "objects", "17")
+ require.NoError(t, os.MkdirAll(estimationShard, 0o755))
+
+ object, err := os.Create(filepath.Join(estimationShard, "123456"))
+ require.NoError(t, err)
+ testhelper.MustClose(t, object)
+
+ looseObjects, err := estimateLooseObjectCount(repo)
+ require.NoError(t, err)
+ require.Equal(t, int64(256), looseObjects)
+
+ // Create a second object in there.
+ object, err = os.Create(filepath.Join(estimationShard, "654321"))
+ require.NoError(t, err)
+ testhelper.MustClose(t, object)
+
+ looseObjects, err = estimateLooseObjectCount(repo)
+ require.NoError(t, err)
+ require.Equal(t, int64(512), looseObjects)
+ })
+}
diff --git a/internal/gitaly/service/repository/prune_unreachable_objects.go b/internal/gitaly/service/repository/prune_unreachable_objects.go
new file mode 100644
index 000000000..4cacb04d9
--- /dev/null
+++ b/internal/gitaly/service/repository/prune_unreachable_objects.go
@@ -0,0 +1,44 @@
+package repository
+
+import (
+ "context"
+
+ "gitlab.com/gitlab-org/gitaly/v14/internal/git"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/git/stats"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
+)
+
+// PruneUnreachableObjects prunes objects which aren't reachable from any of its references. To
+// ensure that concurrently running commands do not reference those objects anymore when we execute
+// the prune we enforce a grace-period: objects will only be pruned if they haven't been accessed
+// for at least 30 minutes.
+func (s *server) PruneUnreachableObjects(
+ ctx context.Context,
+ request *gitalypb.PruneUnreachableObjectsRequest,
+) (*gitalypb.PruneUnreachableObjectsResponse, error) {
+ if request.GetRepository() == nil {
+ return nil, helper.ErrInvalidArgumentf("missing repository")
+ }
+
+ repo := s.localrepo(request.GetRepository())
+
+ // Verify that the repository exists on-disk such that we can return a proper gRPC code in
+ // case it doesn't.
+ if _, err := repo.Path(); err != nil {
+ return nil, err
+ }
+
+ if err := repo.ExecAndWait(ctx, git.SubCmd{
+ Name: "prune",
+ Flags: []git.Option{
+ git.ValueFlag{Name: "--expire", Value: "30.minutes.ago"},
+ },
+ }); err != nil {
+ return nil, helper.ErrInternalf("pruning objects: %w", err)
+ }
+
+ stats.LogObjectsInfo(ctx, repo)
+
+ return &gitalypb.PruneUnreachableObjectsResponse{}, nil
+}
diff --git a/internal/gitaly/service/repository/prune_unreachable_objects_test.go b/internal/gitaly/service/repository/prune_unreachable_objects_test.go
new file mode 100644
index 000000000..3d1d6c1f8
--- /dev/null
+++ b/internal/gitaly/service/repository/prune_unreachable_objects_test.go
@@ -0,0 +1,134 @@
+package repository
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/git"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/helper"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
+ "gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
+)
+
+func TestPruneUnreachableObjects(t *testing.T) {
+ ctx := testhelper.Context(t)
+
+ cfg, client := setupRepositoryServiceWithoutRepo(t)
+
+ setObjectTime := func(t *testing.T, repoPath string, objectID git.ObjectID, when time.Time) {
+ looseObjectPath := filepath.Join(repoPath, "objects", objectID.String()[:2], objectID.String()[2:])
+ require.NoError(t, os.Chtimes(looseObjectPath, when, when))
+ }
+
+ t.Run("missing repository", func(t *testing.T) {
+ _, err := client.PruneUnreachableObjects(ctx, &gitalypb.PruneUnreachableObjectsRequest{})
+ if testhelper.IsPraefectEnabled() {
+ testhelper.RequireGrpcError(t, helper.ErrInvalidArgumentf("repo scoped: empty Repository"), err)
+ } else {
+ testhelper.RequireGrpcError(t, helper.ErrInvalidArgumentf("missing repository"), err)
+ }
+ })
+
+ t.Run("relative path points to removed repository", func(t *testing.T) {
+ repo, repoPath := gittest.CreateRepository(ctx, t, cfg)
+ require.NoError(t, os.RemoveAll(repoPath))
+
+ _, err := client.PruneUnreachableObjects(ctx, &gitalypb.PruneUnreachableObjectsRequest{
+ Repository: repo,
+ })
+ testhelper.RequireGrpcError(t, helper.ErrNotFoundf("GetRepoPath: not a git repository: %q", repoPath), err)
+ })
+
+ t.Run("empty repository", func(t *testing.T) {
+ repo, _ := gittest.CreateRepository(ctx, t, cfg)
+
+ _, err := client.PruneUnreachableObjects(ctx, &gitalypb.PruneUnreachableObjectsRequest{
+ Repository: repo,
+ })
+ require.NoError(t, err)
+ })
+
+ t.Run("repository with reachable objects", func(t *testing.T) {
+ repo, repoPath := gittest.CreateRepository(ctx, t, cfg)
+
+ // Create the commit and a branch pointing to it to make it reachable.
+ commitID := gittest.WriteCommit(t, cfg, repoPath, gittest.WithParents(), gittest.WithBranch("branch"))
+
+ _, err := client.PruneUnreachableObjects(ctx, &gitalypb.PruneUnreachableObjectsRequest{
+ Repository: repo,
+ })
+ require.NoError(t, err)
+
+ // Verify we can still read the commit.
+ gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "--verify", commitID.String()+"^{commit}")
+ })
+
+ t.Run("repository with recent unreachable objects", func(t *testing.T) {
+ repo, repoPath := gittest.CreateRepository(ctx, t, cfg)
+
+ // Create the commit, but don't create a reference pointing to it.
+ commitID := gittest.WriteCommit(t, cfg, repoPath, gittest.WithParents())
+ // Set the object time to something that's close to 30 minutes, but gives us enough
+ // room to not cause flakes.
+ setObjectTime(t, repoPath, commitID, time.Now().Add(-28*time.Minute))
+
+ _, err := client.PruneUnreachableObjects(ctx, &gitalypb.PruneUnreachableObjectsRequest{
+ Repository: repo,
+ })
+ require.NoError(t, err)
+
+ // Verify we can still read the commit. The commit isn't older than 30 minutes, so
+ // it shouldn't be pruned.
+ gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "--verify", commitID.String()+"^{commit}")
+ })
+
+ t.Run("repository with old unreachable objects", func(t *testing.T) {
+ repo, repoPath := gittest.CreateRepository(ctx, t, cfg)
+
+ // Create the commit, but don't create a reference pointing to it.
+ commitID := gittest.WriteCommit(t, cfg, repoPath, gittest.WithParents())
+ setObjectTime(t, repoPath, commitID, time.Now().Add(-31*time.Minute))
+
+ _, err := client.PruneUnreachableObjects(ctx, &gitalypb.PruneUnreachableObjectsRequest{
+ Repository: repo,
+ })
+ require.NoError(t, err)
+
+ cmd := gittest.NewCommand(t, cfg, "-C", repoPath, "rev-parse", "--verify", commitID.String()+"^{commit}")
+ output, err := cmd.CombinedOutput()
+ require.Error(t, err)
+ require.Equal(t, "fatal: Needed a single revision\n", string(output))
+ })
+
+ t.Run("repository with mixed objects", func(t *testing.T) {
+ repo, repoPath := gittest.CreateRepository(ctx, t, cfg)
+
+ reachableOldCommit := gittest.WriteCommit(t, cfg, repoPath, gittest.WithParents(), gittest.WithMessage("a"), gittest.WithBranch("branch"))
+ setObjectTime(t, repoPath, reachableOldCommit, time.Now().Add(-31*time.Minute))
+
+ unreachableRecentCommit := gittest.WriteCommit(t, cfg, repoPath, gittest.WithMessage("b"), gittest.WithParents())
+ setObjectTime(t, repoPath, unreachableRecentCommit, time.Now().Add(-28*time.Minute))
+
+ unreachableOldCommit := gittest.WriteCommit(t, cfg, repoPath, gittest.WithMessage("c"), gittest.WithParents())
+ setObjectTime(t, repoPath, unreachableOldCommit, time.Now().Add(-31*time.Minute))
+
+ _, err := client.PruneUnreachableObjects(ctx, &gitalypb.PruneUnreachableObjectsRequest{
+ Repository: repo,
+ })
+ require.NoError(t, err)
+
+ // The reachable old and unreachable recent commits should still exist.
+ gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "--verify", reachableOldCommit.String()+"^{commit}")
+ gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "--verify", unreachableRecentCommit.String()+"^{commit}")
+
+ // But the unreachable old commit should have been pruned.
+ cmd := gittest.NewCommand(t, cfg, "-C", repoPath, "rev-parse", "--verify", unreachableOldCommit.String()+"^{commit}")
+ output, err := cmd.CombinedOutput()
+ require.Error(t, err)
+ require.Equal(t, "fatal: Needed a single revision\n", string(output))
+ })
+}
diff --git a/internal/gitaly/service/repository/repack.go b/internal/gitaly/service/repository/repack.go
index 280fb6186..a3f428650 100644
--- a/internal/gitaly/service/repository/repack.go
+++ b/internal/gitaly/service/repository/repack.go
@@ -36,31 +36,52 @@ func log2Threads(numCPUs int) git.ValueFlag {
func (s *server) RepackFull(ctx context.Context, in *gitalypb.RepackFullRequest) (*gitalypb.RepackFullResponse, error) {
repo := s.localrepo(in.GetRepository())
- options := []git.Option{
- git.Flag{Name: "-A"},
- git.Flag{Name: "--pack-kept-objects"},
- git.Flag{Name: "-l"},
- log2Threads(runtime.NumCPU()),
+ cfg := repackCommandConfig{
+ fullRepack: true,
+ writeBitmap: in.GetCreateBitmap(),
}
- if err := repack(ctx, repo, in.GetCreateBitmap(), options...); err != nil {
+
+ if err := repack(ctx, repo, cfg); err != nil {
return nil, helper.ErrInternal(err)
}
+
return &gitalypb.RepackFullResponse{}, nil
}
func (s *server) RepackIncremental(ctx context.Context, in *gitalypb.RepackIncrementalRequest) (*gitalypb.RepackIncrementalResponse, error) {
repo := s.localrepo(in.GetRepository())
- if err := repack(ctx, repo, false); err != nil {
- return nil, helper.ErrInternal(err)
+ cfg := repackCommandConfig{
+ fullRepack: false,
+ writeBitmap: false,
+ }
+
+ if err := repack(ctx, repo, cfg); err != nil {
+ return nil, err
}
+
return &gitalypb.RepackIncrementalResponse{}, nil
}
-func repack(ctx context.Context, repo *localrepo.Repo, bitmap bool, args ...git.Option) error {
+type repackCommandConfig struct {
+ fullRepack bool
+ writeBitmap bool
+}
+
+func repack(ctx context.Context, repo *localrepo.Repo, cfg repackCommandConfig) error {
+ var options []git.Option
+ if cfg.fullRepack {
+ options = append(options,
+ git.Flag{Name: "-A"},
+ git.Flag{Name: "--pack-kept-objects"},
+ git.Flag{Name: "-l"},
+ log2Threads(runtime.NumCPU()),
+ )
+ }
+
if err := repo.ExecAndWait(ctx, git.SubCmd{
Name: "repack",
- Flags: append([]git.Option{git.Flag{Name: "-d"}}, args...),
- }, git.WithConfig(repackConfig(ctx, bitmap)...)); err != nil {
+ Flags: append([]git.Option{git.Flag{Name: "-d"}}, options...),
+ }, git.WithConfig(repackConfig(ctx, cfg.writeBitmap)...)); err != nil {
return err
}
diff --git a/internal/log/log.go b/internal/log/log.go
index 5c4e5f6db..b987283b2 100644
--- a/internal/log/log.go
+++ b/internal/log/log.go
@@ -3,7 +3,9 @@ package log
import (
"context"
"os"
+ "regexp"
+ grpcmwlogging "github.com/grpc-ecosystem/go-grpc-middleware/logging"
grpcmwlogrus "github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus"
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"github.com/sirupsen/logrus"
@@ -91,6 +93,48 @@ func Configure(loggers []*logrus.Logger, format string, level string) {
}
}
+// DeciderOption returns a Option to support log filtering.
+// If "GITALY_LOG_REQUEST_METHOD_DENY_PATTERN" ENV variable is set, logger will filter out the log whose "fullMethodName" matches it;
+// If "GITALY_LOG_REQUEST_METHOD_ALLOW_PATTERN" ENV variable is set, logger will only keep the log whose "fullMethodName" matches it;
+// Under any conditions, the error log will not be filtered out;
+// If the ENV variables are not set, there will be no additional effects.
+func DeciderOption() grpcmwlogrus.Option {
+ matcher := methodNameMatcherFromEnv()
+
+ if matcher == nil {
+ return grpcmwlogrus.WithDecider(grpcmwlogging.DefaultDeciderMethod)
+ }
+
+ decider := func(fullMethodName string, err error) bool {
+ if err != nil {
+ return true
+ }
+ return matcher(fullMethodName)
+ }
+
+ return grpcmwlogrus.WithDecider(decider)
+}
+
+func methodNameMatcherFromEnv() func(string) bool {
+ if pattern := os.Getenv("GITALY_LOG_REQUEST_METHOD_ALLOW_PATTERN"); pattern != "" {
+ methodRegex := regexp.MustCompile(pattern)
+
+ return func(fullMethodName string) bool {
+ return methodRegex.MatchString(fullMethodName)
+ }
+ }
+
+ if pattern := os.Getenv("GITALY_LOG_REQUEST_METHOD_DENY_PATTERN"); pattern != "" {
+ methodRegex := regexp.MustCompile(pattern)
+
+ return func(fullMethodName string) bool {
+ return !methodRegex.MatchString(fullMethodName)
+ }
+ }
+
+ return nil
+}
+
func mapGrpcLogLevel(level logrus.Level) logrus.Level {
// Honor grpc-go's debug settings: https://github.com/grpc/grpc-go#how-to-turn-on-logging
logLevel := os.Getenv("GRPC_GO_LOG_SEVERITY_LEVEL")
diff --git a/internal/log/log_test.go b/internal/log/log_test.go
index 93f4b1325..a6b32b93b 100644
--- a/internal/log/log_test.go
+++ b/internal/log/log_test.go
@@ -445,3 +445,73 @@ func TestStreamLogDataCatcherServerInterceptor(t *testing.T) {
func createContext() context.Context {
return context.Background()
}
+
+func TestLogDeciderOption_logByRegexpMatch(t *testing.T) {
+ methodNames := []string{
+ "/grpc.health.v1.Health/Check",
+ "/gitaly.SmartHTTPService/InfoRefsUploadPack",
+ "/gitaly.SmartHTTPService/PostUploadPackWithSidechannel",
+ }
+ for _, tc := range []struct {
+ desc string
+ skip string
+ only string
+ shouldLogMethods []string
+ }{
+ {
+ desc: "default setting",
+ skip: "",
+ only: "",
+ shouldLogMethods: []string{"Check", "InfoRefsUploadPack", "PostUploadPackWithSidechannel"},
+ },
+ {
+ desc: "only log Check",
+ skip: "",
+ only: "^/grpc.health.v1.Health/Check$",
+ shouldLogMethods: []string{"Check"},
+ },
+ {
+ desc: "skip log Check",
+ skip: "^/grpc.health.v1.Health/Check$",
+ only: "",
+ shouldLogMethods: []string{"InfoRefsUploadPack", "PostUploadPackWithSidechannel"},
+ },
+ {
+ // If condition 'only' exists, ignore condition 'skip'
+ desc: "only log Check and ignore skip setting",
+ skip: "^/grpc.health.v1.Health/Check$",
+ only: "^/grpc.health.v1.Health/Check$",
+ shouldLogMethods: []string{"Check"},
+ },
+ } {
+ t.Run(tc.desc, func(t *testing.T) {
+ require.NoError(t, os.Setenv("GITALY_LOG_REQUEST_METHOD_DENY_PATTERN", tc.skip))
+ defer func() { require.NoError(t, os.Unsetenv("GITALY_LOG_REQUEST_METHOD_DENY_PATTERN")) }()
+ require.NoError(t, os.Setenv("GITALY_LOG_REQUEST_METHOD_ALLOW_PATTERN", tc.only))
+ defer func() { require.NoError(t, os.Unsetenv("GITALY_LOG_REQUEST_METHOD_ALLOW_PATTERN")) }()
+
+ logger, hook := test.NewNullLogger()
+ interceptor := grpcmwlogrus.UnaryServerInterceptor(logrus.NewEntry(logger), DeciderOption())
+
+ ctx := createContext()
+ for _, methodName := range methodNames {
+ _, err := interceptor(
+ ctx,
+ nil,
+ &grpc.UnaryServerInfo{FullMethod: methodName},
+ func(ctx context.Context, req interface{}) (interface{}, error) {
+ return nil, nil
+ },
+ )
+ require.NoError(t, err)
+ }
+
+ entries := hook.AllEntries()
+ require.Len(t, entries, len(tc.shouldLogMethods))
+ for idx, entry := range entries {
+ require.Equal(t, entry.Message, "finished unary call with code OK")
+ require.Equal(t, entry.Data["grpc.method"], tc.shouldLogMethods[idx])
+ }
+ })
+ }
+}
diff --git a/internal/middleware/limithandler/concurrency_limiter.go b/internal/middleware/limithandler/concurrency_limiter.go
index 92e5f965d..9dbf87b75 100644
--- a/internal/middleware/limithandler/concurrency_limiter.go
+++ b/internal/middleware/limithandler/concurrency_limiter.go
@@ -15,6 +15,9 @@ import (
// concurrency queue.
var ErrMaxQueueTime = errors.New("maximum time in concurrency queue reached")
+// ErrMaxQueueSize indicates the concurrency queue has reached its maximum size
+var ErrMaxQueueSize = errors.New("maximum queue size reached")
+
// LimitedFunc represents a function that will be limited
type LimitedFunc func() (resp interface{}, err error)
@@ -117,7 +120,8 @@ func (c *ConcurrencyLimiter) queueInc(ctx context.Context) error {
if featureflag.ConcurrencyQueueEnforceMax.IsEnabled(ctx) &&
c.queuedLimit > 0 &&
c.queued >= c.queuedLimit {
- return errors.New("maximum queue size reached")
+ c.monitor.Dropped(ctx, "max_size")
+ return ErrMaxQueueSize
}
c.queued++
@@ -159,6 +163,9 @@ func (c *ConcurrencyLimiter) Limit(ctx context.Context, lockKey string, f Limite
c.monitor.Dequeued(ctx)
if err != nil {
+ if errors.Is(err, ErrMaxQueueTime) {
+ c.monitor.Dropped(ctx, "max_time")
+ }
return nil, err
}
defer sem.release()
diff --git a/internal/middleware/limithandler/concurrency_limiter_test.go b/internal/middleware/limithandler/concurrency_limiter_test.go
index e581b17e2..bbeda4d76 100644
--- a/internal/middleware/limithandler/concurrency_limiter_test.go
+++ b/internal/middleware/limithandler/concurrency_limiter_test.go
@@ -16,12 +16,14 @@ import (
type counter struct {
sync.Mutex
- max int
- current int
- queued int
- dequeued int
- enter int
- exit int
+ max int
+ current int
+ queued int
+ dequeued int
+ enter int
+ exit int
+ droppedSize int
+ droppedTime int
}
func (c *counter) up() {
@@ -71,6 +73,15 @@ func (c *counter) Exit(ctx context.Context) {
c.exit++
}
+func (c *counter) Dropped(ctx context.Context, reason string) {
+ switch reason {
+ case "max_time":
+ c.droppedTime++
+ case "max_size":
+ c.droppedSize++
+ }
+}
+
func TestLimiter(t *testing.T) {
t.Parallel()
@@ -253,9 +264,9 @@ func TestConcurrencyLimiter_queueLimit(t *testing.T) {
)
monitorCh := make(chan struct{})
- gauge := &blockingQueueCounter{queuedCh: monitorCh}
+ monitor := &blockingQueueCounter{queuedCh: monitorCh}
ch := make(chan struct{})
- limiter := NewLimiter(1, queueLimit, nil, gauge)
+ limiter := NewLimiter(1, queueLimit, nil, monitor)
// occupied with one live request that takes a long time to complete
go func() {
@@ -303,13 +314,14 @@ func TestConcurrencyLimiter_queueLimit(t *testing.T) {
err := <-errChan
assert.Error(t, err)
assert.Equal(t, "maximum queue size reached", err.Error())
+ assert.Equal(t, monitor.droppedSize, 1)
} else {
<-monitorCh
assert.Equal(t, int64(queueLimit+1), limiter.queued)
+ assert.Equal(t, monitor.droppedSize, 0)
}
close(ch)
-
wg.Wait()
})
}
@@ -341,14 +353,15 @@ func TestLimitConcurrency_queueWaitTime(t *testing.T) {
ticker := helper.NewManualTicker()
dequeuedCh := make(chan struct{})
- gauge := &blockingDequeueCounter{dequeuedCh: dequeuedCh}
+ monitor := &blockingDequeueCounter{dequeuedCh: dequeuedCh}
+
limiter := NewLimiter(
1,
0,
func() helper.Ticker {
return ticker
},
- gauge,
+ monitor,
)
ch := make(chan struct{})
@@ -379,7 +392,7 @@ func TestLimitConcurrency_queueWaitTime(t *testing.T) {
err := <-errChan
assert.Equal(t, ErrMaxQueueTime, err)
-
+ assert.Equal(t, monitor.droppedTime, 1)
close(ch)
wg.Wait()
})
@@ -394,14 +407,15 @@ func TestLimitConcurrency_queueWaitTime(t *testing.T) {
ticker := helper.NewManualTicker()
dequeuedCh := make(chan struct{})
- gauge := &blockingDequeueCounter{dequeuedCh: dequeuedCh}
+ monitor := &blockingDequeueCounter{dequeuedCh: dequeuedCh}
+
limiter := NewLimiter(
1,
0,
func() helper.Ticker {
return ticker
},
- gauge,
+ monitor,
)
ch := make(chan struct{})
@@ -430,5 +444,6 @@ func TestLimitConcurrency_queueWaitTime(t *testing.T) {
err := <-errChan
assert.NoError(t, err)
+ assert.Equal(t, monitor.droppedTime, 0)
})
}
diff --git a/internal/middleware/limithandler/middleware.go b/internal/middleware/limithandler/middleware.go
index 347ceccf8..ac33ff4b1 100644
--- a/internal/middleware/limithandler/middleware.go
+++ b/internal/middleware/limithandler/middleware.go
@@ -38,6 +38,7 @@ type LimiterMiddleware struct {
acquiringSecondsMetric *prometheus.HistogramVec
inProgressMetric *prometheus.GaugeVec
queuedMetric *prometheus.GaugeVec
+ requestsDroppedMetric *prometheus.CounterVec
}
// New creates a new rate limiter
@@ -73,6 +74,18 @@ func New(cfg config.Cfg, getLockKey GetLockKey) *LimiterMiddleware {
},
[]string{"system", "grpc_service", "grpc_method"},
),
+ requestsDroppedMetric: prometheus.NewCounterVec(
+ prometheus.CounterOpts{
+ Name: "gitaly_requests_dropped_total",
+ Help: "Number of requests dropped from the queue",
+ },
+ []string{
+ "system",
+ "grpc_service",
+ "grpc_method",
+ "reason",
+ },
+ ),
}
middleware.methodLimiters = createLimiterConfig(middleware, cfg)
return middleware
@@ -88,6 +101,7 @@ func (c *LimiterMiddleware) Collect(metrics chan<- prometheus.Metric) {
c.acquiringSecondsMetric.Collect(metrics)
c.inProgressMetric.Collect(metrics)
c.queuedMetric.Collect(metrics)
+ c.requestsDroppedMetric.Collect(metrics)
}
// UnaryInterceptor returns a Unary Interceptor
diff --git a/internal/middleware/limithandler/middleware_test.go b/internal/middleware/limithandler/middleware_test.go
index cfd7380e0..0c63127f9 100644
--- a/internal/middleware/limithandler/middleware_test.go
+++ b/internal/middleware/limithandler/middleware_test.go
@@ -1,15 +1,18 @@
package limithandler_test
import (
+ "bytes"
"context"
"net"
"sync"
"testing"
"time"
+ promtest "github.com/prometheus/client_golang/prometheus/testutil"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
+ "gitlab.com/gitlab-org/gitaly/v14/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/v14/internal/middleware/limithandler"
pb "gitlab.com/gitlab-org/gitaly/v14/internal/middleware/limithandler/testdata"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
@@ -216,7 +219,99 @@ func TestStreamLimitHandler(t *testing.T) {
}
}
-func runServer(t *testing.T, s *server, opt ...grpc.ServerOption) (*grpc.Server, string) {
+type queueTestServer struct {
+ server
+ reqArrivedCh chan struct{}
+}
+
+func (q *queueTestServer) Unary(ctx context.Context, in *pb.UnaryRequest) (*pb.UnaryResponse, error) {
+ q.registerRequest()
+
+ q.reqArrivedCh <- struct{}{} // We need a way to know when a request got to the middleware
+ <-q.blockCh // Block to ensure concurrency
+
+ return &pb.UnaryResponse{Ok: true}, nil
+}
+
+func TestLimitHandlerMetrics(t *testing.T) {
+ s := &queueTestServer{reqArrivedCh: make(chan struct{})}
+ s.blockCh = make(chan struct{})
+
+ methodName := "/test.limithandler.Test/Unary"
+ cfg := config.Cfg{
+ Concurrency: []config.Concurrency{
+ {RPC: methodName, MaxPerRepo: 1, MaxQueueSize: 1},
+ },
+ }
+
+ lh := limithandler.New(cfg, fixedLockKey)
+ interceptor := lh.UnaryInterceptor()
+ srv, serverSocketPath := runServer(t, s, grpc.UnaryInterceptor(interceptor))
+ defer srv.Stop()
+
+ client, conn := newClient(t, serverSocketPath)
+ defer conn.Close()
+
+ ctx := featureflag.IncomingCtxWithFeatureFlag(
+ testhelper.Context(t),
+ featureflag.ConcurrencyQueueEnforceMax,
+ true,
+ )
+
+ go func() {
+ _, err := client.Unary(ctx, &pb.UnaryRequest{})
+ require.NoError(t, err)
+ }()
+ // wait until the first request is being processed. After this, requests will be queued
+ <-s.reqArrivedCh
+
+ respCh := make(chan *pb.UnaryResponse)
+ errChan := make(chan error)
+ // out of ten requests, the first one will be queued and the other 9 will return with
+ // an error
+ for i := 0; i < 10; i++ {
+ go func() {
+ resp, err := client.Unary(ctx, &pb.UnaryRequest{})
+ if err != nil {
+ errChan <- err
+ } else {
+ respCh <- resp
+ }
+ }()
+ }
+
+ var errs int
+ for err := range errChan {
+ testhelper.RequireGrpcError(t, limithandler.ErrMaxQueueSize, err)
+ errs++
+ if errs == 9 {
+ break
+ }
+ }
+
+ expectedMetrics := `# HELP gitaly_rate_limiting_in_progress Gauge of number of concurrent in-progress calls
+# TYPE gitaly_rate_limiting_in_progress gauge
+gitaly_rate_limiting_in_progress{grpc_method="ReplicateRepository",grpc_service="gitaly.RepositoryService",system="gitaly"} 0
+gitaly_rate_limiting_in_progress{grpc_method="Unary",grpc_service="test.limithandler.Test",system="gitaly"} 1
+# HELP gitaly_rate_limiting_queued Gauge of number of queued calls
+# TYPE gitaly_rate_limiting_queued gauge
+gitaly_rate_limiting_queued{grpc_method="ReplicateRepository",grpc_service="gitaly.RepositoryService",system="gitaly"} 0
+gitaly_rate_limiting_queued{grpc_method="Unary",grpc_service="test.limithandler.Test",system="gitaly"} 1
+# HELP gitaly_requests_dropped_total Number of requests dropped from the queue
+# TYPE gitaly_requests_dropped_total counter
+gitaly_requests_dropped_total{grpc_method="Unary",grpc_service="test.limithandler.Test",reason="max_size",system="gitaly"} 9
+`
+ assert.NoError(t, promtest.CollectAndCompare(lh, bytes.NewBufferString(expectedMetrics),
+ "gitaly_rate_limiting_queued",
+ "gitaly_requests_dropped_total",
+ "gitaly_rate_limiting_in_progress"))
+
+ close(s.blockCh)
+ <-s.reqArrivedCh
+ <-respCh
+}
+
+func runServer(t *testing.T, s pb.TestServer, opt ...grpc.ServerOption) (*grpc.Server, string) {
serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
grpcServer := grpc.NewServer(opt...)
pb.RegisterTestServer(grpcServer, s)
diff --git a/internal/middleware/limithandler/monitor.go b/internal/middleware/limithandler/monitor.go
index 40c3869dc..f77014b9d 100644
--- a/internal/middleware/limithandler/monitor.go
+++ b/internal/middleware/limithandler/monitor.go
@@ -17,6 +17,7 @@ type ConcurrencyMonitor interface {
Dequeued(ctx context.Context)
Enter(ctx context.Context, acquireTime time.Duration)
Exit(ctx context.Context)
+ Dropped(ctx context.Context, message string)
}
type nullConcurrencyMonitor struct{}
@@ -25,11 +26,13 @@ func (c *nullConcurrencyMonitor) Queued(ctx context.Context)
func (c *nullConcurrencyMonitor) Dequeued(ctx context.Context) {}
func (c *nullConcurrencyMonitor) Enter(ctx context.Context, acquireTime time.Duration) {}
func (c *nullConcurrencyMonitor) Exit(ctx context.Context) {}
+func (c *nullConcurrencyMonitor) Dropped(ctx context.Context, reason string) {}
type promMonitor struct {
queuedMetric prometheus.Gauge
inProgressMetric prometheus.Gauge
acquiringSecondsMetric prometheus.Observer
+ requestsDroppedMetric *prometheus.CounterVec
}
// newPromMonitor creates a new ConcurrencyMonitor that tracks limiter
@@ -41,6 +44,11 @@ func newPromMonitor(lh *LimiterMiddleware, system string, fullMethod string) Con
lh.queuedMetric.WithLabelValues(system, serviceName, methodName),
lh.inProgressMetric.WithLabelValues(system, serviceName, methodName),
lh.acquiringSecondsMetric.WithLabelValues(system, serviceName, methodName),
+ lh.requestsDroppedMetric.MustCurryWith(prometheus.Labels{
+ "system": system,
+ "grpc_service": serviceName,
+ "grpc_method": methodName,
+ }),
}
}
@@ -67,6 +75,10 @@ func (c *promMonitor) Exit(ctx context.Context) {
c.inProgressMetric.Dec()
}
+func (c *promMonitor) Dropped(ctx context.Context, reason string) {
+ c.requestsDroppedMetric.WithLabelValues(reason).Inc()
+}
+
func splitMethodName(fullMethodName string) (string, string) {
fullMethodName = strings.TrimPrefix(fullMethodName, "/") // remove leading slash
if i := strings.Index(fullMethodName, "/"); i >= 0 {
diff --git a/internal/praefect/coordinator.go b/internal/praefect/coordinator.go
index 0e691ccca..4270c8e82 100644
--- a/internal/praefect/coordinator.go
+++ b/internal/praefect/coordinator.go
@@ -95,15 +95,16 @@ var transactionRPCs = map[string]transactionsCondition{
// target repository, this shouldn't ever have any user-visible impact given that they're
// purely optimizations of the on-disk state. These RPCs are thus treated specially and
// shouldn't ever cause a repository generation bump.
- "/gitaly.RefService/PackRefs": transactionsDisabled,
- "/gitaly.RepositoryService/Cleanup": transactionsDisabled,
- "/gitaly.RepositoryService/GarbageCollect": transactionsDisabled,
- "/gitaly.RepositoryService/MidxRepack": transactionsDisabled,
- "/gitaly.RepositoryService/OptimizeRepository": transactionsDisabled,
- "/gitaly.RepositoryService/RepackFull": transactionsDisabled,
- "/gitaly.RepositoryService/RepackIncremental": transactionsDisabled,
- "/gitaly.RepositoryService/RestoreCustomHooks": transactionsDisabled,
- "/gitaly.RepositoryService/WriteCommitGraph": transactionsDisabled,
+ "/gitaly.RefService/PackRefs": transactionsDisabled,
+ "/gitaly.RepositoryService/Cleanup": transactionsDisabled,
+ "/gitaly.RepositoryService/GarbageCollect": transactionsDisabled,
+ "/gitaly.RepositoryService/MidxRepack": transactionsDisabled,
+ "/gitaly.RepositoryService/OptimizeRepository": transactionsDisabled,
+ "/gitaly.RepositoryService/PruneUnreachableObjects": transactionsDisabled,
+ "/gitaly.RepositoryService/RepackFull": transactionsDisabled,
+ "/gitaly.RepositoryService/RepackIncremental": transactionsDisabled,
+ "/gitaly.RepositoryService/RestoreCustomHooks": transactionsDisabled,
+ "/gitaly.RepositoryService/WriteCommitGraph": transactionsDisabled,
// These shouldn't ever use transactions for the sake of not creating cyclic dependencies.
"/gitaly.RefTransaction/StopTransaction": transactionsDisabled,
@@ -165,7 +166,8 @@ func getReplicationDetails(methodName string, m proto.Message) (datastore.Change
switch methodName {
case "/gitaly.RepositoryService/RemoveRepository":
return datastore.DeleteRepo, nil, nil
- case "/gitaly.RepositoryService/CreateFork",
+ case "/gitaly.ObjectPoolService/CreateObjectPool",
+ "/gitaly.RepositoryService/CreateFork",
"/gitaly.RepositoryService/CreateRepository",
"/gitaly.RepositoryService/CreateRepositoryFromBundle",
"/gitaly.RepositoryService/CreateRepositoryFromSnapshot",
@@ -225,6 +227,12 @@ func getReplicationDetails(methodName string, m proto.Message) (datastore.Change
return "", nil, fmt.Errorf("protocol changed: for method %q expected message type '%T', got '%T'", methodName, req, m)
}
return datastore.OptimizeRepository, nil, nil
+ case "/gitaly.RepositoryService/PruneUnreachableObjects":
+ req, ok := m.(*gitalypb.PruneUnreachableObjectsRequest)
+ if !ok {
+ return "", nil, fmt.Errorf("protocol changed: for method %q expected message type '%T', got '%T'", methodName, req, m)
+ }
+ return datastore.PruneUnreachableObjects, nil, nil
case "/gitaly.RefService/PackRefs":
req, ok := m.(*gitalypb.PackRefsRequest)
if !ok {
@@ -475,21 +483,21 @@ func (c *Coordinator) mutatorStreamParameters(ctx context.Context, call grpcCall
return nil, fmt.Errorf("mutator call: replication details: %w", err)
}
+ var additionalRepoRelativePath string
+ if additionalRepo, ok, err := call.methodInfo.AdditionalRepo(call.msg); err != nil {
+ return nil, helper.ErrInvalidArgument(err)
+ } else if ok {
+ additionalRepoRelativePath = additionalRepo.GetRelativePath()
+ }
+
var route RepositoryMutatorRoute
switch change {
case datastore.CreateRepo:
- route, err = c.router.RouteRepositoryCreation(ctx, virtualStorage, targetRepo.RelativePath)
+ route, err = c.router.RouteRepositoryCreation(ctx, virtualStorage, targetRepo.RelativePath, additionalRepoRelativePath)
if err != nil {
return nil, fmt.Errorf("route repository creation: %w", err)
}
default:
- var additionalRepoRelativePath string
- if additionalRepo, ok, err := call.methodInfo.AdditionalRepo(call.msg); err != nil {
- return nil, helper.ErrInvalidArgument(err)
- } else if ok {
- additionalRepoRelativePath = additionalRepo.GetRelativePath()
- }
-
route, err = c.router.RouteRepositoryMutator(ctx, virtualStorage, targetRepo.RelativePath, additionalRepoRelativePath)
if err != nil {
if errors.Is(err, ErrRepositoryReadOnly) {
diff --git a/internal/praefect/coordinator_test.go b/internal/praefect/coordinator_test.go
index 732671acc..b42506abf 100644
--- a/internal/praefect/coordinator_test.go
+++ b/internal/praefect/coordinator_test.go
@@ -213,7 +213,7 @@ func TestStreamDirectorMutator(t *testing.T) {
protoregistry.GitalyProtoPreregistered,
)
- frame, err := proto.Marshal(&gitalypb.CreateObjectPoolRequest{
+ frame, err := proto.Marshal(&gitalypb.FetchIntoObjectPoolRequest{
Origin: &targetRepo,
ObjectPool: &gitalypb.ObjectPool{Repository: &targetRepo},
})
@@ -221,7 +221,7 @@ func TestStreamDirectorMutator(t *testing.T) {
require.NoError(t, err)
- fullMethod := "/gitaly.ObjectPoolService/CreateObjectPool"
+ fullMethod := "/gitaly.ObjectPoolService/FetchIntoObjectPool"
peeker := &mockPeeker{frame}
streamParams, err := coordinator.StreamDirector(correlation.ContextWithCorrelation(ctx, "my-correlation-id"), fullMethod, peeker)
diff --git a/internal/praefect/datastore/datastore.go b/internal/praefect/datastore/datastore.go
index 2766c9574..2e7497e54 100644
--- a/internal/praefect/datastore/datastore.go
+++ b/internal/praefect/datastore/datastore.go
@@ -63,6 +63,8 @@ const (
MidxRepack = ChangeType("midx_repack")
// OptimizeRepository is when replication optimizes a repository
OptimizeRepository = ChangeType("optimize_repository")
+ // PruneUnreachableObjects is when replication prunes unreachable objects in a repository
+ PruneUnreachableObjects = ChangeType("prune_unreachable_objects")
)
func (ct ChangeType) String() string {
diff --git a/internal/praefect/protoregistry/protoregistry_test.go b/internal/praefect/protoregistry/protoregistry_test.go
index 744870af1..28818ba74 100644
--- a/internal/praefect/protoregistry/protoregistry_test.go
+++ b/internal/praefect/protoregistry/protoregistry_test.go
@@ -22,133 +22,135 @@ func TestNewProtoRegistry(t *testing.T) {
},
"CommitService": {
"CommitIsAncestor": protoregistry.OpAccessor,
- "TreeEntry": protoregistry.OpAccessor,
+ "CommitLanguages": protoregistry.OpAccessor,
+ "CommitStats": protoregistry.OpAccessor,
+ "CommitsByMessage": protoregistry.OpAccessor,
"CountCommits": protoregistry.OpAccessor,
"CountDivergingCommits": protoregistry.OpAccessor,
- "GetTreeEntries": protoregistry.OpAccessor,
- "ListFiles": protoregistry.OpAccessor,
- "FindCommit": protoregistry.OpAccessor,
- "CommitStats": protoregistry.OpAccessor,
+ "FilterShasWithSignatures": protoregistry.OpAccessor,
"FindAllCommits": protoregistry.OpAccessor,
+ "FindCommit": protoregistry.OpAccessor,
"FindCommits": protoregistry.OpAccessor,
- "CommitLanguages": protoregistry.OpAccessor,
- "RawBlame": protoregistry.OpAccessor,
+ "GetTreeEntries": protoregistry.OpAccessor,
"LastCommitForPath": protoregistry.OpAccessor,
- "ListLastCommitsForTree": protoregistry.OpAccessor,
- "CommitsByMessage": protoregistry.OpAccessor,
"ListCommitsByOid": protoregistry.OpAccessor,
- "FilterShasWithSignatures": protoregistry.OpAccessor,
+ "ListFiles": protoregistry.OpAccessor,
+ "ListLastCommitsForTree": protoregistry.OpAccessor,
+ "RawBlame": protoregistry.OpAccessor,
+ "TreeEntry": protoregistry.OpAccessor,
},
"ConflictsService": {
"ListConflictFiles": protoregistry.OpAccessor,
"ResolveConflicts": protoregistry.OpMutator,
},
"DiffService": {
- "CommitDiff": protoregistry.OpAccessor,
"CommitDelta": protoregistry.OpAccessor,
+ "CommitDiff": protoregistry.OpAccessor,
+ "DiffStats": protoregistry.OpAccessor,
"RawDiff": protoregistry.OpAccessor,
"RawPatch": protoregistry.OpAccessor,
- "DiffStats": protoregistry.OpAccessor,
},
"NamespaceService": {
"AddNamespace": protoregistry.OpMutator,
+ "NamespaceExists": protoregistry.OpAccessor,
"RemoveNamespace": protoregistry.OpMutator,
"RenameNamespace": protoregistry.OpMutator,
- "NamespaceExists": protoregistry.OpAccessor,
},
"ObjectPoolService": {
"CreateObjectPool": protoregistry.OpMutator,
"DeleteObjectPool": protoregistry.OpMutator,
+ "DisconnectGitAlternates": protoregistry.OpMutator,
"LinkRepositoryToObjectPool": protoregistry.OpMutator,
"ReduplicateRepository": protoregistry.OpMutator,
- "DisconnectGitAlternates": protoregistry.OpMutator,
},
"OperationService": {
+ "UserApplyPatch": protoregistry.OpMutator,
+ "UserCherryPick": protoregistry.OpMutator,
+ "UserCommitFiles": protoregistry.OpMutator,
"UserCreateBranch": protoregistry.OpMutator,
- "UserUpdateBranch": protoregistry.OpMutator,
- "UserDeleteBranch": protoregistry.OpMutator,
"UserCreateTag": protoregistry.OpMutator,
+ "UserDeleteBranch": protoregistry.OpMutator,
"UserDeleteTag": protoregistry.OpMutator,
- "UserMergeToRef": protoregistry.OpMutator,
- "UserMergeBranch": protoregistry.OpMutator,
"UserFFBranch": protoregistry.OpMutator,
- "UserCherryPick": protoregistry.OpMutator,
- "UserCommitFiles": protoregistry.OpMutator,
+ "UserMergeBranch": protoregistry.OpMutator,
+ "UserMergeToRef": protoregistry.OpMutator,
"UserRevert": protoregistry.OpMutator,
"UserSquash": protoregistry.OpMutator,
- "UserApplyPatch": protoregistry.OpMutator,
+ "UserUpdateBranch": protoregistry.OpMutator,
"UserUpdateSubmodule": protoregistry.OpMutator,
},
"RefService": {
- "FindDefaultBranchName": protoregistry.OpAccessor,
+ "DeleteRefs": protoregistry.OpMutator,
"FindAllBranchNames": protoregistry.OpAccessor,
- "FindAllTagNames": protoregistry.OpAccessor,
- "FindLocalBranches": protoregistry.OpAccessor,
"FindAllBranches": protoregistry.OpAccessor,
- "FindAllTags": protoregistry.OpAccessor,
"FindAllRemoteBranches": protoregistry.OpAccessor,
- "RefExists": protoregistry.OpAccessor,
+ "FindAllTagNames": protoregistry.OpAccessor,
+ "FindAllTags": protoregistry.OpAccessor,
"FindBranch": protoregistry.OpAccessor,
- "DeleteRefs": protoregistry.OpMutator,
+ "FindDefaultBranchName": protoregistry.OpAccessor,
+ "FindLocalBranches": protoregistry.OpAccessor,
+ "GetTagMessages": protoregistry.OpAccessor,
"ListBranchNamesContainingCommit": protoregistry.OpAccessor,
"ListTagNamesContainingCommit": protoregistry.OpAccessor,
- "GetTagMessages": protoregistry.OpAccessor,
"PackRefs": protoregistry.OpMutator,
+ "RefExists": protoregistry.OpAccessor,
},
"RemoteService": {
- "UpdateRemoteMirror": protoregistry.OpAccessor,
"FindRemoteRepository": protoregistry.OpAccessor,
"FindRemoteRootRef": protoregistry.OpAccessor,
+ "UpdateRemoteMirror": protoregistry.OpAccessor,
},
"RepositoryService": {
- "RepositoryExists": protoregistry.OpAccessor,
- "RepackIncremental": protoregistry.OpMutator,
- "RepackFull": protoregistry.OpMutator,
- "GarbageCollect": protoregistry.OpMutator,
- "RepositorySize": protoregistry.OpAccessor,
"ApplyGitattributes": protoregistry.OpMutator,
- "FetchRemote": protoregistry.OpMutator,
- "FetchBundle": protoregistry.OpMutator,
- "CreateRepository": protoregistry.OpMutator,
- "GetArchive": protoregistry.OpAccessor,
- "HasLocalBranches": protoregistry.OpAccessor,
- "FetchSourceBranch": protoregistry.OpMutator,
- "Fsck": protoregistry.OpAccessor,
- "WriteRef": protoregistry.OpMutator,
- "FindMergeBase": protoregistry.OpAccessor,
- "CreateFork": protoregistry.OpMutator,
- "CreateRepositoryFromURL": protoregistry.OpMutator,
+ "BackupCustomHooks": protoregistry.OpAccessor,
+ "CalculateChecksum": protoregistry.OpAccessor,
+ "Cleanup": protoregistry.OpMutator,
"CreateBundle": protoregistry.OpAccessor,
+ "CreateFork": protoregistry.OpMutator,
+ "CreateRepository": protoregistry.OpMutator,
"CreateRepositoryFromBundle": protoregistry.OpMutator,
+ "CreateRepositoryFromSnapshot": protoregistry.OpMutator,
+ "CreateRepositoryFromURL": protoregistry.OpMutator,
+ "FetchBundle": protoregistry.OpMutator,
+ "FetchRemote": protoregistry.OpMutator,
+ "FetchSourceBranch": protoregistry.OpMutator,
"FindLicense": protoregistry.OpAccessor,
+ "FindMergeBase": protoregistry.OpAccessor,
+ "Fsck": protoregistry.OpAccessor,
+ "GarbageCollect": protoregistry.OpMutator,
+ "GetArchive": protoregistry.OpAccessor,
"GetInfoAttributes": protoregistry.OpAccessor,
- "CalculateChecksum": protoregistry.OpAccessor,
- "Cleanup": protoregistry.OpMutator,
- "GetSnapshot": protoregistry.OpAccessor,
- "CreateRepositoryFromSnapshot": protoregistry.OpMutator,
"GetRawChanges": protoregistry.OpAccessor,
+ "GetSnapshot": protoregistry.OpAccessor,
+ "HasLocalBranches": protoregistry.OpAccessor,
+ "OptimizeRepository": protoregistry.OpMutator,
+ "PruneUnreachableObjects": protoregistry.OpMutator,
+ "RepackFull": protoregistry.OpMutator,
+ "RepackIncremental": protoregistry.OpMutator,
+ "RepositoryExists": protoregistry.OpAccessor,
+ "RepositorySize": protoregistry.OpAccessor,
+ "RestoreCustomHooks": protoregistry.OpMutator,
"SearchFilesByContent": protoregistry.OpAccessor,
"SearchFilesByName": protoregistry.OpAccessor,
- "RestoreCustomHooks": protoregistry.OpMutator,
- "BackupCustomHooks": protoregistry.OpAccessor,
+ "WriteRef": protoregistry.OpMutator,
},
"SmartHTTPService": {
- "InfoRefsUploadPack": protoregistry.OpAccessor,
"InfoRefsReceivePack": protoregistry.OpAccessor,
- "PostUploadPack": protoregistry.OpAccessor,
+ "InfoRefsUploadPack": protoregistry.OpAccessor,
"PostReceivePack": protoregistry.OpMutator,
+ "PostUploadPack": protoregistry.OpAccessor,
},
"SSHService": {
- "SSHUploadPack": protoregistry.OpAccessor,
"SSHReceivePack": protoregistry.OpMutator,
"SSHUploadArchive": protoregistry.OpAccessor,
+ "SSHUploadPack": protoregistry.OpAccessor,
},
"WikiService": {
- "WikiWritePage": protoregistry.OpMutator,
- "WikiUpdatePage": protoregistry.OpMutator,
"WikiFindPage": protoregistry.OpAccessor,
"WikiGetAllPages": protoregistry.OpAccessor,
"WikiListPages": protoregistry.OpAccessor,
+ "WikiUpdatePage": protoregistry.OpMutator,
+ "WikiWritePage": protoregistry.OpMutator,
},
}
diff --git a/internal/praefect/replicator.go b/internal/praefect/replicator.go
index a6e51c329..1bfc33745 100644
--- a/internal/praefect/replicator.go
+++ b/internal/praefect/replicator.go
@@ -46,6 +46,8 @@ type Replicator interface {
MidxRepack(ctx context.Context, event datastore.ReplicationEvent, target *grpc.ClientConn) error
// OptimizeRepository will optimize the target repository
OptimizeRepository(ctx context.Context, event datastore.ReplicationEvent, target *grpc.ClientConn) error
+ // PruneUnreachableObjects prunes unreachable objects from the target repository
+ PruneUnreachableObjects(ctx context.Context, event datastore.ReplicationEvent, target *grpc.ClientConn) error
}
type defaultReplicator struct {
@@ -360,6 +362,23 @@ func (dr defaultReplicator) OptimizeRepository(ctx context.Context, event datast
return nil
}
+func (dr defaultReplicator) PruneUnreachableObjects(ctx context.Context, event datastore.ReplicationEvent, targetCC *grpc.ClientConn) error {
+ targetRepo := &gitalypb.Repository{
+ StorageName: event.Job.TargetNodeStorage,
+ RelativePath: event.Job.ReplicaPath,
+ }
+
+ repoSvcClient := gitalypb.NewRepositoryServiceClient(targetCC)
+
+ if _, err := repoSvcClient.PruneUnreachableObjects(ctx, &gitalypb.PruneUnreachableObjectsRequest{
+ Repository: targetRepo,
+ }); err != nil {
+ return err
+ }
+
+ return nil
+}
+
func (dr defaultReplicator) RepackFull(ctx context.Context, event datastore.ReplicationEvent, targetCC *grpc.ClientConn) error {
targetRepo := &gitalypb.Repository{
StorageName: event.Job.TargetNodeStorage,
@@ -840,6 +859,8 @@ func (r ReplMgr) processReplicationEvent(ctx context.Context, event datastore.Re
err = r.replicator.MidxRepack(ctx, event, targetCC)
case datastore.OptimizeRepository:
err = r.replicator.OptimizeRepository(ctx, event, targetCC)
+ case datastore.PruneUnreachableObjects:
+ err = r.replicator.PruneUnreachableObjects(ctx, event, targetCC)
default:
err = fmt.Errorf("unknown replication change type encountered: %q", event.Job.Change)
}
diff --git a/internal/praefect/replicator_test.go b/internal/praefect/replicator_test.go
index 4a409615d..3a6cad4fa 100644
--- a/internal/praefect/replicator_test.go
+++ b/internal/praefect/replicator_test.go
@@ -400,6 +400,9 @@ func TestReplicator_PropagateReplicationJob(t *testing.T) {
_, err = repositoryClient.OptimizeRepository(ctx, &gitalypb.OptimizeRepositoryRequest{Repository: repository})
require.NoError(t, err)
+ _, err = repositoryClient.PruneUnreachableObjects(ctx, &gitalypb.PruneUnreachableObjectsRequest{Repository: repository})
+ require.NoError(t, err)
+
_, err = refClient.PackRefs(ctx, &gitalypb.PackRefsRequest{
Repository: repository,
})
@@ -431,6 +434,9 @@ func TestReplicator_PropagateReplicationJob(t *testing.T) {
expectedPrimaryOptimizeRepository := &gitalypb.OptimizeRepositoryRequest{
Repository: primaryRepository,
}
+ expectedPruneUnreachableObjects := &gitalypb.PruneUnreachableObjectsRequest{
+ Repository: primaryRepository,
+ }
expectedPrimaryPackRefs := &gitalypb.PackRefsRequest{
Repository: primaryRepository,
}
@@ -445,6 +451,7 @@ func TestReplicator_PropagateReplicationJob(t *testing.T) {
waitForRequest(t, primaryServer.writeCommitGraphChan, expectedPrimaryWriteCommitGraph, 5*time.Second)
waitForRequest(t, primaryServer.midxRepackChan, expectedPrimaryMidxRepack, 5*time.Second)
waitForRequest(t, primaryServer.optimizeRepositoryChan, expectedPrimaryOptimizeRepository, 5*time.Second)
+ waitForRequest(t, primaryServer.pruneUnreachableObjectsChan, expectedPruneUnreachableObjects, 5*time.Second)
waitForRequest(t, primaryServer.packRefsChan, expectedPrimaryPackRefs, 5*time.Second)
secondaryRepository := &gitalypb.Repository{StorageName: secondaryStorage, RelativePath: repositoryRelativePath}
@@ -470,6 +477,9 @@ func TestReplicator_PropagateReplicationJob(t *testing.T) {
expectedSecondaryOptimizeRepository := expectedPrimaryOptimizeRepository
expectedSecondaryOptimizeRepository.Repository = secondaryRepository
+ expectedSecondaryPruneUnreachableObjects := expectedPruneUnreachableObjects
+ expectedSecondaryPruneUnreachableObjects.Repository = secondaryRepository
+
expectedSecondaryPackRefs := expectedPrimaryPackRefs
expectedSecondaryPackRefs.Repository = secondaryRepository
@@ -481,6 +491,7 @@ func TestReplicator_PropagateReplicationJob(t *testing.T) {
waitForRequest(t, secondaryServer.writeCommitGraphChan, expectedSecondaryWriteCommitGraph, 5*time.Second)
waitForRequest(t, secondaryServer.midxRepackChan, expectedSecondaryMidxRepack, 5*time.Second)
waitForRequest(t, secondaryServer.optimizeRepositoryChan, expectedSecondaryOptimizeRepository, 5*time.Second)
+ waitForRequest(t, secondaryServer.pruneUnreachableObjectsChan, expectedSecondaryPruneUnreachableObjects, 5*time.Second)
waitForRequest(t, secondaryServer.packRefsChan, expectedSecondaryPackRefs, 5*time.Second)
wg.Wait()
cancel()
@@ -488,7 +499,7 @@ func TestReplicator_PropagateReplicationJob(t *testing.T) {
}
type mockServer struct {
- gcChan, repackFullChan, repackIncrChan, cleanupChan, writeCommitGraphChan, midxRepackChan, optimizeRepositoryChan, packRefsChan chan proto.Message
+ gcChan, repackFullChan, repackIncrChan, cleanupChan, writeCommitGraphChan, midxRepackChan, optimizeRepositoryChan, pruneUnreachableObjectsChan, packRefsChan chan proto.Message
gitalypb.UnimplementedRepositoryServiceServer
gitalypb.UnimplementedRefServiceServer
@@ -496,14 +507,15 @@ type mockServer struct {
func newMockRepositoryServer() *mockServer {
return &mockServer{
- gcChan: make(chan proto.Message),
- repackFullChan: make(chan proto.Message),
- repackIncrChan: make(chan proto.Message),
- cleanupChan: make(chan proto.Message),
- writeCommitGraphChan: make(chan proto.Message),
- midxRepackChan: make(chan proto.Message),
- optimizeRepositoryChan: make(chan proto.Message),
- packRefsChan: make(chan proto.Message),
+ gcChan: make(chan proto.Message),
+ repackFullChan: make(chan proto.Message),
+ repackIncrChan: make(chan proto.Message),
+ cleanupChan: make(chan proto.Message),
+ writeCommitGraphChan: make(chan proto.Message),
+ midxRepackChan: make(chan proto.Message),
+ optimizeRepositoryChan: make(chan proto.Message),
+ pruneUnreachableObjectsChan: make(chan proto.Message),
+ packRefsChan: make(chan proto.Message),
}
}
@@ -556,6 +568,13 @@ func (m *mockServer) OptimizeRepository(ctx context.Context, in *gitalypb.Optimi
return &gitalypb.OptimizeRepositoryResponse{}, nil
}
+func (m *mockServer) PruneUnreachableObjects(ctx context.Context, in *gitalypb.PruneUnreachableObjectsRequest) (*gitalypb.PruneUnreachableObjectsResponse, error) {
+ go func() {
+ m.pruneUnreachableObjectsChan <- in
+ }()
+ return &gitalypb.PruneUnreachableObjectsResponse{}, nil
+}
+
func (m *mockServer) PackRefs(ctx context.Context, in *gitalypb.PackRefsRequest) (*gitalypb.PackRefsResponse, error) {
go func() {
m.packRefsChan <- in
diff --git a/internal/praefect/router.go b/internal/praefect/router.go
index 8c3ec9a5c..1b6f19387 100644
--- a/internal/praefect/router.go
+++ b/internal/praefect/router.go
@@ -65,5 +65,5 @@ type Router interface {
RouteRepositoryMutator(ctx context.Context, virtualStorage, relativePath, additionalRepoRelativePath string) (RepositoryMutatorRoute, error)
// RouteRepositoryCreation decides returns the primary and secondaries that should handle the repository creation
// request. It is up to the caller to store the assignments and primary information after finishing the RPC.
- RouteRepositoryCreation(ctx context.Context, virtualStorage, relativePath string) (RepositoryMutatorRoute, error)
+ RouteRepositoryCreation(ctx context.Context, virtualStorage, relativePath, additionalRepoRelativePath string) (RepositoryMutatorRoute, error)
}
diff --git a/internal/praefect/router_node_manager.go b/internal/praefect/router_node_manager.go
index 7793c4d6d..9cceaea0a 100644
--- a/internal/praefect/router_node_manager.go
+++ b/internal/praefect/router_node_manager.go
@@ -125,7 +125,7 @@ func (r *nodeManagerRouter) RouteRepositoryMutator(ctx context.Context, virtualS
// RouteRepositoryCreation includes healthy secondaries in the transaction and sets the unhealthy secondaries as
// replication targets. The virtual storage's primary acts as the primary for every repository.
-func (r *nodeManagerRouter) RouteRepositoryCreation(ctx context.Context, virtualStorage, relativePath string) (RepositoryMutatorRoute, error) {
+func (r *nodeManagerRouter) RouteRepositoryCreation(ctx context.Context, virtualStorage, relativePath, additionalRepoRelativePath string) (RepositoryMutatorRoute, error) {
shard, err := r.mgr.GetShard(ctx, virtualStorage)
if err != nil {
return RepositoryMutatorRoute{}, fmt.Errorf("get shard: %w", err)
@@ -144,9 +144,10 @@ func (r *nodeManagerRouter) RouteRepositoryCreation(ctx context.Context, virtual
}
return RepositoryMutatorRoute{
- Primary: toRouterNode(shard.Primary),
- ReplicaPath: relativePath,
- Secondaries: secondaries,
- ReplicationTargets: replicationTargets,
+ Primary: toRouterNode(shard.Primary),
+ ReplicaPath: relativePath,
+ AdditionalReplicaPath: additionalRepoRelativePath,
+ Secondaries: secondaries,
+ ReplicationTargets: replicationTargets,
}, nil
}
diff --git a/internal/praefect/router_per_repository.go b/internal/praefect/router_per_repository.go
index f6d3f89ed..bd3a37f66 100644
--- a/internal/praefect/router_per_repository.go
+++ b/internal/praefect/router_per_repository.go
@@ -190,6 +190,19 @@ func (r *PerRepositoryRouter) RouteRepositoryAccessor(ctx context.Context, virtu
}, nil
}
+func (r *PerRepositoryRouter) resolveAdditionalReplicaPath(ctx context.Context, virtualStorage, additionalRelativePath string) (string, error) {
+ if additionalRelativePath == "" {
+ return "", nil
+ }
+
+ additionalRepositoryID, err := r.rs.GetRepositoryID(ctx, virtualStorage, additionalRelativePath)
+ if err != nil {
+ return "", fmt.Errorf("get additional repository id: %w", err)
+ }
+
+ return r.rs.GetReplicaPath(ctx, additionalRepositoryID)
+}
+
//nolint: revive,stylecheck // This is unintentionally missing documentation.
func (r *PerRepositoryRouter) RouteRepositoryMutator(ctx context.Context, virtualStorage, relativePath, additionalRelativePath string) (RepositoryMutatorRoute, error) {
healthyNodes, err := r.healthyNodes(virtualStorage)
@@ -202,17 +215,9 @@ func (r *PerRepositoryRouter) RouteRepositoryMutator(ctx context.Context, virtua
return RepositoryMutatorRoute{}, fmt.Errorf("get repository id: %w", err)
}
- var additionalReplicaPath string
- if additionalRelativePath != "" {
- additionalRepositoryID, err := r.rs.GetRepositoryID(ctx, virtualStorage, additionalRelativePath)
- if err != nil {
- return RepositoryMutatorRoute{}, fmt.Errorf("get additional repository id: %w", err)
- }
-
- additionalReplicaPath, err = r.rs.GetReplicaPath(ctx, additionalRepositoryID)
- if err != nil {
- return RepositoryMutatorRoute{}, fmt.Errorf("get additional repository replica path: %w", err)
- }
+ additionalReplicaPath, err := r.resolveAdditionalReplicaPath(ctx, virtualStorage, additionalRelativePath)
+ if err != nil {
+ return RepositoryMutatorRoute{}, fmt.Errorf("resolve additional replica path: %w", err)
}
primary, err := r.pg.GetPrimary(ctx, virtualStorage, repositoryID)
@@ -280,7 +285,12 @@ func (r *PerRepositoryRouter) RouteRepositoryMutator(ctx context.Context, virtua
// RouteRepositoryCreation picks a random healthy node to act as the primary node and selects the secondary nodes
// if assignments are enabled. Healthy secondaries take part in the transaction, unhealthy secondaries are set as
// replication targets.
-func (r *PerRepositoryRouter) RouteRepositoryCreation(ctx context.Context, virtualStorage, relativePath string) (RepositoryMutatorRoute, error) {
+func (r *PerRepositoryRouter) RouteRepositoryCreation(ctx context.Context, virtualStorage, relativePath, additionalRelativePath string) (RepositoryMutatorRoute, error) {
+ additionalReplicaPath, err := r.resolveAdditionalReplicaPath(ctx, virtualStorage, additionalRelativePath)
+ if err != nil {
+ return RepositoryMutatorRoute{}, fmt.Errorf("resolve additional replica path: %w", err)
+ }
+
healthyNodes, err := r.healthyNodes(virtualStorage)
if err != nil {
return RepositoryMutatorRoute{}, err
@@ -348,10 +358,11 @@ func (r *PerRepositoryRouter) RouteRepositoryCreation(ctx context.Context, virtu
}
return RepositoryMutatorRoute{
- RepositoryID: id,
- ReplicaPath: relativePath,
- Primary: primary,
- Secondaries: secondaries,
- ReplicationTargets: replicationTargets,
+ RepositoryID: id,
+ ReplicaPath: relativePath,
+ AdditionalReplicaPath: additionalReplicaPath,
+ Primary: primary,
+ Secondaries: secondaries,
+ ReplicationTargets: replicationTargets,
}, nil
}
diff --git a/internal/praefect/router_per_repository_test.go b/internal/praefect/router_per_repository_test.go
index b98d94100..dfc68552d 100644
--- a/internal/praefect/router_per_repository_test.go
+++ b/internal/praefect/router_per_repository_test.go
@@ -473,19 +473,24 @@ func TestPerRepositoryRouter_RouteRepositoryCreation(t *testing.T) {
db := testdb.New(t)
- const relativePath = "relative-path"
+ const (
+ relativePath = "relative-path"
+ additionalRelativePath = "additional-relative-path"
+ additionalReplicaPath = "additional-replica-path"
+ )
for _, tc := range []struct {
- desc string
- virtualStorage string
- healthyNodes StaticHealthChecker
- replicationFactor int
- primaryCandidates int
- primaryPick int
- secondaryCandidates int
- repositoryExists bool
- matchRoute matcher
- error error
+ desc string
+ virtualStorage string
+ healthyNodes StaticHealthChecker
+ replicationFactor int
+ primaryCandidates int
+ primaryPick int
+ secondaryCandidates int
+ repositoryExists bool
+ additionalRelativePath string
+ matchRoute matcher
+ error error
}{
{
desc: "no healthy nodes",
@@ -499,17 +504,19 @@ func TestPerRepositoryRouter_RouteRepositoryCreation(t *testing.T) {
error: nodes.ErrVirtualStorageNotExist,
},
{
- desc: "no healthy secondaries",
- virtualStorage: "virtual-storage-1",
- healthyNodes: StaticHealthChecker{"virtual-storage-1": {"primary"}},
- primaryCandidates: 1,
- primaryPick: 0,
+ desc: "no healthy secondaries",
+ virtualStorage: "virtual-storage-1",
+ healthyNodes: StaticHealthChecker{"virtual-storage-1": {"primary"}},
+ primaryCandidates: 1,
+ primaryPick: 0,
+ additionalRelativePath: additionalRelativePath,
matchRoute: requireOneOf(
RepositoryMutatorRoute{
- RepositoryID: 1,
- ReplicaPath: relativePath,
- Primary: RouterNode{Storage: "primary", Connection: primaryConn},
- ReplicationTargets: []string{"secondary-1", "secondary-2"},
+ RepositoryID: 1,
+ ReplicaPath: relativePath,
+ AdditionalReplicaPath: additionalReplicaPath,
+ Primary: RouterNode{Storage: "primary", Connection: primaryConn},
+ ReplicationTargets: []string{"secondary-1", "secondary-2"},
},
),
},
@@ -614,6 +621,18 @@ func TestPerRepositoryRouter_RouteRepositoryCreation(t *testing.T) {
repositoryExists: true,
error: fmt.Errorf("reserve repository id: %w", commonerr.ErrRepositoryAlreadyExists),
},
+ {
+ desc: "additional repository doesn't exist",
+ virtualStorage: "virtual-storage-1",
+ additionalRelativePath: "non-existent",
+ error: fmt.Errorf(
+ "resolve additional replica path: %w",
+ fmt.Errorf(
+ "get additional repository id: %w",
+ commonerr.NewRepositoryNotFoundError("virtual-storage-1", "non-existent"),
+ ),
+ ),
+ },
} {
t.Run(tc.desc, func(t *testing.T) {
ctx := testhelper.Context(t)
@@ -627,6 +646,8 @@ func TestPerRepositoryRouter_RouteRepositoryCreation(t *testing.T) {
)
}
+ require.NoError(t, rs.CreateRepository(ctx, 2, "virtual-storage-1", additionalRelativePath, additionalReplicaPath, "primary", nil, nil, true, true))
+
route, err := NewPerRepositoryRouter(
Connections{
"virtual-storage-1": {
@@ -650,7 +671,7 @@ func TestPerRepositoryRouter_RouteRepositoryCreation(t *testing.T) {
nil,
rs,
map[string]int{"virtual-storage-1": tc.replicationFactor},
- ).RouteRepositoryCreation(ctx, tc.virtualStorage, relativePath)
+ ).RouteRepositoryCreation(ctx, tc.virtualStorage, relativePath, tc.additionalRelativePath)
if tc.error != nil {
require.Equal(t, tc.error, err)
return
diff --git a/proto/go/gitalypb/repository-service.pb.go b/proto/go/gitalypb/repository-service.pb.go
index 64d14bc42..eb8c8bd3f 100644
--- a/proto/go/gitalypb/repository-service.pb.go
+++ b/proto/go/gitalypb/repository-service.pb.go
@@ -4235,6 +4235,95 @@ func (*OptimizeRepositoryResponse) Descriptor() ([]byte, []int) {
return file_repository_service_proto_rawDescGZIP(), []int{78}
}
+// PruneUnreachableObjectsRequest is a request for the PruneUnreachableObjects
+// RPC call.
+type PruneUnreachableObjectsRequest struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Repository *Repository `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"`
+}
+
+func (x *PruneUnreachableObjectsRequest) Reset() {
+ *x = PruneUnreachableObjectsRequest{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_repository_service_proto_msgTypes[79]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *PruneUnreachableObjectsRequest) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*PruneUnreachableObjectsRequest) ProtoMessage() {}
+
+func (x *PruneUnreachableObjectsRequest) ProtoReflect() protoreflect.Message {
+ mi := &file_repository_service_proto_msgTypes[79]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use PruneUnreachableObjectsRequest.ProtoReflect.Descriptor instead.
+func (*PruneUnreachableObjectsRequest) Descriptor() ([]byte, []int) {
+ return file_repository_service_proto_rawDescGZIP(), []int{79}
+}
+
+func (x *PruneUnreachableObjectsRequest) GetRepository() *Repository {
+ if x != nil {
+ return x.Repository
+ }
+ return nil
+}
+
+// PruneUnreachableObjectsResponse is a response for the
+// PruneUnreachableObjects RPC call.
+type PruneUnreachableObjectsResponse struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+}
+
+func (x *PruneUnreachableObjectsResponse) Reset() {
+ *x = PruneUnreachableObjectsResponse{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_repository_service_proto_msgTypes[80]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *PruneUnreachableObjectsResponse) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*PruneUnreachableObjectsResponse) ProtoMessage() {}
+
+func (x *PruneUnreachableObjectsResponse) ProtoReflect() protoreflect.Message {
+ mi := &file_repository_service_proto_msgTypes[80]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use PruneUnreachableObjectsResponse.ProtoReflect.Descriptor instead.
+func (*PruneUnreachableObjectsResponse) Descriptor() ([]byte, []int) {
+ return file_repository_service_proto_rawDescGZIP(), []int{80}
+}
+
// SetFullPathRequest is a request for the SetFullPath RPC.
type SetFullPathRequest struct {
state protoimpl.MessageState
@@ -4250,7 +4339,7 @@ type SetFullPathRequest struct {
func (x *SetFullPathRequest) Reset() {
*x = SetFullPathRequest{}
if protoimpl.UnsafeEnabled {
- mi := &file_repository_service_proto_msgTypes[79]
+ mi := &file_repository_service_proto_msgTypes[81]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -4263,7 +4352,7 @@ func (x *SetFullPathRequest) String() string {
func (*SetFullPathRequest) ProtoMessage() {}
func (x *SetFullPathRequest) ProtoReflect() protoreflect.Message {
- mi := &file_repository_service_proto_msgTypes[79]
+ mi := &file_repository_service_proto_msgTypes[81]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -4276,7 +4365,7 @@ func (x *SetFullPathRequest) ProtoReflect() protoreflect.Message {
// Deprecated: Use SetFullPathRequest.ProtoReflect.Descriptor instead.
func (*SetFullPathRequest) Descriptor() ([]byte, []int) {
- return file_repository_service_proto_rawDescGZIP(), []int{79}
+ return file_repository_service_proto_rawDescGZIP(), []int{81}
}
func (x *SetFullPathRequest) GetRepository() *Repository {
@@ -4303,7 +4392,7 @@ type SetFullPathResponse struct {
func (x *SetFullPathResponse) Reset() {
*x = SetFullPathResponse{}
if protoimpl.UnsafeEnabled {
- mi := &file_repository_service_proto_msgTypes[80]
+ mi := &file_repository_service_proto_msgTypes[82]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -4316,7 +4405,7 @@ func (x *SetFullPathResponse) String() string {
func (*SetFullPathResponse) ProtoMessage() {}
func (x *SetFullPathResponse) ProtoReflect() protoreflect.Message {
- mi := &file_repository_service_proto_msgTypes[80]
+ mi := &file_repository_service_proto_msgTypes[82]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -4329,7 +4418,7 @@ func (x *SetFullPathResponse) ProtoReflect() protoreflect.Message {
// Deprecated: Use SetFullPathResponse.ProtoReflect.Descriptor instead.
func (*SetFullPathResponse) Descriptor() ([]byte, []int) {
- return file_repository_service_proto_rawDescGZIP(), []int{80}
+ return file_repository_service_proto_rawDescGZIP(), []int{82}
}
type GetRawChangesResponse_RawChange struct {
@@ -4351,7 +4440,7 @@ type GetRawChangesResponse_RawChange struct {
func (x *GetRawChangesResponse_RawChange) Reset() {
*x = GetRawChangesResponse_RawChange{}
if protoimpl.UnsafeEnabled {
- mi := &file_repository_service_proto_msgTypes[81]
+ mi := &file_repository_service_proto_msgTypes[83]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -4364,7 +4453,7 @@ func (x *GetRawChangesResponse_RawChange) String() string {
func (*GetRawChangesResponse_RawChange) ProtoMessage() {}
func (x *GetRawChangesResponse_RawChange) ProtoReflect() protoreflect.Message {
- mi := &file_repository_service_proto_msgTypes[81]
+ mi := &file_repository_service_proto_msgTypes[83]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -4897,252 +4986,267 @@ var file_repository_service_proto_rawDesc = []byte{
0x6f, 0x72, 0x79, 0x42, 0x04, 0x98, 0xc6, 0x2c, 0x01, 0x52, 0x0a, 0x72, 0x65, 0x70, 0x6f, 0x73,
0x69, 0x74, 0x6f, 0x72, 0x79, 0x22, 0x1c, 0x0a, 0x1a, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a,
0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f,
- 0x6e, 0x73, 0x65, 0x22, 0x62, 0x0a, 0x12, 0x53, 0x65, 0x74, 0x46, 0x75, 0x6c, 0x6c, 0x50, 0x61,
- 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x38, 0x0a, 0x0a, 0x72, 0x65, 0x70,
- 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e,
- 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72,
- 0x79, 0x42, 0x04, 0x98, 0xc6, 0x2c, 0x01, 0x52, 0x0a, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74,
- 0x6f, 0x72, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28,
- 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x22, 0x15, 0x0a, 0x13, 0x53, 0x65, 0x74, 0x46, 0x75,
- 0x6c, 0x6c, 0x50, 0x61, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x96,
- 0x1d, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x53, 0x65, 0x72,
- 0x76, 0x69, 0x63, 0x65, 0x12, 0x5d, 0x0a, 0x10, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f,
- 0x72, 0x79, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x12, 0x1f, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
+ 0x6e, 0x73, 0x65, 0x22, 0x5a, 0x0a, 0x1e, 0x50, 0x72, 0x75, 0x6e, 0x65, 0x55, 0x6e, 0x72, 0x65,
+ 0x61, 0x63, 0x68, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65,
+ 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x38, 0x0a, 0x0a, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74,
+ 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x67, 0x69, 0x74, 0x61,
+ 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x42, 0x04, 0x98,
+ 0xc6, 0x2c, 0x01, 0x52, 0x0a, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x22,
+ 0x21, 0x0a, 0x1f, 0x50, 0x72, 0x75, 0x6e, 0x65, 0x55, 0x6e, 0x72, 0x65, 0x61, 0x63, 0x68, 0x61,
+ 0x62, 0x6c, 0x65, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
+ 0x73, 0x65, 0x22, 0x62, 0x0a, 0x12, 0x53, 0x65, 0x74, 0x46, 0x75, 0x6c, 0x6c, 0x50, 0x61, 0x74,
+ 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x38, 0x0a, 0x0a, 0x72, 0x65, 0x70, 0x6f,
+ 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x67,
+ 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79,
+ 0x42, 0x04, 0x98, 0xc6, 0x2c, 0x01, 0x52, 0x0a, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f,
+ 0x72, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x22, 0x15, 0x0a, 0x13, 0x53, 0x65, 0x74, 0x46, 0x75, 0x6c,
+ 0x6c, 0x50, 0x61, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x8a, 0x1e,
+ 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x53, 0x65, 0x72, 0x76,
+ 0x69, 0x63, 0x65, 0x12, 0x5d, 0x0a, 0x10, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72,
+ 0x79, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x12, 0x1f, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
+ 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x45, 0x78, 0x69, 0x73, 0x74,
+ 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
0x79, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x45, 0x78, 0x69, 0x73,
- 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61,
- 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x45, 0x78, 0x69,
- 0x73, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28,
- 0x02, 0x08, 0x02, 0x12, 0x60, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x49, 0x6e, 0x63,
- 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x6c, 0x12, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
- 0x79, 0x2e, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x49, 0x6e, 0x63, 0x72, 0x65, 0x6d, 0x65, 0x6e,
- 0x74, 0x61, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x67, 0x69, 0x74,
- 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x49, 0x6e, 0x63, 0x72, 0x65, 0x6d,
- 0x65, 0x6e, 0x74, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa,
- 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x4b, 0x0a, 0x0a, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x46,
- 0x75, 0x6c, 0x6c, 0x12, 0x19, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70,
- 0x61, 0x63, 0x6b, 0x46, 0x75, 0x6c, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a,
- 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x46, 0x75,
- 0x6c, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02,
- 0x08, 0x01, 0x12, 0x4b, 0x0a, 0x0a, 0x4d, 0x69, 0x64, 0x78, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b,
- 0x12, 0x19, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x4d, 0x69, 0x64, 0x78, 0x52, 0x65,
- 0x70, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x67, 0x69,
- 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x4d, 0x69, 0x64, 0x78, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x52,
- 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12,
- 0x57, 0x0a, 0x0e, 0x47, 0x61, 0x72, 0x62, 0x61, 0x67, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63,
- 0x74, 0x12, 0x1d, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x61, 0x72, 0x62, 0x61,
- 0x67, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
- 0x1a, 0x1e, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x61, 0x72, 0x62, 0x61, 0x67,
- 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
- 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x5d, 0x0a, 0x10, 0x57, 0x72, 0x69, 0x74,
- 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1f, 0x2e, 0x67,
+ 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02,
+ 0x08, 0x02, 0x12, 0x60, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x49, 0x6e, 0x63, 0x72,
+ 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x6c, 0x12, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
+ 0x2e, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x49, 0x6e, 0x63, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74,
+ 0x61, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61,
+ 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x49, 0x6e, 0x63, 0x72, 0x65, 0x6d, 0x65,
+ 0x6e, 0x74, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97,
+ 0x28, 0x02, 0x08, 0x01, 0x12, 0x4b, 0x0a, 0x0a, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x46, 0x75,
+ 0x6c, 0x6c, 0x12, 0x19, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x61,
+ 0x63, 0x6b, 0x46, 0x75, 0x6c, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e,
+ 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x46, 0x75, 0x6c,
+ 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08,
+ 0x01, 0x12, 0x4b, 0x0a, 0x0a, 0x4d, 0x69, 0x64, 0x78, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x12,
+ 0x19, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x4d, 0x69, 0x64, 0x78, 0x52, 0x65, 0x70,
+ 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x67, 0x69, 0x74,
+ 0x61, 0x6c, 0x79, 0x2e, 0x4d, 0x69, 0x64, 0x78, 0x52, 0x65, 0x70, 0x61, 0x63, 0x6b, 0x52, 0x65,
+ 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x57,
+ 0x0a, 0x0e, 0x47, 0x61, 0x72, 0x62, 0x61, 0x67, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74,
+ 0x12, 0x1d, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x61, 0x72, 0x62, 0x61, 0x67,
+ 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
+ 0x1e, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x61, 0x72, 0x62, 0x61, 0x67, 0x65,
+ 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
+ 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x5d, 0x0a, 0x10, 0x57, 0x72, 0x69, 0x74, 0x65,
+ 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1f, 0x2e, 0x67, 0x69,
+ 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74,
+ 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x67,
0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x69,
- 0x74, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e,
- 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x43, 0x6f, 0x6d, 0x6d,
- 0x69, 0x74, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
- 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x57, 0x0a, 0x0e, 0x52, 0x65, 0x70, 0x6f, 0x73,
- 0x69, 0x74, 0x6f, 0x72, 0x79, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x1d, 0x2e, 0x67, 0x69, 0x74, 0x61,
- 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x53, 0x69, 0x7a,
- 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
+ 0x74, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06,
+ 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x57, 0x0a, 0x0e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69,
+ 0x74, 0x6f, 0x72, 0x79, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x1d, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
0x79, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x53, 0x69, 0x7a, 0x65,
- 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02,
- 0x12, 0x63, 0x0a, 0x12, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x47, 0x69, 0x74, 0x61, 0x74, 0x74, 0x72,
- 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e,
- 0x41, 0x70, 0x70, 0x6c, 0x79, 0x47, 0x69, 0x74, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74,
- 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x67, 0x69, 0x74, 0x61,
- 0x6c, 0x79, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x47, 0x69, 0x74, 0x61, 0x74, 0x74, 0x72, 0x69,
- 0x62, 0x75, 0x74, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa,
- 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x4e, 0x0a, 0x0b, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65,
- 0x6d, 0x6f, 0x74, 0x65, 0x12, 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x65,
- 0x74, 0x63, 0x68, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
- 0x1a, 0x1b, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52,
- 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa,
- 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x5d, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52,
- 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x1f, 0x2e, 0x67, 0x69, 0x74, 0x61,
+ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
+ 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x53, 0x69, 0x7a, 0x65, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x12,
+ 0x63, 0x0a, 0x12, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x47, 0x69, 0x74, 0x61, 0x74, 0x74, 0x72, 0x69,
+ 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x41,
+ 0x70, 0x70, 0x6c, 0x79, 0x47, 0x69, 0x74, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65,
+ 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
+ 0x79, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x47, 0x69, 0x74, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62,
+ 0x75, 0x74, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97,
+ 0x28, 0x02, 0x08, 0x01, 0x12, 0x4e, 0x0a, 0x0b, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x6d,
+ 0x6f, 0x74, 0x65, 0x12, 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x65, 0x74,
+ 0x63, 0x68, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
+ 0x1b, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65,
+ 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97,
+ 0x28, 0x02, 0x08, 0x01, 0x12, 0x5d, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65,
+ 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x1f, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
+ 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f,
+ 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61,
0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74,
- 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x67, 0x69, 0x74,
- 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69,
- 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97,
- 0x28, 0x02, 0x08, 0x01, 0x12, 0x4d, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x41, 0x72, 0x63, 0x68, 0x69,
- 0x76, 0x65, 0x12, 0x19, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x41,
- 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e,
- 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76,
- 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08,
- 0x02, 0x30, 0x01, 0x12, 0x5d, 0x0a, 0x10, 0x48, 0x61, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x42,
- 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x12, 0x1f, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
+ 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28,
+ 0x02, 0x08, 0x01, 0x12, 0x4d, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76,
+ 0x65, 0x12, 0x19, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x72,
+ 0x63, 0x68, 0x69, 0x76, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x67,
+ 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02,
+ 0x30, 0x01, 0x12, 0x5d, 0x0a, 0x10, 0x48, 0x61, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x42, 0x72,
+ 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x12, 0x1f, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e,
+ 0x48, 0x61, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73,
+ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
0x2e, 0x48, 0x61, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65,
- 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
- 0x79, 0x2e, 0x48, 0x61, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68,
- 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02,
- 0x08, 0x02, 0x12, 0x60, 0x0a, 0x11, 0x46, 0x65, 0x74, 0x63, 0x68, 0x53, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
- 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x42, 0x72, 0x61, 0x6e,
- 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61,
- 0x6c, 0x79, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x42, 0x72,
- 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97,
- 0x28, 0x02, 0x08, 0x01, 0x12, 0x39, 0x0a, 0x04, 0x46, 0x73, 0x63, 0x6b, 0x12, 0x13, 0x2e, 0x67,
- 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x73, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x1a, 0x14, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x73, 0x63, 0x6b, 0x52,
- 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x12,
- 0x45, 0x0a, 0x08, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x66, 0x12, 0x17, 0x2e, 0x67, 0x69,
- 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x66, 0x52, 0x65, 0x71,
- 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x57, 0x72,
- 0x69, 0x74, 0x65, 0x52, 0x65, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06,
- 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x54, 0x0a, 0x0d, 0x46, 0x69, 0x6e, 0x64, 0x4d, 0x65,
- 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x12, 0x1c, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
- 0x2e, 0x46, 0x69, 0x6e, 0x64, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46,
- 0x69, 0x6e, 0x64, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x52, 0x65, 0x73, 0x70,
- 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x12, 0x4b, 0x0a, 0x0a,
- 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x46, 0x6f, 0x72, 0x6b, 0x12, 0x19, 0x2e, 0x67, 0x69, 0x74,
- 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43,
- 0x72, 0x65, 0x61, 0x74, 0x65, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
- 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x72, 0x0a, 0x17, 0x43, 0x72, 0x65,
+ 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08,
+ 0x02, 0x12, 0x60, 0x0a, 0x11, 0x46, 0x65, 0x74, 0x63, 0x68, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e,
+ 0x46, 0x65, 0x74, 0x63, 0x68, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63,
+ 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
+ 0x79, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x42, 0x72, 0x61,
+ 0x6e, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28,
+ 0x02, 0x08, 0x01, 0x12, 0x39, 0x0a, 0x04, 0x46, 0x73, 0x63, 0x6b, 0x12, 0x13, 0x2e, 0x67, 0x69,
+ 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x73, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
+ 0x1a, 0x14, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x73, 0x63, 0x6b, 0x52, 0x65,
+ 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x12, 0x45,
+ 0x0a, 0x08, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x66, 0x12, 0x17, 0x2e, 0x67, 0x69, 0x74,
+ 0x61, 0x6c, 0x79, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x66, 0x52, 0x65, 0x71, 0x75,
+ 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x57, 0x72, 0x69,
+ 0x74, 0x65, 0x52, 0x65, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa,
+ 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x54, 0x0a, 0x0d, 0x46, 0x69, 0x6e, 0x64, 0x4d, 0x65, 0x72,
+ 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x12, 0x1c, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e,
+ 0x46, 0x69, 0x6e, 0x64, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x52, 0x65, 0x71,
+ 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x69,
+ 0x6e, 0x64, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f,
+ 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x12, 0x4b, 0x0a, 0x0a, 0x43,
+ 0x72, 0x65, 0x61, 0x74, 0x65, 0x46, 0x6f, 0x72, 0x6b, 0x12, 0x19, 0x2e, 0x67, 0x69, 0x74, 0x61,
+ 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x65, 0x71,
+ 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72,
+ 0x65, 0x61, 0x74, 0x65, 0x46, 0x6f, 0x72, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
+ 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x72, 0x0a, 0x17, 0x43, 0x72, 0x65, 0x61,
+ 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72, 0x6f, 0x6d,
+ 0x55, 0x52, 0x4c, 0x12, 0x26, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65,
0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72, 0x6f,
- 0x6d, 0x55, 0x52, 0x4c, 0x12, 0x26, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72,
- 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72,
- 0x6f, 0x6d, 0x55, 0x52, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x67,
- 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f,
- 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72, 0x6f, 0x6d, 0x55, 0x52, 0x4c, 0x52, 0x65, 0x73,
- 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x53, 0x0a,
- 0x0c, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x12, 0x1b, 0x2e,
- 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e,
- 0x64, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x67, 0x69, 0x74,
- 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65,
- 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02,
- 0x30, 0x01, 0x12, 0x76, 0x0a, 0x17, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e, 0x64,
- 0x6c, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x65, 0x66, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x26, 0x2e,
- 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e,
- 0x64, 0x6c, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x65, 0x66, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43,
- 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x52,
- 0x65, 0x66, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06,
- 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x28, 0x01, 0x30, 0x01, 0x12, 0x50, 0x0a, 0x0b, 0x46, 0x65,
- 0x74, 0x63, 0x68, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x12, 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61,
- 0x6c, 0x79, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46,
- 0x65, 0x74, 0x63, 0x68, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
- 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x28, 0x01, 0x12, 0x7d, 0x0a, 0x1a,
- 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79,
- 0x46, 0x72, 0x6f, 0x6d, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x12, 0x29, 0x2e, 0x67, 0x69, 0x74,
- 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69,
- 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72, 0x6f, 0x6d, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43,
+ 0x6d, 0x55, 0x52, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x67, 0x69,
+ 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73,
+ 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72, 0x6f, 0x6d, 0x55, 0x52, 0x4c, 0x52, 0x65, 0x73, 0x70,
+ 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x53, 0x0a, 0x0c,
+ 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x12, 0x1b, 0x2e, 0x67,
+ 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e, 0x64,
+ 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x67, 0x69, 0x74, 0x61,
+ 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x30,
+ 0x01, 0x12, 0x76, 0x0a, 0x17, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e, 0x64, 0x6c,
+ 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x65, 0x66, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x26, 0x2e, 0x67,
+ 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e, 0x64,
+ 0x6c, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x65, 0x66, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71,
+ 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72,
+ 0x65, 0x61, 0x74, 0x65, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x65,
+ 0x66, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa,
+ 0x97, 0x28, 0x02, 0x08, 0x02, 0x28, 0x01, 0x30, 0x01, 0x12, 0x50, 0x0a, 0x0b, 0x46, 0x65, 0x74,
+ 0x63, 0x68, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x12, 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
+ 0x79, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x52, 0x65, 0x71,
+ 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x65,
+ 0x74, 0x63, 0x68, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
+ 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x28, 0x01, 0x12, 0x7d, 0x0a, 0x1a, 0x43,
0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46,
- 0x72, 0x6f, 0x6d, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
- 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x28, 0x01, 0x12, 0x4a, 0x0a, 0x09, 0x47,
- 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x18, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
- 0x79, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65,
- 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x43,
- 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa,
- 0x97, 0x28, 0x02, 0x08, 0x02, 0x30, 0x01, 0x12, 0x4e, 0x0a, 0x0b, 0x46, 0x69, 0x6e, 0x64, 0x4c,
- 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e,
- 0x46, 0x69, 0x6e, 0x64, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65,
- 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x69, 0x6e, 0x64,
- 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
- 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x12, 0x62, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x49, 0x6e,
- 0x66, 0x6f, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x20, 0x2e, 0x67,
- 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x41, 0x74, 0x74,
- 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21,
- 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x41,
- 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
- 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x30, 0x01, 0x12, 0x60, 0x0a, 0x11, 0x43,
- 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x75, 0x6d,
- 0x12, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c,
- 0x61, 0x74, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x75, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65,
- 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x61, 0x6c, 0x63,
- 0x75, 0x6c, 0x61, 0x74, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x75, 0x6d, 0x52, 0x65, 0x73,
- 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x12, 0x42, 0x0a,
- 0x07, 0x43, 0x6c, 0x65, 0x61, 0x6e, 0x75, 0x70, 0x12, 0x16, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
- 0x79, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x6e, 0x75, 0x70, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
- 0x1a, 0x17, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x6e, 0x75,
- 0x70, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08,
- 0x01, 0x12, 0x50, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74,
- 0x12, 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x6e, 0x61,
- 0x70, 0x73, 0x68, 0x6f, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x67,
- 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f,
- 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08,
- 0x02, 0x30, 0x01, 0x12, 0x81, 0x01, 0x0a, 0x1c, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65,
- 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72, 0x6f, 0x6d, 0x53, 0x6e, 0x61, 0x70,
- 0x73, 0x68, 0x6f, 0x74, 0x12, 0x2b, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72,
+ 0x72, 0x6f, 0x6d, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x12, 0x29, 0x2e, 0x67, 0x69, 0x74, 0x61,
+ 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74,
+ 0x6f, 0x72, 0x79, 0x46, 0x72, 0x6f, 0x6d, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x52, 0x65, 0x71,
+ 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72,
0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72,
- 0x6f, 0x6d, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x1a, 0x2c, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74,
- 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72, 0x6f, 0x6d, 0x53,
- 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
- 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x56, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x52, 0x61,
- 0x77, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x1c, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
- 0x79, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x61, 0x77, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x52,
- 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e,
- 0x47, 0x65, 0x74, 0x52, 0x61, 0x77, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x52, 0x65, 0x73,
+ 0x6f, 0x6d, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
+ 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x28, 0x01, 0x12, 0x4a, 0x0a, 0x09, 0x47, 0x65,
+ 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x18, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
+ 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
+ 0x74, 0x1a, 0x19, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f,
+ 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97,
+ 0x28, 0x02, 0x08, 0x02, 0x30, 0x01, 0x12, 0x4e, 0x0a, 0x0b, 0x46, 0x69, 0x6e, 0x64, 0x4c, 0x69,
+ 0x63, 0x65, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46,
+ 0x69, 0x6e, 0x64, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
+ 0x74, 0x1a, 0x1b, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x46, 0x69, 0x6e, 0x64, 0x4c,
+ 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06,
+ 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x12, 0x62, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66,
+ 0x6f, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x20, 0x2e, 0x67, 0x69,
+ 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x41, 0x74, 0x74, 0x72,
+ 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e,
+ 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x41, 0x74,
+ 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
+ 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x30, 0x01, 0x12, 0x60, 0x0a, 0x11, 0x43, 0x61,
+ 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x75, 0x6d, 0x12,
+ 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61,
+ 0x74, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x75, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
+ 0x74, 0x1a, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75,
+ 0x6c, 0x61, 0x74, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x75, 0x6d, 0x52, 0x65, 0x73, 0x70,
+ 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x12, 0x42, 0x0a, 0x07,
+ 0x43, 0x6c, 0x65, 0x61, 0x6e, 0x75, 0x70, 0x12, 0x16, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
+ 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x6e, 0x75, 0x70, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
+ 0x17, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x6e, 0x75, 0x70,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01,
+ 0x12, 0x50, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x12,
+ 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70,
+ 0x73, 0x68, 0x6f, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x67, 0x69,
+ 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02,
+ 0x30, 0x01, 0x12, 0x81, 0x01, 0x0a, 0x1c, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70,
+ 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72, 0x6f, 0x6d, 0x53, 0x6e, 0x61, 0x70, 0x73,
+ 0x68, 0x6f, 0x74, 0x12, 0x2b, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65,
+ 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72, 0x6f,
+ 0x6d, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
+ 0x1a, 0x2c, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65,
+ 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x46, 0x72, 0x6f, 0x6d, 0x53, 0x6e,
+ 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06,
+ 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x56, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x52, 0x61, 0x77,
+ 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x1c, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
+ 0x2e, 0x47, 0x65, 0x74, 0x52, 0x61, 0x77, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x52, 0x65,
+ 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47,
+ 0x65, 0x74, 0x52, 0x61, 0x77, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70,
+ 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x30, 0x01, 0x12, 0x6b,
+ 0x0a, 0x14, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x42, 0x79, 0x43,
+ 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x23, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e,
+ 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x42, 0x79, 0x43, 0x6f, 0x6e,
+ 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x67, 0x69,
+ 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73,
+ 0x42, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
+ 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x30, 0x01, 0x12, 0x62, 0x0a, 0x11, 0x53,
+ 0x65, 0x61, 0x72, 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65,
+ 0x12, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68,
+ 0x46, 0x69, 0x6c, 0x65, 0x73, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65,
+ 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x53, 0x65, 0x61, 0x72,
+ 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x30, 0x01, 0x12,
- 0x6b, 0x0a, 0x14, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x42, 0x79,
- 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x23, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
- 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x42, 0x79, 0x43, 0x6f,
- 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x67,
- 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65,
- 0x73, 0x42, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
- 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x30, 0x01, 0x12, 0x62, 0x0a, 0x11,
- 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x42, 0x79, 0x4e, 0x61, 0x6d,
- 0x65, 0x12, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63,
- 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75,
- 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x53, 0x65, 0x61,
- 0x72, 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65,
- 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x30, 0x01,
- 0x12, 0x65, 0x0a, 0x12, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f,
- 0x6d, 0x48, 0x6f, 0x6f, 0x6b, 0x73, 0x12, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e,
- 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x48, 0x6f, 0x6f,
- 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x67, 0x69, 0x74, 0x61,
- 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d,
- 0x48, 0x6f, 0x6f, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa,
- 0x97, 0x28, 0x02, 0x08, 0x01, 0x28, 0x01, 0x12, 0x62, 0x0a, 0x11, 0x42, 0x61, 0x63, 0x6b, 0x75,
- 0x70, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x48, 0x6f, 0x6f, 0x6b, 0x73, 0x12, 0x20, 0x2e, 0x67,
- 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x42, 0x61, 0x63, 0x6b, 0x75, 0x70, 0x43, 0x75, 0x73, 0x74,
- 0x6f, 0x6d, 0x48, 0x6f, 0x6f, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21,
- 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x42, 0x61, 0x63, 0x6b, 0x75, 0x70, 0x43, 0x75,
- 0x73, 0x74, 0x6f, 0x6d, 0x48, 0x6f, 0x6f, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
- 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x30, 0x01, 0x12, 0x6f, 0x0a, 0x16, 0x47,
- 0x65, 0x74, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72,
- 0x79, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x25, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47,
- 0x65, 0x74, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72,
- 0x79, 0x53, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x67,
- 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x44,
- 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x53, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x73, 0x70,
- 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x12, 0x5d, 0x0a, 0x10,
- 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79,
- 0x12, 0x1f, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65,
- 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x1a, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x76,
- 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f,
- 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x5d, 0x0a, 0x10, 0x52,
- 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12,
- 0x1f, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x52,
+ 0x65, 0x0a, 0x12, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d,
+ 0x48, 0x6f, 0x6f, 0x6b, 0x73, 0x12, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52,
+ 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x48, 0x6f, 0x6f, 0x6b,
+ 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
+ 0x79, 0x2e, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x48,
+ 0x6f, 0x6f, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97,
+ 0x28, 0x02, 0x08, 0x01, 0x28, 0x01, 0x12, 0x62, 0x0a, 0x11, 0x42, 0x61, 0x63, 0x6b, 0x75, 0x70,
+ 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x48, 0x6f, 0x6f, 0x6b, 0x73, 0x12, 0x20, 0x2e, 0x67, 0x69,
+ 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x42, 0x61, 0x63, 0x6b, 0x75, 0x70, 0x43, 0x75, 0x73, 0x74, 0x6f,
+ 0x6d, 0x48, 0x6f, 0x6f, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e,
+ 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x42, 0x61, 0x63, 0x6b, 0x75, 0x70, 0x43, 0x75, 0x73,
+ 0x74, 0x6f, 0x6d, 0x48, 0x6f, 0x6f, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
+ 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x30, 0x01, 0x12, 0x6f, 0x0a, 0x16, 0x47, 0x65,
+ 0x74, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79,
+ 0x53, 0x69, 0x7a, 0x65, 0x12, 0x25, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65,
+ 0x74, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79,
+ 0x53, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x67, 0x69,
+ 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x44, 0x69,
+ 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x53, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f,
+ 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x02, 0x12, 0x5d, 0x0a, 0x10, 0x52,
+ 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12,
+ 0x1f, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x52,
0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
- 0x1a, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65,
+ 0x1a, 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65,
0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
- 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x66, 0x0a, 0x13, 0x52, 0x65,
+ 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x5d, 0x0a, 0x10, 0x52, 0x65,
+ 0x6e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x1f,
+ 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x52, 0x65,
+ 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
+ 0x20, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x52,
+ 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
+ 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x66, 0x0a, 0x13, 0x52, 0x65, 0x70,
+ 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79,
+ 0x12, 0x22, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63,
+ 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71,
+ 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65,
0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72,
- 0x79, 0x12, 0x22, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x69,
- 0x63, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x52,
- 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f,
- 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02,
- 0x08, 0x01, 0x12, 0x63, 0x0a, 0x12, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x65, 0x52, 0x65,
- 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c,
- 0x79, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69,
- 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x67, 0x69,
- 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x70,
- 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
- 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x4e, 0x0a, 0x0b, 0x53, 0x65, 0x74, 0x46, 0x75,
- 0x6c, 0x6c, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e,
- 0x53, 0x65, 0x74, 0x46, 0x75, 0x6c, 0x6c, 0x50, 0x61, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65,
- 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x53, 0x65, 0x74, 0x46,
- 0x75, 0x6c, 0x6c, 0x50, 0x61, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
- 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x42, 0x34, 0x5a, 0x32, 0x67, 0x69, 0x74, 0x6c, 0x61,
- 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x2d, 0x6f, 0x72, 0x67,
- 0x2f, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2f, 0x76, 0x31, 0x34, 0x2f, 0x70, 0x72, 0x6f, 0x74,
- 0x6f, 0x2f, 0x67, 0x6f, 0x2f, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x70, 0x62, 0x62, 0x06, 0x70,
- 0x72, 0x6f, 0x74, 0x6f, 0x33,
+ 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08,
+ 0x01, 0x12, 0x63, 0x0a, 0x12, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x70,
+ 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x21, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79,
+ 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74,
+ 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x67, 0x69, 0x74,
+ 0x61, 0x6c, 0x79, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x70, 0x6f,
+ 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x06,
+ 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x72, 0x0a, 0x17, 0x50, 0x72, 0x75, 0x6e, 0x65, 0x55,
+ 0x6e, 0x72, 0x65, 0x61, 0x63, 0x68, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74,
+ 0x73, 0x12, 0x26, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x50, 0x72, 0x75, 0x6e, 0x65,
+ 0x55, 0x6e, 0x72, 0x65, 0x61, 0x63, 0x68, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x62, 0x6a, 0x65, 0x63,
+ 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x67, 0x69, 0x74, 0x61,
+ 0x6c, 0x79, 0x2e, 0x50, 0x72, 0x75, 0x6e, 0x65, 0x55, 0x6e, 0x72, 0x65, 0x61, 0x63, 0x68, 0x61,
+ 0x62, 0x6c, 0x65, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
+ 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x12, 0x4e, 0x0a, 0x0b, 0x53, 0x65,
+ 0x74, 0x46, 0x75, 0x6c, 0x6c, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x2e, 0x67, 0x69, 0x74, 0x61,
+ 0x6c, 0x79, 0x2e, 0x53, 0x65, 0x74, 0x46, 0x75, 0x6c, 0x6c, 0x50, 0x61, 0x74, 0x68, 0x52, 0x65,
+ 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2e, 0x53,
+ 0x65, 0x74, 0x46, 0x75, 0x6c, 0x6c, 0x50, 0x61, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
+ 0x73, 0x65, 0x22, 0x06, 0xfa, 0x97, 0x28, 0x02, 0x08, 0x01, 0x42, 0x34, 0x5a, 0x32, 0x67, 0x69,
+ 0x74, 0x6c, 0x61, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x2d,
+ 0x6f, 0x72, 0x67, 0x2f, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2f, 0x76, 0x31, 0x34, 0x2f, 0x70,
+ 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x2f, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x70, 0x62,
+ 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
@@ -5158,7 +5262,7 @@ func file_repository_service_proto_rawDescGZIP() []byte {
}
var file_repository_service_proto_enumTypes = make([]protoimpl.EnumInfo, 3)
-var file_repository_service_proto_msgTypes = make([]protoimpl.MessageInfo, 82)
+var file_repository_service_proto_msgTypes = make([]protoimpl.MessageInfo, 84)
var file_repository_service_proto_goTypes = []interface{}{
(WriteCommitGraphRequest_SplitStrategy)(0), // 0: gitaly.WriteCommitGraphRequest.SplitStrategy
(GetArchiveRequest_Format)(0), // 1: gitaly.GetArchiveRequest.Format
@@ -5242,145 +5346,150 @@ var file_repository_service_proto_goTypes = []interface{}{
(*ReplicateRepositoryResponse)(nil), // 79: gitaly.ReplicateRepositoryResponse
(*OptimizeRepositoryRequest)(nil), // 80: gitaly.OptimizeRepositoryRequest
(*OptimizeRepositoryResponse)(nil), // 81: gitaly.OptimizeRepositoryResponse
- (*SetFullPathRequest)(nil), // 82: gitaly.SetFullPathRequest
- (*SetFullPathResponse)(nil), // 83: gitaly.SetFullPathResponse
- (*GetRawChangesResponse_RawChange)(nil), // 84: gitaly.GetRawChangesResponse.RawChange
- (*Repository)(nil), // 85: gitaly.Repository
+ (*PruneUnreachableObjectsRequest)(nil), // 82: gitaly.PruneUnreachableObjectsRequest
+ (*PruneUnreachableObjectsResponse)(nil), // 83: gitaly.PruneUnreachableObjectsResponse
+ (*SetFullPathRequest)(nil), // 84: gitaly.SetFullPathRequest
+ (*SetFullPathResponse)(nil), // 85: gitaly.SetFullPathResponse
+ (*GetRawChangesResponse_RawChange)(nil), // 86: gitaly.GetRawChangesResponse.RawChange
+ (*Repository)(nil), // 87: gitaly.Repository
}
var file_repository_service_proto_depIdxs = []int32{
- 85, // 0: gitaly.RepositoryExistsRequest.repository:type_name -> gitaly.Repository
- 85, // 1: gitaly.RepackIncrementalRequest.repository:type_name -> gitaly.Repository
- 85, // 2: gitaly.RepackFullRequest.repository:type_name -> gitaly.Repository
- 85, // 3: gitaly.MidxRepackRequest.repository:type_name -> gitaly.Repository
- 85, // 4: gitaly.GarbageCollectRequest.repository:type_name -> gitaly.Repository
- 85, // 5: gitaly.WriteCommitGraphRequest.repository:type_name -> gitaly.Repository
+ 87, // 0: gitaly.RepositoryExistsRequest.repository:type_name -> gitaly.Repository
+ 87, // 1: gitaly.RepackIncrementalRequest.repository:type_name -> gitaly.Repository
+ 87, // 2: gitaly.RepackFullRequest.repository:type_name -> gitaly.Repository
+ 87, // 3: gitaly.MidxRepackRequest.repository:type_name -> gitaly.Repository
+ 87, // 4: gitaly.GarbageCollectRequest.repository:type_name -> gitaly.Repository
+ 87, // 5: gitaly.WriteCommitGraphRequest.repository:type_name -> gitaly.Repository
0, // 6: gitaly.WriteCommitGraphRequest.splitStrategy:type_name -> gitaly.WriteCommitGraphRequest.SplitStrategy
- 85, // 7: gitaly.CleanupRequest.repository:type_name -> gitaly.Repository
- 85, // 8: gitaly.RepositorySizeRequest.repository:type_name -> gitaly.Repository
- 85, // 9: gitaly.ApplyGitattributesRequest.repository:type_name -> gitaly.Repository
- 85, // 10: gitaly.FetchBundleRequest.repository:type_name -> gitaly.Repository
- 85, // 11: gitaly.FetchRemoteRequest.repository:type_name -> gitaly.Repository
+ 87, // 7: gitaly.CleanupRequest.repository:type_name -> gitaly.Repository
+ 87, // 8: gitaly.RepositorySizeRequest.repository:type_name -> gitaly.Repository
+ 87, // 9: gitaly.ApplyGitattributesRequest.repository:type_name -> gitaly.Repository
+ 87, // 10: gitaly.FetchBundleRequest.repository:type_name -> gitaly.Repository
+ 87, // 11: gitaly.FetchRemoteRequest.repository:type_name -> gitaly.Repository
71, // 12: gitaly.FetchRemoteRequest.remote_params:type_name -> gitaly.Remote
- 85, // 13: gitaly.CreateRepositoryRequest.repository:type_name -> gitaly.Repository
- 85, // 14: gitaly.GetArchiveRequest.repository:type_name -> gitaly.Repository
+ 87, // 13: gitaly.CreateRepositoryRequest.repository:type_name -> gitaly.Repository
+ 87, // 14: gitaly.GetArchiveRequest.repository:type_name -> gitaly.Repository
1, // 15: gitaly.GetArchiveRequest.format:type_name -> gitaly.GetArchiveRequest.Format
- 85, // 16: gitaly.HasLocalBranchesRequest.repository:type_name -> gitaly.Repository
- 85, // 17: gitaly.FetchSourceBranchRequest.repository:type_name -> gitaly.Repository
- 85, // 18: gitaly.FetchSourceBranchRequest.source_repository:type_name -> gitaly.Repository
- 85, // 19: gitaly.FsckRequest.repository:type_name -> gitaly.Repository
- 85, // 20: gitaly.WriteRefRequest.repository:type_name -> gitaly.Repository
- 85, // 21: gitaly.FindMergeBaseRequest.repository:type_name -> gitaly.Repository
- 85, // 22: gitaly.CreateForkRequest.repository:type_name -> gitaly.Repository
- 85, // 23: gitaly.CreateForkRequest.source_repository:type_name -> gitaly.Repository
- 85, // 24: gitaly.CreateRepositoryFromURLRequest.repository:type_name -> gitaly.Repository
- 85, // 25: gitaly.CreateBundleRequest.repository:type_name -> gitaly.Repository
- 85, // 26: gitaly.CreateBundleFromRefListRequest.repository:type_name -> gitaly.Repository
- 85, // 27: gitaly.GetConfigRequest.repository:type_name -> gitaly.Repository
- 85, // 28: gitaly.RestoreCustomHooksRequest.repository:type_name -> gitaly.Repository
- 85, // 29: gitaly.BackupCustomHooksRequest.repository:type_name -> gitaly.Repository
- 85, // 30: gitaly.CreateRepositoryFromBundleRequest.repository:type_name -> gitaly.Repository
- 85, // 31: gitaly.FindLicenseRequest.repository:type_name -> gitaly.Repository
- 85, // 32: gitaly.GetInfoAttributesRequest.repository:type_name -> gitaly.Repository
- 85, // 33: gitaly.CalculateChecksumRequest.repository:type_name -> gitaly.Repository
- 85, // 34: gitaly.GetSnapshotRequest.repository:type_name -> gitaly.Repository
- 85, // 35: gitaly.CreateRepositoryFromSnapshotRequest.repository:type_name -> gitaly.Repository
- 85, // 36: gitaly.GetRawChangesRequest.repository:type_name -> gitaly.Repository
- 84, // 37: gitaly.GetRawChangesResponse.raw_changes:type_name -> gitaly.GetRawChangesResponse.RawChange
- 85, // 38: gitaly.SearchFilesByNameRequest.repository:type_name -> gitaly.Repository
- 85, // 39: gitaly.SearchFilesByContentRequest.repository:type_name -> gitaly.Repository
- 85, // 40: gitaly.GetObjectDirectorySizeRequest.repository:type_name -> gitaly.Repository
- 85, // 41: gitaly.RemoveRepositoryRequest.repository:type_name -> gitaly.Repository
- 85, // 42: gitaly.RenameRepositoryRequest.repository:type_name -> gitaly.Repository
- 85, // 43: gitaly.ReplicateRepositoryRequest.repository:type_name -> gitaly.Repository
- 85, // 44: gitaly.ReplicateRepositoryRequest.source:type_name -> gitaly.Repository
- 85, // 45: gitaly.OptimizeRepositoryRequest.repository:type_name -> gitaly.Repository
- 85, // 46: gitaly.SetFullPathRequest.repository:type_name -> gitaly.Repository
- 2, // 47: gitaly.GetRawChangesResponse.RawChange.operation:type_name -> gitaly.GetRawChangesResponse.RawChange.Operation
- 3, // 48: gitaly.RepositoryService.RepositoryExists:input_type -> gitaly.RepositoryExistsRequest
- 5, // 49: gitaly.RepositoryService.RepackIncremental:input_type -> gitaly.RepackIncrementalRequest
- 7, // 50: gitaly.RepositoryService.RepackFull:input_type -> gitaly.RepackFullRequest
- 9, // 51: gitaly.RepositoryService.MidxRepack:input_type -> gitaly.MidxRepackRequest
- 11, // 52: gitaly.RepositoryService.GarbageCollect:input_type -> gitaly.GarbageCollectRequest
- 13, // 53: gitaly.RepositoryService.WriteCommitGraph:input_type -> gitaly.WriteCommitGraphRequest
- 17, // 54: gitaly.RepositoryService.RepositorySize:input_type -> gitaly.RepositorySizeRequest
- 19, // 55: gitaly.RepositoryService.ApplyGitattributes:input_type -> gitaly.ApplyGitattributesRequest
- 23, // 56: gitaly.RepositoryService.FetchRemote:input_type -> gitaly.FetchRemoteRequest
- 25, // 57: gitaly.RepositoryService.CreateRepository:input_type -> gitaly.CreateRepositoryRequest
- 27, // 58: gitaly.RepositoryService.GetArchive:input_type -> gitaly.GetArchiveRequest
- 29, // 59: gitaly.RepositoryService.HasLocalBranches:input_type -> gitaly.HasLocalBranchesRequest
- 31, // 60: gitaly.RepositoryService.FetchSourceBranch:input_type -> gitaly.FetchSourceBranchRequest
- 33, // 61: gitaly.RepositoryService.Fsck:input_type -> gitaly.FsckRequest
- 35, // 62: gitaly.RepositoryService.WriteRef:input_type -> gitaly.WriteRefRequest
- 37, // 63: gitaly.RepositoryService.FindMergeBase:input_type -> gitaly.FindMergeBaseRequest
- 39, // 64: gitaly.RepositoryService.CreateFork:input_type -> gitaly.CreateForkRequest
- 41, // 65: gitaly.RepositoryService.CreateRepositoryFromURL:input_type -> gitaly.CreateRepositoryFromURLRequest
- 43, // 66: gitaly.RepositoryService.CreateBundle:input_type -> gitaly.CreateBundleRequest
- 45, // 67: gitaly.RepositoryService.CreateBundleFromRefList:input_type -> gitaly.CreateBundleFromRefListRequest
- 21, // 68: gitaly.RepositoryService.FetchBundle:input_type -> gitaly.FetchBundleRequest
- 53, // 69: gitaly.RepositoryService.CreateRepositoryFromBundle:input_type -> gitaly.CreateRepositoryFromBundleRequest
- 47, // 70: gitaly.RepositoryService.GetConfig:input_type -> gitaly.GetConfigRequest
- 55, // 71: gitaly.RepositoryService.FindLicense:input_type -> gitaly.FindLicenseRequest
- 57, // 72: gitaly.RepositoryService.GetInfoAttributes:input_type -> gitaly.GetInfoAttributesRequest
- 59, // 73: gitaly.RepositoryService.CalculateChecksum:input_type -> gitaly.CalculateChecksumRequest
- 15, // 74: gitaly.RepositoryService.Cleanup:input_type -> gitaly.CleanupRequest
- 61, // 75: gitaly.RepositoryService.GetSnapshot:input_type -> gitaly.GetSnapshotRequest
- 63, // 76: gitaly.RepositoryService.CreateRepositoryFromSnapshot:input_type -> gitaly.CreateRepositoryFromSnapshotRequest
- 65, // 77: gitaly.RepositoryService.GetRawChanges:input_type -> gitaly.GetRawChangesRequest
- 69, // 78: gitaly.RepositoryService.SearchFilesByContent:input_type -> gitaly.SearchFilesByContentRequest
- 67, // 79: gitaly.RepositoryService.SearchFilesByName:input_type -> gitaly.SearchFilesByNameRequest
- 49, // 80: gitaly.RepositoryService.RestoreCustomHooks:input_type -> gitaly.RestoreCustomHooksRequest
- 51, // 81: gitaly.RepositoryService.BackupCustomHooks:input_type -> gitaly.BackupCustomHooksRequest
- 72, // 82: gitaly.RepositoryService.GetObjectDirectorySize:input_type -> gitaly.GetObjectDirectorySizeRequest
- 74, // 83: gitaly.RepositoryService.RemoveRepository:input_type -> gitaly.RemoveRepositoryRequest
- 76, // 84: gitaly.RepositoryService.RenameRepository:input_type -> gitaly.RenameRepositoryRequest
- 78, // 85: gitaly.RepositoryService.ReplicateRepository:input_type -> gitaly.ReplicateRepositoryRequest
- 80, // 86: gitaly.RepositoryService.OptimizeRepository:input_type -> gitaly.OptimizeRepositoryRequest
- 82, // 87: gitaly.RepositoryService.SetFullPath:input_type -> gitaly.SetFullPathRequest
- 4, // 88: gitaly.RepositoryService.RepositoryExists:output_type -> gitaly.RepositoryExistsResponse
- 6, // 89: gitaly.RepositoryService.RepackIncremental:output_type -> gitaly.RepackIncrementalResponse
- 8, // 90: gitaly.RepositoryService.RepackFull:output_type -> gitaly.RepackFullResponse
- 10, // 91: gitaly.RepositoryService.MidxRepack:output_type -> gitaly.MidxRepackResponse
- 12, // 92: gitaly.RepositoryService.GarbageCollect:output_type -> gitaly.GarbageCollectResponse
- 14, // 93: gitaly.RepositoryService.WriteCommitGraph:output_type -> gitaly.WriteCommitGraphResponse
- 18, // 94: gitaly.RepositoryService.RepositorySize:output_type -> gitaly.RepositorySizeResponse
- 20, // 95: gitaly.RepositoryService.ApplyGitattributes:output_type -> gitaly.ApplyGitattributesResponse
- 24, // 96: gitaly.RepositoryService.FetchRemote:output_type -> gitaly.FetchRemoteResponse
- 26, // 97: gitaly.RepositoryService.CreateRepository:output_type -> gitaly.CreateRepositoryResponse
- 28, // 98: gitaly.RepositoryService.GetArchive:output_type -> gitaly.GetArchiveResponse
- 30, // 99: gitaly.RepositoryService.HasLocalBranches:output_type -> gitaly.HasLocalBranchesResponse
- 32, // 100: gitaly.RepositoryService.FetchSourceBranch:output_type -> gitaly.FetchSourceBranchResponse
- 34, // 101: gitaly.RepositoryService.Fsck:output_type -> gitaly.FsckResponse
- 36, // 102: gitaly.RepositoryService.WriteRef:output_type -> gitaly.WriteRefResponse
- 38, // 103: gitaly.RepositoryService.FindMergeBase:output_type -> gitaly.FindMergeBaseResponse
- 40, // 104: gitaly.RepositoryService.CreateFork:output_type -> gitaly.CreateForkResponse
- 42, // 105: gitaly.RepositoryService.CreateRepositoryFromURL:output_type -> gitaly.CreateRepositoryFromURLResponse
- 44, // 106: gitaly.RepositoryService.CreateBundle:output_type -> gitaly.CreateBundleResponse
- 46, // 107: gitaly.RepositoryService.CreateBundleFromRefList:output_type -> gitaly.CreateBundleFromRefListResponse
- 22, // 108: gitaly.RepositoryService.FetchBundle:output_type -> gitaly.FetchBundleResponse
- 54, // 109: gitaly.RepositoryService.CreateRepositoryFromBundle:output_type -> gitaly.CreateRepositoryFromBundleResponse
- 48, // 110: gitaly.RepositoryService.GetConfig:output_type -> gitaly.GetConfigResponse
- 56, // 111: gitaly.RepositoryService.FindLicense:output_type -> gitaly.FindLicenseResponse
- 58, // 112: gitaly.RepositoryService.GetInfoAttributes:output_type -> gitaly.GetInfoAttributesResponse
- 60, // 113: gitaly.RepositoryService.CalculateChecksum:output_type -> gitaly.CalculateChecksumResponse
- 16, // 114: gitaly.RepositoryService.Cleanup:output_type -> gitaly.CleanupResponse
- 62, // 115: gitaly.RepositoryService.GetSnapshot:output_type -> gitaly.GetSnapshotResponse
- 64, // 116: gitaly.RepositoryService.CreateRepositoryFromSnapshot:output_type -> gitaly.CreateRepositoryFromSnapshotResponse
- 66, // 117: gitaly.RepositoryService.GetRawChanges:output_type -> gitaly.GetRawChangesResponse
- 70, // 118: gitaly.RepositoryService.SearchFilesByContent:output_type -> gitaly.SearchFilesByContentResponse
- 68, // 119: gitaly.RepositoryService.SearchFilesByName:output_type -> gitaly.SearchFilesByNameResponse
- 50, // 120: gitaly.RepositoryService.RestoreCustomHooks:output_type -> gitaly.RestoreCustomHooksResponse
- 52, // 121: gitaly.RepositoryService.BackupCustomHooks:output_type -> gitaly.BackupCustomHooksResponse
- 73, // 122: gitaly.RepositoryService.GetObjectDirectorySize:output_type -> gitaly.GetObjectDirectorySizeResponse
- 75, // 123: gitaly.RepositoryService.RemoveRepository:output_type -> gitaly.RemoveRepositoryResponse
- 77, // 124: gitaly.RepositoryService.RenameRepository:output_type -> gitaly.RenameRepositoryResponse
- 79, // 125: gitaly.RepositoryService.ReplicateRepository:output_type -> gitaly.ReplicateRepositoryResponse
- 81, // 126: gitaly.RepositoryService.OptimizeRepository:output_type -> gitaly.OptimizeRepositoryResponse
- 83, // 127: gitaly.RepositoryService.SetFullPath:output_type -> gitaly.SetFullPathResponse
- 88, // [88:128] is the sub-list for method output_type
- 48, // [48:88] is the sub-list for method input_type
- 48, // [48:48] is the sub-list for extension type_name
- 48, // [48:48] is the sub-list for extension extendee
- 0, // [0:48] is the sub-list for field type_name
+ 87, // 16: gitaly.HasLocalBranchesRequest.repository:type_name -> gitaly.Repository
+ 87, // 17: gitaly.FetchSourceBranchRequest.repository:type_name -> gitaly.Repository
+ 87, // 18: gitaly.FetchSourceBranchRequest.source_repository:type_name -> gitaly.Repository
+ 87, // 19: gitaly.FsckRequest.repository:type_name -> gitaly.Repository
+ 87, // 20: gitaly.WriteRefRequest.repository:type_name -> gitaly.Repository
+ 87, // 21: gitaly.FindMergeBaseRequest.repository:type_name -> gitaly.Repository
+ 87, // 22: gitaly.CreateForkRequest.repository:type_name -> gitaly.Repository
+ 87, // 23: gitaly.CreateForkRequest.source_repository:type_name -> gitaly.Repository
+ 87, // 24: gitaly.CreateRepositoryFromURLRequest.repository:type_name -> gitaly.Repository
+ 87, // 25: gitaly.CreateBundleRequest.repository:type_name -> gitaly.Repository
+ 87, // 26: gitaly.CreateBundleFromRefListRequest.repository:type_name -> gitaly.Repository
+ 87, // 27: gitaly.GetConfigRequest.repository:type_name -> gitaly.Repository
+ 87, // 28: gitaly.RestoreCustomHooksRequest.repository:type_name -> gitaly.Repository
+ 87, // 29: gitaly.BackupCustomHooksRequest.repository:type_name -> gitaly.Repository
+ 87, // 30: gitaly.CreateRepositoryFromBundleRequest.repository:type_name -> gitaly.Repository
+ 87, // 31: gitaly.FindLicenseRequest.repository:type_name -> gitaly.Repository
+ 87, // 32: gitaly.GetInfoAttributesRequest.repository:type_name -> gitaly.Repository
+ 87, // 33: gitaly.CalculateChecksumRequest.repository:type_name -> gitaly.Repository
+ 87, // 34: gitaly.GetSnapshotRequest.repository:type_name -> gitaly.Repository
+ 87, // 35: gitaly.CreateRepositoryFromSnapshotRequest.repository:type_name -> gitaly.Repository
+ 87, // 36: gitaly.GetRawChangesRequest.repository:type_name -> gitaly.Repository
+ 86, // 37: gitaly.GetRawChangesResponse.raw_changes:type_name -> gitaly.GetRawChangesResponse.RawChange
+ 87, // 38: gitaly.SearchFilesByNameRequest.repository:type_name -> gitaly.Repository
+ 87, // 39: gitaly.SearchFilesByContentRequest.repository:type_name -> gitaly.Repository
+ 87, // 40: gitaly.GetObjectDirectorySizeRequest.repository:type_name -> gitaly.Repository
+ 87, // 41: gitaly.RemoveRepositoryRequest.repository:type_name -> gitaly.Repository
+ 87, // 42: gitaly.RenameRepositoryRequest.repository:type_name -> gitaly.Repository
+ 87, // 43: gitaly.ReplicateRepositoryRequest.repository:type_name -> gitaly.Repository
+ 87, // 44: gitaly.ReplicateRepositoryRequest.source:type_name -> gitaly.Repository
+ 87, // 45: gitaly.OptimizeRepositoryRequest.repository:type_name -> gitaly.Repository
+ 87, // 46: gitaly.PruneUnreachableObjectsRequest.repository:type_name -> gitaly.Repository
+ 87, // 47: gitaly.SetFullPathRequest.repository:type_name -> gitaly.Repository
+ 2, // 48: gitaly.GetRawChangesResponse.RawChange.operation:type_name -> gitaly.GetRawChangesResponse.RawChange.Operation
+ 3, // 49: gitaly.RepositoryService.RepositoryExists:input_type -> gitaly.RepositoryExistsRequest
+ 5, // 50: gitaly.RepositoryService.RepackIncremental:input_type -> gitaly.RepackIncrementalRequest
+ 7, // 51: gitaly.RepositoryService.RepackFull:input_type -> gitaly.RepackFullRequest
+ 9, // 52: gitaly.RepositoryService.MidxRepack:input_type -> gitaly.MidxRepackRequest
+ 11, // 53: gitaly.RepositoryService.GarbageCollect:input_type -> gitaly.GarbageCollectRequest
+ 13, // 54: gitaly.RepositoryService.WriteCommitGraph:input_type -> gitaly.WriteCommitGraphRequest
+ 17, // 55: gitaly.RepositoryService.RepositorySize:input_type -> gitaly.RepositorySizeRequest
+ 19, // 56: gitaly.RepositoryService.ApplyGitattributes:input_type -> gitaly.ApplyGitattributesRequest
+ 23, // 57: gitaly.RepositoryService.FetchRemote:input_type -> gitaly.FetchRemoteRequest
+ 25, // 58: gitaly.RepositoryService.CreateRepository:input_type -> gitaly.CreateRepositoryRequest
+ 27, // 59: gitaly.RepositoryService.GetArchive:input_type -> gitaly.GetArchiveRequest
+ 29, // 60: gitaly.RepositoryService.HasLocalBranches:input_type -> gitaly.HasLocalBranchesRequest
+ 31, // 61: gitaly.RepositoryService.FetchSourceBranch:input_type -> gitaly.FetchSourceBranchRequest
+ 33, // 62: gitaly.RepositoryService.Fsck:input_type -> gitaly.FsckRequest
+ 35, // 63: gitaly.RepositoryService.WriteRef:input_type -> gitaly.WriteRefRequest
+ 37, // 64: gitaly.RepositoryService.FindMergeBase:input_type -> gitaly.FindMergeBaseRequest
+ 39, // 65: gitaly.RepositoryService.CreateFork:input_type -> gitaly.CreateForkRequest
+ 41, // 66: gitaly.RepositoryService.CreateRepositoryFromURL:input_type -> gitaly.CreateRepositoryFromURLRequest
+ 43, // 67: gitaly.RepositoryService.CreateBundle:input_type -> gitaly.CreateBundleRequest
+ 45, // 68: gitaly.RepositoryService.CreateBundleFromRefList:input_type -> gitaly.CreateBundleFromRefListRequest
+ 21, // 69: gitaly.RepositoryService.FetchBundle:input_type -> gitaly.FetchBundleRequest
+ 53, // 70: gitaly.RepositoryService.CreateRepositoryFromBundle:input_type -> gitaly.CreateRepositoryFromBundleRequest
+ 47, // 71: gitaly.RepositoryService.GetConfig:input_type -> gitaly.GetConfigRequest
+ 55, // 72: gitaly.RepositoryService.FindLicense:input_type -> gitaly.FindLicenseRequest
+ 57, // 73: gitaly.RepositoryService.GetInfoAttributes:input_type -> gitaly.GetInfoAttributesRequest
+ 59, // 74: gitaly.RepositoryService.CalculateChecksum:input_type -> gitaly.CalculateChecksumRequest
+ 15, // 75: gitaly.RepositoryService.Cleanup:input_type -> gitaly.CleanupRequest
+ 61, // 76: gitaly.RepositoryService.GetSnapshot:input_type -> gitaly.GetSnapshotRequest
+ 63, // 77: gitaly.RepositoryService.CreateRepositoryFromSnapshot:input_type -> gitaly.CreateRepositoryFromSnapshotRequest
+ 65, // 78: gitaly.RepositoryService.GetRawChanges:input_type -> gitaly.GetRawChangesRequest
+ 69, // 79: gitaly.RepositoryService.SearchFilesByContent:input_type -> gitaly.SearchFilesByContentRequest
+ 67, // 80: gitaly.RepositoryService.SearchFilesByName:input_type -> gitaly.SearchFilesByNameRequest
+ 49, // 81: gitaly.RepositoryService.RestoreCustomHooks:input_type -> gitaly.RestoreCustomHooksRequest
+ 51, // 82: gitaly.RepositoryService.BackupCustomHooks:input_type -> gitaly.BackupCustomHooksRequest
+ 72, // 83: gitaly.RepositoryService.GetObjectDirectorySize:input_type -> gitaly.GetObjectDirectorySizeRequest
+ 74, // 84: gitaly.RepositoryService.RemoveRepository:input_type -> gitaly.RemoveRepositoryRequest
+ 76, // 85: gitaly.RepositoryService.RenameRepository:input_type -> gitaly.RenameRepositoryRequest
+ 78, // 86: gitaly.RepositoryService.ReplicateRepository:input_type -> gitaly.ReplicateRepositoryRequest
+ 80, // 87: gitaly.RepositoryService.OptimizeRepository:input_type -> gitaly.OptimizeRepositoryRequest
+ 82, // 88: gitaly.RepositoryService.PruneUnreachableObjects:input_type -> gitaly.PruneUnreachableObjectsRequest
+ 84, // 89: gitaly.RepositoryService.SetFullPath:input_type -> gitaly.SetFullPathRequest
+ 4, // 90: gitaly.RepositoryService.RepositoryExists:output_type -> gitaly.RepositoryExistsResponse
+ 6, // 91: gitaly.RepositoryService.RepackIncremental:output_type -> gitaly.RepackIncrementalResponse
+ 8, // 92: gitaly.RepositoryService.RepackFull:output_type -> gitaly.RepackFullResponse
+ 10, // 93: gitaly.RepositoryService.MidxRepack:output_type -> gitaly.MidxRepackResponse
+ 12, // 94: gitaly.RepositoryService.GarbageCollect:output_type -> gitaly.GarbageCollectResponse
+ 14, // 95: gitaly.RepositoryService.WriteCommitGraph:output_type -> gitaly.WriteCommitGraphResponse
+ 18, // 96: gitaly.RepositoryService.RepositorySize:output_type -> gitaly.RepositorySizeResponse
+ 20, // 97: gitaly.RepositoryService.ApplyGitattributes:output_type -> gitaly.ApplyGitattributesResponse
+ 24, // 98: gitaly.RepositoryService.FetchRemote:output_type -> gitaly.FetchRemoteResponse
+ 26, // 99: gitaly.RepositoryService.CreateRepository:output_type -> gitaly.CreateRepositoryResponse
+ 28, // 100: gitaly.RepositoryService.GetArchive:output_type -> gitaly.GetArchiveResponse
+ 30, // 101: gitaly.RepositoryService.HasLocalBranches:output_type -> gitaly.HasLocalBranchesResponse
+ 32, // 102: gitaly.RepositoryService.FetchSourceBranch:output_type -> gitaly.FetchSourceBranchResponse
+ 34, // 103: gitaly.RepositoryService.Fsck:output_type -> gitaly.FsckResponse
+ 36, // 104: gitaly.RepositoryService.WriteRef:output_type -> gitaly.WriteRefResponse
+ 38, // 105: gitaly.RepositoryService.FindMergeBase:output_type -> gitaly.FindMergeBaseResponse
+ 40, // 106: gitaly.RepositoryService.CreateFork:output_type -> gitaly.CreateForkResponse
+ 42, // 107: gitaly.RepositoryService.CreateRepositoryFromURL:output_type -> gitaly.CreateRepositoryFromURLResponse
+ 44, // 108: gitaly.RepositoryService.CreateBundle:output_type -> gitaly.CreateBundleResponse
+ 46, // 109: gitaly.RepositoryService.CreateBundleFromRefList:output_type -> gitaly.CreateBundleFromRefListResponse
+ 22, // 110: gitaly.RepositoryService.FetchBundle:output_type -> gitaly.FetchBundleResponse
+ 54, // 111: gitaly.RepositoryService.CreateRepositoryFromBundle:output_type -> gitaly.CreateRepositoryFromBundleResponse
+ 48, // 112: gitaly.RepositoryService.GetConfig:output_type -> gitaly.GetConfigResponse
+ 56, // 113: gitaly.RepositoryService.FindLicense:output_type -> gitaly.FindLicenseResponse
+ 58, // 114: gitaly.RepositoryService.GetInfoAttributes:output_type -> gitaly.GetInfoAttributesResponse
+ 60, // 115: gitaly.RepositoryService.CalculateChecksum:output_type -> gitaly.CalculateChecksumResponse
+ 16, // 116: gitaly.RepositoryService.Cleanup:output_type -> gitaly.CleanupResponse
+ 62, // 117: gitaly.RepositoryService.GetSnapshot:output_type -> gitaly.GetSnapshotResponse
+ 64, // 118: gitaly.RepositoryService.CreateRepositoryFromSnapshot:output_type -> gitaly.CreateRepositoryFromSnapshotResponse
+ 66, // 119: gitaly.RepositoryService.GetRawChanges:output_type -> gitaly.GetRawChangesResponse
+ 70, // 120: gitaly.RepositoryService.SearchFilesByContent:output_type -> gitaly.SearchFilesByContentResponse
+ 68, // 121: gitaly.RepositoryService.SearchFilesByName:output_type -> gitaly.SearchFilesByNameResponse
+ 50, // 122: gitaly.RepositoryService.RestoreCustomHooks:output_type -> gitaly.RestoreCustomHooksResponse
+ 52, // 123: gitaly.RepositoryService.BackupCustomHooks:output_type -> gitaly.BackupCustomHooksResponse
+ 73, // 124: gitaly.RepositoryService.GetObjectDirectorySize:output_type -> gitaly.GetObjectDirectorySizeResponse
+ 75, // 125: gitaly.RepositoryService.RemoveRepository:output_type -> gitaly.RemoveRepositoryResponse
+ 77, // 126: gitaly.RepositoryService.RenameRepository:output_type -> gitaly.RenameRepositoryResponse
+ 79, // 127: gitaly.RepositoryService.ReplicateRepository:output_type -> gitaly.ReplicateRepositoryResponse
+ 81, // 128: gitaly.RepositoryService.OptimizeRepository:output_type -> gitaly.OptimizeRepositoryResponse
+ 83, // 129: gitaly.RepositoryService.PruneUnreachableObjects:output_type -> gitaly.PruneUnreachableObjectsResponse
+ 85, // 130: gitaly.RepositoryService.SetFullPath:output_type -> gitaly.SetFullPathResponse
+ 90, // [90:131] is the sub-list for method output_type
+ 49, // [49:90] is the sub-list for method input_type
+ 49, // [49:49] is the sub-list for extension type_name
+ 49, // [49:49] is the sub-list for extension extendee
+ 0, // [0:49] is the sub-list for field type_name
}
func init() { file_repository_service_proto_init() }
@@ -6340,7 +6449,7 @@ func file_repository_service_proto_init() {
}
}
file_repository_service_proto_msgTypes[79].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*SetFullPathRequest); i {
+ switch v := v.(*PruneUnreachableObjectsRequest); i {
case 0:
return &v.state
case 1:
@@ -6352,7 +6461,7 @@ func file_repository_service_proto_init() {
}
}
file_repository_service_proto_msgTypes[80].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*SetFullPathResponse); i {
+ switch v := v.(*PruneUnreachableObjectsResponse); i {
case 0:
return &v.state
case 1:
@@ -6364,6 +6473,30 @@ func file_repository_service_proto_init() {
}
}
file_repository_service_proto_msgTypes[81].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*SetFullPathRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_repository_service_proto_msgTypes[82].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*SetFullPathResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_repository_service_proto_msgTypes[83].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetRawChangesResponse_RawChange); i {
case 0:
return &v.state
@@ -6382,7 +6515,7 @@ func file_repository_service_proto_init() {
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_repository_service_proto_rawDesc,
NumEnums: 3,
- NumMessages: 82,
+ NumMessages: 84,
NumExtensions: 0,
NumServices: 1,
},
diff --git a/proto/go/gitalypb/repository-service_grpc.pb.go b/proto/go/gitalypb/repository-service_grpc.pb.go
index e50225974..871f667b1 100644
--- a/proto/go/gitalypb/repository-service_grpc.pb.go
+++ b/proto/go/gitalypb/repository-service_grpc.pb.go
@@ -72,6 +72,18 @@ type RepositoryServiceClient interface {
RenameRepository(ctx context.Context, in *RenameRepositoryRequest, opts ...grpc.CallOption) (*RenameRepositoryResponse, error)
ReplicateRepository(ctx context.Context, in *ReplicateRepositoryRequest, opts ...grpc.CallOption) (*ReplicateRepositoryResponse, error)
OptimizeRepository(ctx context.Context, in *OptimizeRepositoryRequest, opts ...grpc.CallOption) (*OptimizeRepositoryResponse, error)
+ // PruneUnreachableObjetcs will prune all objects which aren't reachable from
+ // the repository's current set of references. Because pruning can only
+ // happen for objects which aren't packed, you are required to first run
+ // OptimizeRepository to explode any unreachable objects into loose objects.
+ //
+ // Furthermore, this RPC call has a grace period of 30 minutes: any
+ // unreachable loose objects must not have been accessed or modified in the
+ // last 30 minutes. This is a hard requirement to avoid repository corruption.
+ //
+ // To make proper use of this RPC you thus need to call OptimizeRepository,
+ // wait 30 minutes, and then call PruneUnreachableObjects.
+ PruneUnreachableObjects(ctx context.Context, in *PruneUnreachableObjectsRequest, opts ...grpc.CallOption) (*PruneUnreachableObjectsResponse, error)
// SetFullPath writes the "gitlab.fullpath" configuration into the
// repository's gitconfig. This is mainly to help debugging purposes in case
// an admin inspects the repository's gitconfig such that he can easily see
@@ -742,6 +754,15 @@ func (c *repositoryServiceClient) OptimizeRepository(ctx context.Context, in *Op
return out, nil
}
+func (c *repositoryServiceClient) PruneUnreachableObjects(ctx context.Context, in *PruneUnreachableObjectsRequest, opts ...grpc.CallOption) (*PruneUnreachableObjectsResponse, error) {
+ out := new(PruneUnreachableObjectsResponse)
+ err := c.cc.Invoke(ctx, "/gitaly.RepositoryService/PruneUnreachableObjects", in, out, opts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
func (c *repositoryServiceClient) SetFullPath(ctx context.Context, in *SetFullPathRequest, opts ...grpc.CallOption) (*SetFullPathResponse, error) {
out := new(SetFullPathResponse)
err := c.cc.Invoke(ctx, "/gitaly.RepositoryService/SetFullPath", in, out, opts...)
@@ -809,6 +830,18 @@ type RepositoryServiceServer interface {
RenameRepository(context.Context, *RenameRepositoryRequest) (*RenameRepositoryResponse, error)
ReplicateRepository(context.Context, *ReplicateRepositoryRequest) (*ReplicateRepositoryResponse, error)
OptimizeRepository(context.Context, *OptimizeRepositoryRequest) (*OptimizeRepositoryResponse, error)
+ // PruneUnreachableObjetcs will prune all objects which aren't reachable from
+ // the repository's current set of references. Because pruning can only
+ // happen for objects which aren't packed, you are required to first run
+ // OptimizeRepository to explode any unreachable objects into loose objects.
+ //
+ // Furthermore, this RPC call has a grace period of 30 minutes: any
+ // unreachable loose objects must not have been accessed or modified in the
+ // last 30 minutes. This is a hard requirement to avoid repository corruption.
+ //
+ // To make proper use of this RPC you thus need to call OptimizeRepository,
+ // wait 30 minutes, and then call PruneUnreachableObjects.
+ PruneUnreachableObjects(context.Context, *PruneUnreachableObjectsRequest) (*PruneUnreachableObjectsResponse, error)
// SetFullPath writes the "gitlab.fullpath" configuration into the
// repository's gitconfig. This is mainly to help debugging purposes in case
// an admin inspects the repository's gitconfig such that he can easily see
@@ -938,6 +971,9 @@ func (UnimplementedRepositoryServiceServer) ReplicateRepository(context.Context,
func (UnimplementedRepositoryServiceServer) OptimizeRepository(context.Context, *OptimizeRepositoryRequest) (*OptimizeRepositoryResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method OptimizeRepository not implemented")
}
+func (UnimplementedRepositoryServiceServer) PruneUnreachableObjects(context.Context, *PruneUnreachableObjectsRequest) (*PruneUnreachableObjectsResponse, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method PruneUnreachableObjects not implemented")
+}
func (UnimplementedRepositoryServiceServer) SetFullPath(context.Context, *SetFullPathRequest) (*SetFullPathResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method SetFullPath not implemented")
}
@@ -1715,6 +1751,24 @@ func _RepositoryService_OptimizeRepository_Handler(srv interface{}, ctx context.
return interceptor(ctx, in, info, handler)
}
+func _RepositoryService_PruneUnreachableObjects_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(PruneUnreachableObjectsRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(RepositoryServiceServer).PruneUnreachableObjects(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: "/gitaly.RepositoryService/PruneUnreachableObjects",
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(RepositoryServiceServer).PruneUnreachableObjects(ctx, req.(*PruneUnreachableObjectsRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
func _RepositoryService_SetFullPath_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(SetFullPathRequest)
if err := dec(in); err != nil {
@@ -1845,6 +1899,10 @@ var RepositoryService_ServiceDesc = grpc.ServiceDesc{
Handler: _RepositoryService_OptimizeRepository_Handler,
},
{
+ MethodName: "PruneUnreachableObjects",
+ Handler: _RepositoryService_PruneUnreachableObjects_Handler,
+ },
+ {
MethodName: "SetFullPath",
Handler: _RepositoryService_SetFullPath_Handler,
},
diff --git a/proto/repository-service.proto b/proto/repository-service.proto
index a802cb46d..2a6c85c75 100644
--- a/proto/repository-service.proto
+++ b/proto/repository-service.proto
@@ -222,12 +222,30 @@ service RepositoryService {
op: MUTATOR
};
}
+
rpc OptimizeRepository(OptimizeRepositoryRequest) returns (OptimizeRepositoryResponse) {
option (op_type) = {
op: MUTATOR
};
}
+ // PruneUnreachableObjetcs will prune all objects which aren't reachable from
+ // the repository's current set of references. Because pruning can only
+ // happen for objects which aren't packed, you are required to first run
+ // OptimizeRepository to explode any unreachable objects into loose objects.
+ //
+ // Furthermore, this RPC call has a grace period of 30 minutes: any
+ // unreachable loose objects must not have been accessed or modified in the
+ // last 30 minutes. This is a hard requirement to avoid repository corruption.
+ //
+ // To make proper use of this RPC you thus need to call OptimizeRepository,
+ // wait 30 minutes, and then call PruneUnreachableObjects.
+ rpc PruneUnreachableObjects(PruneUnreachableObjectsRequest) returns (PruneUnreachableObjectsResponse) {
+ option (op_type) = {
+ op: MUTATOR
+ };
+ }
+
// SetFullPath writes the "gitlab.fullpath" configuration into the
// repository's gitconfig. This is mainly to help debugging purposes in case
// an admin inspects the repository's gitconfig such that he can easily see
@@ -721,6 +739,17 @@ message OptimizeRepositoryRequest {
message OptimizeRepositoryResponse{}
+// PruneUnreachableObjectsRequest is a request for the PruneUnreachableObjects
+// RPC call.
+message PruneUnreachableObjectsRequest {
+ Repository repository = 1 [(target_repository)=true];
+}
+
+// PruneUnreachableObjectsResponse is a response for the
+// PruneUnreachableObjects RPC call.
+message PruneUnreachableObjectsResponse {
+}
+
// SetFullPathRequest is a request for the SetFullPath RPC.
message SetFullPathRequest {
// Repository is the repository whose gitconfig should be written to.
diff --git a/ruby/Gemfile b/ruby/Gemfile
index 70dd9c594..78296dff8 100644
--- a/ruby/Gemfile
+++ b/ruby/Gemfile
@@ -3,7 +3,7 @@ source 'https://rubygems.org'
gem 'rugged', '~> 1.2'
gem 'github-linguist', '~> 7.12', require: 'linguist'
gem 'gitlab-markup', '~> 1.7.1'
-gem 'activesupport', '~> 6.1.4.4'
+gem 'activesupport', '~> 6.1.4.6'
gem 'rdoc', '~> 6.0'
gem 'gitlab-gollum-lib', '~> 4.2.7.10.gitlab.2', require: false
gem 'gitlab-gollum-rugged_adapter', '~> 0.4.4.4.gitlab.1', require: false
diff --git a/ruby/Gemfile.lock b/ruby/Gemfile.lock
index fd0dea027..752883d80 100644
--- a/ruby/Gemfile.lock
+++ b/ruby/Gemfile.lock
@@ -2,20 +2,20 @@ GEM
remote: https://rubygems.org/
specs:
abstract_type (0.0.7)
- actionpack (6.1.4.4)
- actionview (= 6.1.4.4)
- activesupport (= 6.1.4.4)
+ actionpack (6.1.4.6)
+ actionview (= 6.1.4.6)
+ activesupport (= 6.1.4.6)
rack (~> 2.0, >= 2.0.9)
rack-test (>= 0.6.3)
rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.2.0)
- actionview (6.1.4.4)
- activesupport (= 6.1.4.4)
+ actionview (6.1.4.6)
+ activesupport (= 6.1.4.6)
builder (~> 3.1)
erubi (~> 1.4)
rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.1, >= 1.2.0)
- activesupport (6.1.4.4)
+ activesupport (6.1.4.6)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 1.6, < 2)
minitest (>= 5.1)
@@ -88,7 +88,7 @@ GEM
google-protobuf (~> 3.18)
googleapis-common-protos-types (~> 1.0)
grpc-tools (1.42.0)
- i18n (1.8.11)
+ i18n (1.9.1)
concurrent-ruby (~> 1.0)
ice_nine (0.11.2)
jaeger-client (1.1.0)
@@ -101,7 +101,7 @@ GEM
reverse_markdown (~> 1.0)
rugged (>= 0.24, < 2.0)
thor (>= 0.19, < 2.0)
- loofah (2.13.0)
+ loofah (2.14.0)
crass (~> 1.0.2)
nokogiri (>= 1.5.9)
memoizable (0.4.2)
@@ -219,13 +219,13 @@ GEM
with_env (1.1.0)
xml-simple (1.1.9)
rexml
- zeitwerk (2.5.3)
+ zeitwerk (2.5.4)
PLATFORMS
ruby
DEPENDENCIES
- activesupport (~> 6.1.4.4)
+ activesupport (~> 6.1.4.6)
factory_bot
faraday (~> 1.0)
github-linguist (~> 7.12)
diff --git a/ruby/proto/gitaly/repository-service_pb.rb b/ruby/proto/gitaly/repository-service_pb.rb
index 9a2409ad9..9de8ca7ce 100644
--- a/ruby/proto/gitaly/repository-service_pb.rb
+++ b/ruby/proto/gitaly/repository-service_pb.rb
@@ -307,6 +307,11 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
end
add_message "gitaly.OptimizeRepositoryResponse" do
end
+ add_message "gitaly.PruneUnreachableObjectsRequest" do
+ optional :repository, :message, 1, "gitaly.Repository"
+ end
+ add_message "gitaly.PruneUnreachableObjectsResponse" do
+ end
add_message "gitaly.SetFullPathRequest" do
optional :repository, :message, 1, "gitaly.Repository"
optional :path, :string, 2
@@ -400,6 +405,8 @@ module Gitaly
ReplicateRepositoryResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.ReplicateRepositoryResponse").msgclass
OptimizeRepositoryRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.OptimizeRepositoryRequest").msgclass
OptimizeRepositoryResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.OptimizeRepositoryResponse").msgclass
+ PruneUnreachableObjectsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.PruneUnreachableObjectsRequest").msgclass
+ PruneUnreachableObjectsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.PruneUnreachableObjectsResponse").msgclass
SetFullPathRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.SetFullPathRequest").msgclass
SetFullPathResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("gitaly.SetFullPathResponse").msgclass
end
diff --git a/ruby/proto/gitaly/repository-service_services_pb.rb b/ruby/proto/gitaly/repository-service_services_pb.rb
index 3b58cae43..4eb5884ee 100644
--- a/ruby/proto/gitaly/repository-service_services_pb.rb
+++ b/ruby/proto/gitaly/repository-service_services_pb.rb
@@ -68,6 +68,18 @@ module Gitaly
rpc :RenameRepository, ::Gitaly::RenameRepositoryRequest, ::Gitaly::RenameRepositoryResponse
rpc :ReplicateRepository, ::Gitaly::ReplicateRepositoryRequest, ::Gitaly::ReplicateRepositoryResponse
rpc :OptimizeRepository, ::Gitaly::OptimizeRepositoryRequest, ::Gitaly::OptimizeRepositoryResponse
+ # PruneUnreachableObjetcs will prune all objects which aren't reachable from
+ # the repository's current set of references. Because pruning can only
+ # happen for objects which aren't packed, you are required to first run
+ # OptimizeRepository to explode any unreachable objects into loose objects.
+ #
+ # Furthermore, this RPC call has a grace period of 30 minutes: any
+ # unreachable loose objects must not have been accessed or modified in the
+ # last 30 minutes. This is a hard requirement to avoid repository corruption.
+ #
+ # To make proper use of this RPC you thus need to call OptimizeRepository,
+ # wait 30 minutes, and then call PruneUnreachableObjects.
+ rpc :PruneUnreachableObjects, ::Gitaly::PruneUnreachableObjectsRequest, ::Gitaly::PruneUnreachableObjectsResponse
# SetFullPath writes the "gitlab.fullpath" configuration into the
# repository's gitconfig. This is mainly to help debugging purposes in case
# an admin inspects the repository's gitconfig such that he can easily see