Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitaly.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/cmd
diff options
context:
space:
mode:
authorJames Fargher <jfargher@gitlab.com>2021-08-25 05:03:09 +0300
committerPatrick Steinhardt <psteinhardt@gitlab.com>2021-08-31 09:58:54 +0300
commitdfb6159289e75d72806586d86b5eb37cff14f292 (patch)
tree88d444751f827f7cc69976647fcddaaa3279266e /cmd
parenta8f73c4c04aa4b0468f243432c3e4baf9ec5a5b9 (diff)
Restore backups concurrently
Changelog: changed
Diffstat (limited to 'cmd')
-rw-r--r--cmd/gitaly-backup/restore.go16
1 files changed, 13 insertions, 3 deletions
diff --git a/cmd/gitaly-backup/restore.go b/cmd/gitaly-backup/restore.go
index fb614eb1e..42ce5a087 100644
--- a/cmd/gitaly-backup/restore.go
+++ b/cmd/gitaly-backup/restore.go
@@ -7,6 +7,7 @@ import (
"flag"
"fmt"
"io"
+ "runtime"
log "github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitaly/v14/internal/backup"
@@ -23,12 +24,16 @@ type restoreRequest struct {
}
type restoreSubcommand struct {
- backupPath string
- locator string
+ backupPath string
+ parallel int
+ parallelStorage int
+ locator string
}
func (cmd *restoreSubcommand) Flags(fs *flag.FlagSet) {
fs.StringVar(&cmd.backupPath, "path", "", "repository backup path")
+ fs.IntVar(&cmd.parallel, "parallel", runtime.NumCPU(), "maximum number of parallel restores")
+ fs.IntVar(&cmd.parallelStorage, "parallel-storage", 2, "maximum number of parallel restores per storage. Note: actual parallelism when combined with `-parallel` depends on the order the repositories are received.")
fs.StringVar(&cmd.locator, "locator", "legacy", "determines how backup files are located. One of legacy, pointer. Note: The feature is not ready for production use.")
}
@@ -44,7 +49,12 @@ func (cmd *restoreSubcommand) Run(ctx context.Context, stdin io.Reader, stdout i
}
manager := backup.NewManager(sink, locator)
- pipeline := backup.NewLoggingPipeline(log.StandardLogger())
+
+ var pipeline backup.Pipeline
+ pipeline = backup.NewLoggingPipeline(log.StandardLogger())
+ if cmd.parallel > 0 || cmd.parallelStorage > 0 {
+ pipeline = backup.NewParallelCreatePipeline(pipeline, cmd.parallel, cmd.parallelStorage)
+ }
decoder := json.NewDecoder(stdin)
for {