Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'workhorse/internal')
-rw-r--r--workhorse/internal/staticpages/servefile.go3
-rw-r--r--workhorse/internal/staticpages/servefile_test.go3
-rw-r--r--workhorse/internal/testhelper/testhelper.go13
-rw-r--r--workhorse/internal/upload/artifacts_store_test.go (renamed from workhorse/internal/artifacts/artifacts_store_test.go)4
-rw-r--r--workhorse/internal/upload/artifacts_upload_test.go (renamed from workhorse/internal/artifacts/artifacts_upload_test.go)27
-rw-r--r--workhorse/internal/upload/artifacts_uploader.go (renamed from workhorse/internal/artifacts/artifacts_upload.go)50
-rw-r--r--workhorse/internal/upload/body_uploader.go35
-rw-r--r--workhorse/internal/upload/body_uploader_test.go10
-rw-r--r--workhorse/internal/upload/destination/destination.go (renamed from workhorse/internal/filestore/file_handler.go)49
-rw-r--r--workhorse/internal/upload/destination/destination_test.go (renamed from workhorse/internal/filestore/file_handler_test.go)122
-rw-r--r--workhorse/internal/upload/destination/filestore/filestore.go21
-rw-r--r--workhorse/internal/upload/destination/filestore/filestore_test.go38
-rw-r--r--workhorse/internal/upload/destination/multi_hash.go (renamed from workhorse/internal/filestore/multi_hash.go)2
-rw-r--r--workhorse/internal/upload/destination/objectstore/doc.go3
-rw-r--r--workhorse/internal/upload/destination/objectstore/gocloud_object.go (renamed from workhorse/internal/objectstore/gocloud_object.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/gocloud_object_test.go (renamed from workhorse/internal/objectstore/gocloud_object_test.go)4
-rw-r--r--workhorse/internal/upload/destination/objectstore/multipart.go (renamed from workhorse/internal/objectstore/multipart.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/multipart_test.go (renamed from workhorse/internal/objectstore/multipart_test.go)4
-rw-r--r--workhorse/internal/upload/destination/objectstore/object.go (renamed from workhorse/internal/objectstore/object.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/object_test.go (renamed from workhorse/internal/objectstore/object_test.go)4
-rw-r--r--workhorse/internal/upload/destination/objectstore/prometheus.go (renamed from workhorse/internal/objectstore/prometheus.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_complete_multipart_api.go (renamed from workhorse/internal/objectstore/s3_complete_multipart_api.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_object.go (renamed from workhorse/internal/objectstore/s3_object.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_object_test.go (renamed from workhorse/internal/objectstore/s3_object_test.go)4
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_session.go (renamed from workhorse/internal/objectstore/s3_session.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_session_test.go (renamed from workhorse/internal/objectstore/s3_session_test.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/consts.go (renamed from workhorse/internal/objectstore/test/consts.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/gocloud_stub.go (renamed from workhorse/internal/objectstore/test/gocloud_stub.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go (renamed from workhorse/internal/objectstore/test/objectstore_stub.go)2
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/objectstore_stub_test.go (renamed from workhorse/internal/objectstore/test/objectstore_stub_test.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/s3_stub.go (renamed from workhorse/internal/objectstore/test/s3_stub.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/upload_strategy.go (renamed from workhorse/internal/objectstore/upload_strategy.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/uploader.go (renamed from workhorse/internal/objectstore/uploader.go)0
-rw-r--r--workhorse/internal/upload/destination/reader.go (renamed from workhorse/internal/filestore/reader.go)2
-rw-r--r--workhorse/internal/upload/destination/reader_test.go (renamed from workhorse/internal/filestore/reader_test.go)2
-rw-r--r--workhorse/internal/upload/destination/upload_opts.go (renamed from workhorse/internal/filestore/save_file_opts.go)18
-rw-r--r--workhorse/internal/upload/destination/upload_opts_test.go (renamed from workhorse/internal/filestore/save_file_opts_test.go)28
-rw-r--r--workhorse/internal/upload/lfs_preparer.go (renamed from workhorse/internal/lfs/lfs.go)19
-rw-r--r--workhorse/internal/upload/lfs_preparer_test.go (renamed from workhorse/internal/lfs/lfs_test.go)16
-rw-r--r--workhorse/internal/upload/multipart_uploader.go (renamed from workhorse/internal/upload/accelerate.go)11
-rw-r--r--workhorse/internal/upload/object_storage_preparer.go9
-rw-r--r--workhorse/internal/upload/preparer.go33
-rw-r--r--workhorse/internal/upload/rewrite.go10
-rw-r--r--workhorse/internal/upload/saved_file_tracker.go4
-rw-r--r--workhorse/internal/upload/saved_file_tracker_test.go6
-rw-r--r--workhorse/internal/upload/uploads.go26
-rw-r--r--workhorse/internal/upload/uploads_test.go20
-rw-r--r--workhorse/internal/upstream/.gitignore1
-rw-r--r--workhorse/internal/upstream/metrics.go103
-rw-r--r--workhorse/internal/upstream/routes.go27
-rw-r--r--workhorse/internal/upstream/routes_test.go30
51 files changed, 374 insertions, 389 deletions
diff --git a/workhorse/internal/staticpages/servefile.go b/workhorse/internal/staticpages/servefile.go
index be314f181b7..18fcdadcbed 100644
--- a/workhorse/internal/staticpages/servefile.go
+++ b/workhorse/internal/staticpages/servefile.go
@@ -67,6 +67,9 @@ func (s *Static) ServeExisting(prefix urlprefix.Prefix, cache CacheMode, notFoun
notFoundHandler.ServeHTTP(w, r)
return
}
+
+ w.Header().Set("X-Content-Type-Options", "nosniff")
+
defer content.Close()
switch cache {
diff --git a/workhorse/internal/staticpages/servefile_test.go b/workhorse/internal/staticpages/servefile_test.go
index f27bd0ccaeb..67675beccf8 100644
--- a/workhorse/internal/staticpages/servefile_test.go
+++ b/workhorse/internal/staticpages/servefile_test.go
@@ -78,6 +78,7 @@ func TestServingTheActualFile(t *testing.T) {
w := httptest.NewRecorder()
st := &Static{DocumentRoot: dir}
st.ServeExisting("/", CacheDisabled, nil).ServeHTTP(w, httpRequest)
+ testhelper.RequireResponseHeader(t, w, "X-Content-Type-Options", "nosniff")
require.Equal(t, 200, w.Code)
if w.Body.String() != fileContent {
t.Error("We should serve the file: ", w.Body.String())
@@ -109,6 +110,7 @@ func TestExcludedPaths(t *testing.T) {
st.ServeExisting("/", CacheDisabled, nil).ServeHTTP(w, httpRequest)
if tc.found {
+ testhelper.RequireResponseHeader(t, w, "X-Content-Type-Options", "nosniff")
require.Equal(t, 200, w.Code)
require.Equal(t, tc.contents, w.Body.String())
} else {
@@ -144,6 +146,7 @@ func testServingThePregzippedFile(t *testing.T, enableGzip bool) {
w := httptest.NewRecorder()
st := &Static{DocumentRoot: dir}
st.ServeExisting("/", CacheDisabled, nil).ServeHTTP(w, httpRequest)
+ testhelper.RequireResponseHeader(t, w, "X-Content-Type-Options", "nosniff")
require.Equal(t, 200, w.Code)
if enableGzip {
testhelper.RequireResponseHeader(t, w, "Content-Encoding", "gzip")
diff --git a/workhorse/internal/testhelper/testhelper.go b/workhorse/internal/testhelper/testhelper.go
index dae8f9b3149..6bbdfddcd60 100644
--- a/workhorse/internal/testhelper/testhelper.go
+++ b/workhorse/internal/testhelper/testhelper.go
@@ -167,3 +167,16 @@ func Retry(t testing.TB, timeout time.Duration, fn func() error) {
}
t.Fatalf("test timeout after %v; last error: %v", timeout, err)
}
+
+func SetupStaticFileHelper(t *testing.T, fpath, content, directory string) string {
+ cwd, err := os.Getwd()
+ require.NoError(t, err, "get working directory")
+
+ absDocumentRoot := path.Join(cwd, directory)
+ require.NoError(t, os.MkdirAll(path.Join(absDocumentRoot, path.Dir(fpath)), 0755), "create document root")
+
+ staticFile := path.Join(absDocumentRoot, fpath)
+ require.NoError(t, ioutil.WriteFile(staticFile, []byte(content), 0666), "write file content")
+
+ return absDocumentRoot
+}
diff --git a/workhorse/internal/artifacts/artifacts_store_test.go b/workhorse/internal/upload/artifacts_store_test.go
index f9fb28cf7ce..97e66fc37a4 100644
--- a/workhorse/internal/artifacts/artifacts_store_test.go
+++ b/workhorse/internal/upload/artifacts_store_test.go
@@ -1,4 +1,4 @@
-package artifacts
+package upload
import (
"archive/zip"
@@ -17,8 +17,8 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
func createTestZipArchive(t *testing.T) (data []byte, md5Hash string) {
diff --git a/workhorse/internal/artifacts/artifacts_upload_test.go b/workhorse/internal/upload/artifacts_upload_test.go
index 3e8a52be1a1..0a9e4ef3869 100644
--- a/workhorse/internal/artifacts/artifacts_upload_test.go
+++ b/workhorse/internal/upload/artifacts_upload_test.go
@@ -1,4 +1,4 @@
-package artifacts
+package upload
import (
"archive/zip"
@@ -16,12 +16,13 @@ import (
"github.com/golang-jwt/jwt/v4"
+ "gitlab.com/gitlab-org/labkit/log"
+
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upstream/roundtripper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
@@ -35,6 +36,14 @@ const (
Path = "/url/path"
)
+func TestMain(m *testing.M) {
+ if err := testhelper.BuildExecutables(); err != nil {
+ log.WithError(err).Fatal()
+ }
+
+ os.Exit(m.Run())
+}
+
func testArtifactsUploadServer(t *testing.T, authResponse *api.Response, bodyProcessor func(w http.ResponseWriter, r *http.Request)) *httptest.Server {
mux := http.NewServeMux()
mux.HandleFunc(Path+"/authorize", func(w http.ResponseWriter, r *http.Request) {
@@ -51,7 +60,7 @@ func testArtifactsUploadServer(t *testing.T, authResponse *api.Response, bodyPro
w.Write(data)
})
mux.HandleFunc(Path, func(w http.ResponseWriter, r *http.Request) {
- opts, err := filestore.GetOpts(authResponse)
+ opts, err := destination.GetOpts(authResponse)
require.NoError(t, err)
if r.Method != "POST" {
@@ -162,7 +171,7 @@ func testUploadArtifacts(t *testing.T, contentType, url string, body io.Reader)
testhelper.ConfigureSecret()
apiClient := api.NewAPI(parsedURL, "123", roundTripper)
proxyClient := proxy.NewProxy(parsedURL, "123", roundTripper)
- UploadArtifacts(apiClient, proxyClient, &upload.DefaultPreparer{}).ServeHTTP(response, httpRequest)
+ Artifacts(apiClient, proxyClient, &DefaultPreparer{}).ServeHTTP(response, httpRequest)
return response
}
@@ -193,10 +202,10 @@ func TestUploadHandlerAddingMetadata(t *testing.T) {
t.Run(tc.desc, func(t *testing.T) {
s := setupWithTmpPath(t, "file", tc.includeFormat, tc.format, nil,
func(w http.ResponseWriter, r *http.Request) {
- token, err := jwt.ParseWithClaims(r.Header.Get(upload.RewrittenFieldsHeader), &upload.MultipartClaims{}, testhelper.ParseJWT)
+ token, err := jwt.ParseWithClaims(r.Header.Get(RewrittenFieldsHeader), &MultipartClaims{}, testhelper.ParseJWT)
require.NoError(t, err)
- rewrittenFields := token.Claims.(*upload.MultipartClaims).RewrittenFields
+ rewrittenFields := token.Claims.(*MultipartClaims).RewrittenFields
require.Equal(t, 2, len(rewrittenFields))
require.Contains(t, rewrittenFields, "file")
@@ -225,10 +234,10 @@ func TestUploadHandlerAddingMetadata(t *testing.T) {
func TestUploadHandlerTarArtifact(t *testing.T) {
s := setupWithTmpPath(t, "file", true, "tar", nil,
func(w http.ResponseWriter, r *http.Request) {
- token, err := jwt.ParseWithClaims(r.Header.Get(upload.RewrittenFieldsHeader), &upload.MultipartClaims{}, testhelper.ParseJWT)
+ token, err := jwt.ParseWithClaims(r.Header.Get(RewrittenFieldsHeader), &MultipartClaims{}, testhelper.ParseJWT)
require.NoError(t, err)
- rewrittenFields := token.Claims.(*upload.MultipartClaims).RewrittenFields
+ rewrittenFields := token.Claims.(*MultipartClaims).RewrittenFields
require.Equal(t, 1, len(rewrittenFields))
require.Contains(t, rewrittenFields, "file")
diff --git a/workhorse/internal/artifacts/artifacts_upload.go b/workhorse/internal/upload/artifacts_uploader.go
index f1fd69082f8..2a91a05fe3d 100644
--- a/workhorse/internal/artifacts/artifacts_upload.go
+++ b/workhorse/internal/upload/artifacts_uploader.go
@@ -1,4 +1,4 @@
-package artifacts
+package upload
import (
"context"
@@ -16,9 +16,8 @@ import (
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
)
@@ -36,17 +35,33 @@ var zipSubcommandsErrorsCounter = promauto.NewCounterVec(
}, []string{"error"})
type artifactsUploadProcessor struct {
- opts *filestore.SaveFileOpts
+ opts *destination.UploadOpts
format string
- upload.SavedFileTracker
+ SavedFileTracker
}
-func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context, file *filestore.FileHandler) (*filestore.FileHandler, error) {
+// Artifacts is like a Multipart but specific for artifacts upload.
+func Artifacts(myAPI *api.API, h http.Handler, p Preparer) http.Handler {
+ return myAPI.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
+ opts, _, err := p.Prepare(a)
+ if err != nil {
+ helper.Fail500(w, r, fmt.Errorf("UploadArtifacts: error preparing file storage options"))
+ return
+ }
+
+ format := r.URL.Query().Get(ArtifactFormatKey)
+
+ mg := &artifactsUploadProcessor{opts: opts, format: format, SavedFileTracker: SavedFileTracker{Request: r}}
+ interceptMultipartFiles(w, r, h, a, mg, opts)
+ }, "/authorize")
+}
+
+func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context, file *destination.FileHandler) (*destination.FileHandler, error) {
metaReader, metaWriter := io.Pipe()
defer metaWriter.Close()
- metaOpts := &filestore.SaveFileOpts{
+ metaOpts := &destination.UploadOpts{
LocalTempPath: a.opts.LocalTempPath,
TempFilePrefix: "metadata.gz",
}
@@ -71,12 +86,12 @@ func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context,
type saveResult struct {
error
- *filestore.FileHandler
+ *destination.FileHandler
}
done := make(chan saveResult)
go func() {
var result saveResult
- result.FileHandler, result.error = filestore.SaveFileFromReader(ctx, metaReader, -1, metaOpts)
+ result.FileHandler, result.error = destination.Upload(ctx, metaReader, -1, metaOpts)
done <- result
}()
@@ -104,7 +119,7 @@ func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context,
return result.FileHandler, result.error
}
-func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName string, file *filestore.FileHandler, writer *multipart.Writer) error {
+func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName string, file *destination.FileHandler, writer *multipart.Writer) error {
// ProcessFile for artifacts requires file form-data field name to eq `file`
if formName != "file" {
@@ -150,18 +165,3 @@ func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName str
func (a *artifactsUploadProcessor) Name() string {
return "artifacts"
}
-
-func UploadArtifacts(myAPI *api.API, h http.Handler, p upload.Preparer) http.Handler {
- return myAPI.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
- opts, _, err := p.Prepare(a)
- if err != nil {
- helper.Fail500(w, r, fmt.Errorf("UploadArtifacts: error preparing file storage options"))
- return
- }
-
- format := r.URL.Query().Get(ArtifactFormatKey)
-
- mg := &artifactsUploadProcessor{opts: opts, format: format, SavedFileTracker: upload.SavedFileTracker{Request: r}}
- upload.InterceptMultipartFiles(w, r, h, a, mg, opts)
- }, "/authorize")
-}
diff --git a/workhorse/internal/upload/body_uploader.go b/workhorse/internal/upload/body_uploader.go
index 6c53bd9241b..d831f9f43a1 100644
--- a/workhorse/internal/upload/body_uploader.go
+++ b/workhorse/internal/upload/body_uploader.go
@@ -8,41 +8,10 @@ import (
"strings"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
-type PreAuthorizer interface {
- PreAuthorizeHandler(next api.HandleFunc, suffix string) http.Handler
-}
-
-// Verifier is an optional pluggable behavior for upload paths. If
-// Verify() returns an error, Workhorse will return an error response to
-// the client instead of propagating the request to Rails. The motivating
-// use case is Git LFS, where Workhorse checks the size and SHA256
-// checksum of the uploaded file.
-type Verifier interface {
- // Verify can abort the upload by returning an error
- Verify(handler *filestore.FileHandler) error
-}
-
-// Preparer is a pluggable behavior that interprets a Rails API response
-// and either tells Workhorse how to handle the upload, via the
-// SaveFileOpts and Verifier, or it rejects the request by returning a
-// non-nil error. Its intended use is to make sure the upload gets stored
-// in the right location: either a local directory, or one of several
-// supported object storage backends.
-type Preparer interface {
- Prepare(a *api.Response) (*filestore.SaveFileOpts, Verifier, error)
-}
-
-type DefaultPreparer struct{}
-
-func (s *DefaultPreparer) Prepare(a *api.Response) (*filestore.SaveFileOpts, Verifier, error) {
- opts, err := filestore.GetOpts(a)
- return opts, nil, err
-}
-
// RequestBody is a request middleware. It will store the request body to
// a location by determined an api.Response value. It then forwards the
// request to gitlab-rails without the original request body.
@@ -54,7 +23,7 @@ func RequestBody(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
return
}
- fh, err := filestore.SaveFileFromReader(r.Context(), r.Body, r.ContentLength, opts)
+ fh, err := destination.Upload(r.Context(), r.Body, r.ContentLength, opts)
if err != nil {
helper.Fail500(w, r, fmt.Errorf("RequestBody: upload failed: %v", err))
return
diff --git a/workhorse/internal/upload/body_uploader_test.go b/workhorse/internal/upload/body_uploader_test.go
index b3d561ac131..47490db8780 100644
--- a/workhorse/internal/upload/body_uploader_test.go
+++ b/workhorse/internal/upload/body_uploader_test.go
@@ -15,8 +15,8 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
const (
@@ -169,8 +169,8 @@ type alwaysLocalPreparer struct {
prepareError error
}
-func (a *alwaysLocalPreparer) Prepare(_ *api.Response) (*filestore.SaveFileOpts, Verifier, error) {
- opts, err := filestore.GetOpts(&api.Response{TempPath: os.TempDir()})
+func (a *alwaysLocalPreparer) Prepare(_ *api.Response) (*destination.UploadOpts, Verifier, error) {
+ opts, err := destination.GetOpts(&api.Response{TempPath: os.TempDir()})
if err != nil {
return nil, nil, err
}
@@ -180,7 +180,7 @@ func (a *alwaysLocalPreparer) Prepare(_ *api.Response) (*filestore.SaveFileOpts,
type alwaysFailsVerifier struct{}
-func (alwaysFailsVerifier) Verify(handler *filestore.FileHandler) error {
+func (alwaysFailsVerifier) Verify(handler *destination.FileHandler) error {
return fmt.Errorf("Verification failed")
}
@@ -188,7 +188,7 @@ type mockVerifier struct {
invoked bool
}
-func (m *mockVerifier) Verify(handler *filestore.FileHandler) error {
+func (m *mockVerifier) Verify(handler *destination.FileHandler) error {
m.invoked = true
return nil
diff --git a/workhorse/internal/filestore/file_handler.go b/workhorse/internal/upload/destination/destination.go
index dac8d4d6247..7a030e59a64 100644
--- a/workhorse/internal/filestore/file_handler.go
+++ b/workhorse/internal/upload/destination/destination.go
@@ -1,4 +1,7 @@
-package filestore
+// The destination package handles uploading to a specific destination (delegates
+// to filestore or objectstore packages) based on options from the pre-authorization
+// API and finalizing the upload.
+package destination
import (
"context"
@@ -14,8 +17,9 @@ import (
"gitlab.com/gitlab-org/labkit/log"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/filestore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
)
type SizeError error
@@ -107,9 +111,9 @@ type consumer interface {
Consume(context.Context, io.Reader, time.Time) (int64, error)
}
-// SaveFileFromReader persists the provided reader content to all the location specified in opts. A cleanup will be performed once ctx is Done
+// Upload persists the provided reader content to all the location specified in opts. A cleanup will be performed once ctx is Done
// Make sure the provided context will not expire before finalizing upload with GitLab Rails.
-func SaveFileFromReader(ctx context.Context, reader io.Reader, size int64, opts *SaveFileOpts) (*FileHandler, error) {
+func Upload(ctx context.Context, reader io.Reader, size int64, opts *UploadOpts) (*FileHandler, error) {
fh := &FileHandler{
Name: opts.TempFilePrefix,
RemoteID: opts.RemoteID,
@@ -126,7 +130,7 @@ func SaveFileFromReader(ctx context.Context, reader io.Reader, size int64, opts
switch {
case opts.IsLocal():
clientMode = "local"
- uploadDestination, err = fh.uploadLocalFile(ctx, opts)
+ uploadDestination, err = fh.newLocalFile(ctx, opts)
case opts.UseWorkhorseClientEnabled() && opts.ObjectStorageConfig.IsGoCloud():
clientMode = fmt.Sprintf("go_cloud:%s", opts.ObjectStorageConfig.Provider)
p := &objectstore.GoCloudObjectParams{
@@ -210,16 +214,16 @@ func SaveFileFromReader(ctx context.Context, reader io.Reader, size int64, opts
return fh, nil
}
-func (fh *FileHandler) uploadLocalFile(ctx context.Context, opts *SaveFileOpts) (consumer, error) {
+func (fh *FileHandler) newLocalFile(ctx context.Context, opts *UploadOpts) (consumer, error) {
// make sure TempFolder exists
err := os.MkdirAll(opts.LocalTempPath, 0700)
if err != nil {
- return nil, fmt.Errorf("uploadLocalFile: mkdir %q: %v", opts.LocalTempPath, err)
+ return nil, fmt.Errorf("newLocalFile: mkdir %q: %v", opts.LocalTempPath, err)
}
file, err := ioutil.TempFile(opts.LocalTempPath, opts.TempFilePrefix)
if err != nil {
- return nil, fmt.Errorf("uploadLocalFile: create file: %v", err)
+ return nil, fmt.Errorf("newLocalFile: create file: %v", err)
}
go func() {
@@ -228,32 +232,5 @@ func (fh *FileHandler) uploadLocalFile(ctx context.Context, opts *SaveFileOpts)
}()
fh.LocalPath = file.Name()
- return &localUpload{file}, nil
-}
-
-type localUpload struct{ io.WriteCloser }
-
-func (loc *localUpload) Consume(_ context.Context, r io.Reader, _ time.Time) (int64, error) {
- n, err := io.Copy(loc.WriteCloser, r)
- errClose := loc.Close()
- if err == nil {
- err = errClose
- }
- return n, err
-}
-
-// SaveFileFromDisk open the local file fileName and calls SaveFileFromReader
-func SaveFileFromDisk(ctx context.Context, fileName string, opts *SaveFileOpts) (fh *FileHandler, err error) {
- file, err := os.Open(fileName)
- if err != nil {
- return nil, err
- }
- defer file.Close()
-
- fi, err := file.Stat()
- if err != nil {
- return nil, err
- }
-
- return SaveFileFromReader(ctx, file, fi.Size(), opts)
+ return &filestore.LocalFile{File: file}, nil
}
diff --git a/workhorse/internal/filestore/file_handler_test.go b/workhorse/internal/upload/destination/destination_test.go
index 2fd034bb761..ddf0ea24d60 100644
--- a/workhorse/internal/filestore/file_handler_test.go
+++ b/workhorse/internal/upload/destination/destination_test.go
@@ -1,4 +1,4 @@
-package filestore_test
+package destination_test
import (
"context"
@@ -17,13 +17,13 @@ import (
"gocloud.dev/blob"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
func testDeadline() time.Time {
- return time.Now().Add(filestore.DefaultObjectStoreTimeout)
+ return time.Now().Add(destination.DefaultObjectStoreTimeout)
}
func requireFileGetsRemovedAsync(t *testing.T, filePath string) {
@@ -39,7 +39,7 @@ func requireObjectStoreDeletedAsync(t *testing.T, expectedDeletes int, osStub *t
require.Eventually(t, func() bool { return osStub.DeletesCnt() == expectedDeletes }, time.Second, time.Millisecond, "Object not deleted")
}
-func TestSaveFileWrongSize(t *testing.T) {
+func TestUploadWrongSize(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
@@ -47,15 +47,15 @@ func TestSaveFileWrongSize(t *testing.T) {
require.NoError(t, err)
defer os.RemoveAll(tmpFolder)
- opts := &filestore.SaveFileOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file"}
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize+1, opts)
+ opts := &destination.UploadOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file"}
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize+1, opts)
require.Error(t, err)
- _, isSizeError := err.(filestore.SizeError)
+ _, isSizeError := err.(destination.SizeError)
require.True(t, isSizeError, "Should fail with SizeError")
require.Nil(t, fh)
}
-func TestSaveFileWithKnownSizeExceedLimit(t *testing.T) {
+func TestUploadWithKnownSizeExceedLimit(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
@@ -63,15 +63,15 @@ func TestSaveFileWithKnownSizeExceedLimit(t *testing.T) {
require.NoError(t, err)
defer os.RemoveAll(tmpFolder)
- opts := &filestore.SaveFileOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file", MaximumSize: test.ObjectSize - 1}
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, opts)
+ opts := &destination.UploadOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file", MaximumSize: test.ObjectSize - 1}
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, opts)
require.Error(t, err)
- _, isSizeError := err.(filestore.SizeError)
+ _, isSizeError := err.(destination.SizeError)
require.True(t, isSizeError, "Should fail with SizeError")
require.Nil(t, fh)
}
-func TestSaveFileWithUnknownSizeExceedLimit(t *testing.T) {
+func TestUploadWithUnknownSizeExceedLimit(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
@@ -79,22 +79,13 @@ func TestSaveFileWithUnknownSizeExceedLimit(t *testing.T) {
require.NoError(t, err)
defer os.RemoveAll(tmpFolder)
- opts := &filestore.SaveFileOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file", MaximumSize: test.ObjectSize - 1}
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), -1, opts)
- require.Equal(t, err, filestore.ErrEntityTooLarge)
+ opts := &destination.UploadOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file", MaximumSize: test.ObjectSize - 1}
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), -1, opts)
+ require.Equal(t, err, destination.ErrEntityTooLarge)
require.Nil(t, fh)
}
-func TestSaveFromDiskNotExistingFile(t *testing.T) {
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
- fh, err := filestore.SaveFileFromDisk(ctx, "/I/do/not/exist", &filestore.SaveFileOpts{})
- require.Error(t, err, "SaveFileFromDisk should fail")
- require.True(t, os.IsNotExist(err), "Provided file should not exists")
- require.Nil(t, fh, "On error FileHandler should be nil")
-}
-
-func TestSaveFileWrongETag(t *testing.T) {
+func TestUploadWrongETag(t *testing.T) {
tests := []struct {
name string
multipart bool
@@ -110,7 +101,7 @@ func TestSaveFileWrongETag(t *testing.T) {
objectURL := ts.URL + test.ObjectPath
- opts := &filestore.SaveFileOpts{
+ opts := &destination.UploadOpts{
RemoteID: "test-file",
RemoteURL: objectURL,
PresignedPut: objectURL + "?Signature=ASignature",
@@ -126,7 +117,7 @@ func TestSaveFileWrongETag(t *testing.T) {
osStub.InitiateMultipartUpload(test.ObjectPath)
}
ctx, cancel := context.WithCancel(context.Background())
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, opts)
require.Nil(t, fh)
require.Error(t, err)
require.Equal(t, 1, osStub.PutsCnt(), "File not uploaded")
@@ -138,32 +129,7 @@ func TestSaveFileWrongETag(t *testing.T) {
}
}
-func TestSaveFileFromDiskToLocalPath(t *testing.T) {
- f, err := ioutil.TempFile("", "workhorse-test")
- require.NoError(t, err)
- defer os.Remove(f.Name())
-
- _, err = fmt.Fprint(f, test.ObjectContent)
- require.NoError(t, err)
-
- tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
- require.NoError(t, err)
- defer os.RemoveAll(tmpFolder)
-
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- opts := &filestore.SaveFileOpts{LocalTempPath: tmpFolder}
- fh, err := filestore.SaveFileFromDisk(ctx, f.Name(), opts)
- require.NoError(t, err)
- require.NotNil(t, fh)
-
- require.NotEmpty(t, fh.LocalPath, "File not persisted on disk")
- _, err = os.Stat(fh.LocalPath)
- require.NoError(t, err)
-}
-
-func TestSaveFile(t *testing.T) {
+func TestUpload(t *testing.T) {
testhelper.ConfigureSecret()
type remote int
@@ -189,7 +155,7 @@ func TestSaveFile(t *testing.T) {
for _, spec := range tests {
t.Run(spec.name, func(t *testing.T) {
- var opts filestore.SaveFileOpts
+ var opts destination.UploadOpts
var expectedDeletes, expectedPuts int
osStub, ts := test.StartObjectStore()
@@ -231,7 +197,7 @@ func TestSaveFile(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
require.NoError(t, err)
require.NotNil(t, fh)
@@ -279,7 +245,7 @@ func TestSaveFile(t *testing.T) {
}
}
-func TestSaveFileWithS3WorkhorseClient(t *testing.T) {
+func TestUploadWithS3WorkhorseClient(t *testing.T) {
tests := []struct {
name string
objectSize int64
@@ -298,7 +264,7 @@ func TestSaveFileWithS3WorkhorseClient(t *testing.T) {
name: "unknown object size with limit",
objectSize: -1,
maxSize: test.ObjectSize - 1,
- expectedErr: filestore.ErrEntityTooLarge,
+ expectedErr: destination.ErrEntityTooLarge,
},
}
@@ -312,12 +278,12 @@ func TestSaveFileWithS3WorkhorseClient(t *testing.T) {
defer cancel()
remoteObject := "tmp/test-file/1"
- opts := filestore.SaveFileOpts{
+ opts := destination.UploadOpts{
RemoteID: "test-file",
Deadline: testDeadline(),
UseWorkhorseClient: true,
RemoteTempObjectID: remoteObject,
- ObjectStorageConfig: filestore.ObjectStorageConfig{
+ ObjectStorageConfig: destination.ObjectStorageConfig{
Provider: "AWS",
S3Credentials: s3Creds,
S3Config: s3Config,
@@ -325,7 +291,7 @@ func TestSaveFileWithS3WorkhorseClient(t *testing.T) {
MaximumSize: tc.maxSize,
}
- _, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), tc.objectSize, &opts)
+ _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), tc.objectSize, &opts)
if tc.expectedErr == nil {
require.NoError(t, err)
@@ -338,7 +304,7 @@ func TestSaveFileWithS3WorkhorseClient(t *testing.T) {
}
}
-func TestSaveFileWithAzureWorkhorseClient(t *testing.T) {
+func TestUploadWithAzureWorkhorseClient(t *testing.T) {
mux, bucketDir, cleanup := test.SetupGoCloudFileBucket(t, "azblob")
defer cleanup()
@@ -346,48 +312,48 @@ func TestSaveFileWithAzureWorkhorseClient(t *testing.T) {
defer cancel()
remoteObject := "tmp/test-file/1"
- opts := filestore.SaveFileOpts{
+ opts := destination.UploadOpts{
RemoteID: "test-file",
Deadline: testDeadline(),
UseWorkhorseClient: true,
RemoteTempObjectID: remoteObject,
- ObjectStorageConfig: filestore.ObjectStorageConfig{
+ ObjectStorageConfig: destination.ObjectStorageConfig{
Provider: "AzureRM",
URLMux: mux,
GoCloudConfig: config.GoCloudConfig{URL: "azblob://test-container"},
},
}
- _, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
require.NoError(t, err)
test.GoCloudObjectExists(t, bucketDir, remoteObject)
}
-func TestSaveFileWithUnknownGoCloudScheme(t *testing.T) {
+func TestUploadWithUnknownGoCloudScheme(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
mux := new(blob.URLMux)
remoteObject := "tmp/test-file/1"
- opts := filestore.SaveFileOpts{
+ opts := destination.UploadOpts{
RemoteID: "test-file",
Deadline: testDeadline(),
UseWorkhorseClient: true,
RemoteTempObjectID: remoteObject,
- ObjectStorageConfig: filestore.ObjectStorageConfig{
+ ObjectStorageConfig: destination.ObjectStorageConfig{
Provider: "SomeCloud",
URLMux: mux,
GoCloudConfig: config.GoCloudConfig{URL: "foo://test-container"},
},
}
- _, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
require.Error(t, err)
}
-func TestSaveMultipartInBodyFailure(t *testing.T) {
+func TestUploadMultipartInBodyFailure(t *testing.T) {
osStub, ts := test.StartObjectStore()
defer ts.Close()
@@ -395,7 +361,7 @@ func TestSaveMultipartInBodyFailure(t *testing.T) {
// this is the only way to get an in-body failure from our ObjectStoreStub
objectPath := "/bucket-but-no-object-key"
objectURL := ts.URL + objectPath
- opts := filestore.SaveFileOpts{
+ opts := destination.UploadOpts{
RemoteID: "test-file",
RemoteURL: objectURL,
PartSize: test.ObjectSize,
@@ -409,13 +375,13 @@ func TestSaveMultipartInBodyFailure(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
require.Nil(t, fh)
require.Error(t, err)
require.EqualError(t, err, test.MultipartUploadInternalError().Error())
}
-func TestSaveRemoteFileWithLimit(t *testing.T) {
+func TestUploadRemoteFileWithLimit(t *testing.T) {
testhelper.ConfigureSecret()
type remote int
@@ -449,20 +415,20 @@ func TestSaveRemoteFileWithLimit(t *testing.T) {
testData: test.ObjectContent,
objectSize: -1,
maxSize: test.ObjectSize - 1,
- expectedErr: filestore.ErrEntityTooLarge,
+ expectedErr: destination.ErrEntityTooLarge,
},
{
name: "large object with unknown size with limit",
testData: string(make([]byte, 20000)),
objectSize: -1,
maxSize: 19000,
- expectedErr: filestore.ErrEntityTooLarge,
+ expectedErr: destination.ErrEntityTooLarge,
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
- var opts filestore.SaveFileOpts
+ var opts destination.UploadOpts
for _, remoteType := range remoteTypes {
tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
@@ -502,7 +468,7 @@ func TestSaveRemoteFileWithLimit(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(tc.testData), tc.objectSize, &opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(tc.testData), tc.objectSize, &opts)
if tc.expectedErr == nil {
require.NoError(t, err)
@@ -516,7 +482,7 @@ func TestSaveRemoteFileWithLimit(t *testing.T) {
}
}
-func checkFileHandlerWithFields(t *testing.T, fh *filestore.FileHandler, fields map[string]string, prefix string) {
+func checkFileHandlerWithFields(t *testing.T, fh *destination.FileHandler, fields map[string]string, prefix string) {
key := func(field string) string {
if prefix == "" {
return field
diff --git a/workhorse/internal/upload/destination/filestore/filestore.go b/workhorse/internal/upload/destination/filestore/filestore.go
new file mode 100644
index 00000000000..2d88874bf25
--- /dev/null
+++ b/workhorse/internal/upload/destination/filestore/filestore.go
@@ -0,0 +1,21 @@
+// The filestore package has a consumer specific to uploading to local disk storage.
+package filestore
+
+import (
+ "context"
+ "io"
+ "time"
+)
+
+type LocalFile struct {
+ File io.WriteCloser
+}
+
+func (lf *LocalFile) Consume(_ context.Context, r io.Reader, _ time.Time) (int64, error) {
+ n, err := io.Copy(lf.File, r)
+ errClose := lf.File.Close()
+ if err == nil {
+ err = errClose
+ }
+ return n, err
+}
diff --git a/workhorse/internal/upload/destination/filestore/filestore_test.go b/workhorse/internal/upload/destination/filestore/filestore_test.go
new file mode 100644
index 00000000000..ec67eae96b9
--- /dev/null
+++ b/workhorse/internal/upload/destination/filestore/filestore_test.go
@@ -0,0 +1,38 @@
+package filestore
+
+import (
+ "context"
+ "io/ioutil"
+ "os"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestConsume(t *testing.T) {
+ f, err := ioutil.TempFile("", "filestore-local-file")
+ if f != nil {
+ defer os.Remove(f.Name())
+ }
+ require.NoError(t, err)
+ defer f.Close()
+
+ localFile := &LocalFile{File: f}
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ content := "file content"
+ reader := strings.NewReader(content)
+ var deadline time.Time
+
+ n, err := localFile.Consume(ctx, reader, deadline)
+ require.NoError(t, err)
+ require.Equal(t, int64(len(content)), n)
+
+ consumedContent, err := ioutil.ReadFile(f.Name())
+ require.NoError(t, err)
+ require.Equal(t, content, string(consumedContent))
+}
diff --git a/workhorse/internal/filestore/multi_hash.go b/workhorse/internal/upload/destination/multi_hash.go
index 40efd3a5c1f..7d4884af3dc 100644
--- a/workhorse/internal/filestore/multi_hash.go
+++ b/workhorse/internal/upload/destination/multi_hash.go
@@ -1,4 +1,4 @@
-package filestore
+package destination
import (
"crypto/md5"
diff --git a/workhorse/internal/upload/destination/objectstore/doc.go b/workhorse/internal/upload/destination/objectstore/doc.go
new file mode 100644
index 00000000000..00f98f230ec
--- /dev/null
+++ b/workhorse/internal/upload/destination/objectstore/doc.go
@@ -0,0 +1,3 @@
+// The objectstore package consists of a number of consumers specific to uploading
+// to object storage providers that are S3 compatible (e.g. AWS S3, GCP, Azure).
+package objectstore
diff --git a/workhorse/internal/objectstore/gocloud_object.go b/workhorse/internal/upload/destination/objectstore/gocloud_object.go
index 38545086994..38545086994 100644
--- a/workhorse/internal/objectstore/gocloud_object.go
+++ b/workhorse/internal/upload/destination/objectstore/gocloud_object.go
diff --git a/workhorse/internal/objectstore/gocloud_object_test.go b/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go
index f320a65dbfb..57b3a35b41e 100644
--- a/workhorse/internal/objectstore/gocloud_object_test.go
+++ b/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go
@@ -9,9 +9,9 @@ import (
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
func TestGoCloudObjectUpload(t *testing.T) {
diff --git a/workhorse/internal/objectstore/multipart.go b/workhorse/internal/upload/destination/objectstore/multipart.go
index 4c5b64b27ee..4c5b64b27ee 100644
--- a/workhorse/internal/objectstore/multipart.go
+++ b/workhorse/internal/upload/destination/objectstore/multipart.go
diff --git a/workhorse/internal/objectstore/multipart_test.go b/workhorse/internal/upload/destination/objectstore/multipart_test.go
index 42ab5b4e535..4aff3467e30 100644
--- a/workhorse/internal/objectstore/multipart_test.go
+++ b/workhorse/internal/upload/destination/objectstore/multipart_test.go
@@ -11,8 +11,8 @@ import (
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
func TestMultipartUploadWithUpcaseETags(t *testing.T) {
diff --git a/workhorse/internal/objectstore/object.go b/workhorse/internal/upload/destination/objectstore/object.go
index b7c4f12f009..b7c4f12f009 100644
--- a/workhorse/internal/objectstore/object.go
+++ b/workhorse/internal/upload/destination/objectstore/object.go
diff --git a/workhorse/internal/objectstore/object_test.go b/workhorse/internal/upload/destination/objectstore/object_test.go
index b9c1fb2087b..24117891b6d 100644
--- a/workhorse/internal/objectstore/object_test.go
+++ b/workhorse/internal/upload/destination/objectstore/object_test.go
@@ -11,8 +11,8 @@ import (
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
const testTimeout = 10 * time.Second
diff --git a/workhorse/internal/objectstore/prometheus.go b/workhorse/internal/upload/destination/objectstore/prometheus.go
index 20762fb52bc..20762fb52bc 100644
--- a/workhorse/internal/objectstore/prometheus.go
+++ b/workhorse/internal/upload/destination/objectstore/prometheus.go
diff --git a/workhorse/internal/objectstore/s3_complete_multipart_api.go b/workhorse/internal/upload/destination/objectstore/s3_complete_multipart_api.go
index b84f5757f49..b84f5757f49 100644
--- a/workhorse/internal/objectstore/s3_complete_multipart_api.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_complete_multipart_api.go
diff --git a/workhorse/internal/objectstore/s3_object.go b/workhorse/internal/upload/destination/objectstore/s3_object.go
index ce0cccc7eb1..ce0cccc7eb1 100644
--- a/workhorse/internal/objectstore/s3_object.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_object.go
diff --git a/workhorse/internal/objectstore/s3_object_test.go b/workhorse/internal/upload/destination/objectstore/s3_object_test.go
index c7426e3843b..b81b0ae2024 100644
--- a/workhorse/internal/objectstore/s3_object_test.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_object_test.go
@@ -18,9 +18,9 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
type failedReader struct {
diff --git a/workhorse/internal/objectstore/s3_session.go b/workhorse/internal/upload/destination/objectstore/s3_session.go
index a0c1f099145..a0c1f099145 100644
--- a/workhorse/internal/objectstore/s3_session.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_session.go
diff --git a/workhorse/internal/objectstore/s3_session_test.go b/workhorse/internal/upload/destination/objectstore/s3_session_test.go
index 5d57b4f9af8..5d57b4f9af8 100644
--- a/workhorse/internal/objectstore/s3_session_test.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_session_test.go
diff --git a/workhorse/internal/objectstore/test/consts.go b/workhorse/internal/upload/destination/objectstore/test/consts.go
index 7a1bcc28d45..7a1bcc28d45 100644
--- a/workhorse/internal/objectstore/test/consts.go
+++ b/workhorse/internal/upload/destination/objectstore/test/consts.go
diff --git a/workhorse/internal/objectstore/test/gocloud_stub.go b/workhorse/internal/upload/destination/objectstore/test/gocloud_stub.go
index cf22075e407..cf22075e407 100644
--- a/workhorse/internal/objectstore/test/gocloud_stub.go
+++ b/workhorse/internal/upload/destination/objectstore/test/gocloud_stub.go
diff --git a/workhorse/internal/objectstore/test/objectstore_stub.go b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go
index ec5e271b759..d51a2de7456 100644
--- a/workhorse/internal/objectstore/test/objectstore_stub.go
+++ b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go
@@ -13,7 +13,7 @@ import (
"strings"
"sync"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
)
type partsEtagMap map[int]string
diff --git a/workhorse/internal/objectstore/test/objectstore_stub_test.go b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub_test.go
index 8c0d52a2d79..8c0d52a2d79 100644
--- a/workhorse/internal/objectstore/test/objectstore_stub_test.go
+++ b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub_test.go
diff --git a/workhorse/internal/objectstore/test/s3_stub.go b/workhorse/internal/upload/destination/objectstore/test/s3_stub.go
index 6b83426b852..6b83426b852 100644
--- a/workhorse/internal/objectstore/test/s3_stub.go
+++ b/workhorse/internal/upload/destination/objectstore/test/s3_stub.go
diff --git a/workhorse/internal/objectstore/upload_strategy.go b/workhorse/internal/upload/destination/objectstore/upload_strategy.go
index 5707ba5f24e..5707ba5f24e 100644
--- a/workhorse/internal/objectstore/upload_strategy.go
+++ b/workhorse/internal/upload/destination/objectstore/upload_strategy.go
diff --git a/workhorse/internal/objectstore/uploader.go b/workhorse/internal/upload/destination/objectstore/uploader.go
index aedfbe55ead..aedfbe55ead 100644
--- a/workhorse/internal/objectstore/uploader.go
+++ b/workhorse/internal/upload/destination/objectstore/uploader.go
diff --git a/workhorse/internal/filestore/reader.go b/workhorse/internal/upload/destination/reader.go
index b1045b991fc..925a9468e14 100644
--- a/workhorse/internal/filestore/reader.go
+++ b/workhorse/internal/upload/destination/reader.go
@@ -1,4 +1,4 @@
-package filestore
+package destination
import "io"
diff --git a/workhorse/internal/filestore/reader_test.go b/workhorse/internal/upload/destination/reader_test.go
index 424d921ecaf..a26f7746a13 100644
--- a/workhorse/internal/filestore/reader_test.go
+++ b/workhorse/internal/upload/destination/reader_test.go
@@ -1,4 +1,4 @@
-package filestore
+package destination
import (
"fmt"
diff --git a/workhorse/internal/filestore/save_file_opts.go b/workhorse/internal/upload/destination/upload_opts.go
index 544101d693a..750a79d7bc2 100644
--- a/workhorse/internal/filestore/save_file_opts.go
+++ b/workhorse/internal/upload/destination/upload_opts.go
@@ -1,4 +1,4 @@
-package filestore
+package destination
import (
"errors"
@@ -27,8 +27,8 @@ type ObjectStorageConfig struct {
GoCloudConfig config.GoCloudConfig
}
-// SaveFileOpts represents all the options available for saving a file to object store
-type SaveFileOpts struct {
+// UploadOpts represents all the options available for saving a file to object store
+type UploadOpts struct {
// TempFilePrefix is the prefix used to create temporary local file
TempFilePrefix string
// LocalTempPath is the directory where to write a local copy of the file
@@ -66,28 +66,28 @@ type SaveFileOpts struct {
}
// UseWorkhorseClientEnabled checks if the options require direct access to object storage
-func (s *SaveFileOpts) UseWorkhorseClientEnabled() bool {
+func (s *UploadOpts) UseWorkhorseClientEnabled() bool {
return s.UseWorkhorseClient && s.ObjectStorageConfig.IsValid() && s.RemoteTempObjectID != ""
}
// IsLocal checks if the options require the writing of the file on disk
-func (s *SaveFileOpts) IsLocal() bool {
+func (s *UploadOpts) IsLocal() bool {
return s.LocalTempPath != ""
}
// IsMultipart checks if the options requires a Multipart upload
-func (s *SaveFileOpts) IsMultipart() bool {
+func (s *UploadOpts) IsMultipart() bool {
return s.PartSize > 0
}
-// GetOpts converts GitLab api.Response to a proper SaveFileOpts
-func GetOpts(apiResponse *api.Response) (*SaveFileOpts, error) {
+// GetOpts converts GitLab api.Response to a proper UploadOpts
+func GetOpts(apiResponse *api.Response) (*UploadOpts, error) {
timeout := time.Duration(apiResponse.RemoteObject.Timeout) * time.Second
if timeout == 0 {
timeout = DefaultObjectStoreTimeout
}
- opts := SaveFileOpts{
+ opts := UploadOpts{
LocalTempPath: apiResponse.TempPath,
RemoteID: apiResponse.RemoteObject.ID,
RemoteURL: apiResponse.RemoteObject.GetURL,
diff --git a/workhorse/internal/filestore/save_file_opts_test.go b/workhorse/internal/upload/destination/upload_opts_test.go
index f389b2054e5..fde726c985d 100644
--- a/workhorse/internal/filestore/save_file_opts_test.go
+++ b/workhorse/internal/upload/destination/upload_opts_test.go
@@ -1,4 +1,4 @@
-package filestore_test
+package destination_test
import (
"testing"
@@ -8,11 +8,11 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
-func TestSaveFileOptsLocalAndRemote(t *testing.T) {
+func TestUploadOptsLocalAndRemote(t *testing.T) {
tests := []struct {
name string
localTempPath string
@@ -43,7 +43,7 @@ func TestSaveFileOptsLocalAndRemote(t *testing.T) {
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
- opts := filestore.SaveFileOpts{
+ opts := destination.UploadOpts{
LocalTempPath: test.localTempPath,
PresignedPut: test.presignedPut,
PartSize: test.partSize,
@@ -106,7 +106,7 @@ func TestGetOpts(t *testing.T) {
},
}
deadline := time.Now().Add(time.Duration(apiResponse.RemoteObject.Timeout) * time.Second)
- opts, err := filestore.GetOpts(apiResponse)
+ opts, err := destination.GetOpts(apiResponse)
require.NoError(t, err)
require.Equal(t, apiResponse.TempPath, opts.LocalTempPath)
@@ -155,22 +155,22 @@ func TestGetOptsFail(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- _, err := filestore.GetOpts(tc.in)
+ _, err := destination.GetOpts(tc.in)
require.Error(t, err, "expect input to be rejected")
})
}
}
func TestGetOptsDefaultTimeout(t *testing.T) {
- deadline := time.Now().Add(filestore.DefaultObjectStoreTimeout)
- opts, err := filestore.GetOpts(&api.Response{TempPath: "/foo/bar"})
+ deadline := time.Now().Add(destination.DefaultObjectStoreTimeout)
+ opts, err := destination.GetOpts(&api.Response{TempPath: "/foo/bar"})
require.NoError(t, err)
require.WithinDuration(t, deadline, opts.Deadline, time.Minute)
}
func TestUseWorkhorseClientEnabled(t *testing.T) {
- cfg := filestore.ObjectStorageConfig{
+ cfg := destination.ObjectStorageConfig{
Provider: "AWS",
S3Config: config.S3Config{
Bucket: "test-bucket",
@@ -195,7 +195,7 @@ func TestUseWorkhorseClientEnabled(t *testing.T) {
name string
UseWorkhorseClient bool
remoteTempObjectID string
- objectStorageConfig filestore.ObjectStorageConfig
+ objectStorageConfig destination.ObjectStorageConfig
expected bool
}{
{
@@ -243,7 +243,7 @@ func TestUseWorkhorseClientEnabled(t *testing.T) {
name: "missing S3 bucket",
UseWorkhorseClient: true,
remoteTempObjectID: "test-object",
- objectStorageConfig: filestore.ObjectStorageConfig{
+ objectStorageConfig: destination.ObjectStorageConfig{
Provider: "AWS",
S3Config: config.S3Config{},
},
@@ -269,7 +269,7 @@ func TestUseWorkhorseClientEnabled(t *testing.T) {
},
}
deadline := time.Now().Add(time.Duration(apiResponse.RemoteObject.Timeout) * time.Second)
- opts, err := filestore.GetOpts(apiResponse)
+ opts, err := destination.GetOpts(apiResponse)
require.NoError(t, err)
opts.ObjectStorageConfig = test.objectStorageConfig
@@ -323,7 +323,7 @@ func TestGoCloudConfig(t *testing.T) {
},
}
deadline := time.Now().Add(time.Duration(apiResponse.RemoteObject.Timeout) * time.Second)
- opts, err := filestore.GetOpts(apiResponse)
+ opts, err := destination.GetOpts(apiResponse)
require.NoError(t, err)
opts.ObjectStorageConfig.URLMux = mux
diff --git a/workhorse/internal/lfs/lfs.go b/workhorse/internal/upload/lfs_preparer.go
index e26f59046ea..e7c5cf16a30 100644
--- a/workhorse/internal/lfs/lfs.go
+++ b/workhorse/internal/upload/lfs_preparer.go
@@ -1,16 +1,11 @@
-/*
-In this file we handle git lfs objects downloads and uploads
-*/
-
-package lfs
+package upload
import (
"fmt"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
type object struct {
@@ -18,7 +13,7 @@ type object struct {
oid string
}
-func (l *object) Verify(fh *filestore.FileHandler) error {
+func (l *object) Verify(fh *destination.FileHandler) error {
if fh.Size != l.size {
return fmt.Errorf("LFSObject: expected size %d, wrote %d", l.size, fh.Size)
}
@@ -31,14 +26,16 @@ func (l *object) Verify(fh *filestore.FileHandler) error {
}
type uploadPreparer struct {
- objectPreparer upload.Preparer
+ objectPreparer Preparer
}
-func NewLfsUploadPreparer(c config.Config, objectPreparer upload.Preparer) upload.Preparer {
+// NewLfs returns a new preparer instance which adds capability to a wrapped
+// preparer to set options required for a LFS upload.
+func NewLfsPreparer(c config.Config, objectPreparer Preparer) Preparer {
return &uploadPreparer{objectPreparer: objectPreparer}
}
-func (l *uploadPreparer) Prepare(a *api.Response) (*filestore.SaveFileOpts, upload.Verifier, error) {
+func (l *uploadPreparer) Prepare(a *api.Response) (*destination.UploadOpts, Verifier, error) {
opts, _, err := l.objectPreparer.Prepare(a)
if err != nil {
return nil, nil, err
diff --git a/workhorse/internal/lfs/lfs_test.go b/workhorse/internal/upload/lfs_preparer_test.go
index 63b2628343e..6be4a7c2955 100644
--- a/workhorse/internal/lfs/lfs_test.go
+++ b/workhorse/internal/upload/lfs_preparer_test.go
@@ -1,17 +1,15 @@
-package lfs_test
+package upload
import (
"testing"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/lfs"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload"
"github.com/stretchr/testify/require"
)
-func TestLfsUploadPreparerWithConfig(t *testing.T) {
+func TestLfsPreparerWithConfig(t *testing.T) {
lfsOid := "abcd1234"
creds := config.S3Credentials{
AwsAccessKeyID: "test-key",
@@ -36,8 +34,8 @@ func TestLfsUploadPreparerWithConfig(t *testing.T) {
},
}
- uploadPreparer := upload.NewObjectStoragePreparer(c)
- lfsPreparer := lfs.NewLfsUploadPreparer(c, uploadPreparer)
+ uploadPreparer := NewObjectStoragePreparer(c)
+ lfsPreparer := NewLfsPreparer(c, uploadPreparer)
opts, verifier, err := lfsPreparer.Prepare(r)
require.NoError(t, err)
@@ -48,11 +46,11 @@ func TestLfsUploadPreparerWithConfig(t *testing.T) {
require.NotNil(t, verifier)
}
-func TestLfsUploadPreparerWithNoConfig(t *testing.T) {
+func TestLfsPreparerWithNoConfig(t *testing.T) {
c := config.Config{}
r := &api.Response{RemoteObject: api.RemoteObject{ID: "the upload ID"}}
- uploadPreparer := upload.NewObjectStoragePreparer(c)
- lfsPreparer := lfs.NewLfsUploadPreparer(c, uploadPreparer)
+ uploadPreparer := NewObjectStoragePreparer(c)
+ lfsPreparer := NewLfsPreparer(c, uploadPreparer)
opts, verifier, err := lfsPreparer.Prepare(r)
require.NoError(t, err)
diff --git a/workhorse/internal/upload/accelerate.go b/workhorse/internal/upload/multipart_uploader.go
index 28d3b3dee2e..d0097f9e153 100644
--- a/workhorse/internal/upload/accelerate.go
+++ b/workhorse/internal/upload/multipart_uploader.go
@@ -4,19 +4,10 @@ import (
"fmt"
"net/http"
- "github.com/golang-jwt/jwt/v4"
-
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
)
-const RewrittenFieldsHeader = "Gitlab-Workhorse-Multipart-Fields"
-
-type MultipartClaims struct {
- RewrittenFields map[string]string `json:"rewritten_fields"`
- jwt.StandardClaims
-}
-
// Multipart is a request middleware. If the request has a MIME multipart
// request body, the middleware will iterate through the multipart parts.
// When it finds a file part (filename != ""), the middleware will save
@@ -32,6 +23,6 @@ func Multipart(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
return
}
- InterceptMultipartFiles(w, r, h, a, s, opts)
+ interceptMultipartFiles(w, r, h, a, s, opts)
}, "/authorize")
}
diff --git a/workhorse/internal/upload/object_storage_preparer.go b/workhorse/internal/upload/object_storage_preparer.go
index 241cfc2d9d2..f28f598c895 100644
--- a/workhorse/internal/upload/object_storage_preparer.go
+++ b/workhorse/internal/upload/object_storage_preparer.go
@@ -3,7 +3,7 @@ package upload
import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
type ObjectStoragePreparer struct {
@@ -11,12 +11,15 @@ type ObjectStoragePreparer struct {
credentials config.ObjectStorageCredentials
}
+// NewObjectStoragePreparer returns a new preparer instance which is responsible for
+// setting the object storage credentials and settings needed by an uploader
+// to upload to object storage.
func NewObjectStoragePreparer(c config.Config) Preparer {
return &ObjectStoragePreparer{credentials: c.ObjectStorageCredentials, config: c.ObjectStorageConfig}
}
-func (p *ObjectStoragePreparer) Prepare(a *api.Response) (*filestore.SaveFileOpts, Verifier, error) {
- opts, err := filestore.GetOpts(a)
+func (p *ObjectStoragePreparer) Prepare(a *api.Response) (*destination.UploadOpts, Verifier, error) {
+ opts, err := destination.GetOpts(a)
if err != nil {
return nil, nil, err
}
diff --git a/workhorse/internal/upload/preparer.go b/workhorse/internal/upload/preparer.go
new file mode 100644
index 00000000000..46a4cac01b5
--- /dev/null
+++ b/workhorse/internal/upload/preparer.go
@@ -0,0 +1,33 @@
+package upload
+
+import (
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
+)
+
+// Verifier is an optional pluggable behavior for upload paths. If
+// Verify() returns an error, Workhorse will return an error response to
+// the client instead of propagating the request to Rails. The motivating
+// use case is Git LFS, where Workhorse checks the size and SHA256
+// checksum of the uploaded file.
+type Verifier interface {
+ // Verify can abort the upload by returning an error
+ Verify(handler *destination.FileHandler) error
+}
+
+// Preparer is a pluggable behavior that interprets a Rails API response
+// and either tells Workhorse how to handle the upload, via the
+// UploadOpts and Verifier, or it rejects the request by returning a
+// non-nil error. Its intended use is to make sure the upload gets stored
+// in the right location: either a local directory, or one of several
+// supported object storage backends.
+type Preparer interface {
+ Prepare(a *api.Response) (*destination.UploadOpts, Verifier, error)
+}
+
+type DefaultPreparer struct{}
+
+func (s *DefaultPreparer) Prepare(a *api.Response) (*destination.UploadOpts, Verifier, error) {
+ opts, err := destination.GetOpts(a)
+ return opts, nil, err
+}
diff --git a/workhorse/internal/upload/rewrite.go b/workhorse/internal/upload/rewrite.go
index bbabe840ef5..ff5190226af 100644
--- a/workhorse/internal/upload/rewrite.go
+++ b/workhorse/internal/upload/rewrite.go
@@ -21,8 +21,8 @@ import (
"golang.org/x/image/tiff"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/lsif_transformer/parser"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
)
@@ -68,7 +68,7 @@ type rewriter struct {
finalizedFields map[string]bool
}
-func rewriteFormFilesFromMultipart(r *http.Request, writer *multipart.Writer, preauth *api.Response, filter MultipartFormProcessor, opts *filestore.SaveFileOpts) error {
+func rewriteFormFilesFromMultipart(r *http.Request, writer *multipart.Writer, preauth *api.Response, filter MultipartFormProcessor, opts *destination.UploadOpts) error {
// Create multipart reader
reader, err := r.MultipartReader()
if err != nil {
@@ -128,7 +128,7 @@ func parseAndNormalizeContentDisposition(header textproto.MIMEHeader) (string, s
return params["name"], params["filename"]
}
-func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipart.Part, opts *filestore.SaveFileOpts) error {
+func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipart.Part, opts *destination.UploadOpts) error {
if rew.filter.Count() >= maxFilesAllowed {
return ErrTooManyFilesUploaded
}
@@ -164,10 +164,10 @@ func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipa
defer inputReader.Close()
- fh, err := filestore.SaveFileFromReader(ctx, inputReader, -1, opts)
+ fh, err := destination.Upload(ctx, inputReader, -1, opts)
if err != nil {
switch err {
- case filestore.ErrEntityTooLarge, exif.ErrRemovingExif:
+ case destination.ErrEntityTooLarge, exif.ErrRemovingExif:
return err
default:
return fmt.Errorf("persisting multipart file: %v", err)
diff --git a/workhorse/internal/upload/saved_file_tracker.go b/workhorse/internal/upload/saved_file_tracker.go
index e6f9a8c9a88..b70a303a4a4 100644
--- a/workhorse/internal/upload/saved_file_tracker.go
+++ b/workhorse/internal/upload/saved_file_tracker.go
@@ -6,8 +6,8 @@ import (
"mime/multipart"
"net/http"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
type SavedFileTracker struct {
@@ -26,7 +26,7 @@ func (s *SavedFileTracker) Count() int {
return len(s.rewrittenFields)
}
-func (s *SavedFileTracker) ProcessFile(_ context.Context, fieldName string, file *filestore.FileHandler, _ *multipart.Writer) error {
+func (s *SavedFileTracker) ProcessFile(_ context.Context, fieldName string, file *destination.FileHandler, _ *multipart.Writer) error {
if _, ok := s.rewrittenFields[fieldName]; ok {
return fmt.Errorf("the %v field has already been processed", fieldName)
}
diff --git a/workhorse/internal/upload/saved_file_tracker_test.go b/workhorse/internal/upload/saved_file_tracker_test.go
index ba927db253e..4f323bf8888 100644
--- a/workhorse/internal/upload/saved_file_tracker_test.go
+++ b/workhorse/internal/upload/saved_file_tracker_test.go
@@ -10,8 +10,8 @@ import (
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
func TestSavedFileTracking(t *testing.T) {
@@ -23,7 +23,7 @@ func TestSavedFileTracking(t *testing.T) {
tracker := SavedFileTracker{Request: r}
require.Equal(t, "accelerate", tracker.Name())
- file := &filestore.FileHandler{}
+ file := &destination.FileHandler{}
ctx := context.Background()
tracker.ProcessFile(ctx, "test", file, nil)
require.Equal(t, 1, tracker.Count())
@@ -40,7 +40,7 @@ func TestSavedFileTracking(t *testing.T) {
func TestDuplicatedFileProcessing(t *testing.T) {
tracker := SavedFileTracker{}
- file := &filestore.FileHandler{}
+ file := &destination.FileHandler{}
require.NoError(t, tracker.ProcessFile(context.Background(), "file", file, nil))
diff --git a/workhorse/internal/upload/uploads.go b/workhorse/internal/upload/uploads.go
index 1806e7563ce..8272a3d920d 100644
--- a/workhorse/internal/upload/uploads.go
+++ b/workhorse/internal/upload/uploads.go
@@ -8,27 +8,39 @@ import (
"mime/multipart"
"net/http"
+ "github.com/golang-jwt/jwt/v4"
+
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
)
+const RewrittenFieldsHeader = "Gitlab-Workhorse-Multipart-Fields"
+
+type PreAuthorizer interface {
+ PreAuthorizeHandler(next api.HandleFunc, suffix string) http.Handler
+}
+
+type MultipartClaims struct {
+ RewrittenFields map[string]string `json:"rewritten_fields"`
+ jwt.StandardClaims
+}
+
// MultipartFormProcessor abstracts away implementation differences
// between generic MIME multipart file uploads and CI artifact uploads.
type MultipartFormProcessor interface {
- ProcessFile(ctx context.Context, formName string, file *filestore.FileHandler, writer *multipart.Writer) error
+ ProcessFile(ctx context.Context, formName string, file *destination.FileHandler, writer *multipart.Writer) error
ProcessField(ctx context.Context, formName string, writer *multipart.Writer) error
Finalize(ctx context.Context) error
Name() string
Count() int
}
-// InterceptMultipartFiles is the core of the implementation of
-// Multipart. Because it is also used for CI artifact uploads it is a
-// public function.
-func InterceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Handler, preauth *api.Response, filter MultipartFormProcessor, opts *filestore.SaveFileOpts) {
+// interceptMultipartFiles is the core of the implementation of
+// Multipart.
+func interceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Handler, preauth *api.Response, filter MultipartFormProcessor, opts *destination.UploadOpts) {
var body bytes.Buffer
writer := multipart.NewWriter(&body)
defer writer.Close()
@@ -43,7 +55,7 @@ func InterceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Hand
helper.CaptureAndFail(w, r, err, err.Error(), http.StatusBadRequest)
case http.ErrNotMultipart:
h.ServeHTTP(w, r)
- case filestore.ErrEntityTooLarge:
+ case destination.ErrEntityTooLarge:
helper.RequestEntityTooLarge(w, r, err)
case zipartifacts.ErrBadMetadata:
helper.RequestEntityTooLarge(w, r, err)
diff --git a/workhorse/internal/upload/uploads_test.go b/workhorse/internal/upload/uploads_test.go
index f262bf94b08..9d787b10d1c 100644
--- a/workhorse/internal/upload/uploads_test.go
+++ b/workhorse/internal/upload/uploads_test.go
@@ -22,9 +22,9 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upstream/roundtripper"
)
@@ -78,7 +78,7 @@ func TestUploadHandlerForwardingRawData(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
- InterceptMultipartFiles(response, httpRequest, handler, apiResponse, nil, opts)
+ interceptMultipartFiles(response, httpRequest, handler, apiResponse, nil, opts)
require.Equal(t, 202, response.Code)
require.Equal(t, "RESPONSE", response.Body.String(), "response body")
@@ -149,7 +149,7 @@ func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
- InterceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
+ interceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, 202, response.Code)
cancel() // this will trigger an async cleanup
@@ -218,7 +218,7 @@ func TestUploadHandlerDetectingInjectedMultiPartData(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
- InterceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
+ interceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, test.response, response.Code)
cancel() // this will trigger an async cleanup
@@ -248,7 +248,7 @@ func TestUploadProcessingField(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
- InterceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
+ interceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, 500, response.Code)
}
@@ -279,7 +279,7 @@ func TestUploadingMultipleFiles(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
- InterceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
+ interceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, 400, response.Code)
require.Equal(t, "upload request contains more than 10 files\n", response.Body.String())
@@ -335,7 +335,7 @@ func TestUploadProcessingFile(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
- InterceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
+ interceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, 200, response.Code)
})
@@ -381,7 +381,7 @@ func TestInvalidFileNames(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
- InterceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &SavedFileTracker{Request: httpRequest}, opts)
+ interceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &SavedFileTracker{Request: httpRequest}, opts)
require.Equal(t, testCase.code, response.Code)
require.Equal(t, testCase.expectedPrefix, opts.TempFilePrefix)
}
@@ -447,7 +447,7 @@ func TestContentDispositionRewrite(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
- InterceptMultipartFiles(response, httpRequest, customHandler, apiResponse, &SavedFileTracker{Request: httpRequest}, opts)
+ interceptMultipartFiles(response, httpRequest, customHandler, apiResponse, &SavedFileTracker{Request: httpRequest}, opts)
upstreamRequest, err := http.ReadRequest(bufio.NewReader(&upstreamRequestBuffer))
require.NoError(t, err)
@@ -570,7 +570,7 @@ func runUploadTest(t *testing.T, image []byte, filename string, httpCode int, ts
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
- InterceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
+ interceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, httpCode, response.Code)
}
diff --git a/workhorse/internal/upstream/.gitignore b/workhorse/internal/upstream/.gitignore
new file mode 100644
index 00000000000..d63cd8b2c40
--- /dev/null
+++ b/workhorse/internal/upstream/.gitignore
@@ -0,0 +1 @@
+testdata/public
diff --git a/workhorse/internal/upstream/metrics.go b/workhorse/internal/upstream/metrics.go
index 1a11bdc8b53..27cc6bb045b 100644
--- a/workhorse/internal/upstream/metrics.go
+++ b/workhorse/internal/upstream/metrics.go
@@ -6,6 +6,8 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/prometheus/client_golang/prometheus/promhttp"
+
+ "gitlab.com/gitlab-org/labkit/metrics"
)
const (
@@ -13,95 +15,7 @@ const (
httpSubsystem = "http"
)
-func secondsDurationBuckets() []float64 {
- return []float64{
- 0.005, /* 5ms */
- 0.025, /* 25ms */
- 0.1, /* 100ms */
- 0.5, /* 500ms */
- 1.0, /* 1s */
- 10.0, /* 10s */
- 30.0, /* 30s */
- 60.0, /* 1m */
- 300.0, /* 10m */
- }
-}
-
-func byteSizeBuckets() []float64 {
- return []float64{
- 10,
- 64,
- 256,
- 1024, /* 1kB */
- 64 * 1024, /* 64kB */
- 256 * 1024, /* 256kB */
- 1024 * 1024, /* 1mB */
- 64 * 1024 * 1024, /* 64mB */
- }
-}
-
var (
- httpInFlightRequests = promauto.NewGauge(prometheus.GaugeOpts{
- Namespace: namespace,
- Subsystem: httpSubsystem,
- Name: "in_flight_requests",
- Help: "A gauge of requests currently being served by workhorse.",
- })
-
- httpRequestsTotal = promauto.NewCounterVec(
- prometheus.CounterOpts{
- Namespace: namespace,
- Subsystem: httpSubsystem,
- Name: "requests_total",
- Help: "A counter for requests to workhorse.",
- },
- []string{"code", "method", "route"},
- )
-
- httpRequestDurationSeconds = promauto.NewHistogramVec(
- prometheus.HistogramOpts{
- Namespace: namespace,
- Subsystem: httpSubsystem,
- Name: "request_duration_seconds",
- Help: "A histogram of latencies for requests to workhorse.",
- Buckets: secondsDurationBuckets(),
- },
- []string{"code", "method", "route"},
- )
-
- httpRequestSizeBytes = promauto.NewHistogramVec(
- prometheus.HistogramOpts{
- Namespace: namespace,
- Subsystem: httpSubsystem,
- Name: "request_size_bytes",
- Help: "A histogram of sizes of requests to workhorse.",
- Buckets: byteSizeBuckets(),
- },
- []string{"code", "method", "route"},
- )
-
- httpResponseSizeBytes = promauto.NewHistogramVec(
- prometheus.HistogramOpts{
- Namespace: namespace,
- Subsystem: httpSubsystem,
- Name: "response_size_bytes",
- Help: "A histogram of response sizes for requests to workhorse.",
- Buckets: byteSizeBuckets(),
- },
- []string{"code", "method", "route"},
- )
-
- httpTimeToWriteHeaderSeconds = promauto.NewHistogramVec(
- prometheus.HistogramOpts{
- Namespace: namespace,
- Subsystem: httpSubsystem,
- Name: "time_to_write_header_seconds",
- Help: "A histogram of request durations until the response headers are written.",
- Buckets: secondsDurationBuckets(),
- },
- []string{"code", "method", "route"},
- )
-
httpGeoProxiedRequestsTotal = promauto.NewCounterVec(
prometheus.CounterOpts{
Namespace: namespace,
@@ -111,19 +25,12 @@ var (
},
[]string{"code", "method", "route"},
)
+
+ buildHandler = metrics.NewHandlerFactory(metrics.WithNamespace(namespace), metrics.WithLabels("route"))
)
func instrumentRoute(next http.Handler, method string, regexpStr string) http.Handler {
- handler := next
-
- handler = promhttp.InstrumentHandlerCounter(httpRequestsTotal.MustCurryWith(map[string]string{"route": regexpStr}), handler)
- handler = promhttp.InstrumentHandlerDuration(httpRequestDurationSeconds.MustCurryWith(map[string]string{"route": regexpStr}), handler)
- handler = promhttp.InstrumentHandlerInFlight(httpInFlightRequests, handler)
- handler = promhttp.InstrumentHandlerRequestSize(httpRequestSizeBytes.MustCurryWith(map[string]string{"route": regexpStr}), handler)
- handler = promhttp.InstrumentHandlerResponseSize(httpResponseSizeBytes.MustCurryWith(map[string]string{"route": regexpStr}), handler)
- handler = promhttp.InstrumentHandlerTimeToWriteHeader(httpTimeToWriteHeaderSeconds.MustCurryWith(map[string]string{"route": regexpStr}), handler)
-
- return handler
+ return buildHandler(next, metrics.WithLabelValues(map[string]string{"route": regexpStr}))
}
func instrumentGeoProxyRoute(next http.Handler, method string, regexpStr string) http.Handler {
diff --git a/workhorse/internal/upstream/routes.go b/workhorse/internal/upstream/routes.go
index b8089865ffe..b1d76dfc1bd 100644
--- a/workhorse/internal/upstream/routes.go
+++ b/workhorse/internal/upstream/routes.go
@@ -20,7 +20,6 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/git"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/imageresizer"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/lfs"
proxypkg "gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/queueing"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/redis"
@@ -251,8 +250,8 @@ func configureRoutes(u *upstream) {
u.route("PUT", gitProjectPattern+`gitlab-lfs/objects/([0-9a-f]{64})/([0-9]+)\z`, upload.RequestBody(api, signingProxy, preparers.lfs), withMatcher(isContentType("application/octet-stream"))),
// CI Artifacts
- u.route("POST", apiPattern+`v4/jobs/[0-9]+/artifacts\z`, contentEncodingHandler(artifacts.UploadArtifacts(api, signingProxy, preparers.artifacts))),
- u.route("POST", ciAPIPattern+`v1/builds/[0-9]+/artifacts\z`, contentEncodingHandler(artifacts.UploadArtifacts(api, signingProxy, preparers.artifacts))),
+ u.route("POST", apiPattern+`v4/jobs/[0-9]+/artifacts\z`, contentEncodingHandler(upload.Artifacts(api, signingProxy, preparers.artifacts))),
+ u.route("POST", ciAPIPattern+`v1/builds/[0-9]+/artifacts\z`, contentEncodingHandler(upload.Artifacts(api, signingProxy, preparers.artifacts))),
// ActionCable websocket
u.wsRoute(`^/-/cable\z`, cableProxy),
@@ -318,9 +317,12 @@ func configureRoutes(u *upstream) {
// Group Import via UI upload acceleration
u.route("POST", importPattern+`gitlab_group`, upload.Multipart(api, signingProxy, preparers.uploads)),
- // Metric image upload
+ // Issuable Metric image upload
u.route("POST", apiProjectPattern+`issues/[0-9]+/metric_images\z`, upload.Multipart(api, signingProxy, preparers.uploads)),
+ // Alert Metric image upload
+ u.route("POST", apiProjectPattern+`alert_management_alerts/[0-9]+/metric_images\z`, upload.Multipart(api, signingProxy, preparers.uploads)),
+
// Requirements Import via UI upload acceleration
u.route("POST", projectPattern+`requirements_management/requirements/import_csv`, upload.Multipart(api, signingProxy, preparers.uploads)),
@@ -383,11 +385,10 @@ func configureRoutes(u *upstream) {
u.route("", "^/oauth/geo/(auth|callback|logout)$", defaultUpstream),
// Admin Area > Geo routes
- u.route("", "^/admin/geo$", defaultUpstream),
- u.route("", "^/admin/geo/", defaultUpstream),
+ u.route("", "^/admin/geo/replication/projects", defaultUpstream),
+ u.route("", "^/admin/geo/replication/designs", defaultUpstream),
// Geo API routes
- u.route("", "^/api/v4/geo_nodes", defaultUpstream),
u.route("", "^/api/v4/geo_replication", defaultUpstream),
u.route("", "^/api/v4/geo/proxy_git_ssh", defaultUpstream),
u.route("", "^/api/v4/geo/graphql", defaultUpstream),
@@ -395,6 +396,16 @@ func configureRoutes(u *upstream) {
// Internal API routes
u.route("", "^/api/v4/internal", defaultUpstream),
+ u.route(
+ "", `^/assets/`,
+ static.ServeExisting(
+ u.URLPrefix,
+ staticpages.CacheExpireMax,
+ assetsNotFoundHandler,
+ ),
+ withoutTracing(), // Tracing on assets is very noisy
+ ),
+
// Don't define a catch-all route. If a route does not match, then we know
// the request should be proxied.
}
@@ -405,7 +416,7 @@ func createUploadPreparers(cfg config.Config) uploadPreparers {
return uploadPreparers{
artifacts: defaultPreparer,
- lfs: lfs.NewLfsUploadPreparer(cfg, defaultPreparer),
+ lfs: upload.NewLfsPreparer(cfg, defaultPreparer),
packages: defaultPreparer,
uploads: defaultPreparer,
}
diff --git a/workhorse/internal/upstream/routes_test.go b/workhorse/internal/upstream/routes_test.go
index f196433f5b4..8a032519bdf 100644
--- a/workhorse/internal/upstream/routes_test.go
+++ b/workhorse/internal/upstream/routes_test.go
@@ -2,8 +2,26 @@ package upstream
import (
"testing"
+
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
)
+func TestAdminGeoPathsWithGeoProxy(t *testing.T) {
+ testCases := []testCase{
+ {"Regular admin/geo", "/admin/geo", "Geo primary received request to path /admin/geo"},
+ {"Specific object replication", "/admin/geo/replication/object_type", "Geo primary received request to path /admin/geo/replication/object_type"},
+ {"Specific object replication per-site", "/admin/geo/sites/2/replication/object_type", "Geo primary received request to path /admin/geo/sites/2/replication/object_type"},
+ {"Projects replication per-site", "/admin/geo/sites/2/replication/projects", "Geo primary received request to path /admin/geo/sites/2/replication/projects"},
+ {"Designs replication per-site", "/admin/geo/sites/2/replication/designs", "Geo primary received request to path /admin/geo/sites/2/replication/designs"},
+ {"Projects replication", "/admin/geo/replication/projects", "Local Rails server received request to path /admin/geo/replication/projects"},
+ {"Projects replication subpaths", "/admin/geo/replication/projects/2", "Local Rails server received request to path /admin/geo/replication/projects/2"},
+ {"Designs replication", "/admin/geo/replication/designs", "Local Rails server received request to path /admin/geo/replication/designs"},
+ {"Designs replication subpaths", "/admin/geo/replication/designs/3", "Local Rails server received request to path /admin/geo/replication/designs/3"},
+ }
+
+ runTestCasesWithGeoProxyEnabled(t, testCases)
+}
+
func TestProjectNotExistingGitHttpPullWithGeoProxy(t *testing.T) {
testCases := []testCase{
{"secondary info/refs", "/group/project.git/info/refs", "Local Rails server received request to path /group/project.git/info/refs"},
@@ -45,3 +63,15 @@ func TestProjectNotExistingGitSSHPushWithGeoProxy(t *testing.T) {
runTestCasesWithGeoProxyEnabled(t, testCases)
}
+
+func TestAssetsServedLocallyWithGeoProxy(t *testing.T) {
+ path := "/assets/static.txt"
+ content := "local geo asset"
+ testhelper.SetupStaticFileHelper(t, path, content, testDocumentRoot)
+
+ testCases := []testCase{
+ {"assets path", "/assets/static.txt", "local geo asset"},
+ }
+
+ runTestCasesWithGeoProxyEnabled(t, testCases)
+}