Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'workhorse/internal/upload')
-rw-r--r--workhorse/internal/upload/accelerate.go32
-rw-r--r--workhorse/internal/upload/body_uploader.go90
-rw-r--r--workhorse/internal/upload/body_uploader_test.go195
-rw-r--r--workhorse/internal/upload/exif/exif.go107
-rw-r--r--workhorse/internal/upload/exif/exif_test.go95
-rw-r--r--workhorse/internal/upload/exif/testdata/sample_exif.jpgbin0 -> 33881 bytes
-rw-r--r--workhorse/internal/upload/object_storage_preparer.go28
-rw-r--r--workhorse/internal/upload/object_storage_preparer_test.go62
-rw-r--r--workhorse/internal/upload/rewrite.go203
-rw-r--r--workhorse/internal/upload/saved_file_tracker.go55
-rw-r--r--workhorse/internal/upload/saved_file_tracker_test.go39
-rw-r--r--workhorse/internal/upload/skip_rails_authorizer.go22
-rw-r--r--workhorse/internal/upload/uploads.go66
-rw-r--r--workhorse/internal/upload/uploads_test.go475
14 files changed, 1469 insertions, 0 deletions
diff --git a/workhorse/internal/upload/accelerate.go b/workhorse/internal/upload/accelerate.go
new file mode 100644
index 00000000000..7d8ea51b14d
--- /dev/null
+++ b/workhorse/internal/upload/accelerate.go
@@ -0,0 +1,32 @@
+package upload
+
+import (
+ "fmt"
+ "net/http"
+
+ "github.com/dgrijalva/jwt-go"
+
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/helper"
+)
+
+const RewrittenFieldsHeader = "Gitlab-Workhorse-Multipart-Fields"
+
+type MultipartClaims struct {
+ RewrittenFields map[string]string `json:"rewritten_fields"`
+ jwt.StandardClaims
+}
+
+func Accelerate(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
+ return rails.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
+ s := &SavedFileTracker{Request: r}
+
+ opts, _, err := p.Prepare(a)
+ if err != nil {
+ helper.Fail500(w, r, fmt.Errorf("Accelerate: error preparing file storage options"))
+ return
+ }
+
+ HandleFileUploads(w, r, h, a, s, opts)
+ }, "/authorize")
+}
diff --git a/workhorse/internal/upload/body_uploader.go b/workhorse/internal/upload/body_uploader.go
new file mode 100644
index 00000000000..2cee90195fb
--- /dev/null
+++ b/workhorse/internal/upload/body_uploader.go
@@ -0,0 +1,90 @@
+package upload
+
+import (
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "strings"
+
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/filestore"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/helper"
+)
+
+type PreAuthorizer interface {
+ PreAuthorizeHandler(next api.HandleFunc, suffix string) http.Handler
+}
+
+// Verifier allows to check an upload before sending it to rails
+type Verifier interface {
+ // Verify can abort the upload returning an error
+ Verify(handler *filestore.FileHandler) error
+}
+
+// Preparer allows to customize BodyUploader configuration
+type Preparer interface {
+ // Prepare converts api.Response into a *SaveFileOpts, it can optionally return an Verifier that will be
+ // invoked after the real upload, before the finalization with rails
+ Prepare(a *api.Response) (*filestore.SaveFileOpts, Verifier, error)
+}
+
+type DefaultPreparer struct{}
+
+func (s *DefaultPreparer) Prepare(a *api.Response) (*filestore.SaveFileOpts, Verifier, error) {
+ opts, err := filestore.GetOpts(a)
+ return opts, nil, err
+}
+
+// BodyUploader is an http.Handler that perform a pre authorization call to rails before hijacking the request body and
+// uploading it.
+// Providing an Preparer allows to customize the upload process
+func BodyUploader(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
+ return rails.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
+ opts, verifier, err := p.Prepare(a)
+ if err != nil {
+ helper.Fail500(w, r, fmt.Errorf("BodyUploader: preparation failed: %v", err))
+ return
+ }
+
+ fh, err := filestore.SaveFileFromReader(r.Context(), r.Body, r.ContentLength, opts)
+ if err != nil {
+ helper.Fail500(w, r, fmt.Errorf("BodyUploader: upload failed: %v", err))
+ return
+ }
+
+ if verifier != nil {
+ if err := verifier.Verify(fh); err != nil {
+ helper.Fail500(w, r, fmt.Errorf("BodyUploader: verification failed: %v", err))
+ return
+ }
+ }
+
+ data := url.Values{}
+ fields, err := fh.GitLabFinalizeFields("file")
+ if err != nil {
+ helper.Fail500(w, r, fmt.Errorf("BodyUploader: finalize fields failed: %v", err))
+ return
+ }
+
+ for k, v := range fields {
+ data.Set(k, v)
+ }
+
+ // Hijack body
+ body := data.Encode()
+ r.Body = ioutil.NopCloser(strings.NewReader(body))
+ r.ContentLength = int64(len(body))
+ r.Header.Set("Content-Type", "application/x-www-form-urlencoded")
+
+ sft := SavedFileTracker{Request: r}
+ sft.Track("file", fh.LocalPath)
+ if err := sft.Finalize(r.Context()); err != nil {
+ helper.Fail500(w, r, fmt.Errorf("BodyUploader: finalize failed: %v", err))
+ return
+ }
+
+ // And proxy the request
+ h.ServeHTTP(w, r)
+ }, "/authorize")
+}
diff --git a/workhorse/internal/upload/body_uploader_test.go b/workhorse/internal/upload/body_uploader_test.go
new file mode 100644
index 00000000000..451d7c97fab
--- /dev/null
+++ b/workhorse/internal/upload/body_uploader_test.go
@@ -0,0 +1,195 @@
+package upload
+
+import (
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "strconv"
+ "strings"
+ "testing"
+
+ "github.com/dgrijalva/jwt-go"
+ "github.com/stretchr/testify/require"
+
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/filestore"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/testhelper"
+)
+
+const (
+ fileContent = "A test file content"
+ fileLen = len(fileContent)
+)
+
+func TestBodyUploader(t *testing.T) {
+ testhelper.ConfigureSecret()
+
+ body := strings.NewReader(fileContent)
+
+ resp := testUpload(&rails{}, &alwaysLocalPreparer{}, echoProxy(t, fileLen), body)
+ require.Equal(t, http.StatusOK, resp.StatusCode)
+
+ uploadEcho, err := ioutil.ReadAll(resp.Body)
+
+ require.NoError(t, err, "Can't read response body")
+ require.Equal(t, fileContent, string(uploadEcho))
+}
+
+func TestBodyUploaderCustomPreparer(t *testing.T) {
+ body := strings.NewReader(fileContent)
+
+ resp := testUpload(&rails{}, &alwaysLocalPreparer{}, echoProxy(t, fileLen), body)
+ require.Equal(t, http.StatusOK, resp.StatusCode)
+
+ uploadEcho, err := ioutil.ReadAll(resp.Body)
+ require.NoError(t, err, "Can't read response body")
+ require.Equal(t, fileContent, string(uploadEcho))
+}
+
+func TestBodyUploaderCustomVerifier(t *testing.T) {
+ body := strings.NewReader(fileContent)
+ verifier := &mockVerifier{}
+
+ resp := testUpload(&rails{}, &alwaysLocalPreparer{verifier: verifier}, echoProxy(t, fileLen), body)
+ require.Equal(t, http.StatusOK, resp.StatusCode)
+
+ uploadEcho, err := ioutil.ReadAll(resp.Body)
+ require.NoError(t, err, "Can't read response body")
+ require.Equal(t, fileContent, string(uploadEcho))
+ require.True(t, verifier.invoked, "Verifier.Verify not invoked")
+}
+
+func TestBodyUploaderAuthorizationFailure(t *testing.T) {
+ testNoProxyInvocation(t, http.StatusUnauthorized, &rails{unauthorized: true}, &alwaysLocalPreparer{})
+}
+
+func TestBodyUploaderErrors(t *testing.T) {
+ tests := []struct {
+ name string
+ preparer *alwaysLocalPreparer
+ }{
+ {name: "Prepare failure", preparer: &alwaysLocalPreparer{prepareError: fmt.Errorf("")}},
+ {name: "Verify failure", preparer: &alwaysLocalPreparer{verifier: &alwaysFailsVerifier{}}},
+ }
+
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ testNoProxyInvocation(t, http.StatusInternalServerError, &rails{}, test.preparer)
+ })
+ }
+}
+
+func testNoProxyInvocation(t *testing.T, expectedStatus int, auth PreAuthorizer, preparer Preparer) {
+ proxy := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ require.Fail(t, "request proxied upstream")
+ })
+
+ resp := testUpload(auth, preparer, proxy, nil)
+ require.Equal(t, expectedStatus, resp.StatusCode)
+}
+
+func testUpload(auth PreAuthorizer, preparer Preparer, proxy http.Handler, body io.Reader) *http.Response {
+ req := httptest.NewRequest("POST", "http://example.com/upload", body)
+ w := httptest.NewRecorder()
+
+ BodyUploader(auth, proxy, preparer).ServeHTTP(w, req)
+
+ return w.Result()
+}
+
+func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ err := r.ParseForm()
+ require.NoError(t, err)
+
+ require.Equal(t, "application/x-www-form-urlencoded", r.Header.Get("Content-Type"), "Wrong Content-Type header")
+
+ require.Contains(t, r.PostForm, "file.md5")
+ require.Contains(t, r.PostForm, "file.sha1")
+ require.Contains(t, r.PostForm, "file.sha256")
+ require.Contains(t, r.PostForm, "file.sha512")
+
+ require.Contains(t, r.PostForm, "file.path")
+ require.Contains(t, r.PostForm, "file.size")
+ require.Contains(t, r.PostForm, "file.gitlab-workhorse-upload")
+ require.Equal(t, strconv.Itoa(expectedBodyLength), r.PostFormValue("file.size"))
+
+ token, err := jwt.ParseWithClaims(r.Header.Get(RewrittenFieldsHeader), &MultipartClaims{}, testhelper.ParseJWT)
+ require.NoError(t, err, "Wrong JWT header")
+
+ rewrittenFields := token.Claims.(*MultipartClaims).RewrittenFields
+ if len(rewrittenFields) != 1 || len(rewrittenFields["file"]) == 0 {
+ t.Fatalf("Unexpected rewritten_fields value: %v", rewrittenFields)
+ }
+
+ token, jwtErr := jwt.ParseWithClaims(r.PostFormValue("file.gitlab-workhorse-upload"), &testhelper.UploadClaims{}, testhelper.ParseJWT)
+ require.NoError(t, jwtErr, "Wrong signed upload fields")
+
+ uploadFields := token.Claims.(*testhelper.UploadClaims).Upload
+ require.Contains(t, uploadFields, "name")
+ require.Contains(t, uploadFields, "path")
+ require.Contains(t, uploadFields, "remote_url")
+ require.Contains(t, uploadFields, "remote_id")
+ require.Contains(t, uploadFields, "size")
+ require.Contains(t, uploadFields, "md5")
+ require.Contains(t, uploadFields, "sha1")
+ require.Contains(t, uploadFields, "sha256")
+ require.Contains(t, uploadFields, "sha512")
+
+ path := r.PostFormValue("file.path")
+ uploaded, err := os.Open(path)
+ require.NoError(t, err, "File not uploaded")
+
+ //sending back the file for testing purpose
+ io.Copy(w, uploaded)
+ })
+}
+
+type rails struct {
+ unauthorized bool
+}
+
+func (r *rails) PreAuthorizeHandler(next api.HandleFunc, _ string) http.Handler {
+ if r.unauthorized {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(http.StatusUnauthorized)
+ })
+ }
+
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ next(w, r, &api.Response{TempPath: os.TempDir()})
+ })
+}
+
+type alwaysLocalPreparer struct {
+ verifier Verifier
+ prepareError error
+}
+
+func (a *alwaysLocalPreparer) Prepare(_ *api.Response) (*filestore.SaveFileOpts, Verifier, error) {
+ opts, err := filestore.GetOpts(&api.Response{TempPath: os.TempDir()})
+ if err != nil {
+ return nil, nil, err
+ }
+
+ return opts, a.verifier, a.prepareError
+}
+
+type alwaysFailsVerifier struct{}
+
+func (alwaysFailsVerifier) Verify(handler *filestore.FileHandler) error {
+ return fmt.Errorf("Verification failed")
+}
+
+type mockVerifier struct {
+ invoked bool
+}
+
+func (m *mockVerifier) Verify(handler *filestore.FileHandler) error {
+ m.invoked = true
+
+ return nil
+}
diff --git a/workhorse/internal/upload/exif/exif.go b/workhorse/internal/upload/exif/exif.go
new file mode 100644
index 00000000000..a9307b1ca90
--- /dev/null
+++ b/workhorse/internal/upload/exif/exif.go
@@ -0,0 +1,107 @@
+package exif
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "os/exec"
+ "regexp"
+
+ "gitlab.com/gitlab-org/labkit/log"
+)
+
+var ErrRemovingExif = errors.New("error while removing EXIF")
+
+type cleaner struct {
+ ctx context.Context
+ cmd *exec.Cmd
+ stdout io.Reader
+ stderr bytes.Buffer
+ eof bool
+}
+
+func NewCleaner(ctx context.Context, stdin io.Reader) (io.ReadCloser, error) {
+ c := &cleaner{ctx: ctx}
+
+ if err := c.startProcessing(stdin); err != nil {
+ return nil, err
+ }
+
+ return c, nil
+}
+
+func (c *cleaner) Close() error {
+ if c.cmd == nil {
+ return nil
+ }
+
+ return c.cmd.Wait()
+}
+
+func (c *cleaner) Read(p []byte) (int, error) {
+ if c.eof {
+ return 0, io.EOF
+ }
+
+ n, err := c.stdout.Read(p)
+ if err == io.EOF {
+ if waitErr := c.cmd.Wait(); waitErr != nil {
+ log.WithContextFields(c.ctx, log.Fields{
+ "command": c.cmd.Args,
+ "stderr": c.stderr.String(),
+ "error": waitErr.Error(),
+ }).Print("exiftool command failed")
+
+ return n, ErrRemovingExif
+ }
+
+ c.eof = true
+ }
+
+ return n, err
+}
+
+func (c *cleaner) startProcessing(stdin io.Reader) error {
+ var err error
+
+ whitelisted_tags := []string{
+ "-ResolutionUnit",
+ "-XResolution",
+ "-YResolution",
+ "-YCbCrSubSampling",
+ "-YCbCrPositioning",
+ "-BitsPerSample",
+ "-ImageHeight",
+ "-ImageWidth",
+ "-ImageSize",
+ "-Copyright",
+ "-CopyrightNotice",
+ "-Orientation",
+ }
+
+ args := append([]string{"-all=", "--IPTC:all", "--XMP-iptcExt:all", "-tagsFromFile", "@"}, whitelisted_tags...)
+ args = append(args, "-")
+ c.cmd = exec.CommandContext(c.ctx, "exiftool", args...)
+
+ c.cmd.Stderr = &c.stderr
+ c.cmd.Stdin = stdin
+
+ c.stdout, err = c.cmd.StdoutPipe()
+ if err != nil {
+ return fmt.Errorf("failed to create stdout pipe: %v", err)
+ }
+
+ if err = c.cmd.Start(); err != nil {
+ return fmt.Errorf("start %v: %v", c.cmd.Args, err)
+ }
+
+ return nil
+}
+
+func IsExifFile(filename string) bool {
+ filenameMatch := regexp.MustCompile(`(?i)\.(jpg|jpeg|tiff)$`)
+
+ return filenameMatch.MatchString(filename)
+}
diff --git a/workhorse/internal/upload/exif/exif_test.go b/workhorse/internal/upload/exif/exif_test.go
new file mode 100644
index 00000000000..373d97f7fce
--- /dev/null
+++ b/workhorse/internal/upload/exif/exif_test.go
@@ -0,0 +1,95 @@
+package exif
+
+import (
+ "context"
+ "io"
+ "io/ioutil"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestIsExifFile(t *testing.T) {
+ tests := []struct {
+ name string
+ expected bool
+ }{
+ {
+ name: "/full/path.jpg",
+ expected: true,
+ },
+ {
+ name: "path.jpeg",
+ expected: true,
+ },
+ {
+ name: "path.tiff",
+ expected: true,
+ },
+ {
+ name: "path.JPG",
+ expected: true,
+ },
+ {
+ name: "path.tar",
+ expected: false,
+ },
+ {
+ name: "path",
+ expected: false,
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ require.Equal(t, test.expected, IsExifFile(test.name))
+ })
+ }
+}
+
+func TestNewCleanerWithValidFile(t *testing.T) {
+ input, err := os.Open("testdata/sample_exif.jpg")
+ require.NoError(t, err)
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ cleaner, err := NewCleaner(ctx, input)
+ require.NoError(t, err, "Expected no error when creating cleaner command")
+
+ size, err := io.Copy(ioutil.Discard, cleaner)
+ require.NoError(t, err, "Expected no error when reading output")
+
+ sizeAfterStrip := int64(25399)
+ require.Equal(t, sizeAfterStrip, size, "Different size of converted image")
+}
+
+func TestNewCleanerWithInvalidFile(t *testing.T) {
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ cleaner, err := NewCleaner(ctx, strings.NewReader("invalid image"))
+ require.NoError(t, err, "Expected no error when creating cleaner command")
+
+ size, err := io.Copy(ioutil.Discard, cleaner)
+ require.Error(t, err, "Expected error when reading output")
+ require.Equal(t, int64(0), size, "Size of invalid image should be 0")
+}
+
+func TestNewCleanerReadingAfterEOF(t *testing.T) {
+ input, err := os.Open("testdata/sample_exif.jpg")
+ require.NoError(t, err)
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ cleaner, err := NewCleaner(ctx, input)
+ require.NoError(t, err, "Expected no error when creating cleaner command")
+
+ _, err = io.Copy(ioutil.Discard, cleaner)
+ require.NoError(t, err, "Expected no error when reading output")
+
+ buf := make([]byte, 1)
+ size, err := cleaner.Read(buf)
+ require.Equal(t, 0, size, "The output was already consumed by previous reads")
+ require.Equal(t, io.EOF, err, "We return EOF")
+}
diff --git a/workhorse/internal/upload/exif/testdata/sample_exif.jpg b/workhorse/internal/upload/exif/testdata/sample_exif.jpg
new file mode 100644
index 00000000000..05eda3f7f95
--- /dev/null
+++ b/workhorse/internal/upload/exif/testdata/sample_exif.jpg
Binary files differ
diff --git a/workhorse/internal/upload/object_storage_preparer.go b/workhorse/internal/upload/object_storage_preparer.go
new file mode 100644
index 00000000000..7a113fae80a
--- /dev/null
+++ b/workhorse/internal/upload/object_storage_preparer.go
@@ -0,0 +1,28 @@
+package upload
+
+import (
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/config"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/filestore"
+)
+
+type ObjectStoragePreparer struct {
+ config config.ObjectStorageConfig
+ credentials config.ObjectStorageCredentials
+}
+
+func NewObjectStoragePreparer(c config.Config) Preparer {
+ return &ObjectStoragePreparer{credentials: c.ObjectStorageCredentials, config: c.ObjectStorageConfig}
+}
+
+func (p *ObjectStoragePreparer) Prepare(a *api.Response) (*filestore.SaveFileOpts, Verifier, error) {
+ opts, err := filestore.GetOpts(a)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ opts.ObjectStorageConfig.URLMux = p.config.URLMux
+ opts.ObjectStorageConfig.S3Credentials = p.credentials.S3Credentials
+
+ return opts, nil, nil
+}
diff --git a/workhorse/internal/upload/object_storage_preparer_test.go b/workhorse/internal/upload/object_storage_preparer_test.go
new file mode 100644
index 00000000000..613b6071275
--- /dev/null
+++ b/workhorse/internal/upload/object_storage_preparer_test.go
@@ -0,0 +1,62 @@
+package upload_test
+
+import (
+ "testing"
+
+ "gocloud.dev/blob"
+
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/config"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/upload"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestPrepareWithS3Config(t *testing.T) {
+ creds := config.S3Credentials{
+ AwsAccessKeyID: "test-key",
+ AwsSecretAccessKey: "test-secret",
+ }
+
+ c := config.Config{
+ ObjectStorageCredentials: config.ObjectStorageCredentials{
+ Provider: "AWS",
+ S3Credentials: creds,
+ },
+ ObjectStorageConfig: config.ObjectStorageConfig{
+ URLMux: new(blob.URLMux),
+ },
+ }
+
+ r := &api.Response{
+ RemoteObject: api.RemoteObject{
+ ID: "the ID",
+ UseWorkhorseClient: true,
+ ObjectStorage: &api.ObjectStorageParams{
+ Provider: "AWS",
+ },
+ },
+ }
+
+ p := upload.NewObjectStoragePreparer(c)
+ opts, v, err := p.Prepare(r)
+
+ require.NoError(t, err)
+ require.True(t, opts.ObjectStorageConfig.IsAWS())
+ require.True(t, opts.UseWorkhorseClient)
+ require.Equal(t, creds, opts.ObjectStorageConfig.S3Credentials)
+ require.NotNil(t, opts.ObjectStorageConfig.URLMux)
+ require.Equal(t, nil, v)
+}
+
+func TestPrepareWithNoConfig(t *testing.T) {
+ c := config.Config{}
+ r := &api.Response{RemoteObject: api.RemoteObject{ID: "id"}}
+ p := upload.NewObjectStoragePreparer(c)
+ opts, v, err := p.Prepare(r)
+
+ require.NoError(t, err)
+ require.False(t, opts.UseWorkhorseClient)
+ require.Nil(t, v)
+ require.Nil(t, opts.ObjectStorageConfig.URLMux)
+}
diff --git a/workhorse/internal/upload/rewrite.go b/workhorse/internal/upload/rewrite.go
new file mode 100644
index 00000000000..e51604c6ed9
--- /dev/null
+++ b/workhorse/internal/upload/rewrite.go
@@ -0,0 +1,203 @@
+package upload
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "mime/multipart"
+ "net/http"
+ "strings"
+
+ "github.com/prometheus/client_golang/prometheus"
+ "github.com/prometheus/client_golang/prometheus/promauto"
+ "gitlab.com/gitlab-org/labkit/log"
+
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/filestore"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/lsif_transformer/parser"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/upload/exif"
+)
+
+// ErrInjectedClientParam means that the client sent a parameter that overrides one of our own fields
+var ErrInjectedClientParam = errors.New("injected client parameter")
+
+var (
+ multipartUploadRequests = promauto.NewCounterVec(
+ prometheus.CounterOpts{
+
+ Name: "gitlab_workhorse_multipart_upload_requests",
+ Help: "How many multipart upload requests have been processed by gitlab-workhorse. Partitioned by type.",
+ },
+ []string{"type"},
+ )
+
+ multipartFileUploadBytes = promauto.NewCounterVec(
+ prometheus.CounterOpts{
+ Name: "gitlab_workhorse_multipart_upload_bytes",
+ Help: "How many disk bytes of multipart file parts have been successfully written by gitlab-workhorse. Partitioned by type.",
+ },
+ []string{"type"},
+ )
+
+ multipartFiles = promauto.NewCounterVec(
+ prometheus.CounterOpts{
+ Name: "gitlab_workhorse_multipart_upload_files",
+ Help: "How many multipart file parts have been processed by gitlab-workhorse. Partitioned by type.",
+ },
+ []string{"type"},
+ )
+)
+
+type rewriter struct {
+ writer *multipart.Writer
+ preauth *api.Response
+ filter MultipartFormProcessor
+ finalizedFields map[string]bool
+}
+
+func rewriteFormFilesFromMultipart(r *http.Request, writer *multipart.Writer, preauth *api.Response, filter MultipartFormProcessor, opts *filestore.SaveFileOpts) error {
+ // Create multipart reader
+ reader, err := r.MultipartReader()
+ if err != nil {
+ if err == http.ErrNotMultipart {
+ // We want to be able to recognize http.ErrNotMultipart elsewhere so no fmt.Errorf
+ return http.ErrNotMultipart
+ }
+ return fmt.Errorf("get multipart reader: %v", err)
+ }
+
+ multipartUploadRequests.WithLabelValues(filter.Name()).Inc()
+
+ rew := &rewriter{
+ writer: writer,
+ preauth: preauth,
+ filter: filter,
+ finalizedFields: make(map[string]bool),
+ }
+
+ for {
+ p, err := reader.NextPart()
+ if err != nil {
+ if err == io.EOF {
+ break
+ }
+ return err
+ }
+
+ name := p.FormName()
+ if name == "" {
+ continue
+ }
+
+ if rew.finalizedFields[name] {
+ return ErrInjectedClientParam
+ }
+
+ if p.FileName() != "" {
+ err = rew.handleFilePart(r.Context(), name, p, opts)
+ } else {
+ err = rew.copyPart(r.Context(), name, p)
+ }
+
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipart.Part, opts *filestore.SaveFileOpts) error {
+ multipartFiles.WithLabelValues(rew.filter.Name()).Inc()
+
+ filename := p.FileName()
+
+ if strings.Contains(filename, "/") || filename == "." || filename == ".." {
+ return fmt.Errorf("illegal filename: %q", filename)
+ }
+
+ opts.TempFilePrefix = filename
+
+ var inputReader io.ReadCloser
+ var err error
+ switch {
+ case exif.IsExifFile(filename):
+ inputReader, err = handleExifUpload(ctx, p, filename)
+ if err != nil {
+ return err
+ }
+ case rew.preauth.ProcessLsif:
+ inputReader, err = handleLsifUpload(ctx, p, opts.LocalTempPath, filename, rew.preauth)
+ if err != nil {
+ return err
+ }
+ default:
+ inputReader = ioutil.NopCloser(p)
+ }
+
+ defer inputReader.Close()
+
+ fh, err := filestore.SaveFileFromReader(ctx, inputReader, -1, opts)
+ if err != nil {
+ switch err {
+ case filestore.ErrEntityTooLarge, exif.ErrRemovingExif:
+ return err
+ default:
+ return fmt.Errorf("persisting multipart file: %v", err)
+ }
+ }
+
+ fields, err := fh.GitLabFinalizeFields(name)
+ if err != nil {
+ return fmt.Errorf("failed to finalize fields: %v", err)
+ }
+
+ for key, value := range fields {
+ rew.writer.WriteField(key, value)
+ rew.finalizedFields[key] = true
+ }
+
+ multipartFileUploadBytes.WithLabelValues(rew.filter.Name()).Add(float64(fh.Size))
+
+ return rew.filter.ProcessFile(ctx, name, fh, rew.writer)
+}
+
+func handleExifUpload(ctx context.Context, r io.Reader, filename string) (io.ReadCloser, error) {
+ log.WithContextFields(ctx, log.Fields{
+ "filename": filename,
+ }).Print("running exiftool to remove any metadata")
+
+ cleaner, err := exif.NewCleaner(ctx, r)
+ if err != nil {
+ return nil, err
+ }
+
+ return cleaner, nil
+}
+
+func handleLsifUpload(ctx context.Context, reader io.Reader, tempPath, filename string, preauth *api.Response) (io.ReadCloser, error) {
+ parserConfig := parser.Config{
+ TempPath: tempPath,
+ }
+
+ return parser.NewParser(ctx, reader, parserConfig)
+}
+
+func (rew *rewriter) copyPart(ctx context.Context, name string, p *multipart.Part) error {
+ np, err := rew.writer.CreatePart(p.Header)
+ if err != nil {
+ return fmt.Errorf("create multipart field: %v", err)
+ }
+
+ if _, err := io.Copy(np, p); err != nil {
+ return fmt.Errorf("duplicate multipart field: %v", err)
+ }
+
+ if err := rew.filter.ProcessField(ctx, name, rew.writer); err != nil {
+ return fmt.Errorf("process multipart field: %v", err)
+ }
+
+ return nil
+}
diff --git a/workhorse/internal/upload/saved_file_tracker.go b/workhorse/internal/upload/saved_file_tracker.go
new file mode 100644
index 00000000000..7b6cade4faa
--- /dev/null
+++ b/workhorse/internal/upload/saved_file_tracker.go
@@ -0,0 +1,55 @@
+package upload
+
+import (
+ "context"
+ "fmt"
+ "mime/multipart"
+ "net/http"
+
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/filestore"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/secret"
+)
+
+type SavedFileTracker struct {
+ Request *http.Request
+ rewrittenFields map[string]string
+}
+
+func (s *SavedFileTracker) Track(fieldName string, localPath string) {
+ if s.rewrittenFields == nil {
+ s.rewrittenFields = make(map[string]string)
+ }
+ s.rewrittenFields[fieldName] = localPath
+}
+
+func (s *SavedFileTracker) Count() int {
+ return len(s.rewrittenFields)
+}
+
+func (s *SavedFileTracker) ProcessFile(_ context.Context, fieldName string, file *filestore.FileHandler, _ *multipart.Writer) error {
+ s.Track(fieldName, file.LocalPath)
+ return nil
+}
+
+func (s *SavedFileTracker) ProcessField(_ context.Context, _ string, _ *multipart.Writer) error {
+ return nil
+}
+
+func (s *SavedFileTracker) Finalize(_ context.Context) error {
+ if s.rewrittenFields == nil {
+ return nil
+ }
+
+ claims := MultipartClaims{RewrittenFields: s.rewrittenFields, StandardClaims: secret.DefaultClaims}
+ tokenString, err := secret.JWTTokenString(claims)
+ if err != nil {
+ return fmt.Errorf("savedFileTracker.Finalize: %v", err)
+ }
+
+ s.Request.Header.Set(RewrittenFieldsHeader, tokenString)
+ return nil
+}
+
+func (s *SavedFileTracker) Name() string {
+ return "accelerate"
+}
diff --git a/workhorse/internal/upload/saved_file_tracker_test.go b/workhorse/internal/upload/saved_file_tracker_test.go
new file mode 100644
index 00000000000..e5a5e8f23a7
--- /dev/null
+++ b/workhorse/internal/upload/saved_file_tracker_test.go
@@ -0,0 +1,39 @@
+package upload
+
+import (
+ "context"
+
+ "github.com/dgrijalva/jwt-go"
+
+ "net/http"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/filestore"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/testhelper"
+)
+
+func TestSavedFileTracking(t *testing.T) {
+ testhelper.ConfigureSecret()
+
+ r, err := http.NewRequest("PUT", "/url/path", nil)
+ require.NoError(t, err)
+
+ tracker := SavedFileTracker{Request: r}
+ require.Equal(t, "accelerate", tracker.Name())
+
+ file := &filestore.FileHandler{}
+ ctx := context.Background()
+ tracker.ProcessFile(ctx, "test", file, nil)
+ require.Equal(t, 1, tracker.Count())
+
+ tracker.Finalize(ctx)
+ token, err := jwt.ParseWithClaims(r.Header.Get(RewrittenFieldsHeader), &MultipartClaims{}, testhelper.ParseJWT)
+ require.NoError(t, err)
+
+ rewrittenFields := token.Claims.(*MultipartClaims).RewrittenFields
+ require.Equal(t, 1, len(rewrittenFields))
+
+ require.Contains(t, rewrittenFields, "test")
+}
diff --git a/workhorse/internal/upload/skip_rails_authorizer.go b/workhorse/internal/upload/skip_rails_authorizer.go
new file mode 100644
index 00000000000..716467b8841
--- /dev/null
+++ b/workhorse/internal/upload/skip_rails_authorizer.go
@@ -0,0 +1,22 @@
+package upload
+
+import (
+ "net/http"
+
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
+)
+
+// SkipRailsAuthorizer implements a fake PreAuthorizer that do not calls rails API and
+// authorize each call as a local only upload to TempPath
+type SkipRailsAuthorizer struct {
+ // TempPath is the temporary path for a local only upload
+ TempPath string
+}
+
+// PreAuthorizeHandler implements PreAuthorizer. It always grant the upload.
+// The fake API response contains only TempPath
+func (l *SkipRailsAuthorizer) PreAuthorizeHandler(next api.HandleFunc, _ string) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ next(w, r, &api.Response{TempPath: l.TempPath})
+ })
+}
diff --git a/workhorse/internal/upload/uploads.go b/workhorse/internal/upload/uploads.go
new file mode 100644
index 00000000000..3be39f9518f
--- /dev/null
+++ b/workhorse/internal/upload/uploads.go
@@ -0,0 +1,66 @@
+package upload
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "io/ioutil"
+ "mime/multipart"
+ "net/http"
+
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/filestore"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/helper"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/upload/exif"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/zipartifacts"
+)
+
+// These methods are allowed to have thread-unsafe implementations.
+type MultipartFormProcessor interface {
+ ProcessFile(ctx context.Context, formName string, file *filestore.FileHandler, writer *multipart.Writer) error
+ ProcessField(ctx context.Context, formName string, writer *multipart.Writer) error
+ Finalize(ctx context.Context) error
+ Name() string
+}
+
+func HandleFileUploads(w http.ResponseWriter, r *http.Request, h http.Handler, preauth *api.Response, filter MultipartFormProcessor, opts *filestore.SaveFileOpts) {
+ var body bytes.Buffer
+ writer := multipart.NewWriter(&body)
+ defer writer.Close()
+
+ // Rewrite multipart form data
+ err := rewriteFormFilesFromMultipart(r, writer, preauth, filter, opts)
+ if err != nil {
+ switch err {
+ case ErrInjectedClientParam:
+ helper.CaptureAndFail(w, r, err, "Bad Request", http.StatusBadRequest)
+ case http.ErrNotMultipart:
+ h.ServeHTTP(w, r)
+ case filestore.ErrEntityTooLarge:
+ helper.RequestEntityTooLarge(w, r, err)
+ case zipartifacts.ErrBadMetadata:
+ helper.RequestEntityTooLarge(w, r, err)
+ case exif.ErrRemovingExif:
+ helper.CaptureAndFail(w, r, err, "Failed to process image", http.StatusUnprocessableEntity)
+ default:
+ helper.Fail500(w, r, fmt.Errorf("handleFileUploads: extract files from multipart: %v", err))
+ }
+ return
+ }
+
+ // Close writer
+ writer.Close()
+
+ // Hijack the request
+ r.Body = ioutil.NopCloser(&body)
+ r.ContentLength = int64(body.Len())
+ r.Header.Set("Content-Type", writer.FormDataContentType())
+
+ if err := filter.Finalize(r.Context()); err != nil {
+ helper.Fail500(w, r, fmt.Errorf("handleFileUploads: Finalize: %v", err))
+ return
+ }
+
+ // Proxy the request
+ h.ServeHTTP(w, r)
+}
diff --git a/workhorse/internal/upload/uploads_test.go b/workhorse/internal/upload/uploads_test.go
new file mode 100644
index 00000000000..fc1a1ac57ef
--- /dev/null
+++ b/workhorse/internal/upload/uploads_test.go
@@ -0,0 +1,475 @@
+package upload
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "io/ioutil"
+ "mime/multipart"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "regexp"
+ "strconv"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/filestore"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/helper"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/objectstore/test"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/proxy"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab-workhorse/internal/upstream/roundtripper"
+)
+
+var nilHandler = http.HandlerFunc(func(http.ResponseWriter, *http.Request) {})
+
+type testFormProcessor struct{}
+
+func (a *testFormProcessor) ProcessFile(ctx context.Context, formName string, file *filestore.FileHandler, writer *multipart.Writer) error {
+ return nil
+}
+
+func (a *testFormProcessor) ProcessField(ctx context.Context, formName string, writer *multipart.Writer) error {
+ if formName != "token" && !strings.HasPrefix(formName, "file.") && !strings.HasPrefix(formName, "other.") {
+ return fmt.Errorf("illegal field: %v", formName)
+ }
+ return nil
+}
+
+func (a *testFormProcessor) Finalize(ctx context.Context) error {
+ return nil
+}
+
+func (a *testFormProcessor) Name() string {
+ return ""
+}
+
+func TestUploadTempPathRequirement(t *testing.T) {
+ apiResponse := &api.Response{}
+ preparer := &DefaultPreparer{}
+ _, _, err := preparer.Prepare(apiResponse)
+ require.Error(t, err)
+}
+
+func TestUploadHandlerForwardingRawData(t *testing.T) {
+ ts := testhelper.TestServerWithHandler(regexp.MustCompile(`/url/path\z`), func(w http.ResponseWriter, r *http.Request) {
+ require.Equal(t, "PATCH", r.Method, "method")
+
+ body, err := ioutil.ReadAll(r.Body)
+ require.NoError(t, err)
+ require.Equal(t, "REQUEST", string(body), "request body")
+
+ w.WriteHeader(202)
+ fmt.Fprint(w, "RESPONSE")
+ })
+ defer ts.Close()
+
+ httpRequest, err := http.NewRequest("PATCH", ts.URL+"/url/path", bytes.NewBufferString("REQUEST"))
+ require.NoError(t, err)
+
+ tempPath, err := ioutil.TempDir("", "uploads")
+ require.NoError(t, err)
+ defer os.RemoveAll(tempPath)
+
+ response := httptest.NewRecorder()
+
+ handler := newProxy(ts.URL)
+ apiResponse := &api.Response{TempPath: tempPath}
+ preparer := &DefaultPreparer{}
+ opts, _, err := preparer.Prepare(apiResponse)
+ require.NoError(t, err)
+
+ HandleFileUploads(response, httpRequest, handler, apiResponse, nil, opts)
+
+ require.Equal(t, 202, response.Code)
+ require.Equal(t, "RESPONSE", response.Body.String(), "response body")
+}
+
+func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
+ var filePath string
+
+ tempPath, err := ioutil.TempDir("", "uploads")
+ require.NoError(t, err)
+ defer os.RemoveAll(tempPath)
+
+ ts := testhelper.TestServerWithHandler(regexp.MustCompile(`/url/path\z`), func(w http.ResponseWriter, r *http.Request) {
+ require.Equal(t, "PUT", r.Method, "method")
+ require.NoError(t, r.ParseMultipartForm(100000))
+
+ require.Empty(t, r.MultipartForm.File, "Expected to not receive any files")
+ require.Equal(t, "test", r.FormValue("token"), "Expected to receive token")
+ require.Equal(t, "my.file", r.FormValue("file.name"), "Expected to receive a filename")
+
+ filePath = r.FormValue("file.path")
+ require.True(t, strings.HasPrefix(filePath, tempPath), "Expected to the file to be in tempPath")
+
+ require.Empty(t, r.FormValue("file.remote_url"), "Expected to receive empty remote_url")
+ require.Empty(t, r.FormValue("file.remote_id"), "Expected to receive empty remote_id")
+ require.Equal(t, "4", r.FormValue("file.size"), "Expected to receive the file size")
+
+ hashes := map[string]string{
+ "md5": "098f6bcd4621d373cade4e832627b4f6",
+ "sha1": "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3",
+ "sha256": "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08",
+ "sha512": "ee26b0dd4af7e749aa1a8ee3c10ae9923f618980772e473f8819a5d4940e0db27ac185f8a0e1d5f84f88bc887fd67b143732c304cc5fa9ad8e6f57f50028a8ff",
+ }
+
+ for algo, hash := range hashes {
+ require.Equal(t, hash, r.FormValue("file."+algo), "file hash %s", algo)
+ }
+
+ require.Len(t, r.MultipartForm.Value, 11, "multipart form values")
+
+ w.WriteHeader(202)
+ fmt.Fprint(w, "RESPONSE")
+ })
+
+ var buffer bytes.Buffer
+
+ writer := multipart.NewWriter(&buffer)
+ writer.WriteField("token", "test")
+ file, err := writer.CreateFormFile("file", "my.file")
+ require.NoError(t, err)
+ fmt.Fprint(file, "test")
+ writer.Close()
+
+ httpRequest, err := http.NewRequest("PUT", ts.URL+"/url/path", nil)
+ require.NoError(t, err)
+
+ ctx, cancel := context.WithCancel(context.Background())
+ httpRequest = httpRequest.WithContext(ctx)
+ httpRequest.Body = ioutil.NopCloser(&buffer)
+ httpRequest.ContentLength = int64(buffer.Len())
+ httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
+ response := httptest.NewRecorder()
+
+ handler := newProxy(ts.URL)
+
+ apiResponse := &api.Response{TempPath: tempPath}
+ preparer := &DefaultPreparer{}
+ opts, _, err := preparer.Prepare(apiResponse)
+ require.NoError(t, err)
+
+ HandleFileUploads(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
+ require.Equal(t, 202, response.Code)
+
+ cancel() // this will trigger an async cleanup
+ waitUntilDeleted(t, filePath)
+}
+
+func TestUploadHandlerDetectingInjectedMultiPartData(t *testing.T) {
+ var filePath string
+
+ tempPath, err := ioutil.TempDir("", "uploads")
+ require.NoError(t, err)
+ defer os.RemoveAll(tempPath)
+
+ tests := []struct {
+ name string
+ field string
+ response int
+ }{
+ {
+ name: "injected file.path",
+ field: "file.path",
+ response: 400,
+ },
+ {
+ name: "injected file.remote_id",
+ field: "file.remote_id",
+ response: 400,
+ },
+ {
+ name: "field with other prefix",
+ field: "other.path",
+ response: 202,
+ },
+ }
+
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ ts := testhelper.TestServerWithHandler(regexp.MustCompile(`/url/path\z`), func(w http.ResponseWriter, r *http.Request) {
+ require.Equal(t, "PUT", r.Method, "method")
+
+ w.WriteHeader(202)
+ fmt.Fprint(w, "RESPONSE")
+ })
+
+ var buffer bytes.Buffer
+
+ writer := multipart.NewWriter(&buffer)
+ file, err := writer.CreateFormFile("file", "my.file")
+ require.NoError(t, err)
+ fmt.Fprint(file, "test")
+
+ writer.WriteField(test.field, "value")
+ writer.Close()
+
+ httpRequest, err := http.NewRequest("PUT", ts.URL+"/url/path", &buffer)
+ require.NoError(t, err)
+
+ ctx, cancel := context.WithCancel(context.Background())
+ httpRequest = httpRequest.WithContext(ctx)
+ httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
+ response := httptest.NewRecorder()
+
+ handler := newProxy(ts.URL)
+ apiResponse := &api.Response{TempPath: tempPath}
+ preparer := &DefaultPreparer{}
+ opts, _, err := preparer.Prepare(apiResponse)
+ require.NoError(t, err)
+
+ HandleFileUploads(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
+ require.Equal(t, test.response, response.Code)
+
+ cancel() // this will trigger an async cleanup
+ waitUntilDeleted(t, filePath)
+ })
+ }
+}
+
+func TestUploadProcessingField(t *testing.T) {
+ tempPath, err := ioutil.TempDir("", "uploads")
+ require.NoError(t, err)
+ defer os.RemoveAll(tempPath)
+
+ var buffer bytes.Buffer
+
+ writer := multipart.NewWriter(&buffer)
+ writer.WriteField("token2", "test")
+ writer.Close()
+
+ httpRequest, err := http.NewRequest("PUT", "/url/path", &buffer)
+ require.NoError(t, err)
+ httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
+
+ response := httptest.NewRecorder()
+ apiResponse := &api.Response{TempPath: tempPath}
+ preparer := &DefaultPreparer{}
+ opts, _, err := preparer.Prepare(apiResponse)
+ require.NoError(t, err)
+
+ HandleFileUploads(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
+
+ require.Equal(t, 500, response.Code)
+}
+
+func TestUploadProcessingFile(t *testing.T) {
+ tempPath, err := ioutil.TempDir("", "uploads")
+ require.NoError(t, err)
+ defer os.RemoveAll(tempPath)
+
+ _, testServer := test.StartObjectStore()
+ defer testServer.Close()
+
+ storeUrl := testServer.URL + test.ObjectPath
+
+ tests := []struct {
+ name string
+ preauth api.Response
+ }{
+ {
+ name: "FileStore Upload",
+ preauth: api.Response{TempPath: tempPath},
+ },
+ {
+ name: "ObjectStore Upload",
+ preauth: api.Response{RemoteObject: api.RemoteObject{StoreURL: storeUrl}},
+ },
+ {
+ name: "ObjectStore and FileStore Upload",
+ preauth: api.Response{
+ TempPath: tempPath,
+ RemoteObject: api.RemoteObject{StoreURL: storeUrl},
+ },
+ },
+ }
+
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ var buffer bytes.Buffer
+ writer := multipart.NewWriter(&buffer)
+ file, err := writer.CreateFormFile("file", "my.file")
+ require.NoError(t, err)
+ fmt.Fprint(file, "test")
+ writer.Close()
+
+ httpRequest, err := http.NewRequest("PUT", "/url/path", &buffer)
+ require.NoError(t, err)
+ httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
+
+ response := httptest.NewRecorder()
+ apiResponse := &api.Response{TempPath: tempPath}
+ preparer := &DefaultPreparer{}
+ opts, _, err := preparer.Prepare(apiResponse)
+ require.NoError(t, err)
+
+ HandleFileUploads(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
+
+ require.Equal(t, 200, response.Code)
+ })
+ }
+
+}
+
+func TestInvalidFileNames(t *testing.T) {
+ testhelper.ConfigureSecret()
+
+ tempPath, err := ioutil.TempDir("", "uploads")
+ require.NoError(t, err)
+ defer os.RemoveAll(tempPath)
+
+ for _, testCase := range []struct {
+ filename string
+ code int
+ }{
+ {"foobar", 200}, // sanity check for test setup below
+ {"foo/bar", 500},
+ {"/../../foobar", 500},
+ {".", 500},
+ {"..", 500},
+ } {
+ buffer := &bytes.Buffer{}
+
+ writer := multipart.NewWriter(buffer)
+ file, err := writer.CreateFormFile("file", testCase.filename)
+ require.NoError(t, err)
+ fmt.Fprint(file, "test")
+ writer.Close()
+
+ httpRequest, err := http.NewRequest("POST", "/example", buffer)
+ require.NoError(t, err)
+ httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
+
+ response := httptest.NewRecorder()
+ apiResponse := &api.Response{TempPath: tempPath}
+ preparer := &DefaultPreparer{}
+ opts, _, err := preparer.Prepare(apiResponse)
+ require.NoError(t, err)
+
+ HandleFileUploads(response, httpRequest, nilHandler, apiResponse, &SavedFileTracker{Request: httpRequest}, opts)
+ require.Equal(t, testCase.code, response.Code)
+ }
+}
+
+func TestUploadHandlerRemovingExif(t *testing.T) {
+ tempPath, err := ioutil.TempDir("", "uploads")
+ require.NoError(t, err)
+ defer os.RemoveAll(tempPath)
+
+ var buffer bytes.Buffer
+
+ content, err := ioutil.ReadFile("exif/testdata/sample_exif.jpg")
+ require.NoError(t, err)
+
+ writer := multipart.NewWriter(&buffer)
+ file, err := writer.CreateFormFile("file", "test.jpg")
+ require.NoError(t, err)
+
+ _, err = file.Write(content)
+ require.NoError(t, err)
+
+ err = writer.Close()
+ require.NoError(t, err)
+
+ ts := testhelper.TestServerWithHandler(regexp.MustCompile(`/url/path\z`), func(w http.ResponseWriter, r *http.Request) {
+ err := r.ParseMultipartForm(100000)
+ require.NoError(t, err)
+
+ size, err := strconv.Atoi(r.FormValue("file.size"))
+ require.NoError(t, err)
+ require.True(t, size < len(content), "Expected the file to be smaller after removal of exif")
+ require.True(t, size > 0, "Expected to receive not empty file")
+
+ w.WriteHeader(200)
+ fmt.Fprint(w, "RESPONSE")
+ })
+ defer ts.Close()
+
+ httpRequest, err := http.NewRequest("POST", ts.URL+"/url/path", &buffer)
+ require.NoError(t, err)
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ httpRequest = httpRequest.WithContext(ctx)
+ httpRequest.ContentLength = int64(buffer.Len())
+ httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
+ response := httptest.NewRecorder()
+
+ handler := newProxy(ts.URL)
+ apiResponse := &api.Response{TempPath: tempPath}
+ preparer := &DefaultPreparer{}
+ opts, _, err := preparer.Prepare(apiResponse)
+ require.NoError(t, err)
+
+ HandleFileUploads(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
+ require.Equal(t, 200, response.Code)
+}
+
+func TestUploadHandlerRemovingInvalidExif(t *testing.T) {
+ tempPath, err := ioutil.TempDir("", "uploads")
+ require.NoError(t, err)
+ defer os.RemoveAll(tempPath)
+
+ var buffer bytes.Buffer
+
+ writer := multipart.NewWriter(&buffer)
+ file, err := writer.CreateFormFile("file", "test.jpg")
+ require.NoError(t, err)
+
+ fmt.Fprint(file, "this is not valid image data")
+ err = writer.Close()
+ require.NoError(t, err)
+
+ ts := testhelper.TestServerWithHandler(regexp.MustCompile(`/url/path\z`), func(w http.ResponseWriter, r *http.Request) {
+ err := r.ParseMultipartForm(100000)
+ require.Error(t, err)
+ })
+ defer ts.Close()
+
+ httpRequest, err := http.NewRequest("POST", ts.URL+"/url/path", &buffer)
+ require.NoError(t, err)
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ httpRequest = httpRequest.WithContext(ctx)
+ httpRequest.ContentLength = int64(buffer.Len())
+ httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
+ response := httptest.NewRecorder()
+
+ handler := newProxy(ts.URL)
+ apiResponse := &api.Response{TempPath: tempPath}
+ preparer := &DefaultPreparer{}
+ opts, _, err := preparer.Prepare(apiResponse)
+ require.NoError(t, err)
+
+ HandleFileUploads(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
+ require.Equal(t, 422, response.Code)
+}
+
+func newProxy(url string) *proxy.Proxy {
+ parsedURL := helper.URLMustParse(url)
+ return proxy.NewProxy(parsedURL, "123", roundtripper.NewTestBackendRoundTripper(parsedURL))
+}
+
+func waitUntilDeleted(t *testing.T, path string) {
+ var err error
+
+ // Poll because the file removal is async
+ for i := 0; i < 100; i++ {
+ _, err = os.Stat(path)
+ if err != nil {
+ break
+ }
+ time.Sleep(100 * time.Millisecond)
+ }
+
+ require.True(t, os.IsNotExist(err), "expected the file to be deleted")
+}