Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-06-17 21:09:49 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-06-17 21:09:49 +0300
commit9a1066298169f8ebecacb9e55fe895f4f8962000 (patch)
treef4814e70a1bbc65fb6c412bf977d72f6c78a3642 /workhorse/internal
parent6c26db46b89172c15ae7b27d938db643721d59cb (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'workhorse/internal')
-rw-r--r--workhorse/internal/api/api.go2
-rw-r--r--workhorse/internal/lsif_transformer/parser/cache.go4
-rw-r--r--workhorse/internal/lsif_transformer/parser/cache_test.go2
-rw-r--r--workhorse/internal/lsif_transformer/parser/docs.go4
-rw-r--r--workhorse/internal/lsif_transformer/parser/docs_test.go6
-rw-r--r--workhorse/internal/lsif_transformer/parser/hovers.go8
-rw-r--r--workhorse/internal/lsif_transformer/parser/hovers_test.go2
-rw-r--r--workhorse/internal/lsif_transformer/parser/parser.go10
-rw-r--r--workhorse/internal/lsif_transformer/parser/parser_test.go2
-rw-r--r--workhorse/internal/lsif_transformer/parser/performance_test.go2
-rw-r--r--workhorse/internal/lsif_transformer/parser/ranges.go8
-rw-r--r--workhorse/internal/lsif_transformer/parser/ranges_test.go2
-rw-r--r--workhorse/internal/lsif_transformer/parser/references.go8
-rw-r--r--workhorse/internal/lsif_transformer/parser/references_test.go4
-rw-r--r--workhorse/internal/upload/artifacts_uploader.go31
-rw-r--r--workhorse/internal/upload/exif.go91
-rw-r--r--workhorse/internal/upload/rewrite.go106
-rw-r--r--workhorse/internal/upload/saved_file_tracker.go12
-rw-r--r--workhorse/internal/upload/uploads.go1
19 files changed, 149 insertions, 156 deletions
diff --git a/workhorse/internal/api/api.go b/workhorse/internal/api/api.go
index 3abc735dc90..a536435a587 100644
--- a/workhorse/internal/api/api.go
+++ b/workhorse/internal/api/api.go
@@ -156,8 +156,6 @@ type Response struct {
ShowAllRefs bool
// Detects whether an artifact is used for code intelligence
ProcessLsif bool
- // Detects whether LSIF artifact will be parsed with references
- ProcessLsifReferences bool
// The maximum accepted size in bytes of the upload
MaximumSize int64
}
diff --git a/workhorse/internal/lsif_transformer/parser/cache.go b/workhorse/internal/lsif_transformer/parser/cache.go
index 8049bbfee00..ec64fd21aa8 100644
--- a/workhorse/internal/lsif_transformer/parser/cache.go
+++ b/workhorse/internal/lsif_transformer/parser/cache.go
@@ -14,8 +14,8 @@ type cache struct {
chunkSize int64
}
-func newCache(tempDir, filename string, data interface{}) (*cache, error) {
- f, err := os.CreateTemp(tempDir, filename)
+func newCache(filename string, data interface{}) (*cache, error) {
+ f, err := os.CreateTemp("", filename)
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/cache_test.go b/workhorse/internal/lsif_transformer/parser/cache_test.go
index cc8da2ae4ee..c5d4479d973 100644
--- a/workhorse/internal/lsif_transformer/parser/cache_test.go
+++ b/workhorse/internal/lsif_transformer/parser/cache_test.go
@@ -13,7 +13,7 @@ type chunk struct {
}
func TestCache(t *testing.T) {
- cache, err := newCache("", "test-chunks", chunk{})
+ cache, err := newCache("test-chunks", chunk{})
require.NoError(t, err)
defer cache.Close()
diff --git a/workhorse/internal/lsif_transformer/parser/docs.go b/workhorse/internal/lsif_transformer/parser/docs.go
index c626e07d3fe..f87bc7fd86c 100644
--- a/workhorse/internal/lsif_transformer/parser/docs.go
+++ b/workhorse/internal/lsif_transformer/parser/docs.go
@@ -35,8 +35,8 @@ type Metadata struct {
Root string `json:"projectRoot"`
}
-func NewDocs(config Config) (*Docs, error) {
- ranges, err := NewRanges(config)
+func NewDocs() (*Docs, error) {
+ ranges, err := NewRanges()
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/docs_test.go b/workhorse/internal/lsif_transformer/parser/docs_test.go
index 57dca8e773d..24e3eba8ac5 100644
--- a/workhorse/internal/lsif_transformer/parser/docs_test.go
+++ b/workhorse/internal/lsif_transformer/parser/docs_test.go
@@ -14,7 +14,7 @@ func createLine(id, label, uri string) []byte {
}
func TestParse(t *testing.T) {
- d, err := NewDocs(Config{})
+ d, err := NewDocs()
require.NoError(t, err)
defer d.Close()
@@ -31,7 +31,7 @@ func TestParse(t *testing.T) {
}
func TestParseContainsLine(t *testing.T) {
- d, err := NewDocs(Config{})
+ d, err := NewDocs()
require.NoError(t, err)
defer d.Close()
@@ -44,7 +44,7 @@ func TestParseContainsLine(t *testing.T) {
}
func TestParsingVeryLongLine(t *testing.T) {
- d, err := NewDocs(Config{})
+ d, err := NewDocs()
require.NoError(t, err)
defer d.Close()
diff --git a/workhorse/internal/lsif_transformer/parser/hovers.go b/workhorse/internal/lsif_transformer/parser/hovers.go
index 63ed46ba2a2..33d45829295 100644
--- a/workhorse/internal/lsif_transformer/parser/hovers.go
+++ b/workhorse/internal/lsif_transformer/parser/hovers.go
@@ -35,10 +35,8 @@ type ResultSetRef struct {
RefId Id `json:"inV"`
}
-func NewHovers(config Config) (*Hovers, error) {
- tempPath := config.TempPath
-
- file, err := os.CreateTemp(tempPath, "hovers")
+func NewHovers() (*Hovers, error) {
+ file, err := os.CreateTemp("", "hovers")
if err != nil {
return nil, err
}
@@ -47,7 +45,7 @@ func NewHovers(config Config) (*Hovers, error) {
return nil, err
}
- offsets, err := newCache(tempPath, "hovers-indexes", Offset{})
+ offsets, err := newCache("hovers-indexes", Offset{})
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/hovers_test.go b/workhorse/internal/lsif_transformer/parser/hovers_test.go
index 3037be103af..5b2166c07a1 100644
--- a/workhorse/internal/lsif_transformer/parser/hovers_test.go
+++ b/workhorse/internal/lsif_transformer/parser/hovers_test.go
@@ -19,7 +19,7 @@ func TestHoversRead(t *testing.T) {
}
func setupHovers(t *testing.T) *Hovers {
- h, err := NewHovers(Config{})
+ h, err := NewHovers()
require.NoError(t, err)
require.NoError(t, h.Read("hoverResult", []byte(`{"id":"2","label":"hoverResult","result":{"contents": ["hello"]}}`)))
diff --git a/workhorse/internal/lsif_transformer/parser/parser.go b/workhorse/internal/lsif_transformer/parser/parser.go
index ac4d60d144a..2e4f925950c 100644
--- a/workhorse/internal/lsif_transformer/parser/parser.go
+++ b/workhorse/internal/lsif_transformer/parser/parser.go
@@ -21,18 +21,14 @@ type Parser struct {
pr *io.PipeReader
}
-type Config struct {
- TempPath string
-}
-
-func NewParser(ctx context.Context, r io.Reader, config Config) (io.ReadCloser, error) {
- docs, err := NewDocs(config)
+func NewParser(ctx context.Context, r io.Reader) (io.ReadCloser, error) {
+ docs, err := NewDocs()
if err != nil {
return nil, err
}
// ZIP files need to be seekable. Don't hold it all in RAM, use a tempfile
- tempFile, err := os.CreateTemp(config.TempPath, Lsif)
+ tempFile, err := os.CreateTemp("", Lsif)
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/parser_test.go b/workhorse/internal/lsif_transformer/parser/parser_test.go
index 1925f8de310..6887f699cb3 100644
--- a/workhorse/internal/lsif_transformer/parser/parser_test.go
+++ b/workhorse/internal/lsif_transformer/parser/parser_test.go
@@ -42,7 +42,7 @@ func createFiles(t *testing.T, filePath, tmpDir string) {
file, err := os.Open(filePath)
require.NoError(t, err)
- parser, err := NewParser(context.Background(), file, Config{})
+ parser, err := NewParser(context.Background(), file)
require.NoError(t, err)
zipFileName := tmpDir + ".zip"
diff --git a/workhorse/internal/lsif_transformer/parser/performance_test.go b/workhorse/internal/lsif_transformer/parser/performance_test.go
index 392fe2b6900..f4adb7a52be 100644
--- a/workhorse/internal/lsif_transformer/parser/performance_test.go
+++ b/workhorse/internal/lsif_transformer/parser/performance_test.go
@@ -21,7 +21,7 @@ func BenchmarkGenerate(b *testing.B) {
file, err := os.Open(filePath)
require.NoError(b, err)
- parser, err := NewParser(context.Background(), file, Config{})
+ parser, err := NewParser(context.Background(), file)
require.NoError(b, err)
_, err = io.Copy(io.Discard, parser)
diff --git a/workhorse/internal/lsif_transformer/parser/ranges.go b/workhorse/internal/lsif_transformer/parser/ranges.go
index 3786e15186e..0b4bd588e16 100644
--- a/workhorse/internal/lsif_transformer/parser/ranges.go
+++ b/workhorse/internal/lsif_transformer/parser/ranges.go
@@ -50,18 +50,18 @@ type SerializedRange struct {
References []SerializedReference `json:"references,omitempty"`
}
-func NewRanges(config Config) (*Ranges, error) {
- hovers, err := NewHovers(config)
+func NewRanges() (*Ranges, error) {
+ hovers, err := NewHovers()
if err != nil {
return nil, err
}
- references, err := NewReferences(config)
+ references, err := NewReferences()
if err != nil {
return nil, err
}
- cache, err := newCache(config.TempPath, "ranges", Range{})
+ cache, err := newCache("ranges", Range{})
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/ranges_test.go b/workhorse/internal/lsif_transformer/parser/ranges_test.go
index c1400ba61da..807945b41b1 100644
--- a/workhorse/internal/lsif_transformer/parser/ranges_test.go
+++ b/workhorse/internal/lsif_transformer/parser/ranges_test.go
@@ -42,7 +42,7 @@ func TestSerialize(t *testing.T) {
}
func setup(t *testing.T) (*Ranges, func()) {
- r, err := NewRanges(Config{})
+ r, err := NewRanges()
require.NoError(t, err)
require.NoError(t, r.Read("range", []byte(`{"id":1,"label":"range","start":{"line":1,"character":2}}`)))
diff --git a/workhorse/internal/lsif_transformer/parser/references.go b/workhorse/internal/lsif_transformer/parser/references.go
index 39c34105fd1..815f6dfff49 100644
--- a/workhorse/internal/lsif_transformer/parser/references.go
+++ b/workhorse/internal/lsif_transformer/parser/references.go
@@ -19,15 +19,13 @@ type SerializedReference struct {
Path string `json:"path"`
}
-func NewReferences(config Config) (*References, error) {
- tempPath := config.TempPath
-
- items, err := newCache(tempPath, "references", Item{})
+func NewReferences() (*References, error) {
+ items, err := newCache("references", Item{})
if err != nil {
return nil, err
}
- offsets, err := newCache(tempPath, "references-offsets", ReferencesOffset{})
+ offsets, err := newCache("references-offsets", ReferencesOffset{})
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/references_test.go b/workhorse/internal/lsif_transformer/parser/references_test.go
index 7b47513bc53..0bf18e44c01 100644
--- a/workhorse/internal/lsif_transformer/parser/references_test.go
+++ b/workhorse/internal/lsif_transformer/parser/references_test.go
@@ -12,7 +12,7 @@ func TestReferencesStore(t *testing.T) {
refId = 3
)
- r, err := NewReferences(Config{})
+ r, err := NewReferences()
require.NoError(t, err)
err = r.Store(refId, []Item{{Line: 2, DocId: docId}, {Line: 3, DocId: docId}})
@@ -30,7 +30,7 @@ func TestReferencesStore(t *testing.T) {
func TestReferencesStoreEmpty(t *testing.T) {
const refId = 3
- r, err := NewReferences(Config{})
+ r, err := NewReferences()
require.NoError(t, err)
err = r.Store(refId, []Item{})
diff --git a/workhorse/internal/upload/artifacts_uploader.go b/workhorse/internal/upload/artifacts_uploader.go
index debbb9c24db..a8c944a1d33 100644
--- a/workhorse/internal/upload/artifacts_uploader.go
+++ b/workhorse/internal/upload/artifacts_uploader.go
@@ -3,6 +3,7 @@ package upload
import (
"context"
"fmt"
+ "io"
"mime/multipart"
"net/http"
"os"
@@ -16,6 +17,7 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/lsif_transformer/parser"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
)
@@ -34,8 +36,9 @@ var zipSubcommandsErrorsCounter = promauto.NewCounterVec(
}, []string{"error"})
type artifactsUploadProcessor struct {
- opts *destination.UploadOpts
- format string
+ format string
+ processLSIF bool
+ tempDir string
SavedFileTracker
}
@@ -43,16 +46,11 @@ type artifactsUploadProcessor struct {
// Artifacts is like a Multipart but specific for artifacts upload.
func Artifacts(myAPI *api.API, h http.Handler, p Preparer) http.Handler {
return myAPI.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
- opts, err := p.Prepare(a)
- if err != nil {
- helper.Fail500(w, r, fmt.Errorf("UploadArtifacts: error preparing file storage options"))
- return
- }
-
format := r.URL.Query().Get(ArtifactFormatKey)
mg := &artifactsUploadProcessor{
- opts: opts,
format: format,
+ processLSIF: a.ProcessLsif,
+ tempDir: a.TempPath,
SavedFileTracker: SavedFileTracker{Request: r},
}
interceptMultipartFiles(w, r, h, mg, &eagerAuthorizer{a}, p)
@@ -61,7 +59,7 @@ func Artifacts(myAPI *api.API, h http.Handler, p Preparer) http.Handler {
func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context, file *destination.FileHandler) (*destination.FileHandler, error) {
metaOpts := &destination.UploadOpts{
- LocalTempPath: a.opts.LocalTempPath,
+ LocalTempPath: a.tempDir,
}
if metaOpts.LocalTempPath == "" {
metaOpts.LocalTempPath = os.TempDir()
@@ -115,10 +113,10 @@ func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context,
func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName string, file *destination.FileHandler, writer *multipart.Writer) error {
// ProcessFile for artifacts requires file form-data field name to eq `file`
-
if formName != "file" {
return fmt.Errorf("invalid form field: %q", formName)
}
+
if a.Count() > 0 {
return fmt.Errorf("artifacts request contains more than one file")
}
@@ -134,7 +132,6 @@ func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName str
return nil
}
- // TODO: can we rely on disk for shipping metadata? Not if we split workhorse and rails in 2 different PODs
metadata, err := a.generateMetadataFromZip(ctx, file)
if err != nil {
return err
@@ -156,6 +153,12 @@ func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName str
return nil
}
-func (a *artifactsUploadProcessor) Name() string {
- return "artifacts"
+func (a *artifactsUploadProcessor) Name() string { return "artifacts" }
+
+func (a *artifactsUploadProcessor) TransformContents(ctx context.Context, filename string, r io.Reader) (io.ReadCloser, error) {
+ if a.processLSIF {
+ return parser.NewParser(ctx, r)
+ }
+
+ return a.SavedFileTracker.TransformContents(ctx, filename, r)
}
diff --git a/workhorse/internal/upload/exif.go b/workhorse/internal/upload/exif.go
new file mode 100644
index 00000000000..e77afb24502
--- /dev/null
+++ b/workhorse/internal/upload/exif.go
@@ -0,0 +1,91 @@
+package upload
+
+import (
+ "context"
+ "io"
+ "net/http"
+ "os"
+
+ "gitlab.com/gitlab-org/labkit/log"
+ "golang.org/x/image/tiff"
+
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
+)
+
+func handleExifUpload(ctx context.Context, r io.Reader, filename string, imageType exif.FileType) (io.ReadCloser, error) {
+ tmpfile, err := os.CreateTemp("", "exifremove")
+ if err != nil {
+ return nil, err
+ }
+ go func() {
+ <-ctx.Done()
+ tmpfile.Close()
+ }()
+ if err := os.Remove(tmpfile.Name()); err != nil {
+ return nil, err
+ }
+
+ _, err = io.Copy(tmpfile, r)
+ if err != nil {
+ return nil, err
+ }
+
+ if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
+ return nil, err
+ }
+
+ isValidType := false
+ switch imageType {
+ case exif.TypeJPEG:
+ isValidType = isJPEG(tmpfile)
+ case exif.TypeTIFF:
+ isValidType = isTIFF(tmpfile)
+ }
+
+ if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
+ return nil, err
+ }
+
+ if !isValidType {
+ log.WithContextFields(ctx, log.Fields{
+ "filename": filename,
+ "imageType": imageType,
+ }).Info("invalid content type, not running exiftool")
+
+ return tmpfile, nil
+ }
+
+ log.WithContextFields(ctx, log.Fields{
+ "filename": filename,
+ }).Info("running exiftool to remove any metadata")
+
+ cleaner, err := exif.NewCleaner(ctx, tmpfile)
+ if err != nil {
+ return nil, err
+ }
+
+ return cleaner, nil
+}
+
+func isTIFF(r io.Reader) bool {
+ _, err := tiff.DecodeConfig(r)
+ if err == nil {
+ return true
+ }
+
+ if _, unsupported := err.(tiff.UnsupportedError); unsupported {
+ return true
+ }
+
+ return false
+}
+
+func isJPEG(r io.Reader) bool {
+ // Only the first 512 bytes are used to sniff the content type.
+ buf, err := io.ReadAll(io.LimitReader(r, 512))
+ if err != nil {
+ return false
+ }
+
+ return http.DetectContentType(buf) == "image/jpeg"
+}
diff --git a/workhorse/internal/upload/rewrite.go b/workhorse/internal/upload/rewrite.go
index 7b753c8e5d7..d03445923fa 100644
--- a/workhorse/internal/upload/rewrite.go
+++ b/workhorse/internal/upload/rewrite.go
@@ -9,18 +9,15 @@ import (
"mime/multipart"
"net/http"
"net/textproto"
- "os"
"path/filepath"
"strings"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
- "golang.org/x/image/tiff"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/log"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/lsif_transformer/parser"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
)
@@ -151,22 +148,11 @@ func (rew *rewriter) handleFilePart(r *http.Request, name string, p *multipart.P
return err
}
- var inputReader io.ReadCloser
ctx := r.Context()
- if imageType := exif.FileTypeFromSuffix(filename); imageType != exif.TypeUnknown {
- inputReader, err = handleExifUpload(ctx, p, filename, imageType)
- if err != nil {
- return err
- }
- } else if apiResponse.ProcessLsif {
- inputReader, err = handleLsifUpload(ctx, p, opts.LocalTempPath, filename)
- if err != nil {
- return err
- }
- } else {
- inputReader = io.NopCloser(p)
+ inputReader, err := rew.filter.TransformContents(ctx, filename, p)
+ if err != nil {
+ return err
}
-
defer inputReader.Close()
fh, err := destination.Upload(ctx, inputReader, -1, filename, opts)
@@ -194,92 +180,6 @@ func (rew *rewriter) handleFilePart(r *http.Request, name string, p *multipart.P
return rew.filter.ProcessFile(ctx, name, fh, rew.writer)
}
-func handleExifUpload(ctx context.Context, r io.Reader, filename string, imageType exif.FileType) (io.ReadCloser, error) {
- tmpfile, err := os.CreateTemp("", "exifremove")
- if err != nil {
- return nil, err
- }
- go func() {
- <-ctx.Done()
- tmpfile.Close()
- }()
- if err := os.Remove(tmpfile.Name()); err != nil {
- return nil, err
- }
-
- _, err = io.Copy(tmpfile, r)
- if err != nil {
- return nil, err
- }
-
- if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
- return nil, err
- }
-
- isValidType := false
- switch imageType {
- case exif.TypeJPEG:
- isValidType = isJPEG(tmpfile)
- case exif.TypeTIFF:
- isValidType = isTIFF(tmpfile)
- }
-
- if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
- return nil, err
- }
-
- if !isValidType {
- log.WithContextFields(ctx, log.Fields{
- "filename": filename,
- "imageType": imageType,
- }).Info("invalid content type, not running exiftool")
-
- return tmpfile, nil
- }
-
- log.WithContextFields(ctx, log.Fields{
- "filename": filename,
- }).Info("running exiftool to remove any metadata")
-
- cleaner, err := exif.NewCleaner(ctx, tmpfile)
- if err != nil {
- return nil, err
- }
-
- return cleaner, nil
-}
-
-func isTIFF(r io.Reader) bool {
- _, err := tiff.DecodeConfig(r)
- if err == nil {
- return true
- }
-
- if _, unsupported := err.(tiff.UnsupportedError); unsupported {
- return true
- }
-
- return false
-}
-
-func isJPEG(r io.Reader) bool {
- // Only the first 512 bytes are used to sniff the content type.
- buf, err := io.ReadAll(io.LimitReader(r, 512))
- if err != nil {
- return false
- }
-
- return http.DetectContentType(buf) == "image/jpeg"
-}
-
-func handleLsifUpload(ctx context.Context, reader io.Reader, tempPath, filename string) (io.ReadCloser, error) {
- parserConfig := parser.Config{
- TempPath: tempPath,
- }
-
- return parser.NewParser(ctx, reader, parserConfig)
-}
-
func (rew *rewriter) copyPart(ctx context.Context, name string, p *multipart.Part) error {
np, err := rew.writer.CreatePart(p.Header)
if err != nil {
diff --git a/workhorse/internal/upload/saved_file_tracker.go b/workhorse/internal/upload/saved_file_tracker.go
index 77758520d94..1fad5343647 100644
--- a/workhorse/internal/upload/saved_file_tracker.go
+++ b/workhorse/internal/upload/saved_file_tracker.go
@@ -3,11 +3,13 @@ package upload
import (
"context"
"fmt"
+ "io"
"mime/multipart"
"net/http"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
)
type SavedFileTracker struct {
@@ -54,6 +56,12 @@ func (s *SavedFileTracker) Finalize(_ context.Context) error {
return nil
}
-func (s *SavedFileTracker) Name() string {
- return "accelerate"
+func (s *SavedFileTracker) Name() string { return "accelerate" }
+
+func (*SavedFileTracker) TransformContents(ctx context.Context, filename string, r io.Reader) (io.ReadCloser, error) {
+ if imageType := exif.FileTypeFromSuffix(filename); imageType != exif.TypeUnknown {
+ return handleExifUpload(ctx, r, filename, imageType)
+ }
+
+ return io.NopCloser(r), nil
}
diff --git a/workhorse/internal/upload/uploads.go b/workhorse/internal/upload/uploads.go
index cdbd367297e..61b419901a7 100644
--- a/workhorse/internal/upload/uploads.go
+++ b/workhorse/internal/upload/uploads.go
@@ -36,6 +36,7 @@ type MultipartFormProcessor interface {
Finalize(ctx context.Context) error
Name() string
Count() int
+ TransformContents(ctx context.Context, filename string, r io.Reader) (io.ReadCloser, error)
}
// interceptMultipartFiles is the core of the implementation of