Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'workhorse/internal')
-rw-r--r--workhorse/internal/api/api.go46
-rw-r--r--workhorse/internal/api/block_test.go4
-rw-r--r--workhorse/internal/artifacts/entry_test.go14
-rw-r--r--workhorse/internal/badgateway/roundtripper.go4
-rw-r--r--workhorse/internal/badgateway/roundtripper_test.go4
-rw-r--r--workhorse/internal/config/config.go1
-rw-r--r--workhorse/internal/git/archive.go3
-rw-r--r--workhorse/internal/git/archive_test.go4
-rw-r--r--workhorse/internal/git/upload-pack_test.go8
-rw-r--r--workhorse/internal/helper/helpers.go6
-rw-r--r--workhorse/internal/helper/tempfile.go3
-rw-r--r--workhorse/internal/helper/writeafterreader.go3
-rw-r--r--workhorse/internal/helper/writeafterreader_test.go7
-rw-r--r--workhorse/internal/httprs/httprs.go3
-rw-r--r--workhorse/internal/httprs/httprs_test.go11
-rw-r--r--workhorse/internal/imageresizer/image_resizer_test.go10
-rw-r--r--workhorse/internal/lsif_transformer/parser/cache.go5
-rw-r--r--workhorse/internal/lsif_transformer/parser/cache_test.go6
-rw-r--r--workhorse/internal/lsif_transformer/parser/docs.go4
-rw-r--r--workhorse/internal/lsif_transformer/parser/docs_test.go6
-rw-r--r--workhorse/internal/lsif_transformer/parser/hovers.go9
-rw-r--r--workhorse/internal/lsif_transformer/parser/hovers_test.go2
-rw-r--r--workhorse/internal/lsif_transformer/parser/parser.go11
-rw-r--r--workhorse/internal/lsif_transformer/parser/parser_test.go7
-rw-r--r--workhorse/internal/lsif_transformer/parser/performance_test.go5
-rw-r--r--workhorse/internal/lsif_transformer/parser/ranges.go8
-rw-r--r--workhorse/internal/lsif_transformer/parser/ranges_test.go2
-rw-r--r--workhorse/internal/lsif_transformer/parser/references.go8
-rw-r--r--workhorse/internal/lsif_transformer/parser/references_test.go4
-rw-r--r--workhorse/internal/secret/jwt.go2
-rw-r--r--workhorse/internal/secret/secret.go4
-rw-r--r--workhorse/internal/senddata/contentprocessor/contentprocessor_test.go3
-rw-r--r--workhorse/internal/senddata/writer_test.go3
-rw-r--r--workhorse/internal/sendfile/sendfile.go3
-rw-r--r--workhorse/internal/sendfile/sendfile_test.go15
-rw-r--r--workhorse/internal/sendurl/sendurl_test.go3
-rw-r--r--workhorse/internal/staticpages/deploy_page.go4
-rw-r--r--workhorse/internal/staticpages/deploy_page_test.go15
-rw-r--r--workhorse/internal/staticpages/error_pages.go4
-rw-r--r--workhorse/internal/staticpages/error_pages_test.go39
-rw-r--r--workhorse/internal/staticpages/servefile_test.go25
-rw-r--r--workhorse/internal/testhelper/gitaly.go6
-rw-r--r--workhorse/internal/testhelper/testhelper.go9
-rw-r--r--workhorse/internal/upload/artifacts_store_test.go25
-rw-r--r--workhorse/internal/upload/artifacts_upload_test.go17
-rw-r--r--workhorse/internal/upload/artifacts_uploader.go67
-rw-r--r--workhorse/internal/upload/body_uploader.go6
-rw-r--r--workhorse/internal/upload/body_uploader_test.go5
-rw-r--r--workhorse/internal/upload/destination/destination.go25
-rw-r--r--workhorse/internal/upload/destination/destination_test.go54
-rw-r--r--workhorse/internal/upload/destination/filestore/filestore_test.go5
-rw-r--r--workhorse/internal/upload/destination/objectstore/gocloud_object_test.go3
-rw-r--r--workhorse/internal/upload/destination/objectstore/multipart.go9
-rw-r--r--workhorse/internal/upload/destination/objectstore/multipart_test.go4
-rw-r--r--workhorse/internal/upload/destination/objectstore/object.go3
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_object_test.go18
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_session.go7
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/gocloud_stub.go12
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go24
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/s3_stub.go10
-rw-r--r--workhorse/internal/upload/destination/reader_test.go4
-rw-r--r--workhorse/internal/upload/destination/upload_opts.go2
-rw-r--r--workhorse/internal/upload/destination/upload_opts_test.go3
-rw-r--r--workhorse/internal/upload/exif.go91
-rw-r--r--workhorse/internal/upload/exif/exif_test.go7
-rw-r--r--workhorse/internal/upload/multipart_uploader.go29
-rw-r--r--workhorse/internal/upload/rewrite.go169
-rw-r--r--workhorse/internal/upload/saved_file_tracker.go14
-rw-r--r--workhorse/internal/upload/skip_rails_authorizer.go22
-rw-r--r--workhorse/internal/upload/uploads.go11
-rw-r--r--workhorse/internal/upload/uploads_test.go146
-rw-r--r--workhorse/internal/upstream/handlers_test.go6
-rw-r--r--workhorse/internal/upstream/roundtripper/roundtripper_test.go4
-rw-r--r--workhorse/internal/upstream/routes.go2
-rw-r--r--workhorse/internal/upstream/upstream_test.go3
-rw-r--r--workhorse/internal/utils/svg/README.md4
-rw-r--r--workhorse/internal/zipartifacts/metadata_test.go7
77 files changed, 519 insertions, 642 deletions
diff --git a/workhorse/internal/api/api.go b/workhorse/internal/api/api.go
index 8954923ad75..a536435a587 100644
--- a/workhorse/internal/api/api.go
+++ b/workhorse/internal/api/api.go
@@ -156,8 +156,6 @@ type Response struct {
ShowAllRefs bool
// Detects whether an artifact is used for code intelligence
ProcessLsif bool
- // Detects whether LSIF artifact will be parsed with references
- ProcessLsifReferences bool
// The maximum accepted size in bytes of the upload
MaximumSize int64
}
@@ -263,26 +261,22 @@ func (api *API) newRequest(r *http.Request, suffix string) (*http.Request, error
// PreAuthorize performs a pre-authorization check against the API for the given HTTP request
//
-// If `outErr` is set, the other fields will be nil and it should be treated as
-// a 500 error.
+// If the returned *http.Response is not nil, the caller is responsible for closing its body
//
-// If httpResponse is present, the caller is responsible for closing its body
-//
-// authResponse will only be present if the authorization check was successful
-func (api *API) PreAuthorize(suffix string, r *http.Request) (httpResponse *http.Response, authResponse *Response, outErr error) {
+// Only upon successful authorization do we return a non-nil *Response
+func (api *API) PreAuthorize(suffix string, r *http.Request) (_ *http.Response, _ *Response, err error) {
authReq, err := api.newRequest(r, suffix)
if err != nil {
return nil, nil, fmt.Errorf("preAuthorizeHandler newUpstreamRequest: %v", err)
}
- httpResponse, err = api.doRequestWithoutRedirects(authReq)
+ httpResponse, err := api.doRequestWithoutRedirects(authReq)
if err != nil {
return nil, nil, fmt.Errorf("preAuthorizeHandler: do request: %v", err)
}
defer func() {
- if outErr != nil {
+ if err != nil {
httpResponse.Body.Close()
- httpResponse = nil
}
}()
requestsCounter.WithLabelValues(strconv.Itoa(httpResponse.StatusCode), authReq.Method).Inc()
@@ -293,17 +287,43 @@ func (api *API) PreAuthorize(suffix string, r *http.Request) (httpResponse *http
return httpResponse, nil, nil
}
- authResponse = &Response{}
+ authResponse := &Response{}
// The auth backend validated the client request and told us additional
// request metadata. We must extract this information from the auth
// response body.
if err := json.NewDecoder(httpResponse.Body).Decode(authResponse); err != nil {
- return httpResponse, nil, fmt.Errorf("preAuthorizeHandler: decode authorization response: %v", err)
+ return nil, nil, fmt.Errorf("preAuthorizeHandler: decode authorization response: %v", err)
}
return httpResponse, authResponse, nil
}
+// PreAuthorizeFixedPath makes an internal Workhorse API call to a fixed
+// path, using the HTTP headers of r.
+func (api *API) PreAuthorizeFixedPath(r *http.Request, method string, path string) (*Response, error) {
+ authReq, err := http.NewRequestWithContext(r.Context(), method, api.URL.String(), nil)
+ if err != nil {
+ return nil, fmt.Errorf("construct auth request: %w", err)
+ }
+ authReq.Header = helper.HeaderClone(r.Header)
+
+ failureResponse, apiResponse, err := api.PreAuthorize(path, authReq)
+ if err != nil {
+ return nil, fmt.Errorf("PreAuthorize: %w", err)
+ }
+
+ // We don't need the contents of failureResponse but we are responsible
+ // for closing it. Part of the reason PreAuthorizeFixedPath exists is to
+ // hide this awkwardness.
+ failureResponse.Body.Close()
+
+ if apiResponse == nil {
+ return nil, fmt.Errorf("no api response: status %d", failureResponse.StatusCode)
+ }
+
+ return apiResponse, nil
+}
+
func (api *API) PreAuthorizeHandler(next HandleFunc, suffix string) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
httpResponse, authResponse, err := api.PreAuthorize(suffix, r)
diff --git a/workhorse/internal/api/block_test.go b/workhorse/internal/api/block_test.go
index 85ad54f3cfd..0beb401d2f5 100644
--- a/workhorse/internal/api/block_test.go
+++ b/workhorse/internal/api/block_test.go
@@ -1,7 +1,7 @@
package api
import (
- "io/ioutil"
+ "io"
"net/http"
"net/http/httptest"
"testing"
@@ -46,7 +46,7 @@ func TestBlocker(t *testing.T) {
rw.Flush()
body := rw.Result().Body
- data, err := ioutil.ReadAll(body)
+ data, err := io.ReadAll(body)
require.NoError(t, err)
require.NoError(t, body.Close())
diff --git a/workhorse/internal/artifacts/entry_test.go b/workhorse/internal/artifacts/entry_test.go
index 800125eec91..709fd5f57a7 100644
--- a/workhorse/internal/artifacts/entry_test.go
+++ b/workhorse/internal/artifacts/entry_test.go
@@ -4,7 +4,6 @@ import (
"archive/zip"
"encoding/base64"
"fmt"
- "io/ioutil"
"net/http"
"net/http/httptest"
"os"
@@ -36,7 +35,7 @@ func testEntryServer(t *testing.T, archive string, entry string) *httptest.Respo
}
func TestDownloadingFromValidArchive(t *testing.T) {
- tempFile, err := ioutil.TempFile("", "uploads")
+ tempFile, err := os.CreateTemp("", "uploads")
require.NoError(t, err)
defer tempFile.Close()
defer os.Remove(tempFile.Name())
@@ -63,9 +62,7 @@ func TestDownloadingFromValidArchive(t *testing.T) {
}
func TestDownloadingFromValidHTTPArchive(t *testing.T) {
- tempDir, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
- defer os.RemoveAll(tempDir)
+ tempDir := t.TempDir()
f, err := os.Create(filepath.Join(tempDir, "archive.zip"))
require.NoError(t, err)
@@ -97,10 +94,9 @@ func TestDownloadingFromValidHTTPArchive(t *testing.T) {
}
func TestDownloadingNonExistingFile(t *testing.T) {
- tempFile, err := ioutil.TempFile("", "uploads")
+ tempFile, err := os.CreateTemp(t.TempDir(), "uploads")
require.NoError(t, err)
defer tempFile.Close()
- defer os.Remove(tempFile.Name())
archive := zip.NewWriter(tempFile)
defer archive.Close()
@@ -121,9 +117,7 @@ func TestIncompleteApiResponse(t *testing.T) {
}
func TestDownloadingFromNonExistingHTTPArchive(t *testing.T) {
- tempDir, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
- defer os.RemoveAll(tempDir)
+ tempDir := t.TempDir()
fileServer := httptest.NewServer(http.FileServer(http.Dir(tempDir)))
defer fileServer.Close()
diff --git a/workhorse/internal/badgateway/roundtripper.go b/workhorse/internal/badgateway/roundtripper.go
index 86337e80f28..240a4ebc86b 100644
--- a/workhorse/internal/badgateway/roundtripper.go
+++ b/workhorse/internal/badgateway/roundtripper.go
@@ -4,7 +4,7 @@ import (
"bytes"
"fmt"
"html/template"
- "io/ioutil"
+ "io"
"net/http"
"strings"
"time"
@@ -60,7 +60,7 @@ func (t *roundTripper) RoundTrip(r *http.Request) (*http.Response, error) {
message, contentType = developmentModeResponse(err)
}
- injectedResponse.Body = ioutil.NopCloser(strings.NewReader(message))
+ injectedResponse.Body = io.NopCloser(strings.NewReader(message))
injectedResponse.Header.Set("Content-Type", contentType)
return injectedResponse, nil
diff --git a/workhorse/internal/badgateway/roundtripper_test.go b/workhorse/internal/badgateway/roundtripper_test.go
index fc7132f9bd7..b59cb8d2c5b 100644
--- a/workhorse/internal/badgateway/roundtripper_test.go
+++ b/workhorse/internal/badgateway/roundtripper_test.go
@@ -2,7 +2,7 @@ package badgateway
import (
"errors"
- "io/ioutil"
+ "io"
"net/http"
"testing"
@@ -45,7 +45,7 @@ func TestErrorPage502(t *testing.T) {
require.NoError(t, err, "perform roundtrip")
defer response.Body.Close()
- body, err := ioutil.ReadAll(response.Body)
+ body, err := io.ReadAll(response.Body)
require.NoError(t, err)
require.Equal(t, tc.contentType, response.Header.Get("content-type"), "content type")
diff --git a/workhorse/internal/config/config.go b/workhorse/internal/config/config.go
index e83f55f43bf..3ce88f449a9 100644
--- a/workhorse/internal/config/config.go
+++ b/workhorse/internal/config/config.go
@@ -120,6 +120,7 @@ type Config struct {
TrustedCIDRsForXForwardedFor []string `toml:"trusted_cidrs_for_x_forwarded_for"`
TrustedCIDRsForPropagation []string `toml:"trusted_cidrs_for_propagation"`
Listeners []ListenerConfig `toml:"listeners"`
+ MetricsListener *ListenerConfig `toml:"metrics_listener"`
}
var DefaultImageResizerConfig = ImageResizerConfig{
diff --git a/workhorse/internal/git/archive.go b/workhorse/internal/git/archive.go
index e1d03828b63..5dcbb7f262e 100644
--- a/workhorse/internal/git/archive.go
+++ b/workhorse/internal/git/archive.go
@@ -7,7 +7,6 @@ package git
import (
"fmt"
"io"
- "io/ioutil"
"net/http"
"os"
"path"
@@ -180,7 +179,7 @@ func prepareArchiveTempfile(dir string, prefix string) (*os.File, error) {
if err := os.MkdirAll(dir, 0700); err != nil {
return nil, err
}
- return ioutil.TempFile(dir, prefix)
+ return os.CreateTemp(dir, prefix)
}
func finalizeCachedArchive(tempFile *os.File, archivePath string) error {
diff --git a/workhorse/internal/git/archive_test.go b/workhorse/internal/git/archive_test.go
index b96d5fdec85..b87800f492c 100644
--- a/workhorse/internal/git/archive_test.go
+++ b/workhorse/internal/git/archive_test.go
@@ -1,8 +1,8 @@
package git
import (
- "io/ioutil"
"net/http/httptest"
+ "os"
"testing"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
@@ -40,7 +40,7 @@ func TestParseBasename(t *testing.T) {
}
func TestFinalizeArchive(t *testing.T) {
- tempFile, err := ioutil.TempFile("", "gitlab-workhorse-test")
+ tempFile, err := os.CreateTemp("", "gitlab-workhorse-test")
if err != nil {
t.Fatal(err)
}
diff --git a/workhorse/internal/git/upload-pack_test.go b/workhorse/internal/git/upload-pack_test.go
index 9ffc7117790..d1184f95d88 100644
--- a/workhorse/internal/git/upload-pack_test.go
+++ b/workhorse/internal/git/upload-pack_test.go
@@ -5,10 +5,8 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"net"
"net/http/httptest"
- "os"
"path/filepath"
"testing"
"time"
@@ -57,7 +55,7 @@ func TestUploadPackTimesOut(t *testing.T) {
}
defer conn.Close()
- _, _ = io.Copy(ioutil.Discard, conn)
+ _, _ = io.Copy(io.Discard, conn)
return &gitalypb.PostUploadPackWithSidechannelResponse{}, nil
},
})
@@ -75,8 +73,7 @@ func TestUploadPackTimesOut(t *testing.T) {
func startSmartHTTPServer(t testing.TB, s gitalypb.SmartHTTPServiceServer) string {
t.Helper()
- tmp, err := ioutil.TempDir("", "")
- require.NoError(t, err)
+ tmp := t.TempDir()
socket := filepath.Join(tmp, "gitaly.sock")
ln, err := net.Listen("unix", socket)
@@ -90,7 +87,6 @@ func startSmartHTTPServer(t testing.TB, s gitalypb.SmartHTTPServiceServer) strin
t.Cleanup(func() {
srv.GracefulStop()
- require.NoError(t, os.RemoveAll(tmp), "error removing temp dir %q", tmp)
})
return fmt.Sprintf("%s://%s", ln.Addr().Network(), ln.Addr().String())
diff --git a/workhorse/internal/helper/helpers.go b/workhorse/internal/helper/helpers.go
index f9b46181579..33318407f88 100644
--- a/workhorse/internal/helper/helpers.go
+++ b/workhorse/internal/helper/helpers.go
@@ -3,7 +3,7 @@ package helper
import (
"bytes"
"errors"
- "io/ioutil"
+ "io"
"mime"
"net"
"net/http"
@@ -197,12 +197,12 @@ func ReadRequestBody(w http.ResponseWriter, r *http.Request, maxBodySize int64)
limitedBody := http.MaxBytesReader(w, r.Body, maxBodySize)
defer limitedBody.Close()
- return ioutil.ReadAll(limitedBody)
+ return io.ReadAll(limitedBody)
}
func CloneRequestWithNewBody(r *http.Request, body []byte) *http.Request {
newReq := *r
- newReq.Body = ioutil.NopCloser(bytes.NewReader(body))
+ newReq.Body = io.NopCloser(bytes.NewReader(body))
newReq.Header = HeaderClone(r.Header)
newReq.ContentLength = int64(len(body))
return &newReq
diff --git a/workhorse/internal/helper/tempfile.go b/workhorse/internal/helper/tempfile.go
index d8fc0d44698..f5864f549d0 100644
--- a/workhorse/internal/helper/tempfile.go
+++ b/workhorse/internal/helper/tempfile.go
@@ -2,12 +2,11 @@ package helper
import (
"io"
- "io/ioutil"
"os"
)
func ReadAllTempfile(r io.Reader) (tempfile *os.File, err error) {
- tempfile, err = ioutil.TempFile("", "gitlab-workhorse-read-all-tempfile")
+ tempfile, err = os.CreateTemp("", "gitlab-workhorse-read-all-tempfile")
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/helper/writeafterreader.go b/workhorse/internal/helper/writeafterreader.go
index 7df2279a86a..3626d70e493 100644
--- a/workhorse/internal/helper/writeafterreader.go
+++ b/workhorse/internal/helper/writeafterreader.go
@@ -3,7 +3,6 @@ package helper
import (
"fmt"
"io"
- "io/ioutil"
"os"
"sync"
)
@@ -131,7 +130,7 @@ func (w *coupledWriter) tempfileWrite(data []byte) (int, error) {
}
func (*coupledWriter) newTempfile() (tempfile *os.File, err error) {
- tempfile, err = ioutil.TempFile("", "gitlab-workhorse-coupledWriter")
+ tempfile, err = os.CreateTemp("", "gitlab-workhorse-coupledWriter")
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/helper/writeafterreader_test.go b/workhorse/internal/helper/writeafterreader_test.go
index 67cb3e6e542..c3da428184b 100644
--- a/workhorse/internal/helper/writeafterreader_test.go
+++ b/workhorse/internal/helper/writeafterreader_test.go
@@ -4,7 +4,6 @@ import (
"bytes"
"fmt"
"io"
- "io/ioutil"
"testing"
"testing/iotest"
)
@@ -14,7 +13,7 @@ func TestBusyReader(t *testing.T) {
r := testReader(testData)
br, _ := NewWriteAfterReader(r, &bytes.Buffer{})
- result, err := ioutil.ReadAll(br)
+ result, err := io.ReadAll(br)
if err != nil {
t.Fatal(err)
}
@@ -27,7 +26,7 @@ func TestBusyReader(t *testing.T) {
func TestFirstWriteAfterReadDone(t *testing.T) {
writeRecorder := &bytes.Buffer{}
br, cw := NewWriteAfterReader(&bytes.Buffer{}, writeRecorder)
- if _, err := io.Copy(ioutil.Discard, br); err != nil {
+ if _, err := io.Copy(io.Discard, br); err != nil {
t.Fatalf("copy from busyreader: %v", err)
}
testData := "test data"
@@ -53,7 +52,7 @@ func TestWriteDelay(t *testing.T) {
}
// Unblock the coupled writer by draining the reader
- if _, err := io.Copy(ioutil.Discard, br); err != nil {
+ if _, err := io.Copy(io.Discard, br); err != nil {
t.Fatalf("copy from busyreader: %v", err)
}
// Now it is no longer an error if 'w' receives a Write()
diff --git a/workhorse/internal/httprs/httprs.go b/workhorse/internal/httprs/httprs.go
index a38230c1968..f7767d2ee28 100644
--- a/workhorse/internal/httprs/httprs.go
+++ b/workhorse/internal/httprs/httprs.go
@@ -20,7 +20,6 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"net/http"
"github.com/mitchellh/copystructure"
@@ -148,7 +147,7 @@ func (r *HttpReadSeeker) Seek(offset int64, whence int) (int64, error) {
if r.r != nil {
// Try to read, which is cheaper than doing a request
if r.pos < offset && offset-r.pos <= shortSeekBytes {
- _, err := io.CopyN(ioutil.Discard, r, offset-r.pos)
+ _, err := io.CopyN(io.Discard, r, offset-r.pos)
if err != nil {
return 0, err
}
diff --git a/workhorse/internal/httprs/httprs_test.go b/workhorse/internal/httprs/httprs_test.go
index e26d2d21215..9dcfe75219b 100644
--- a/workhorse/internal/httprs/httprs_test.go
+++ b/workhorse/internal/httprs/httprs_test.go
@@ -3,7 +3,6 @@ package httprs
import (
"fmt"
"io"
- "io/ioutil"
"net/http"
"net/http/httptest"
"os"
@@ -49,7 +48,7 @@ type fakeRoundTripper struct {
func (f *fakeRoundTripper) RoundTrip(r *http.Request) (*http.Response, error) {
fw := &fakeResponseWriter{h: http.Header{}}
var err error
- fw.tmp, err = ioutil.TempFile(os.TempDir(), "httprs")
+ fw.tmp, err = os.CreateTemp(os.TempDir(), "httprs")
if err != nil {
return nil, err
}
@@ -79,7 +78,7 @@ type RSFactory func() *HttpReadSeeker
func newRSFactory(flags int) RSFactory {
return func() *HttpReadSeeker {
- tmp, err := ioutil.TempFile(os.TempDir(), "httprs")
+ tmp, err := os.CreateTemp(os.TempDir(), "httprs")
if err != nil {
return nil
}
@@ -111,11 +110,9 @@ func newRSFactory(flags int) RSFactory {
func TestHttpWebServer(t *testing.T) {
Convey("Scenario: testing WebServer", t, func() {
- dir, err := ioutil.TempDir("", "webserver")
- So(err, ShouldBeNil)
- defer os.RemoveAll(dir)
+ dir := t.TempDir()
- err = ioutil.WriteFile(filepath.Join(dir, "file"), make([]byte, 10000), 0755)
+ err := os.WriteFile(filepath.Join(dir, "file"), make([]byte, 10000), 0755)
So(err, ShouldBeNil)
server := httptest.NewServer(http.FileServer(http.Dir(dir)))
diff --git a/workhorse/internal/imageresizer/image_resizer_test.go b/workhorse/internal/imageresizer/image_resizer_test.go
index 8f5c8b2a6eb..f16df77f002 100644
--- a/workhorse/internal/imageresizer/image_resizer_test.go
+++ b/workhorse/internal/imageresizer/image_resizer_test.go
@@ -5,7 +5,7 @@ import (
"encoding/json"
"image"
"image/png"
- "io/ioutil"
+ "io"
"net/http"
"net/http/httptest"
"os"
@@ -184,7 +184,7 @@ func TestServeOriginalImageWhenSourceImageFormatIsNotAllowed(t *testing.T) {
cfg := config.DefaultImageResizerConfig
// SVG images are not allowed to be resized
svgImagePath := "../../testdata/image.svg"
- svgImage, err := ioutil.ReadFile(svgImagePath)
+ svgImage, err := os.ReadFile(svgImagePath)
require.NoError(t, err)
// ContentType is no longer used to perform the format validation.
// To make the test more strict, we'll use allowed, but incorrect ContentType.
@@ -193,7 +193,7 @@ func TestServeOriginalImageWhenSourceImageFormatIsNotAllowed(t *testing.T) {
resp := requestScaledImage(t, nil, params, cfg)
require.Equal(t, http.StatusOK, resp.StatusCode)
- responseData, err := ioutil.ReadAll(resp.Body)
+ responseData, err := io.ReadAll(resp.Body)
require.NoError(t, err)
require.Equal(t, svgImage, responseData, "expected original image")
}
@@ -201,7 +201,7 @@ func TestServeOriginalImageWhenSourceImageFormatIsNotAllowed(t *testing.T) {
func TestServeOriginalImageWhenSourceImageIsTooSmall(t *testing.T) {
content := []byte("PNG") // 3 bytes only, invalid as PNG/JPEG image
- img, err := ioutil.TempFile("", "*.png")
+ img, err := os.CreateTemp("", "*.png")
require.NoError(t, err)
defer img.Close()
@@ -216,7 +216,7 @@ func TestServeOriginalImageWhenSourceImageIsTooSmall(t *testing.T) {
resp := requestScaledImage(t, nil, params, cfg)
require.Equal(t, http.StatusOK, resp.StatusCode)
- responseData, err := ioutil.ReadAll(resp.Body)
+ responseData, err := io.ReadAll(resp.Body)
require.NoError(t, err)
require.Equal(t, content, responseData, "expected original image")
}
diff --git a/workhorse/internal/lsif_transformer/parser/cache.go b/workhorse/internal/lsif_transformer/parser/cache.go
index 395069cd217..ec64fd21aa8 100644
--- a/workhorse/internal/lsif_transformer/parser/cache.go
+++ b/workhorse/internal/lsif_transformer/parser/cache.go
@@ -3,7 +3,6 @@ package parser
import (
"encoding/binary"
"io"
- "io/ioutil"
"os"
)
@@ -15,8 +14,8 @@ type cache struct {
chunkSize int64
}
-func newCache(tempDir, filename string, data interface{}) (*cache, error) {
- f, err := ioutil.TempFile(tempDir, filename)
+func newCache(filename string, data interface{}) (*cache, error) {
+ f, err := os.CreateTemp("", filename)
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/cache_test.go b/workhorse/internal/lsif_transformer/parser/cache_test.go
index 23a2ac6e9a9..c5d4479d973 100644
--- a/workhorse/internal/lsif_transformer/parser/cache_test.go
+++ b/workhorse/internal/lsif_transformer/parser/cache_test.go
@@ -1,7 +1,7 @@
package parser
import (
- "io/ioutil"
+ "io"
"testing"
"github.com/stretchr/testify/require"
@@ -13,7 +13,7 @@ type chunk struct {
}
func TestCache(t *testing.T) {
- cache, err := newCache("", "test-chunks", chunk{})
+ cache, err := newCache("test-chunks", chunk{})
require.NoError(t, err)
defer cache.Close()
@@ -21,7 +21,7 @@ func TestCache(t *testing.T) {
require.NoError(t, cache.SetEntry(1, &c))
require.NoError(t, cache.setOffset(0))
- content, err := ioutil.ReadAll(cache.file)
+ content, err := io.ReadAll(cache.file)
require.NoError(t, err)
expected := []byte{0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x2, 0x0}
diff --git a/workhorse/internal/lsif_transformer/parser/docs.go b/workhorse/internal/lsif_transformer/parser/docs.go
index c626e07d3fe..f87bc7fd86c 100644
--- a/workhorse/internal/lsif_transformer/parser/docs.go
+++ b/workhorse/internal/lsif_transformer/parser/docs.go
@@ -35,8 +35,8 @@ type Metadata struct {
Root string `json:"projectRoot"`
}
-func NewDocs(config Config) (*Docs, error) {
- ranges, err := NewRanges(config)
+func NewDocs() (*Docs, error) {
+ ranges, err := NewRanges()
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/docs_test.go b/workhorse/internal/lsif_transformer/parser/docs_test.go
index 57dca8e773d..24e3eba8ac5 100644
--- a/workhorse/internal/lsif_transformer/parser/docs_test.go
+++ b/workhorse/internal/lsif_transformer/parser/docs_test.go
@@ -14,7 +14,7 @@ func createLine(id, label, uri string) []byte {
}
func TestParse(t *testing.T) {
- d, err := NewDocs(Config{})
+ d, err := NewDocs()
require.NoError(t, err)
defer d.Close()
@@ -31,7 +31,7 @@ func TestParse(t *testing.T) {
}
func TestParseContainsLine(t *testing.T) {
- d, err := NewDocs(Config{})
+ d, err := NewDocs()
require.NoError(t, err)
defer d.Close()
@@ -44,7 +44,7 @@ func TestParseContainsLine(t *testing.T) {
}
func TestParsingVeryLongLine(t *testing.T) {
- d, err := NewDocs(Config{})
+ d, err := NewDocs()
require.NoError(t, err)
defer d.Close()
diff --git a/workhorse/internal/lsif_transformer/parser/hovers.go b/workhorse/internal/lsif_transformer/parser/hovers.go
index a13c7e4c5c2..33d45829295 100644
--- a/workhorse/internal/lsif_transformer/parser/hovers.go
+++ b/workhorse/internal/lsif_transformer/parser/hovers.go
@@ -2,7 +2,6 @@ package parser
import (
"encoding/json"
- "io/ioutil"
"os"
)
@@ -36,10 +35,8 @@ type ResultSetRef struct {
RefId Id `json:"inV"`
}
-func NewHovers(config Config) (*Hovers, error) {
- tempPath := config.TempPath
-
- file, err := ioutil.TempFile(tempPath, "hovers")
+func NewHovers() (*Hovers, error) {
+ file, err := os.CreateTemp("", "hovers")
if err != nil {
return nil, err
}
@@ -48,7 +45,7 @@ func NewHovers(config Config) (*Hovers, error) {
return nil, err
}
- offsets, err := newCache(tempPath, "hovers-indexes", Offset{})
+ offsets, err := newCache("hovers-indexes", Offset{})
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/hovers_test.go b/workhorse/internal/lsif_transformer/parser/hovers_test.go
index 3037be103af..5b2166c07a1 100644
--- a/workhorse/internal/lsif_transformer/parser/hovers_test.go
+++ b/workhorse/internal/lsif_transformer/parser/hovers_test.go
@@ -19,7 +19,7 @@ func TestHoversRead(t *testing.T) {
}
func setupHovers(t *testing.T) *Hovers {
- h, err := NewHovers(Config{})
+ h, err := NewHovers()
require.NoError(t, err)
require.NoError(t, h.Read("hoverResult", []byte(`{"id":"2","label":"hoverResult","result":{"contents": ["hello"]}}`)))
diff --git a/workhorse/internal/lsif_transformer/parser/parser.go b/workhorse/internal/lsif_transformer/parser/parser.go
index 085e7a856aa..2e4f925950c 100644
--- a/workhorse/internal/lsif_transformer/parser/parser.go
+++ b/workhorse/internal/lsif_transformer/parser/parser.go
@@ -6,7 +6,6 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"os"
"gitlab.com/gitlab-org/labkit/log"
@@ -22,18 +21,14 @@ type Parser struct {
pr *io.PipeReader
}
-type Config struct {
- TempPath string
-}
-
-func NewParser(ctx context.Context, r io.Reader, config Config) (io.ReadCloser, error) {
- docs, err := NewDocs(config)
+func NewParser(ctx context.Context, r io.Reader) (io.ReadCloser, error) {
+ docs, err := NewDocs()
if err != nil {
return nil, err
}
// ZIP files need to be seekable. Don't hold it all in RAM, use a tempfile
- tempFile, err := ioutil.TempFile(config.TempPath, Lsif)
+ tempFile, err := os.CreateTemp("", Lsif)
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/parser_test.go b/workhorse/internal/lsif_transformer/parser/parser_test.go
index 3a4d72360e2..6887f699cb3 100644
--- a/workhorse/internal/lsif_transformer/parser/parser_test.go
+++ b/workhorse/internal/lsif_transformer/parser/parser_test.go
@@ -6,7 +6,6 @@ import (
"context"
"encoding/json"
"io"
- "io/ioutil"
"os"
"path/filepath"
"testing"
@@ -26,13 +25,13 @@ func TestGenerate(t *testing.T) {
}
func verifyCorrectnessOf(t *testing.T, tmpDir, fileName string) {
- file, err := ioutil.ReadFile(filepath.Join(tmpDir, fileName))
+ file, err := os.ReadFile(filepath.Join(tmpDir, fileName))
require.NoError(t, err)
var buf bytes.Buffer
require.NoError(t, json.Indent(&buf, file, "", " "))
- expected, err := ioutil.ReadFile(filepath.Join("testdata/expected/", fileName))
+ expected, err := os.ReadFile(filepath.Join("testdata/expected/", fileName))
require.NoError(t, err)
require.Equal(t, string(expected), buf.String())
@@ -43,7 +42,7 @@ func createFiles(t *testing.T, filePath, tmpDir string) {
file, err := os.Open(filePath)
require.NoError(t, err)
- parser, err := NewParser(context.Background(), file, Config{})
+ parser, err := NewParser(context.Background(), file)
require.NoError(t, err)
zipFileName := tmpDir + ".zip"
diff --git a/workhorse/internal/lsif_transformer/parser/performance_test.go b/workhorse/internal/lsif_transformer/parser/performance_test.go
index 5a12d90072f..f4adb7a52be 100644
--- a/workhorse/internal/lsif_transformer/parser/performance_test.go
+++ b/workhorse/internal/lsif_transformer/parser/performance_test.go
@@ -3,7 +3,6 @@ package parser
import (
"context"
"io"
- "io/ioutil"
"os"
"runtime"
"testing"
@@ -22,10 +21,10 @@ func BenchmarkGenerate(b *testing.B) {
file, err := os.Open(filePath)
require.NoError(b, err)
- parser, err := NewParser(context.Background(), file, Config{})
+ parser, err := NewParser(context.Background(), file)
require.NoError(b, err)
- _, err = io.Copy(ioutil.Discard, parser)
+ _, err = io.Copy(io.Discard, parser)
require.NoError(b, err)
require.NoError(b, parser.Close())
})
diff --git a/workhorse/internal/lsif_transformer/parser/ranges.go b/workhorse/internal/lsif_transformer/parser/ranges.go
index 3786e15186e..0b4bd588e16 100644
--- a/workhorse/internal/lsif_transformer/parser/ranges.go
+++ b/workhorse/internal/lsif_transformer/parser/ranges.go
@@ -50,18 +50,18 @@ type SerializedRange struct {
References []SerializedReference `json:"references,omitempty"`
}
-func NewRanges(config Config) (*Ranges, error) {
- hovers, err := NewHovers(config)
+func NewRanges() (*Ranges, error) {
+ hovers, err := NewHovers()
if err != nil {
return nil, err
}
- references, err := NewReferences(config)
+ references, err := NewReferences()
if err != nil {
return nil, err
}
- cache, err := newCache(config.TempPath, "ranges", Range{})
+ cache, err := newCache("ranges", Range{})
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/ranges_test.go b/workhorse/internal/lsif_transformer/parser/ranges_test.go
index c1400ba61da..807945b41b1 100644
--- a/workhorse/internal/lsif_transformer/parser/ranges_test.go
+++ b/workhorse/internal/lsif_transformer/parser/ranges_test.go
@@ -42,7 +42,7 @@ func TestSerialize(t *testing.T) {
}
func setup(t *testing.T) (*Ranges, func()) {
- r, err := NewRanges(Config{})
+ r, err := NewRanges()
require.NoError(t, err)
require.NoError(t, r.Read("range", []byte(`{"id":1,"label":"range","start":{"line":1,"character":2}}`)))
diff --git a/workhorse/internal/lsif_transformer/parser/references.go b/workhorse/internal/lsif_transformer/parser/references.go
index 39c34105fd1..815f6dfff49 100644
--- a/workhorse/internal/lsif_transformer/parser/references.go
+++ b/workhorse/internal/lsif_transformer/parser/references.go
@@ -19,15 +19,13 @@ type SerializedReference struct {
Path string `json:"path"`
}
-func NewReferences(config Config) (*References, error) {
- tempPath := config.TempPath
-
- items, err := newCache(tempPath, "references", Item{})
+func NewReferences() (*References, error) {
+ items, err := newCache("references", Item{})
if err != nil {
return nil, err
}
- offsets, err := newCache(tempPath, "references-offsets", ReferencesOffset{})
+ offsets, err := newCache("references-offsets", ReferencesOffset{})
if err != nil {
return nil, err
}
diff --git a/workhorse/internal/lsif_transformer/parser/references_test.go b/workhorse/internal/lsif_transformer/parser/references_test.go
index 7b47513bc53..0bf18e44c01 100644
--- a/workhorse/internal/lsif_transformer/parser/references_test.go
+++ b/workhorse/internal/lsif_transformer/parser/references_test.go
@@ -12,7 +12,7 @@ func TestReferencesStore(t *testing.T) {
refId = 3
)
- r, err := NewReferences(Config{})
+ r, err := NewReferences()
require.NoError(t, err)
err = r.Store(refId, []Item{{Line: 2, DocId: docId}, {Line: 3, DocId: docId}})
@@ -30,7 +30,7 @@ func TestReferencesStore(t *testing.T) {
func TestReferencesStoreEmpty(t *testing.T) {
const refId = 3
- r, err := NewReferences(Config{})
+ r, err := NewReferences()
require.NoError(t, err)
err = r.Store(refId, []Item{})
diff --git a/workhorse/internal/secret/jwt.go b/workhorse/internal/secret/jwt.go
index 804f3a9aba9..ce0de6ca38d 100644
--- a/workhorse/internal/secret/jwt.go
+++ b/workhorse/internal/secret/jwt.go
@@ -7,7 +7,7 @@ import (
)
var (
- DefaultClaims = jwt.StandardClaims{Issuer: "gitlab-workhorse"}
+ DefaultClaims = jwt.RegisteredClaims{Issuer: "gitlab-workhorse"}
)
func JWTTokenString(claims jwt.Claims) (string, error) {
diff --git a/workhorse/internal/secret/secret.go b/workhorse/internal/secret/secret.go
index e8c7c25393c..664f07a52c0 100644
--- a/workhorse/internal/secret/secret.go
+++ b/workhorse/internal/secret/secret.go
@@ -3,7 +3,7 @@ package secret
import (
"encoding/base64"
"fmt"
- "io/ioutil"
+ "os"
"sync"
)
@@ -57,7 +57,7 @@ func setBytes() ([]byte, error) {
return theSecret.bytes, nil
}
- base64Bytes, err := ioutil.ReadFile(theSecret.path)
+ base64Bytes, err := os.ReadFile(theSecret.path)
if err != nil {
return nil, fmt.Errorf("secret.setBytes: read %q: %v", theSecret.path, err)
}
diff --git a/workhorse/internal/senddata/contentprocessor/contentprocessor_test.go b/workhorse/internal/senddata/contentprocessor/contentprocessor_test.go
index b009cda1a24..ce7f7921589 100644
--- a/workhorse/internal/senddata/contentprocessor/contentprocessor_test.go
+++ b/workhorse/internal/senddata/contentprocessor/contentprocessor_test.go
@@ -2,7 +2,6 @@ package contentprocessor
import (
"io"
- "io/ioutil"
"net/http"
"net/http/httptest"
"testing"
@@ -306,7 +305,7 @@ func makeRequest(t *testing.T, handler http.HandlerFunc, body string, dispositio
SetContentHeaders(handler).ServeHTTP(rw, req)
resp := rw.Result()
- respBody, err := ioutil.ReadAll(resp.Body)
+ respBody, err := io.ReadAll(resp.Body)
require.NoError(t, err)
require.Equal(t, body, string(respBody))
diff --git a/workhorse/internal/senddata/writer_test.go b/workhorse/internal/senddata/writer_test.go
index fd6f7df5e64..b0c808c6158 100644
--- a/workhorse/internal/senddata/writer_test.go
+++ b/workhorse/internal/senddata/writer_test.go
@@ -2,7 +2,6 @@ package senddata
import (
"io"
- "io/ioutil"
"net/http"
"net/http/httptest"
"strings"
@@ -47,7 +46,7 @@ func TestWriter(t *testing.T) {
recorder.Flush()
body := recorder.Result().Body
- data, err := ioutil.ReadAll(body)
+ data, err := io.ReadAll(body)
require.NoError(t, err)
require.NoError(t, body.Close())
diff --git a/workhorse/internal/sendfile/sendfile.go b/workhorse/internal/sendfile/sendfile.go
index 75e497f0564..07b1789445a 100644
--- a/workhorse/internal/sendfile/sendfile.go
+++ b/workhorse/internal/sendfile/sendfile.go
@@ -9,7 +9,6 @@ package sendfile
import (
"fmt"
"io"
- "io/ioutil"
"net/http"
"regexp"
@@ -128,7 +127,7 @@ func sendFileFromDisk(w http.ResponseWriter, r *http.Request, file string) {
countSendFileMetrics(fi.Size(), r)
if contentTypeHeaderPresent {
- data, err := ioutil.ReadAll(io.LimitReader(content, headers.MaxDetectSize))
+ data, err := io.ReadAll(io.LimitReader(content, headers.MaxDetectSize))
if err != nil {
helper.Fail500(w, r, fmt.Errorf("content type detection: %v", err))
return
diff --git a/workhorse/internal/sendfile/sendfile_test.go b/workhorse/internal/sendfile/sendfile_test.go
index f01bee0b799..002de7f9f3e 100644
--- a/workhorse/internal/sendfile/sendfile_test.go
+++ b/workhorse/internal/sendfile/sendfile_test.go
@@ -1,9 +1,10 @@
package sendfile
import (
- "io/ioutil"
+ "io"
"net/http"
"net/http/httptest"
+ "os"
"testing"
"github.com/stretchr/testify/require"
@@ -15,7 +16,7 @@ func TestResponseWriter(t *testing.T) {
upstreamResponse := "hello world"
fixturePath := "testdata/sent-file.txt"
- fixtureContent, err := ioutil.ReadFile(fixturePath)
+ fixtureContent, err := os.ReadFile(fixturePath)
require.NoError(t, err)
testCases := []struct {
@@ -52,7 +53,7 @@ func TestResponseWriter(t *testing.T) {
rw.Flush()
body := rw.Result().Body
- data, err := ioutil.ReadAll(body)
+ data, err := io.ReadAll(body)
require.NoError(t, err)
require.NoError(t, body.Close())
@@ -90,7 +91,7 @@ func TestSuccessOverrideContentHeadersFeatureEnabled(t *testing.T) {
func TestSuccessOverrideContentHeadersRangeRequestFeatureEnabled(t *testing.T) {
fixturePath := "../../testdata/forgedfile.png"
- fixtureContent, err := ioutil.ReadFile(fixturePath)
+ fixtureContent, err := os.ReadFile(fixturePath)
require.NoError(t, err)
r, err := http.NewRequest("GET", "/foo", nil)
@@ -113,7 +114,7 @@ func TestSuccessOverrideContentHeadersRangeRequestFeatureEnabled(t *testing.T) {
resp := rw.Result()
body := resp.Body
- data, err := ioutil.ReadAll(body)
+ data, err := io.ReadAll(body)
require.NoError(t, err)
require.NoError(t, body.Close())
@@ -138,7 +139,7 @@ func TestSuccessInlineWhitelistedTypesFeatureEnabled(t *testing.T) {
}
func makeRequest(t *testing.T, fixturePath string, httpHeaders map[string]string) *http.Response {
- fixtureContent, err := ioutil.ReadFile(fixturePath)
+ fixtureContent, err := os.ReadFile(fixturePath)
require.NoError(t, err)
r, err := http.NewRequest("GET", "/foo", nil)
@@ -161,7 +162,7 @@ func makeRequest(t *testing.T, fixturePath string, httpHeaders map[string]string
resp := rw.Result()
body := resp.Body
- data, err := ioutil.ReadAll(body)
+ data, err := io.ReadAll(body)
require.NoError(t, err)
require.NoError(t, body.Close())
diff --git a/workhorse/internal/sendurl/sendurl_test.go b/workhorse/internal/sendurl/sendurl_test.go
index cc77fff0bfd..bca6b7d3075 100644
--- a/workhorse/internal/sendurl/sendurl_test.go
+++ b/workhorse/internal/sendurl/sendurl_test.go
@@ -3,7 +3,6 @@ package sendurl
import (
"encoding/base64"
"fmt"
- "io/ioutil"
"net/http"
"net/http/httptest"
"os"
@@ -40,7 +39,7 @@ func testEntryServer(t *testing.T, requestURL string, httpHeaders http.Header, a
serveFile := func(w http.ResponseWriter, r *http.Request) {
require.Equal(t, "GET", r.Method)
- tempFile, err := ioutil.TempFile("", "download_file")
+ tempFile, err := os.CreateTemp("", "download_file")
require.NoError(t, err)
require.NoError(t, os.Remove(tempFile.Name()))
defer tempFile.Close()
diff --git a/workhorse/internal/staticpages/deploy_page.go b/workhorse/internal/staticpages/deploy_page.go
index 35ebafa66e1..3dc2d982981 100644
--- a/workhorse/internal/staticpages/deploy_page.go
+++ b/workhorse/internal/staticpages/deploy_page.go
@@ -1,8 +1,8 @@
package staticpages
import (
- "io/ioutil"
"net/http"
+ "os"
"path/filepath"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
@@ -12,7 +12,7 @@ func (s *Static) DeployPage(handler http.Handler) http.Handler {
deployPage := filepath.Join(s.DocumentRoot, "index.html")
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- data, err := ioutil.ReadFile(deployPage)
+ data, err := os.ReadFile(deployPage)
if err != nil {
handler.ServeHTTP(w, r)
return
diff --git a/workhorse/internal/staticpages/deploy_page_test.go b/workhorse/internal/staticpages/deploy_page_test.go
index bc413880184..52ac69da23c 100644
--- a/workhorse/internal/staticpages/deploy_page_test.go
+++ b/workhorse/internal/staticpages/deploy_page_test.go
@@ -1,7 +1,6 @@
package staticpages
import (
- "io/ioutil"
"net/http"
"net/http/httptest"
"os"
@@ -14,11 +13,7 @@ import (
)
func TestIfNoDeployPageExist(t *testing.T) {
- dir, err := ioutil.TempDir("", "deploy")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(dir)
+ dir := t.TempDir()
w := httptest.NewRecorder()
@@ -33,14 +28,10 @@ func TestIfNoDeployPageExist(t *testing.T) {
}
func TestIfDeployPageExist(t *testing.T) {
- dir, err := ioutil.TempDir("", "deploy")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(dir)
+ dir := t.TempDir()
deployPage := "DEPLOY"
- ioutil.WriteFile(filepath.Join(dir, "index.html"), []byte(deployPage), 0600)
+ os.WriteFile(filepath.Join(dir, "index.html"), []byte(deployPage), 0600)
w := httptest.NewRecorder()
diff --git a/workhorse/internal/staticpages/error_pages.go b/workhorse/internal/staticpages/error_pages.go
index 6b86e21940b..e0ba7a5ceef 100644
--- a/workhorse/internal/staticpages/error_pages.go
+++ b/workhorse/internal/staticpages/error_pages.go
@@ -3,8 +3,8 @@ package staticpages
import (
"encoding/json"
"fmt"
- "io/ioutil"
"net/http"
+ "os"
"path/filepath"
"github.com/prometheus/client_golang/prometheus"
@@ -97,7 +97,7 @@ func (s *errorPageResponseWriter) writeHTML() (string, []byte) {
errorPageFile := filepath.Join(s.path, fmt.Sprintf("%d.html", s.status))
// check if custom error page exists, serve this page instead
- if data, err := ioutil.ReadFile(errorPageFile); err == nil {
+ if data, err := os.ReadFile(errorPageFile); err == nil {
return "text/html; charset=utf-8", data
}
}
diff --git a/workhorse/internal/staticpages/error_pages_test.go b/workhorse/internal/staticpages/error_pages_test.go
index c9927668fcc..12c268fb40b 100644
--- a/workhorse/internal/staticpages/error_pages_test.go
+++ b/workhorse/internal/staticpages/error_pages_test.go
@@ -2,7 +2,6 @@ package staticpages
import (
"fmt"
- "io/ioutil"
"net/http"
"net/http/httptest"
"os"
@@ -15,14 +14,10 @@ import (
)
func TestIfErrorPageIsPresented(t *testing.T) {
- dir, err := ioutil.TempDir("", "error_page")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(dir)
+ dir := t.TempDir()
errorPage := "ERROR"
- ioutil.WriteFile(filepath.Join(dir, "404.html"), []byte(errorPage), 0600)
+ os.WriteFile(filepath.Join(dir, "404.html"), []byte(errorPage), 0600)
w := httptest.NewRecorder()
h := http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
@@ -42,11 +37,7 @@ func TestIfErrorPageIsPresented(t *testing.T) {
}
func TestIfErrorPassedIfNoErrorPageIsFound(t *testing.T) {
- dir, err := ioutil.TempDir("", "error_page")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(dir)
+ dir := t.TempDir()
w := httptest.NewRecorder()
errorResponse := "ERROR"
@@ -63,14 +54,10 @@ func TestIfErrorPassedIfNoErrorPageIsFound(t *testing.T) {
}
func TestIfErrorPageIsIgnoredInDevelopment(t *testing.T) {
- dir, err := ioutil.TempDir("", "error_page")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(dir)
+ dir := t.TempDir()
errorPage := "ERROR"
- ioutil.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
+ os.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
w := httptest.NewRecorder()
serverError := "Interesting Server Error"
@@ -86,14 +73,10 @@ func TestIfErrorPageIsIgnoredInDevelopment(t *testing.T) {
}
func TestIfErrorPageIsIgnoredIfCustomError(t *testing.T) {
- dir, err := ioutil.TempDir("", "error_page")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(dir)
+ dir := t.TempDir()
errorPage := "ERROR"
- ioutil.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
+ os.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
w := httptest.NewRecorder()
serverError := "Interesting Server Error"
@@ -121,14 +104,10 @@ func TestErrorPageInterceptedByContentType(t *testing.T) {
}
for _, tc := range testCases {
- dir, err := ioutil.TempDir("", "error_page")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(dir)
+ dir := t.TempDir()
errorPage := "ERROR"
- ioutil.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
+ os.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600)
w := httptest.NewRecorder()
serverError := "Interesting Server Error"
diff --git a/workhorse/internal/staticpages/servefile_test.go b/workhorse/internal/staticpages/servefile_test.go
index 67675beccf8..231c0e59068 100644
--- a/workhorse/internal/staticpages/servefile_test.go
+++ b/workhorse/internal/staticpages/servefile_test.go
@@ -3,7 +3,6 @@ package staticpages
import (
"bytes"
"compress/gzip"
- "io/ioutil"
"net/http"
"net/http/httptest"
"os"
@@ -26,11 +25,7 @@ func TestServingNonExistingFile(t *testing.T) {
}
func TestServingDirectory(t *testing.T) {
- dir, err := ioutil.TempDir("", "deploy")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(dir)
+ dir := t.TempDir()
httpRequest, _ := http.NewRequest("GET", "/file", nil)
w := httptest.NewRecorder()
@@ -64,16 +59,12 @@ func TestExecutingHandlerWhenNoFileFound(t *testing.T) {
}
func TestServingTheActualFile(t *testing.T) {
- dir, err := ioutil.TempDir("", "deploy")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(dir)
+ dir := t.TempDir()
httpRequest, _ := http.NewRequest("GET", "/file", nil)
fileContent := "STATIC"
- ioutil.WriteFile(filepath.Join(dir, "file"), []byte(fileContent), 0600)
+ os.WriteFile(filepath.Join(dir, "file"), []byte(fileContent), 0600)
w := httptest.NewRecorder()
st := &Static{DocumentRoot: dir}
@@ -121,11 +112,7 @@ func TestExcludedPaths(t *testing.T) {
}
func testServingThePregzippedFile(t *testing.T, enableGzip bool) {
- dir, err := ioutil.TempDir("", "deploy")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(dir)
+ dir := t.TempDir()
httpRequest, _ := http.NewRequest("GET", "/file", nil)
@@ -140,8 +127,8 @@ func testServingThePregzippedFile(t *testing.T, enableGzip bool) {
fileGzip.Write([]byte(fileContent))
fileGzip.Close()
- ioutil.WriteFile(filepath.Join(dir, "file.gz"), fileGzipContent.Bytes(), 0600)
- ioutil.WriteFile(filepath.Join(dir, "file"), []byte(fileContent), 0600)
+ os.WriteFile(filepath.Join(dir, "file.gz"), fileGzipContent.Bytes(), 0600)
+ os.WriteFile(filepath.Join(dir, "file"), []byte(fileContent), 0600)
w := httptest.NewRecorder()
st := &Static{DocumentRoot: dir}
diff --git a/workhorse/internal/testhelper/gitaly.go b/workhorse/internal/testhelper/gitaly.go
index 747d5e6d078..488fb2eaf01 100644
--- a/workhorse/internal/testhelper/gitaly.go
+++ b/workhorse/internal/testhelper/gitaly.go
@@ -4,7 +4,7 @@ import (
"bytes"
"fmt"
"io"
- "io/ioutil"
+ "os"
"path"
"strings"
"sync"
@@ -49,10 +49,10 @@ var (
func init() {
var err error
- if GitalyReceivePackResponseMock, err = ioutil.ReadFile(path.Join(RootDir(), "testdata/receive-pack-fixture.txt")); err != nil {
+ if GitalyReceivePackResponseMock, err = os.ReadFile(path.Join(RootDir(), "testdata/receive-pack-fixture.txt")); err != nil {
log.WithError(err).Fatal("Unable to read pack response")
}
- if GitalyUploadPackResponseMock, err = ioutil.ReadFile(path.Join(RootDir(), "testdata/upload-pack-fixture.txt")); err != nil {
+ if GitalyUploadPackResponseMock, err = os.ReadFile(path.Join(RootDir(), "testdata/upload-pack-fixture.txt")); err != nil {
log.WithError(err).Fatal("Unable to read pack response")
}
}
diff --git a/workhorse/internal/testhelper/testhelper.go b/workhorse/internal/testhelper/testhelper.go
index 6bbdfddcd60..6ea5c1c73e1 100644
--- a/workhorse/internal/testhelper/testhelper.go
+++ b/workhorse/internal/testhelper/testhelper.go
@@ -4,7 +4,6 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"net/http"
"net/http/httptest"
"os"
@@ -121,7 +120,7 @@ func RootDir() string {
func LoadFile(t *testing.T, filePath string) string {
t.Helper()
- content, err := ioutil.ReadFile(path.Join(RootDir(), filePath))
+ content, err := os.ReadFile(path.Join(RootDir(), filePath))
require.NoError(t, err)
return string(content)
}
@@ -129,7 +128,7 @@ func LoadFile(t *testing.T, filePath string) string {
func ReadAll(t *testing.T, r io.Reader) []byte {
t.Helper()
- b, err := ioutil.ReadAll(r)
+ b, err := io.ReadAll(r)
require.NoError(t, err)
return b
}
@@ -152,7 +151,7 @@ func ParseJWT(token *jwt.Token) (interface{}, error) {
// UploadClaims represents the JWT claim for upload parameters
type UploadClaims struct {
Upload map[string]string `json:"upload"`
- jwt.StandardClaims
+ jwt.RegisteredClaims
}
func Retry(t testing.TB, timeout time.Duration, fn func() error) {
@@ -176,7 +175,7 @@ func SetupStaticFileHelper(t *testing.T, fpath, content, directory string) strin
require.NoError(t, os.MkdirAll(path.Join(absDocumentRoot, path.Dir(fpath)), 0755), "create document root")
staticFile := path.Join(absDocumentRoot, fpath)
- require.NoError(t, ioutil.WriteFile(staticFile, []byte(content), 0666), "write file content")
+ require.NoError(t, os.WriteFile(staticFile, []byte(content), 0666), "write file content")
return absDocumentRoot
}
diff --git a/workhorse/internal/upload/artifacts_store_test.go b/workhorse/internal/upload/artifacts_store_test.go
index 97e66fc37a4..7032313fbde 100644
--- a/workhorse/internal/upload/artifacts_store_test.go
+++ b/workhorse/internal/upload/artifacts_store_test.go
@@ -6,7 +6,7 @@ import (
"crypto/md5"
"encoding/hex"
"fmt"
- "io/ioutil"
+ "io"
"mime/multipart"
"net/http"
"net/http/httptest"
@@ -56,16 +56,11 @@ func testUploadArtifactsFromTestZip(t *testing.T, ts *httptest.Server) *httptest
}
func TestUploadHandlerSendingToExternalStorage(t *testing.T) {
- tempPath, err := ioutil.TempDir("", "uploads")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(tempPath)
+ tempPath := t.TempDir()
archiveData, md5 := createTestZipArchive(t)
- archiveFile, err := ioutil.TempFile("", "artifact.zip")
+ archiveFile, err := os.CreateTemp(tempPath, "artifact.zip")
require.NoError(t, err)
- defer os.Remove(archiveFile.Name())
_, err = archiveFile.Write(archiveData)
require.NoError(t, err)
archiveFile.Close()
@@ -75,7 +70,7 @@ func TestUploadHandlerSendingToExternalStorage(t *testing.T) {
storeServerMux.HandleFunc("/url/put", func(w http.ResponseWriter, r *http.Request) {
require.Equal(t, "PUT", r.Method)
- receivedData, err := ioutil.ReadAll(r.Body)
+ receivedData, err := io.ReadAll(r.Body)
require.NoError(t, err)
require.Equal(t, archiveData, receivedData)
@@ -135,11 +130,7 @@ func TestUploadHandlerSendingToExternalStorage(t *testing.T) {
}
func TestUploadHandlerSendingToExternalStorageAndStorageServerUnreachable(t *testing.T) {
- tempPath, err := ioutil.TempDir("", "uploads")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(tempPath)
+ tempPath := t.TempDir()
responseProcessor := func(w http.ResponseWriter, r *http.Request) {
t.Fatal("it should not be called")
@@ -161,11 +152,7 @@ func TestUploadHandlerSendingToExternalStorageAndStorageServerUnreachable(t *tes
}
func TestUploadHandlerSendingToExternalStorageAndInvalidURLIsUsed(t *testing.T) {
- tempPath, err := ioutil.TempDir("", "uploads")
- if err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(tempPath)
+ tempPath := t.TempDir()
responseProcessor := func(w http.ResponseWriter, r *http.Request) {
t.Fatal("it should not be called")
diff --git a/workhorse/internal/upload/artifacts_upload_test.go b/workhorse/internal/upload/artifacts_upload_test.go
index 96eb3810673..c94129092c6 100644
--- a/workhorse/internal/upload/artifacts_upload_test.go
+++ b/workhorse/internal/upload/artifacts_upload_test.go
@@ -7,7 +7,6 @@ import (
"encoding/json"
"fmt"
"io"
- "io/ioutil"
"mime/multipart"
"net/http"
"net/http/httptest"
@@ -72,7 +71,7 @@ func testArtifactsUploadServer(t *testing.T, authResponse *api.Response, bodyPro
return
}
- _, err := ioutil.ReadFile(r.FormValue("file.path"))
+ _, err := os.ReadFile(r.FormValue("file.path"))
if err != nil {
t.Fatal("Expected file to be readable")
return
@@ -85,7 +84,7 @@ func testArtifactsUploadServer(t *testing.T, authResponse *api.Response, bodyPro
}
if r.FormValue("metadata.path") != "" {
- metadata, err := ioutil.ReadFile(r.FormValue("metadata.path"))
+ metadata, err := os.ReadFile(r.FormValue("metadata.path"))
if err != nil {
t.Fatal("Expected metadata to be readable")
return
@@ -96,7 +95,7 @@ func testArtifactsUploadServer(t *testing.T, authResponse *api.Response, bodyPro
return
}
defer gz.Close()
- metadata, err = ioutil.ReadAll(gz)
+ metadata, err = io.ReadAll(gz)
if err != nil {
t.Fatal("Expected metadata to be valid")
return
@@ -130,8 +129,7 @@ type testServer struct {
}
func setupWithTmpPath(t *testing.T, filename string, includeFormat bool, format string, authResponse *api.Response, bodyProcessor func(w http.ResponseWriter, r *http.Request)) *testServer {
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
+ tempPath := t.TempDir()
if authResponse == nil {
authResponse = &api.Response{TempPath: tempPath}
@@ -147,7 +145,6 @@ func setupWithTmpPath(t *testing.T, filename string, includeFormat bool, format
cleanup := func() {
ts.Close()
- require.NoError(t, os.RemoveAll(tempPath))
require.NoError(t, writer.Close())
}
@@ -292,8 +289,7 @@ func TestUploadFormProcessing(t *testing.T) {
}
func TestLsifFileProcessing(t *testing.T) {
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
+ tempPath := t.TempDir()
s := setupWithTmpPath(t, "file", true, "zip", &api.Response{TempPath: tempPath, ProcessLsif: true}, nil)
defer s.cleanup()
@@ -312,8 +308,7 @@ func TestLsifFileProcessing(t *testing.T) {
}
func TestInvalidLsifFileProcessing(t *testing.T) {
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
+ tempPath := t.TempDir()
s := setupWithTmpPath(t, "file", true, "zip", &api.Response{TempPath: tempPath, ProcessLsif: true}, nil)
defer s.cleanup()
diff --git a/workhorse/internal/upload/artifacts_uploader.go b/workhorse/internal/upload/artifacts_uploader.go
index c1c49638e21..a8c944a1d33 100644
--- a/workhorse/internal/upload/artifacts_uploader.go
+++ b/workhorse/internal/upload/artifacts_uploader.go
@@ -17,6 +17,7 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/lsif_transformer/parser"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
)
@@ -35,7 +36,9 @@ var zipSubcommandsErrorsCounter = promauto.NewCounterVec(
}, []string{"error"})
type artifactsUploadProcessor struct {
- format string
+ format string
+ processLSIF bool
+ tempDir string
SavedFileTracker
}
@@ -43,26 +46,23 @@ type artifactsUploadProcessor struct {
// Artifacts is like a Multipart but specific for artifacts upload.
func Artifacts(myAPI *api.API, h http.Handler, p Preparer) http.Handler {
return myAPI.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
- opts, err := p.Prepare(a)
- if err != nil {
- helper.Fail500(w, r, fmt.Errorf("UploadArtifacts: error preparing file storage options"))
- return
- }
-
format := r.URL.Query().Get(ArtifactFormatKey)
-
- mg := &artifactsUploadProcessor{format: format, SavedFileTracker: SavedFileTracker{Request: r}}
- interceptMultipartFiles(w, r, h, a, mg, opts)
+ mg := &artifactsUploadProcessor{
+ format: format,
+ processLSIF: a.ProcessLsif,
+ tempDir: a.TempPath,
+ SavedFileTracker: SavedFileTracker{Request: r},
+ }
+ interceptMultipartFiles(w, r, h, mg, &eagerAuthorizer{a}, p)
}, "/authorize")
}
func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context, file *destination.FileHandler) (*destination.FileHandler, error) {
- metaReader, metaWriter := io.Pipe()
- defer metaWriter.Close()
-
metaOpts := &destination.UploadOpts{
- LocalTempPath: os.TempDir(),
- TempFilePrefix: "metadata.gz",
+ LocalTempPath: a.tempDir,
+ }
+ if metaOpts.LocalTempPath == "" {
+ metaOpts.LocalTempPath = os.TempDir()
}
fileName := file.LocalPath
@@ -73,24 +73,22 @@ func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context,
zipMd := exec.CommandContext(ctx, "gitlab-zip-metadata", fileName)
zipMd.Stderr = log.ContextLogger(ctx).Writer()
zipMd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
- zipMd.Stdout = metaWriter
+
+ zipMdOut, err := zipMd.StdoutPipe()
+ if err != nil {
+ return nil, err
+ }
+ defer zipMdOut.Close()
if err := zipMd.Start(); err != nil {
return nil, err
}
defer helper.CleanUpProcessGroup(zipMd)
- type saveResult struct {
- error
- *destination.FileHandler
+ fh, err := destination.Upload(ctx, zipMdOut, -1, "metadata.gz", metaOpts)
+ if err != nil {
+ return nil, err
}
- done := make(chan saveResult)
- go func() {
- var result saveResult
- result.FileHandler, result.error = destination.Upload(ctx, metaReader, -1, metaOpts)
-
- done <- result
- }()
if err := zipMd.Wait(); err != nil {
st, ok := helper.ExitStatus(err)
@@ -110,17 +108,15 @@ func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context,
}
}
- metaWriter.Close()
- result := <-done
- return result.FileHandler, result.error
+ return fh, nil
}
func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName string, file *destination.FileHandler, writer *multipart.Writer) error {
// ProcessFile for artifacts requires file form-data field name to eq `file`
-
if formName != "file" {
return fmt.Errorf("invalid form field: %q", formName)
}
+
if a.Count() > 0 {
return fmt.Errorf("artifacts request contains more than one file")
}
@@ -136,7 +132,6 @@ func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName str
return nil
}
- // TODO: can we rely on disk for shipping metadata? Not if we split workhorse and rails in 2 different PODs
metadata, err := a.generateMetadataFromZip(ctx, file)
if err != nil {
return err
@@ -158,6 +153,12 @@ func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName str
return nil
}
-func (a *artifactsUploadProcessor) Name() string {
- return "artifacts"
+func (a *artifactsUploadProcessor) Name() string { return "artifacts" }
+
+func (a *artifactsUploadProcessor) TransformContents(ctx context.Context, filename string, r io.Reader) (io.ReadCloser, error) {
+ if a.processLSIF {
+ return parser.NewParser(ctx, r)
+ }
+
+ return a.SavedFileTracker.TransformContents(ctx, filename, r)
}
diff --git a/workhorse/internal/upload/body_uploader.go b/workhorse/internal/upload/body_uploader.go
index 6fb201fe677..4b5152c283c 100644
--- a/workhorse/internal/upload/body_uploader.go
+++ b/workhorse/internal/upload/body_uploader.go
@@ -2,7 +2,7 @@ package upload
import (
"fmt"
- "io/ioutil"
+ "io"
"net/http"
"net/url"
"strings"
@@ -23,7 +23,7 @@ func RequestBody(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
return
}
- fh, err := destination.Upload(r.Context(), r.Body, r.ContentLength, opts)
+ fh, err := destination.Upload(r.Context(), r.Body, r.ContentLength, "upload", opts)
if err != nil {
helper.Fail500(w, r, fmt.Errorf("RequestBody: upload failed: %v", err))
return
@@ -42,7 +42,7 @@ func RequestBody(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
// Hijack body
body := data.Encode()
- r.Body = ioutil.NopCloser(strings.NewReader(body))
+ r.Body = io.NopCloser(strings.NewReader(body))
r.ContentLength = int64(len(body))
r.Header.Set("Content-Type", "application/x-www-form-urlencoded")
diff --git a/workhorse/internal/upload/body_uploader_test.go b/workhorse/internal/upload/body_uploader_test.go
index 35772be5bc3..837d119e72e 100644
--- a/workhorse/internal/upload/body_uploader_test.go
+++ b/workhorse/internal/upload/body_uploader_test.go
@@ -3,7 +3,6 @@ package upload
import (
"fmt"
"io"
- "io/ioutil"
"net/http"
"net/http/httptest"
"os"
@@ -32,7 +31,7 @@ func TestRequestBody(t *testing.T) {
resp := testUpload(&rails{}, &alwaysLocalPreparer{}, echoProxy(t, fileLen), body)
require.Equal(t, http.StatusOK, resp.StatusCode)
- uploadEcho, err := ioutil.ReadAll(resp.Body)
+ uploadEcho, err := io.ReadAll(resp.Body)
require.NoError(t, err, "Can't read response body")
require.Equal(t, fileContent, string(uploadEcho))
@@ -44,7 +43,7 @@ func TestRequestBodyCustomPreparer(t *testing.T) {
resp := testUpload(&rails{}, &alwaysLocalPreparer{}, echoProxy(t, fileLen), body)
require.Equal(t, http.StatusOK, resp.StatusCode)
- uploadEcho, err := ioutil.ReadAll(resp.Body)
+ uploadEcho, err := io.ReadAll(resp.Body)
require.NoError(t, err, "Can't read response body")
require.Equal(t, fileContent, string(uploadEcho))
}
diff --git a/workhorse/internal/upload/destination/destination.go b/workhorse/internal/upload/destination/destination.go
index b18b6e22a99..5e145e2cb2a 100644
--- a/workhorse/internal/upload/destination/destination.go
+++ b/workhorse/internal/upload/destination/destination.go
@@ -8,7 +8,6 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"os"
"strconv"
"time"
@@ -54,7 +53,7 @@ type FileHandler struct {
type uploadClaims struct {
Upload map[string]string `json:"upload"`
- jwt.StandardClaims
+ jwt.RegisteredClaims
}
// SHA256 hash of the handled file
@@ -97,7 +96,7 @@ func (fh *FileHandler) GitLabFinalizeFields(prefix string) (map[string]string, e
signedData[hashName] = hash
}
- claims := uploadClaims{Upload: signedData, StandardClaims: secret.DefaultClaims}
+ claims := uploadClaims{Upload: signedData, RegisteredClaims: secret.DefaultClaims}
jwtData, err := secret.JWTTokenString(claims)
if err != nil {
return nil, err
@@ -113,9 +112,9 @@ type consumer interface {
// Upload persists the provided reader content to all the location specified in opts. A cleanup will be performed once ctx is Done
// Make sure the provided context will not expire before finalizing upload with GitLab Rails.
-func Upload(ctx context.Context, reader io.Reader, size int64, opts *UploadOpts) (*FileHandler, error) {
+func Upload(ctx context.Context, reader io.Reader, size int64, name string, opts *UploadOpts) (*FileHandler, error) {
fh := &FileHandler{
- Name: opts.TempFilePrefix,
+ Name: name,
RemoteID: opts.RemoteID,
RemoteURL: opts.RemoteURL,
}
@@ -199,13 +198,13 @@ func Upload(ctx context.Context, reader io.Reader, size int64, opts *UploadOpts)
}
logger := log.WithContextFields(ctx, log.Fields{
- "copied_bytes": fh.Size,
- "is_local": opts.IsLocalTempFile(),
- "is_multipart": opts.IsMultipart(),
- "is_remote": !opts.IsLocalTempFile(),
- "remote_id": opts.RemoteID,
- "temp_file_prefix": opts.TempFilePrefix,
- "client_mode": clientMode,
+ "copied_bytes": fh.Size,
+ "is_local": opts.IsLocalTempFile(),
+ "is_multipart": opts.IsMultipart(),
+ "is_remote": !opts.IsLocalTempFile(),
+ "remote_id": opts.RemoteID,
+ "client_mode": clientMode,
+ "filename": fh.Name,
})
if opts.IsLocalTempFile() {
@@ -226,7 +225,7 @@ func (fh *FileHandler) newLocalFile(ctx context.Context, opts *UploadOpts) (cons
return nil, fmt.Errorf("newLocalFile: mkdir %q: %v", opts.LocalTempPath, err)
}
- file, err := ioutil.TempFile(opts.LocalTempPath, opts.TempFilePrefix)
+ file, err := os.CreateTemp(opts.LocalTempPath, "gitlab-workhorse-upload")
if err != nil {
return nil, fmt.Errorf("newLocalFile: create file: %v", err)
}
diff --git a/workhorse/internal/upload/destination/destination_test.go b/workhorse/internal/upload/destination/destination_test.go
index ddf0ea24d60..6ebe163468b 100644
--- a/workhorse/internal/upload/destination/destination_test.go
+++ b/workhorse/internal/upload/destination/destination_test.go
@@ -4,7 +4,6 @@ import (
"context"
"errors"
"fmt"
- "io/ioutil"
"os"
"path"
"strconv"
@@ -43,12 +42,10 @@ func TestUploadWrongSize(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
- require.NoError(t, err)
- defer os.RemoveAll(tmpFolder)
+ tmpFolder := t.TempDir()
- opts := &destination.UploadOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file"}
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize+1, opts)
+ opts := &destination.UploadOpts{LocalTempPath: tmpFolder}
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize+1, "upload", opts)
require.Error(t, err)
_, isSizeError := err.(destination.SizeError)
require.True(t, isSizeError, "Should fail with SizeError")
@@ -59,12 +56,10 @@ func TestUploadWithKnownSizeExceedLimit(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
- require.NoError(t, err)
- defer os.RemoveAll(tmpFolder)
+ tmpFolder := t.TempDir()
- opts := &destination.UploadOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file", MaximumSize: test.ObjectSize - 1}
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, opts)
+ opts := &destination.UploadOpts{LocalTempPath: tmpFolder, MaximumSize: test.ObjectSize - 1}
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", opts)
require.Error(t, err)
_, isSizeError := err.(destination.SizeError)
require.True(t, isSizeError, "Should fail with SizeError")
@@ -75,12 +70,10 @@ func TestUploadWithUnknownSizeExceedLimit(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
- require.NoError(t, err)
- defer os.RemoveAll(tmpFolder)
+ tmpFolder := t.TempDir()
- opts := &destination.UploadOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file", MaximumSize: test.ObjectSize - 1}
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), -1, opts)
+ opts := &destination.UploadOpts{LocalTempPath: tmpFolder, MaximumSize: test.ObjectSize - 1}
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), -1, "upload", opts)
require.Equal(t, err, destination.ErrEntityTooLarge)
require.Nil(t, fh)
}
@@ -117,7 +110,7 @@ func TestUploadWrongETag(t *testing.T) {
osStub.InitiateMultipartUpload(test.ObjectPath)
}
ctx, cancel := context.WithCancel(context.Background())
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", opts)
require.Nil(t, fh)
require.Error(t, err)
require.Equal(t, 1, osStub.PutsCnt(), "File not uploaded")
@@ -139,9 +132,7 @@ func TestUpload(t *testing.T) {
remoteMultipart
)
- tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
- require.NoError(t, err)
- defer os.RemoveAll(tmpFolder)
+ tmpFolder := t.TempDir()
tests := []struct {
name string
@@ -191,13 +182,12 @@ func TestUpload(t *testing.T) {
if spec.local {
opts.LocalTempPath = tmpFolder
- opts.TempFilePrefix = "test-file"
}
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
require.NoError(t, err)
require.NotNil(t, fh)
@@ -211,9 +201,6 @@ func TestUpload(t *testing.T) {
dir := path.Dir(fh.LocalPath)
require.Equal(t, opts.LocalTempPath, dir)
- filename := path.Base(fh.LocalPath)
- beginsWithPrefix := strings.HasPrefix(filename, opts.TempFilePrefix)
- require.True(t, beginsWithPrefix, fmt.Sprintf("LocalPath filename %q do not begin with TempFilePrefix %q", filename, opts.TempFilePrefix))
} else {
require.Empty(t, fh.LocalPath, "LocalPath must be empty for non local uploads")
}
@@ -291,7 +278,7 @@ func TestUploadWithS3WorkhorseClient(t *testing.T) {
MaximumSize: tc.maxSize,
}
- _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), tc.objectSize, &opts)
+ _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), tc.objectSize, "upload", &opts)
if tc.expectedErr == nil {
require.NoError(t, err)
@@ -305,8 +292,7 @@ func TestUploadWithS3WorkhorseClient(t *testing.T) {
}
func TestUploadWithAzureWorkhorseClient(t *testing.T) {
- mux, bucketDir, cleanup := test.SetupGoCloudFileBucket(t, "azblob")
- defer cleanup()
+ mux, bucketDir := test.SetupGoCloudFileBucket(t, "azblob")
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
@@ -324,7 +310,7 @@ func TestUploadWithAzureWorkhorseClient(t *testing.T) {
},
}
- _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
require.NoError(t, err)
test.GoCloudObjectExists(t, bucketDir, remoteObject)
@@ -349,7 +335,7 @@ func TestUploadWithUnknownGoCloudScheme(t *testing.T) {
},
}
- _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
require.Error(t, err)
}
@@ -375,7 +361,7 @@ func TestUploadMultipartInBodyFailure(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
require.Nil(t, fh)
require.Error(t, err)
require.EqualError(t, err, test.MultipartUploadInternalError().Error())
@@ -431,10 +417,6 @@ func TestUploadRemoteFileWithLimit(t *testing.T) {
var opts destination.UploadOpts
for _, remoteType := range remoteTypes {
- tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
- require.NoError(t, err)
- defer os.RemoveAll(tmpFolder)
-
osStub, ts := test.StartObjectStore()
defer ts.Close()
@@ -468,7 +450,7 @@ func TestUploadRemoteFileWithLimit(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- fh, err := destination.Upload(ctx, strings.NewReader(tc.testData), tc.objectSize, &opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(tc.testData), tc.objectSize, "upload", &opts)
if tc.expectedErr == nil {
require.NoError(t, err)
diff --git a/workhorse/internal/upload/destination/filestore/filestore_test.go b/workhorse/internal/upload/destination/filestore/filestore_test.go
index ec67eae96b9..6f0425809fe 100644
--- a/workhorse/internal/upload/destination/filestore/filestore_test.go
+++ b/workhorse/internal/upload/destination/filestore/filestore_test.go
@@ -2,7 +2,6 @@ package filestore
import (
"context"
- "io/ioutil"
"os"
"strings"
"testing"
@@ -12,7 +11,7 @@ import (
)
func TestConsume(t *testing.T) {
- f, err := ioutil.TempFile("", "filestore-local-file")
+ f, err := os.CreateTemp("", "filestore-local-file")
if f != nil {
defer os.Remove(f.Name())
}
@@ -32,7 +31,7 @@ func TestConsume(t *testing.T) {
require.NoError(t, err)
require.Equal(t, int64(len(content)), n)
- consumedContent, err := ioutil.ReadFile(f.Name())
+ consumedContent, err := os.ReadFile(f.Name())
require.NoError(t, err)
require.Equal(t, content, string(consumedContent))
}
diff --git a/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go b/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go
index 57b3a35b41e..55d886087be 100644
--- a/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go
+++ b/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go
@@ -15,8 +15,7 @@ import (
)
func TestGoCloudObjectUpload(t *testing.T) {
- mux, _, cleanup := test.SetupGoCloudFileBucket(t, "azuretest")
- defer cleanup()
+ mux, _ := test.SetupGoCloudFileBucket(t, "azuretest")
ctx, cancel := context.WithCancel(context.Background())
deadline := time.Now().Add(testTimeout)
diff --git a/workhorse/internal/upload/destination/objectstore/multipart.go b/workhorse/internal/upload/destination/objectstore/multipart.go
index 4c5b64b27ee..df336d2d901 100644
--- a/workhorse/internal/upload/destination/objectstore/multipart.go
+++ b/workhorse/internal/upload/destination/objectstore/multipart.go
@@ -7,7 +7,6 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"net/http"
"os"
@@ -66,7 +65,7 @@ func (m *Multipart) Upload(ctx context.Context, r io.Reader) error {
}
}
- n, err := io.Copy(ioutil.Discard, r)
+ n, err := io.Copy(io.Discard, r)
if err != nil {
return fmt.Errorf("drain pipe: %v", err)
}
@@ -93,19 +92,19 @@ func (m *Multipart) Delete() {
}
func (m *Multipart) readAndUploadOnePart(ctx context.Context, partURL string, putHeaders map[string]string, src io.Reader, partNumber int) (*completeMultipartUploadPart, error) {
- file, err := ioutil.TempFile("", "part-buffer")
+ file, err := os.CreateTemp("", "part-buffer")
if err != nil {
return nil, fmt.Errorf("create temporary buffer file: %v", err)
}
defer file.Close()
if err := os.Remove(file.Name()); err != nil {
- return nil, err
+ return nil, fmt.Errorf("remove temporary buffer file: %v", err)
}
n, err := io.Copy(file, src)
if err != nil {
- return nil, err
+ return nil, fmt.Errorf("copy to temporary buffer file: %v", err)
}
if n == 0 {
return nil, nil
diff --git a/workhorse/internal/upload/destination/objectstore/multipart_test.go b/workhorse/internal/upload/destination/objectstore/multipart_test.go
index 4aff3467e30..2a5161e42e7 100644
--- a/workhorse/internal/upload/destination/objectstore/multipart_test.go
+++ b/workhorse/internal/upload/destination/objectstore/multipart_test.go
@@ -2,7 +2,7 @@ package objectstore_test
import (
"context"
- "io/ioutil"
+ "io"
"net/http"
"net/http/httptest"
"strings"
@@ -22,7 +22,7 @@ func TestMultipartUploadWithUpcaseETags(t *testing.T) {
var putCnt, postCnt int
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- _, err := ioutil.ReadAll(r.Body)
+ _, err := io.ReadAll(r.Body)
require.NoError(t, err)
defer r.Body.Close()
diff --git a/workhorse/internal/upload/destination/objectstore/object.go b/workhorse/internal/upload/destination/objectstore/object.go
index b7c4f12f009..68c566861af 100644
--- a/workhorse/internal/upload/destination/objectstore/object.go
+++ b/workhorse/internal/upload/destination/objectstore/object.go
@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"io"
- "io/ioutil"
"net/http"
"gitlab.com/gitlab-org/labkit/mask"
@@ -53,7 +52,7 @@ func newObject(putURL, deleteURL string, putHeaders map[string]string, size int6
func (o *Object) Upload(ctx context.Context, r io.Reader) error {
// we should prevent pr.Close() otherwise it may shadow error set with pr.CloseWithError(err)
- req, err := http.NewRequest(http.MethodPut, o.putURL, ioutil.NopCloser(r))
+ req, err := http.NewRequest(http.MethodPut, o.putURL, io.NopCloser(r))
if err != nil {
return fmt.Errorf("PUT %q: %v", mask.URL(o.putURL), err)
diff --git a/workhorse/internal/upload/destination/objectstore/s3_object_test.go b/workhorse/internal/upload/destination/objectstore/s3_object_test.go
index b81b0ae2024..0ed14a2e844 100644
--- a/workhorse/internal/upload/destination/objectstore/s3_object_test.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_object_test.go
@@ -4,8 +4,6 @@ import (
"context"
"fmt"
"io"
- "io/ioutil"
- "os"
"path/filepath"
"strings"
"sync"
@@ -47,9 +45,7 @@ func TestS3ObjectUpload(t *testing.T) {
defer ts.Close()
deadline := time.Now().Add(testTimeout)
- tmpDir, err := ioutil.TempDir("", "workhorse-test-")
- require.NoError(t, err)
- defer os.Remove(tmpDir)
+ tmpDir := t.TempDir()
objectName := filepath.Join(tmpDir, "s3-test-data")
ctx, cancel := context.WithCancel(context.Background())
@@ -87,9 +83,7 @@ func TestConcurrentS3ObjectUpload(t *testing.T) {
defer artifactsServer.Close()
deadline := time.Now().Add(testTimeout)
- tmpDir, err := ioutil.TempDir("", "workhorse-test-")
- require.NoError(t, err)
- defer os.Remove(tmpDir)
+ tmpDir := t.TempDir()
var wg sync.WaitGroup
@@ -136,9 +130,7 @@ func TestS3ObjectUploadCancel(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
deadline := time.Now().Add(testTimeout)
- tmpDir, err := ioutil.TempDir("", "workhorse-test-")
- require.NoError(t, err)
- defer os.Remove(tmpDir)
+ tmpDir := t.TempDir()
objectName := filepath.Join(tmpDir, "s3-test-data")
@@ -160,9 +152,7 @@ func TestS3ObjectUploadLimitReached(t *testing.T) {
defer ts.Close()
deadline := time.Now().Add(testTimeout)
- tmpDir, err := ioutil.TempDir("", "workhorse-test-")
- require.NoError(t, err)
- defer os.Remove(tmpDir)
+ tmpDir := t.TempDir()
objectName := filepath.Join(tmpDir, "s3-test-data")
object, err := objectstore.NewS3Object(objectName, creds, config)
diff --git a/workhorse/internal/upload/destination/objectstore/s3_session.go b/workhorse/internal/upload/destination/objectstore/s3_session.go
index aa38f18ed7a..d71b38eb22e 100644
--- a/workhorse/internal/upload/destination/objectstore/s3_session.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_session.go
@@ -10,6 +10,8 @@ import (
"github.com/aws/aws-sdk-go/aws/session"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
+
+ "gitlab.com/gitlab-org/labkit/fips"
)
type s3Session struct {
@@ -61,8 +63,9 @@ func setupS3Session(s3Credentials config.S3Credentials, s3Config config.S3Config
}
cfg := &aws.Config{
- Region: aws.String(s3Config.Region),
- S3ForcePathStyle: aws.Bool(s3Config.PathStyle),
+ Region: aws.String(s3Config.Region),
+ S3ForcePathStyle: aws.Bool(s3Config.PathStyle),
+ S3DisableContentMD5Validation: aws.Bool(fips.Enabled()),
}
// In case IAM profiles aren't being used, use the static credentials
diff --git a/workhorse/internal/upload/destination/objectstore/test/gocloud_stub.go b/workhorse/internal/upload/destination/objectstore/test/gocloud_stub.go
index cf22075e407..bff0eabaee5 100644
--- a/workhorse/internal/upload/destination/objectstore/test/gocloud_stub.go
+++ b/workhorse/internal/upload/destination/objectstore/test/gocloud_stub.go
@@ -2,9 +2,7 @@ package test
import (
"context"
- "io/ioutil"
"net/url"
- "os"
"testing"
"github.com/stretchr/testify/require"
@@ -20,18 +18,14 @@ func (o *dirOpener) OpenBucketURL(ctx context.Context, u *url.URL) (*blob.Bucket
return fileblob.OpenBucket(o.tmpDir, nil)
}
-func SetupGoCloudFileBucket(t *testing.T, scheme string) (m *blob.URLMux, bucketDir string, cleanup func()) {
- tmpDir, err := ioutil.TempDir("", "")
- require.NoError(t, err)
+func SetupGoCloudFileBucket(t *testing.T, scheme string) (m *blob.URLMux, bucketDir string) {
+ tmpDir := t.TempDir()
mux := new(blob.URLMux)
fake := &dirOpener{tmpDir: tmpDir}
mux.RegisterBucket(scheme, fake)
- cleanup = func() {
- os.RemoveAll(tmpDir)
- }
- return mux, tmpDir, cleanup
+ return mux, tmpDir
}
func GoCloudObjectExists(t *testing.T, bucketDir string, objectName string) {
diff --git a/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go
index d51a2de7456..1a380bd5083 100644
--- a/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go
+++ b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go
@@ -6,7 +6,6 @@ import (
"encoding/xml"
"fmt"
"io"
- "io/ioutil"
"net/http"
"net/http/httptest"
"strconv"
@@ -22,7 +21,8 @@ type partsEtagMap map[int]string
// Instead of storing objects it will just save md5sum.
type ObjectstoreStub struct {
// bucket contains md5sum of uploaded objects
- bucket map[string]string
+ bucket map[string]string
+ contents map[string][]byte
// overwriteMD5 contains overwrites for md5sum that should be return instead of the regular hash
overwriteMD5 map[string]string
// multipart is a map of MultipartUploads
@@ -48,6 +48,7 @@ func StartObjectStoreWithCustomMD5(md5Hashes map[string]string) (*ObjectstoreStu
multipart: make(map[string]partsEtagMap),
overwriteMD5: make(map[string]string),
headers: make(map[string]*http.Header),
+ contents: make(map[string][]byte),
}
for k, v := range md5Hashes {
@@ -82,6 +83,15 @@ func (o *ObjectstoreStub) GetObjectMD5(path string) string {
return o.bucket[path]
}
+// GetObject returns the contents of the uploaded object. The caller must
+// not modify the byte slice.
+func (o *ObjectstoreStub) GetObject(path string) []byte {
+ o.m.Lock()
+ defer o.m.Unlock()
+
+ return o.contents[path]
+}
+
// GetHeader returns a given HTTP header of the object uploaded to the path
func (o *ObjectstoreStub) GetHeader(path, key string) string {
o.m.Lock()
@@ -154,11 +164,11 @@ func (o *ObjectstoreStub) putObject(w http.ResponseWriter, r *http.Request) {
etag, overwritten := o.overwriteMD5[objectPath]
if !overwritten {
+ buf, _ := io.ReadAll(r.Body)
+ o.contents[objectPath] = buf
hasher := md5.New()
- io.Copy(hasher, r.Body)
-
- checksum := hasher.Sum(nil)
- etag = hex.EncodeToString(checksum)
+ hasher.Write(buf)
+ etag = hex.EncodeToString(hasher.Sum(nil))
}
o.headers[objectPath] = &r.Header
@@ -196,7 +206,7 @@ func (o *ObjectstoreStub) completeMultipartUpload(w http.ResponseWriter, r *http
return
}
- buf, err := ioutil.ReadAll(r.Body)
+ buf, err := io.ReadAll(r.Body)
if err != nil {
http.Error(w, err.Error(), 500)
return
diff --git a/workhorse/internal/upload/destination/objectstore/test/s3_stub.go b/workhorse/internal/upload/destination/objectstore/test/s3_stub.go
index 6b83426b852..6a6b4662904 100644
--- a/workhorse/internal/upload/destination/objectstore/test/s3_stub.go
+++ b/workhorse/internal/upload/destination/objectstore/test/s3_stub.go
@@ -1,7 +1,6 @@
package test
import (
- "io/ioutil"
"net/http/httptest"
"os"
"strings"
@@ -76,7 +75,7 @@ func S3ObjectExists(t *testing.T, sess *session.Session, config config.S3Config,
require.NoError(t, err)
require.Equal(t, int64(len(expectedBytes)), numBytes)
- output, err := ioutil.ReadFile(tmpfile.Name())
+ output, err := os.ReadFile(tmpfile.Name())
require.NoError(t, err)
require.Equal(t, []byte(expectedBytes), output)
@@ -124,13 +123,10 @@ func S3ObjectDoesNotExist(t *testing.T, sess *session.Session, config config.S3C
}
func downloadObject(t *testing.T, sess *session.Session, config config.S3Config, objectName string, handler func(tmpfile *os.File, numBytes int64, err error)) {
- tmpDir, err := ioutil.TempDir("", "workhorse-test-")
- require.NoError(t, err)
- defer os.Remove(tmpDir)
+ tmpDir := t.TempDir()
- tmpfile, err := ioutil.TempFile(tmpDir, "s3-output")
+ tmpfile, err := os.CreateTemp(tmpDir, "s3-output")
require.NoError(t, err)
- defer os.Remove(tmpfile.Name())
downloadSvc := s3manager.NewDownloader(sess)
numBytes, err := downloadSvc.Download(tmpfile, &s3.GetObjectInput{
diff --git a/workhorse/internal/upload/destination/reader_test.go b/workhorse/internal/upload/destination/reader_test.go
index a26f7746a13..40ff76d3866 100644
--- a/workhorse/internal/upload/destination/reader_test.go
+++ b/workhorse/internal/upload/destination/reader_test.go
@@ -2,7 +2,7 @@ package destination
import (
"fmt"
- "io/ioutil"
+ "io"
"strings"
"testing"
"testing/iotest"
@@ -19,7 +19,7 @@ func TestHardLimitReader(t *testing.T) {
},
)
- out, err := ioutil.ReadAll(r)
+ out, err := io.ReadAll(r)
require.NoError(t, err)
require.Equal(t, text, string(out))
}
diff --git a/workhorse/internal/upload/destination/upload_opts.go b/workhorse/internal/upload/destination/upload_opts.go
index 77a8927d34f..b2223fac912 100644
--- a/workhorse/internal/upload/destination/upload_opts.go
+++ b/workhorse/internal/upload/destination/upload_opts.go
@@ -29,8 +29,6 @@ type ObjectStorageConfig struct {
// UploadOpts represents all the options available for saving a file to object store
type UploadOpts struct {
- // TempFilePrefix is the prefix used to create temporary local file
- TempFilePrefix string
// LocalTempPath is the directory where to write a local copy of the file
LocalTempPath string
// RemoteID is the remote ObjectID provided by GitLab
diff --git a/workhorse/internal/upload/destination/upload_opts_test.go b/workhorse/internal/upload/destination/upload_opts_test.go
index 24a372495c6..fd9e56db194 100644
--- a/workhorse/internal/upload/destination/upload_opts_test.go
+++ b/workhorse/internal/upload/destination/upload_opts_test.go
@@ -283,8 +283,7 @@ func TestUseWorkhorseClientEnabled(t *testing.T) {
}
func TestGoCloudConfig(t *testing.T) {
- mux, _, cleanup := test.SetupGoCloudFileBucket(t, "azblob")
- defer cleanup()
+ mux, _ := test.SetupGoCloudFileBucket(t, "azblob")
tests := []struct {
name string
diff --git a/workhorse/internal/upload/exif.go b/workhorse/internal/upload/exif.go
new file mode 100644
index 00000000000..e77afb24502
--- /dev/null
+++ b/workhorse/internal/upload/exif.go
@@ -0,0 +1,91 @@
+package upload
+
+import (
+ "context"
+ "io"
+ "net/http"
+ "os"
+
+ "gitlab.com/gitlab-org/labkit/log"
+ "golang.org/x/image/tiff"
+
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
+)
+
+func handleExifUpload(ctx context.Context, r io.Reader, filename string, imageType exif.FileType) (io.ReadCloser, error) {
+ tmpfile, err := os.CreateTemp("", "exifremove")
+ if err != nil {
+ return nil, err
+ }
+ go func() {
+ <-ctx.Done()
+ tmpfile.Close()
+ }()
+ if err := os.Remove(tmpfile.Name()); err != nil {
+ return nil, err
+ }
+
+ _, err = io.Copy(tmpfile, r)
+ if err != nil {
+ return nil, err
+ }
+
+ if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
+ return nil, err
+ }
+
+ isValidType := false
+ switch imageType {
+ case exif.TypeJPEG:
+ isValidType = isJPEG(tmpfile)
+ case exif.TypeTIFF:
+ isValidType = isTIFF(tmpfile)
+ }
+
+ if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
+ return nil, err
+ }
+
+ if !isValidType {
+ log.WithContextFields(ctx, log.Fields{
+ "filename": filename,
+ "imageType": imageType,
+ }).Info("invalid content type, not running exiftool")
+
+ return tmpfile, nil
+ }
+
+ log.WithContextFields(ctx, log.Fields{
+ "filename": filename,
+ }).Info("running exiftool to remove any metadata")
+
+ cleaner, err := exif.NewCleaner(ctx, tmpfile)
+ if err != nil {
+ return nil, err
+ }
+
+ return cleaner, nil
+}
+
+func isTIFF(r io.Reader) bool {
+ _, err := tiff.DecodeConfig(r)
+ if err == nil {
+ return true
+ }
+
+ if _, unsupported := err.(tiff.UnsupportedError); unsupported {
+ return true
+ }
+
+ return false
+}
+
+func isJPEG(r io.Reader) bool {
+ // Only the first 512 bytes are used to sniff the content type.
+ buf, err := io.ReadAll(io.LimitReader(r, 512))
+ if err != nil {
+ return false
+ }
+
+ return http.DetectContentType(buf) == "image/jpeg"
+}
diff --git a/workhorse/internal/upload/exif/exif_test.go b/workhorse/internal/upload/exif/exif_test.go
index ee5883d9e08..75ecfc51f08 100644
--- a/workhorse/internal/upload/exif/exif_test.go
+++ b/workhorse/internal/upload/exif/exif_test.go
@@ -3,7 +3,6 @@ package exif
import (
"context"
"io"
- "io/ioutil"
"os"
"strings"
"testing"
@@ -75,7 +74,7 @@ func TestNewCleanerWithValidFile(t *testing.T) {
cleaner, err := NewCleaner(ctx, input)
require.NoError(t, err, "Expected no error when creating cleaner command")
- size, err := io.Copy(ioutil.Discard, cleaner)
+ size, err := io.Copy(io.Discard, cleaner)
require.NoError(t, err, "Expected no error when reading output")
sizeAfterStrip := int64(25399)
@@ -89,7 +88,7 @@ func TestNewCleanerWithInvalidFile(t *testing.T) {
cleaner, err := NewCleaner(ctx, strings.NewReader("invalid image"))
require.NoError(t, err, "Expected no error when creating cleaner command")
- size, err := io.Copy(ioutil.Discard, cleaner)
+ size, err := io.Copy(io.Discard, cleaner)
require.Error(t, err, "Expected error when reading output")
require.Equal(t, int64(0), size, "Size of invalid image should be 0")
}
@@ -103,7 +102,7 @@ func TestNewCleanerReadingAfterEOF(t *testing.T) {
cleaner, err := NewCleaner(ctx, input)
require.NoError(t, err, "Expected no error when creating cleaner command")
- _, err = io.Copy(ioutil.Discard, cleaner)
+ _, err = io.Copy(io.Discard, cleaner)
require.NoError(t, err, "Expected no error when reading output")
buf := make([]byte, 1)
diff --git a/workhorse/internal/upload/multipart_uploader.go b/workhorse/internal/upload/multipart_uploader.go
index 34675d2aa14..2456a2c8626 100644
--- a/workhorse/internal/upload/multipart_uploader.go
+++ b/workhorse/internal/upload/multipart_uploader.go
@@ -1,11 +1,9 @@
package upload
import (
- "fmt"
"net/http"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
)
// Multipart is a request middleware. If the request has a MIME multipart
@@ -17,12 +15,27 @@ func Multipart(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
return rails.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
s := &SavedFileTracker{Request: r}
- opts, err := p.Prepare(a)
- if err != nil {
- helper.Fail500(w, r, fmt.Errorf("Multipart: error preparing file storage options"))
- return
+ interceptMultipartFiles(w, r, h, s, &eagerAuthorizer{a}, p)
+ }, "/authorize")
+}
+
+// SkipRailsPreAuthMultipart behaves like Multipart except it does not
+// pre-authorize with Rails. It is intended for use on catch-all routes
+// where we cannot pre-authorize both because we don't know which Rails
+// endpoint to call, and because eagerly pre-authorizing would add too
+// much overhead.
+func SkipRailsPreAuthMultipart(tempPath string, myAPI *api.API, h http.Handler, p Preparer) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ s := &SavedFileTracker{Request: r}
+
+ // We use testAuthorizer as a temporary measure. When
+ // https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/742 is done, we
+ // should only be using apiAuthorizer.
+ fa := &testAuthorizer{
+ test: &apiAuthorizer{myAPI},
+ actual: &eagerAuthorizer{&api.Response{TempPath: tempPath}},
}
- interceptMultipartFiles(w, r, h, a, s, opts)
- }, "/authorize")
+ interceptMultipartFiles(w, r, h, s, fa, p)
+ })
}
diff --git a/workhorse/internal/upload/rewrite.go b/workhorse/internal/upload/rewrite.go
index ff5190226af..d03445923fa 100644
--- a/workhorse/internal/upload/rewrite.go
+++ b/workhorse/internal/upload/rewrite.go
@@ -5,23 +5,19 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"mime"
"mime/multipart"
"net/http"
"net/textproto"
- "os"
"path/filepath"
"strings"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
- "gitlab.com/gitlab-org/labkit/log"
- "golang.org/x/image/tiff"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/log"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/lsif_transformer/parser"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
)
@@ -62,13 +58,14 @@ var (
)
type rewriter struct {
- writer *multipart.Writer
- preauth *api.Response
+ writer *multipart.Writer
+ fileAuthorizer
+ Preparer
filter MultipartFormProcessor
finalizedFields map[string]bool
}
-func rewriteFormFilesFromMultipart(r *http.Request, writer *multipart.Writer, preauth *api.Response, filter MultipartFormProcessor, opts *destination.UploadOpts) error {
+func rewriteFormFilesFromMultipart(r *http.Request, writer *multipart.Writer, filter MultipartFormProcessor, fa fileAuthorizer, preparer Preparer) error {
// Create multipart reader
reader, err := r.MultipartReader()
if err != nil {
@@ -83,7 +80,8 @@ func rewriteFormFilesFromMultipart(r *http.Request, writer *multipart.Writer, pr
rew := &rewriter{
writer: writer,
- preauth: preauth,
+ fileAuthorizer: fa,
+ Preparer: preparer,
filter: filter,
finalizedFields: make(map[string]bool),
}
@@ -108,7 +106,7 @@ func rewriteFormFilesFromMultipart(r *http.Request, writer *multipart.Writer, pr
}
if filename != "" {
- err = rew.handleFilePart(r.Context(), name, p, opts)
+ err = rew.handleFilePart(r, name, p)
} else {
err = rew.copyPart(r.Context(), name, p)
}
@@ -128,7 +126,7 @@ func parseAndNormalizeContentDisposition(header textproto.MIMEHeader) (string, s
return params["name"], params["filename"]
}
-func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipart.Part, opts *destination.UploadOpts) error {
+func (rew *rewriter) handleFilePart(r *http.Request, name string, p *multipart.Part) error {
if rew.filter.Count() >= maxFilesAllowed {
return ErrTooManyFilesUploaded
}
@@ -141,30 +139,23 @@ func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipa
return fmt.Errorf("illegal filename: %q", filename)
}
- opts.TempFilePrefix = filename
-
- var inputReader io.ReadCloser
- var err error
-
- imageType := exif.FileTypeFromSuffix(filename)
- switch {
- case imageType != exif.TypeUnknown:
- inputReader, err = handleExifUpload(ctx, p, filename, imageType)
- if err != nil {
- return err
- }
- case rew.preauth.ProcessLsif:
- inputReader, err = handleLsifUpload(ctx, p, opts.LocalTempPath, filename, rew.preauth)
- if err != nil {
- return err
- }
- default:
- inputReader = ioutil.NopCloser(p)
+ apiResponse, err := rew.AuthorizeFile(r)
+ if err != nil {
+ return err
+ }
+ opts, err := rew.Prepare(apiResponse)
+ if err != nil {
+ return err
}
+ ctx := r.Context()
+ inputReader, err := rew.filter.TransformContents(ctx, filename, p)
+ if err != nil {
+ return err
+ }
defer inputReader.Close()
- fh, err := destination.Upload(ctx, inputReader, -1, opts)
+ fh, err := destination.Upload(ctx, inputReader, -1, filename, opts)
if err != nil {
switch err {
case destination.ErrEntityTooLarge, exif.ErrRemovingExif:
@@ -189,105 +180,63 @@ func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipa
return rew.filter.ProcessFile(ctx, name, fh, rew.writer)
}
-func handleExifUpload(ctx context.Context, r io.Reader, filename string, imageType exif.FileType) (io.ReadCloser, error) {
- tmpfile, err := ioutil.TempFile("", "exifremove")
- if err != nil {
- return nil, err
- }
- go func() {
- <-ctx.Done()
- tmpfile.Close()
- }()
- if err := os.Remove(tmpfile.Name()); err != nil {
- return nil, err
- }
-
- _, err = io.Copy(tmpfile, r)
+func (rew *rewriter) copyPart(ctx context.Context, name string, p *multipart.Part) error {
+ np, err := rew.writer.CreatePart(p.Header)
if err != nil {
- return nil, err
- }
-
- if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
- return nil, err
- }
-
- isValidType := false
- switch imageType {
- case exif.TypeJPEG:
- isValidType = isJPEG(tmpfile)
- case exif.TypeTIFF:
- isValidType = isTIFF(tmpfile)
- }
-
- if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
- return nil, err
+ return fmt.Errorf("create multipart field: %v", err)
}
- if !isValidType {
- log.WithContextFields(ctx, log.Fields{
- "filename": filename,
- "imageType": imageType,
- }).Print("invalid content type, not running exiftool")
-
- return tmpfile, nil
+ if _, err := io.Copy(np, p); err != nil {
+ return fmt.Errorf("duplicate multipart field: %v", err)
}
- log.WithContextFields(ctx, log.Fields{
- "filename": filename,
- }).Print("running exiftool to remove any metadata")
-
- cleaner, err := exif.NewCleaner(ctx, tmpfile)
- if err != nil {
- return nil, err
+ if err := rew.filter.ProcessField(ctx, name, rew.writer); err != nil {
+ return fmt.Errorf("process multipart field: %v", err)
}
- return cleaner, nil
+ return nil
}
-func isTIFF(r io.Reader) bool {
- _, err := tiff.DecodeConfig(r)
- if err == nil {
- return true
- }
+type fileAuthorizer interface {
+ AuthorizeFile(*http.Request) (*api.Response, error)
+}
- if _, unsupported := err.(tiff.UnsupportedError); unsupported {
- return true
- }
+type eagerAuthorizer struct{ response *api.Response }
- return false
+func (ea *eagerAuthorizer) AuthorizeFile(r *http.Request) (*api.Response, error) {
+ return ea.response, nil
}
-func isJPEG(r io.Reader) bool {
- // Only the first 512 bytes are used to sniff the content type.
- buf, err := ioutil.ReadAll(io.LimitReader(r, 512))
- if err != nil {
- return false
- }
+var _ fileAuthorizer = &eagerAuthorizer{}
- return http.DetectContentType(buf) == "image/jpeg"
+type apiAuthorizer struct {
+ api *api.API
}
-func handleLsifUpload(ctx context.Context, reader io.Reader, tempPath, filename string, preauth *api.Response) (io.ReadCloser, error) {
- parserConfig := parser.Config{
- TempPath: tempPath,
- }
-
- return parser.NewParser(ctx, reader, parserConfig)
+func (aa *apiAuthorizer) AuthorizeFile(r *http.Request) (*api.Response, error) {
+ return aa.api.PreAuthorizeFixedPath(
+ r,
+ "POST",
+ "/api/v4/internal/workhorse/authorize_upload",
+ )
}
-func (rew *rewriter) copyPart(ctx context.Context, name string, p *multipart.Part) error {
- np, err := rew.writer.CreatePart(p.Header)
- if err != nil {
- return fmt.Errorf("create multipart field: %v", err)
- }
+var _ fileAuthorizer = &apiAuthorizer{}
- if _, err := io.Copy(np, p); err != nil {
- return fmt.Errorf("duplicate multipart field: %v", err)
- }
+type testAuthorizer struct {
+ test fileAuthorizer
+ actual fileAuthorizer
+}
- if err := rew.filter.ProcessField(ctx, name, rew.writer); err != nil {
- return fmt.Errorf("process multipart field: %v", err)
+func (ta *testAuthorizer) AuthorizeFile(r *http.Request) (*api.Response, error) {
+ logger := log.WithRequest(r)
+ if response, err := ta.test.AuthorizeFile(r); err != nil {
+ logger.WithError(err).Error("test api preauthorize request failed")
+ } else {
+ logger.WithFields(log.Fields{
+ "temp_path": response.TempPath,
+ }).Info("test api preauthorize request")
}
- return nil
+ return ta.actual.AuthorizeFile(r)
}
diff --git a/workhorse/internal/upload/saved_file_tracker.go b/workhorse/internal/upload/saved_file_tracker.go
index b70a303a4a4..1fad5343647 100644
--- a/workhorse/internal/upload/saved_file_tracker.go
+++ b/workhorse/internal/upload/saved_file_tracker.go
@@ -3,11 +3,13 @@ package upload
import (
"context"
"fmt"
+ "io"
"mime/multipart"
"net/http"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
)
type SavedFileTracker struct {
@@ -44,7 +46,7 @@ func (s *SavedFileTracker) Finalize(_ context.Context) error {
return nil
}
- claims := MultipartClaims{RewrittenFields: s.rewrittenFields, StandardClaims: secret.DefaultClaims}
+ claims := MultipartClaims{RewrittenFields: s.rewrittenFields, RegisteredClaims: secret.DefaultClaims}
tokenString, err := secret.JWTTokenString(claims)
if err != nil {
return fmt.Errorf("savedFileTracker.Finalize: %v", err)
@@ -54,6 +56,12 @@ func (s *SavedFileTracker) Finalize(_ context.Context) error {
return nil
}
-func (s *SavedFileTracker) Name() string {
- return "accelerate"
+func (s *SavedFileTracker) Name() string { return "accelerate" }
+
+func (*SavedFileTracker) TransformContents(ctx context.Context, filename string, r io.Reader) (io.ReadCloser, error) {
+ if imageType := exif.FileTypeFromSuffix(filename); imageType != exif.TypeUnknown {
+ return handleExifUpload(ctx, r, filename, imageType)
+ }
+
+ return io.NopCloser(r), nil
}
diff --git a/workhorse/internal/upload/skip_rails_authorizer.go b/workhorse/internal/upload/skip_rails_authorizer.go
deleted file mode 100644
index e74048fb6e3..00000000000
--- a/workhorse/internal/upload/skip_rails_authorizer.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package upload
-
-import (
- "net/http"
-
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
-)
-
-// SkipRailsAuthorizer implements a fake PreAuthorizer that does not call
-// the gitlab-rails API. It must be fast because it gets called on each
-// request proxied to Rails.
-type SkipRailsAuthorizer struct {
- // TempPath is a directory where workhorse can store files that can later
- // be accessed by gitlab-rails.
- TempPath string
-}
-
-func (l *SkipRailsAuthorizer) PreAuthorizeHandler(next api.HandleFunc, _ string) http.Handler {
- return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- next(w, r, &api.Response{TempPath: l.TempPath})
- })
-}
diff --git a/workhorse/internal/upload/uploads.go b/workhorse/internal/upload/uploads.go
index 8272a3d920d..61b419901a7 100644
--- a/workhorse/internal/upload/uploads.go
+++ b/workhorse/internal/upload/uploads.go
@@ -4,7 +4,7 @@ import (
"bytes"
"context"
"fmt"
- "io/ioutil"
+ "io"
"mime/multipart"
"net/http"
@@ -25,7 +25,7 @@ type PreAuthorizer interface {
type MultipartClaims struct {
RewrittenFields map[string]string `json:"rewritten_fields"`
- jwt.StandardClaims
+ jwt.RegisteredClaims
}
// MultipartFormProcessor abstracts away implementation differences
@@ -36,17 +36,18 @@ type MultipartFormProcessor interface {
Finalize(ctx context.Context) error
Name() string
Count() int
+ TransformContents(ctx context.Context, filename string, r io.Reader) (io.ReadCloser, error)
}
// interceptMultipartFiles is the core of the implementation of
// Multipart.
-func interceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Handler, preauth *api.Response, filter MultipartFormProcessor, opts *destination.UploadOpts) {
+func interceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Handler, filter MultipartFormProcessor, fa fileAuthorizer, p Preparer) {
var body bytes.Buffer
writer := multipart.NewWriter(&body)
defer writer.Close()
// Rewrite multipart form data
- err := rewriteFormFilesFromMultipart(r, writer, preauth, filter, opts)
+ err := rewriteFormFilesFromMultipart(r, writer, filter, fa, p)
if err != nil {
switch err {
case ErrInjectedClientParam:
@@ -71,7 +72,7 @@ func interceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Hand
writer.Close()
// Hijack the request
- r.Body = ioutil.NopCloser(&body)
+ r.Body = io.NopCloser(&body)
r.ContentLength = int64(body.Len())
r.Header.Set("Content-Type", writer.FormDataContentType())
diff --git a/workhorse/internal/upload/uploads_test.go b/workhorse/internal/upload/uploads_test.go
index a9c8834d4be..ffe9fec302e 100644
--- a/workhorse/internal/upload/uploads_test.go
+++ b/workhorse/internal/upload/uploads_test.go
@@ -6,12 +6,12 @@ import (
"context"
"fmt"
"io"
- "io/ioutil"
"mime/multipart"
"net/http"
"net/http/httptest"
"net/textproto"
"os"
+ "path"
"regexp"
"strconv"
"strings"
@@ -54,7 +54,7 @@ func TestUploadHandlerForwardingRawData(t *testing.T) {
ts := testhelper.TestServerWithHandler(regexp.MustCompile(`/url/path\z`), func(w http.ResponseWriter, r *http.Request) {
require.Equal(t, "PATCH", r.Method, "method")
- body, err := ioutil.ReadAll(r.Body)
+ body, err := io.ReadAll(r.Body)
require.NoError(t, err)
require.Equal(t, "REQUEST", string(body), "request body")
@@ -66,19 +66,14 @@ func TestUploadHandlerForwardingRawData(t *testing.T) {
httpRequest, err := http.NewRequest("PATCH", ts.URL+"/url/path", bytes.NewBufferString("REQUEST"))
require.NoError(t, err)
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
- defer os.RemoveAll(tempPath)
-
+ tempPath := t.TempDir()
response := httptest.NewRecorder()
handler := newProxy(ts.URL)
- apiResponse := &api.Response{TempPath: tempPath}
+ fa := &eagerAuthorizer{&api.Response{TempPath: tempPath}}
preparer := &DefaultPreparer{}
- opts, err := preparer.Prepare(apiResponse)
- require.NoError(t, err)
- interceptMultipartFiles(response, httpRequest, handler, apiResponse, nil, opts)
+ interceptMultipartFiles(response, httpRequest, handler, nil, fa, preparer)
require.Equal(t, 202, response.Code)
require.Equal(t, "RESPONSE", response.Body.String(), "response body")
@@ -86,10 +81,7 @@ func TestUploadHandlerForwardingRawData(t *testing.T) {
func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
var filePath string
-
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
- defer os.RemoveAll(tempPath)
+ tempPath := t.TempDir()
ts := testhelper.TestServerWithHandler(regexp.MustCompile(`/url/path\z`), func(w http.ResponseWriter, r *http.Request) {
require.Equal(t, "PUT", r.Method, "method")
@@ -137,19 +129,17 @@ func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
httpRequest = httpRequest.WithContext(ctx)
- httpRequest.Body = ioutil.NopCloser(&buffer)
+ httpRequest.Body = io.NopCloser(&buffer)
httpRequest.ContentLength = int64(buffer.Len())
httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
response := httptest.NewRecorder()
handler := newProxy(ts.URL)
- apiResponse := &api.Response{TempPath: tempPath}
+ fa := &eagerAuthorizer{&api.Response{TempPath: tempPath}}
preparer := &DefaultPreparer{}
- opts, err := preparer.Prepare(apiResponse)
- require.NoError(t, err)
- interceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
+ interceptMultipartFiles(response, httpRequest, handler, &testFormProcessor{}, fa, preparer)
require.Equal(t, 202, response.Code)
cancel() // this will trigger an async cleanup
@@ -159,10 +149,6 @@ func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
func TestUploadHandlerDetectingInjectedMultiPartData(t *testing.T) {
var filePath string
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
- defer os.RemoveAll(tempPath)
-
tests := []struct {
name string
field string
@@ -213,12 +199,8 @@ func TestUploadHandlerDetectingInjectedMultiPartData(t *testing.T) {
response := httptest.NewRecorder()
handler := newProxy(ts.URL)
- apiResponse := &api.Response{TempPath: tempPath}
- preparer := &DefaultPreparer{}
- opts, err := preparer.Prepare(apiResponse)
- require.NoError(t, err)
- interceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
+ testInterceptMultipartFiles(t, response, httpRequest, handler, &testFormProcessor{})
require.Equal(t, test.response, response.Code)
cancel() // this will trigger an async cleanup
@@ -228,10 +210,6 @@ func TestUploadHandlerDetectingInjectedMultiPartData(t *testing.T) {
}
func TestUploadProcessingField(t *testing.T) {
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
- defer os.RemoveAll(tempPath)
-
var buffer bytes.Buffer
writer := multipart.NewWriter(&buffer)
@@ -243,12 +221,8 @@ func TestUploadProcessingField(t *testing.T) {
httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
response := httptest.NewRecorder()
- apiResponse := &api.Response{TempPath: tempPath}
- preparer := &DefaultPreparer{}
- opts, err := preparer.Prepare(apiResponse)
- require.NoError(t, err)
- interceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
+ testInterceptMultipartFiles(t, response, httpRequest, nilHandler, &testFormProcessor{})
require.Equal(t, 500, response.Code)
}
@@ -256,15 +230,11 @@ func TestUploadProcessingField(t *testing.T) {
func TestUploadingMultipleFiles(t *testing.T) {
testhelper.ConfigureSecret()
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
- defer os.RemoveAll(tempPath)
-
var buffer bytes.Buffer
writer := multipart.NewWriter(&buffer)
for i := 0; i < 11; i++ {
- _, err = writer.CreateFormFile(fmt.Sprintf("file %v", i), "my.file")
+ _, err := writer.CreateFormFile(fmt.Sprintf("file %v", i), "my.file")
require.NoError(t, err)
}
require.NoError(t, writer.Close())
@@ -274,23 +244,18 @@ func TestUploadingMultipleFiles(t *testing.T) {
httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
response := httptest.NewRecorder()
- apiResponse := &api.Response{TempPath: tempPath}
- preparer := &DefaultPreparer{}
- opts, err := preparer.Prepare(apiResponse)
- require.NoError(t, err)
- interceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
+ testInterceptMultipartFiles(t, response, httpRequest, nilHandler, &testFormProcessor{})
require.Equal(t, 400, response.Code)
require.Equal(t, "upload request contains more than 10 files\n", response.Body.String())
}
func TestUploadProcessingFile(t *testing.T) {
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
- defer os.RemoveAll(tempPath)
+ testhelper.ConfigureSecret()
+ tempPath := t.TempDir()
- _, testServer := test.StartObjectStore()
+ objectStore, testServer := test.StartObjectStore()
defer testServer.Close()
storeUrl := testServer.URL + test.ObjectPath
@@ -298,21 +263,24 @@ func TestUploadProcessingFile(t *testing.T) {
tests := []struct {
name string
preauth *api.Response
+ content func(t *testing.T) []byte
}{
{
name: "FileStore Upload",
preauth: &api.Response{TempPath: tempPath},
+ content: func(t *testing.T) []byte {
+ entries, err := os.ReadDir(tempPath)
+ require.NoError(t, err)
+ require.Len(t, entries, 1)
+ content, err := os.ReadFile(path.Join(tempPath, entries[0].Name()))
+ require.NoError(t, err)
+ return content
+ },
},
{
name: "ObjectStore Upload",
- preauth: &api.Response{RemoteObject: api.RemoteObject{StoreURL: storeUrl}},
- },
- {
- name: "ObjectStore and FileStore Upload",
- preauth: &api.Response{
- TempPath: tempPath,
- RemoteObject: api.RemoteObject{StoreURL: storeUrl},
- },
+ preauth: &api.Response{RemoteObject: api.RemoteObject{StoreURL: storeUrl, ID: "123"}},
+ content: func(*testing.T) []byte { return objectStore.GetObject(test.ObjectPath) },
},
}
@@ -330,26 +298,20 @@ func TestUploadProcessingFile(t *testing.T) {
httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
response := httptest.NewRecorder()
- apiResponse := &api.Response{TempPath: tempPath}
+ fa := &eagerAuthorizer{test.preauth}
preparer := &DefaultPreparer{}
- opts, err := preparer.Prepare(apiResponse)
- require.NoError(t, err)
- interceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
+ interceptMultipartFiles(response, httpRequest, nilHandler, &testFormProcessor{}, fa, preparer)
require.Equal(t, 200, response.Code)
+ require.Equal(t, "test", string(test.content(t)))
})
}
-
}
func TestInvalidFileNames(t *testing.T) {
testhelper.ConfigureSecret()
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
- defer os.RemoveAll(tempPath)
-
for _, testCase := range []struct {
filename string
code int
@@ -376,24 +338,14 @@ func TestInvalidFileNames(t *testing.T) {
httpRequest.Header.Set("Content-Type", writer.FormDataContentType())
response := httptest.NewRecorder()
- apiResponse := &api.Response{TempPath: tempPath}
- preparer := &DefaultPreparer{}
- opts, err := preparer.Prepare(apiResponse)
- require.NoError(t, err)
-
- interceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &SavedFileTracker{Request: httpRequest}, opts)
+ testInterceptMultipartFiles(t, response, httpRequest, nilHandler, &SavedFileTracker{Request: httpRequest})
require.Equal(t, testCase.code, response.Code)
- require.Equal(t, testCase.expectedPrefix, opts.TempFilePrefix)
}
}
func TestContentDispositionRewrite(t *testing.T) {
testhelper.ConfigureSecret()
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
- defer os.RemoveAll(tempPath)
-
tests := []struct {
desc string
header string
@@ -442,12 +394,7 @@ func TestContentDispositionRewrite(t *testing.T) {
})
response := httptest.NewRecorder()
- apiResponse := &api.Response{TempPath: tempPath}
- preparer := &DefaultPreparer{}
- opts, err := preparer.Prepare(apiResponse)
- require.NoError(t, err)
-
- interceptMultipartFiles(response, httpRequest, customHandler, apiResponse, &SavedFileTracker{Request: httpRequest}, opts)
+ testInterceptMultipartFiles(t, response, httpRequest, customHandler, &SavedFileTracker{Request: httpRequest})
upstreamRequest, err := http.ReadRequest(bufio.NewReader(&upstreamRequestBuffer))
require.NoError(t, err)
@@ -471,7 +418,7 @@ func TestContentDispositionRewrite(t *testing.T) {
}
func TestUploadHandlerRemovingExif(t *testing.T) {
- content, err := ioutil.ReadFile("exif/testdata/sample_exif.jpg")
+ content, err := os.ReadFile("exif/testdata/sample_exif.jpg")
require.NoError(t, err)
runUploadTest(t, content, "sample_exif.jpg", 200, func(w http.ResponseWriter, r *http.Request) {
@@ -489,7 +436,7 @@ func TestUploadHandlerRemovingExif(t *testing.T) {
}
func TestUploadHandlerRemovingExifTiff(t *testing.T) {
- content, err := ioutil.ReadFile("exif/testdata/sample_exif.tiff")
+ content, err := os.ReadFile("exif/testdata/sample_exif.tiff")
require.NoError(t, err)
runUploadTest(t, content, "sample_exif.tiff", 200, func(w http.ResponseWriter, r *http.Request) {
@@ -507,14 +454,14 @@ func TestUploadHandlerRemovingExifTiff(t *testing.T) {
}
func TestUploadHandlerRemovingExifInvalidContentType(t *testing.T) {
- content, err := ioutil.ReadFile("exif/testdata/sample_exif_invalid.jpg")
+ content, err := os.ReadFile("exif/testdata/sample_exif_invalid.jpg")
require.NoError(t, err)
runUploadTest(t, content, "sample_exif_invalid.jpg", 200, func(w http.ResponseWriter, r *http.Request) {
err := r.ParseMultipartForm(100000)
require.NoError(t, err)
- output, err := ioutil.ReadFile(r.FormValue("file.path"))
+ output, err := os.ReadFile(r.FormValue("file.path"))
require.NoError(t, err)
require.Equal(t, content, output, "Expected the file to be same as before")
@@ -524,7 +471,7 @@ func TestUploadHandlerRemovingExifInvalidContentType(t *testing.T) {
}
func TestUploadHandlerRemovingExifCorruptedFile(t *testing.T) {
- content, err := ioutil.ReadFile("exif/testdata/sample_exif_corrupted.jpg")
+ content, err := os.ReadFile("exif/testdata/sample_exif_corrupted.jpg")
require.NoError(t, err)
runUploadTest(t, content, "sample_exif_corrupted.jpg", 422, func(w http.ResponseWriter, r *http.Request) {
@@ -534,10 +481,6 @@ func TestUploadHandlerRemovingExifCorruptedFile(t *testing.T) {
}
func runUploadTest(t *testing.T, image []byte, filename string, httpCode int, tsHandler func(http.ResponseWriter, *http.Request)) {
- tempPath, err := ioutil.TempDir("", "uploads")
- require.NoError(t, err)
- defer os.RemoveAll(tempPath)
-
var buffer bytes.Buffer
writer := multipart.NewWriter(&buffer)
@@ -565,12 +508,8 @@ func runUploadTest(t *testing.T, image []byte, filename string, httpCode int, ts
response := httptest.NewRecorder()
handler := newProxy(ts.URL)
- apiResponse := &api.Response{TempPath: tempPath}
- preparer := &DefaultPreparer{}
- opts, err := preparer.Prepare(apiResponse)
- require.NoError(t, err)
- interceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
+ testInterceptMultipartFiles(t, response, httpRequest, handler, &testFormProcessor{})
require.Equal(t, httpCode, response.Code)
}
@@ -587,3 +526,12 @@ func waitUntilDeleted(t *testing.T, path string) {
}, 10*time.Second, 10*time.Millisecond)
require.True(t, os.IsNotExist(err), "expected the file to be deleted")
}
+
+func testInterceptMultipartFiles(t *testing.T, w http.ResponseWriter, r *http.Request, h http.Handler, filter MultipartFormProcessor) {
+ t.Helper()
+
+ fa := &eagerAuthorizer{&api.Response{TempPath: t.TempDir()}}
+ preparer := &DefaultPreparer{}
+
+ interceptMultipartFiles(w, r, h, filter, fa, preparer)
+}
diff --git a/workhorse/internal/upstream/handlers_test.go b/workhorse/internal/upstream/handlers_test.go
index 10c7479f5c5..03ca80ddcb9 100644
--- a/workhorse/internal/upstream/handlers_test.go
+++ b/workhorse/internal/upstream/handlers_test.go
@@ -4,7 +4,7 @@ import (
"bytes"
"compress/gzip"
"fmt"
- "io/ioutil"
+ "io"
"net/http"
"net/http/httptest"
"testing"
@@ -20,7 +20,7 @@ func TestGzipEncoding(t *testing.T) {
fmt.Fprint(w, "test")
w.Close()
- body := ioutil.NopCloser(&b)
+ body := io.NopCloser(&b)
req, err := http.NewRequest("POST", "http://address/test", body)
require.NoError(t, err)
@@ -38,7 +38,7 @@ func TestNoEncoding(t *testing.T) {
resp := httptest.NewRecorder()
var b bytes.Buffer
- body := ioutil.NopCloser(&b)
+ body := io.NopCloser(&b)
req, err := http.NewRequest("POST", "http://address/test", body)
require.NoError(t, err)
diff --git a/workhorse/internal/upstream/roundtripper/roundtripper_test.go b/workhorse/internal/upstream/roundtripper/roundtripper_test.go
index eed71cc5bae..d3cec80d49b 100644
--- a/workhorse/internal/upstream/roundtripper/roundtripper_test.go
+++ b/workhorse/internal/upstream/roundtripper/roundtripper_test.go
@@ -4,7 +4,7 @@ import (
"crypto/tls"
"crypto/x509"
"fmt"
- "io/ioutil"
+ "io"
"net/http"
"net/http/httptest"
"net/url"
@@ -86,7 +86,7 @@ func testNewBackendRoundTripper(t *testing.T, ts *httptest.Server, tlsClientConf
require.NoError(t, err, "perform roundtrip")
defer response.Body.Close()
- body, err := ioutil.ReadAll(response.Body)
+ body, err := io.ReadAll(response.Body)
require.NoError(t, err)
require.Equal(t, expectedResponseBody, string(body))
diff --git a/workhorse/internal/upstream/routes.go b/workhorse/internal/upstream/routes.go
index dd106053f8b..95c9b99b833 100644
--- a/workhorse/internal/upstream/routes.go
+++ b/workhorse/internal/upstream/routes.go
@@ -223,7 +223,7 @@ func configureRoutes(u *upstream) {
mimeMultipartUploader := upload.Multipart(api, signingProxy, preparer)
uploadPath := path.Join(u.DocumentRoot, "uploads/tmp")
- tempfileMultipartProxy := upload.Multipart(&upload.SkipRailsAuthorizer{TempPath: uploadPath}, proxy, preparer)
+ tempfileMultipartProxy := upload.SkipRailsPreAuthMultipart(uploadPath, api, proxy, preparer)
ciAPIProxyQueue := queueing.QueueRequests("ci_api_job_requests", tempfileMultipartProxy, u.APILimit, u.APIQueueLimit, u.APIQueueTimeout)
ciAPILongPolling := builds.RegisterHandler(ciAPIProxyQueue, redis.WatchKey, u.APICILongPollingDuration)
diff --git a/workhorse/internal/upstream/upstream_test.go b/workhorse/internal/upstream/upstream_test.go
index 8f054f5ccef..f931c1b31b3 100644
--- a/workhorse/internal/upstream/upstream_test.go
+++ b/workhorse/internal/upstream/upstream_test.go
@@ -3,7 +3,6 @@ package upstream
import (
"fmt"
"io"
- "io/ioutil"
"net/http"
"net/http/httptest"
"os"
@@ -287,7 +286,7 @@ func runTestCases(t *testing.T, ws *httptest.Server, testCases []testCase) {
require.NoError(t, err)
defer resp.Body.Close()
- body, err := ioutil.ReadAll(resp.Body)
+ body, err := io.ReadAll(resp.Body)
require.NoError(t, err)
require.Equal(t, 200, resp.StatusCode, "response code")
diff --git a/workhorse/internal/utils/svg/README.md b/workhorse/internal/utils/svg/README.md
index e5531f47473..4057316927c 100644
--- a/workhorse/internal/utils/svg/README.md
+++ b/workhorse/internal/utils/svg/README.md
@@ -15,13 +15,13 @@ package main
import (
"fmt"
- "io/ioutil"
+ "os"
svg "github.com/h2non/go-is-svg"
)
func main() {
- buf, err := ioutil.ReadFile("_example/example.svg")
+ buf, err := os.ReadFile("_example/example.svg")
if err != nil {
fmt.Printf("Error: %s\n", err)
return
diff --git a/workhorse/internal/zipartifacts/metadata_test.go b/workhorse/internal/zipartifacts/metadata_test.go
index 353ed4376f6..e4799ba4a59 100644
--- a/workhorse/internal/zipartifacts/metadata_test.go
+++ b/workhorse/internal/zipartifacts/metadata_test.go
@@ -6,7 +6,6 @@ import (
"context"
"fmt"
"io"
- "io/ioutil"
"os"
"testing"
@@ -41,7 +40,7 @@ func validateMetadata(r io.Reader) error {
return err
}
- meta, err := ioutil.ReadAll(gz)
+ meta, err := io.ReadAll(gz)
if err != nil {
return err
}
@@ -59,7 +58,7 @@ func validateMetadata(r io.Reader) error {
func TestGenerateZipMetadataFromFile(t *testing.T) {
var metaBuffer bytes.Buffer
- f, err := ioutil.TempFile("", "workhorse-metadata.zip-")
+ f, err := os.CreateTemp("", "workhorse-metadata.zip-")
if f != nil {
defer os.Remove(f.Name())
}
@@ -84,7 +83,7 @@ func TestGenerateZipMetadataFromFile(t *testing.T) {
}
func TestErrNotAZip(t *testing.T) {
- f, err := ioutil.TempFile("", "workhorse-metadata.zip-")
+ f, err := os.CreateTemp("", "workhorse-metadata.zip-")
if f != nil {
defer os.Remove(f.Name())
}