Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-01-18 22:00:14 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-01-18 22:00:14 +0300
commit05f0ebba3a2c8ddf39e436f412dc2ab5bf1353b2 (patch)
tree11d0f2a6ec31c7793c184106cedc2ded3d9a2cc5 /workhorse
parentec73467c23693d0db63a797d10194da9e72a74af (diff)
Add latest changes from gitlab-org/gitlab@15-8-stable-eev15.8.0-rc42
Diffstat (limited to 'workhorse')
-rw-r--r--workhorse/Makefile7
-rwxr-xr-xworkhorse/_support/detect-external-tests.sh11
-rw-r--r--workhorse/go.mod10
-rw-r--r--workhorse/go.sum24
-rw-r--r--workhorse/internal/api/api.go4
-rw-r--r--workhorse/internal/lsif_transformer/parser/docs.go10
-rw-r--r--workhorse/internal/lsif_transformer/parser/docs_test.go32
-rw-r--r--workhorse/internal/upload/body_uploader_test.go12
-rw-r--r--workhorse/internal/upload/destination/destination.go10
-rw-r--r--workhorse/internal/upload/destination/destination_test.go148
-rw-r--r--workhorse/internal/upload/destination/filestore/filestore.go4
-rw-r--r--workhorse/internal/upload/destination/multi_hash.go39
-rw-r--r--workhorse/internal/upload/destination/multi_hash_test.go52
-rw-r--r--workhorse/internal/upload/destination/objectstore/gocloud_object_test.go11
-rw-r--r--workhorse/internal/upload/destination/objectstore/multipart.go10
-rw-r--r--workhorse/internal/upload/destination/objectstore/multipart_test.go5
-rw-r--r--workhorse/internal/upload/destination/objectstore/object_test.go11
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_complete_multipart_api.go38
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_object_test.go11
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3api/s3api.go37
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go10
-rw-r--r--workhorse/internal/upload/destination/objectstore/uploader.go34
-rw-r--r--workhorse/internal/upload/destination/upload_opts.go26
-rw-r--r--workhorse/internal/upload/destination/upload_opts_test.go25
-rw-r--r--workhorse/internal/upload/object_storage_preparer_test.go7
-rw-r--r--workhorse/internal/upload/uploads_test.go9
-rw-r--r--workhorse/internal/zipartifacts/metadata_test.go12
-rw-r--r--workhorse/upload_test.go163
28 files changed, 466 insertions, 306 deletions
diff --git a/workhorse/Makefile b/workhorse/Makefile
index a0412f5e2e1..4236a1a0d8e 100644
--- a/workhorse/Makefile
+++ b/workhorse/Makefile
@@ -144,7 +144,7 @@ testdata/scratch:
mkdir -p testdata/scratch
.PHONY: verify
-verify: lint vet detect-context detect-assert check-formatting staticcheck deps-check
+verify: lint vet detect-context detect-assert detect-external-tests check-formatting staticcheck deps-check
.PHONY: lint
lint:
@@ -167,6 +167,11 @@ detect-assert:
$(call message,Verify: $@)
_support/detect-assert.sh
+.PHONY: detect-external-tests
+detect-external-tests:
+ $(call message,Verify: $@)
+ _support/detect-external-tests.sh
+
.PHONY: check-formatting
check-formatting: install-goimports
$(call message,Verify: $@)
diff --git a/workhorse/_support/detect-external-tests.sh b/workhorse/_support/detect-external-tests.sh
new file mode 100755
index 00000000000..865bd1447e1
--- /dev/null
+++ b/workhorse/_support/detect-external-tests.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+go list -f '{{join .XTestGoFiles "\n"}}' ./... | awk '
+ { print }
+ END {
+ if(NR>0) {
+ print "Please avoid using external test packages (package foobar_test) in Workhorse."
+ print "See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/107373."
+ exit(1)
+ }
+ }
+'
diff --git a/workhorse/go.mod b/workhorse/go.mod
index 80c017ad1cb..bc40134cdeb 100644
--- a/workhorse/go.mod
+++ b/workhorse/go.mod
@@ -7,7 +7,7 @@ require (
github.com/BurntSushi/toml v1.2.1
github.com/FZambia/sentinel v1.1.1
github.com/alecthomas/chroma/v2 v2.4.0
- github.com/aws/aws-sdk-go v1.44.157
+ github.com/aws/aws-sdk-go v1.44.180
github.com/disintegration/imaging v1.6.2
github.com/getsentry/raven-go v0.2.0
github.com/golang-jwt/jwt/v4 v4.4.3
@@ -17,7 +17,7 @@ require (
github.com/gorilla/websocket v1.5.0
github.com/grpc-ecosystem/go-grpc-middleware v1.3.0
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0
- github.com/johannesboyne/gofakes3 v0.0.0-20221128113635-c2f5cc6b5294
+ github.com/johannesboyne/gofakes3 v0.0.0-20230108161031-df26ca44a1e9
github.com/jpillora/backoff v1.0.0
github.com/mitchellh/copystructure v1.2.0
github.com/prometheus/client_golang v1.14.0
@@ -26,9 +26,9 @@ require (
github.com/sirupsen/logrus v1.9.0
github.com/smartystreets/goconvey v1.7.2
github.com/stretchr/testify v1.8.1
- gitlab.com/gitlab-org/gitaly/v15 v15.6.2
+ gitlab.com/gitlab-org/gitaly/v15 v15.7.0
gitlab.com/gitlab-org/golang-archive-zip v0.1.1
- gitlab.com/gitlab-org/labkit v1.16.1
+ gitlab.com/gitlab-org/labkit v1.17.0
gocloud.dev v0.27.0
golang.org/x/image v0.0.0-20220722155232-062f8c9fd539
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616
@@ -111,7 +111,7 @@ require (
golang.org/x/exp/typeparams v0.0.0-20220218215828-6cf2b201936e // indirect
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
golang.org/x/sync v0.1.0 // indirect
- golang.org/x/sys v0.1.0 // indirect
+ golang.org/x/sys v0.2.0 // indirect
golang.org/x/text v0.4.0 // indirect
golang.org/x/time v0.2.0 // indirect
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
diff --git a/workhorse/go.sum b/workhorse/go.sum
index 5e095f5b417..836c53afcd2 100644
--- a/workhorse/go.sum
+++ b/workhorse/go.sum
@@ -221,15 +221,15 @@ github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:W
github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU=
github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
-github.com/aws/aws-sdk-go v1.17.4/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
+github.com/aws/aws-sdk-go v1.33.0/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
github.com/aws/aws-sdk-go v1.38.35/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
github.com/aws/aws-sdk-go v1.43.11/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
github.com/aws/aws-sdk-go v1.43.31/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
github.com/aws/aws-sdk-go v1.44.45/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
github.com/aws/aws-sdk-go v1.44.68/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
-github.com/aws/aws-sdk-go v1.44.157 h1:JVBPpEWC8+yA7CbfAuTl/ZFFlHS3yoqWFqxFyTCISwg=
-github.com/aws/aws-sdk-go v1.44.157/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
+github.com/aws/aws-sdk-go v1.44.180 h1:VLZuAHI9fa/3WME5JjpVjcPCNfpGHVMiHx8sLHWhMgI=
+github.com/aws/aws-sdk-go v1.44.180/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g=
github.com/aws/aws-sdk-go-v2 v1.16.8 h1:gOe9UPR98XSf7oEJCcojYg+N2/jCRm4DdeIsP85pIyQ=
github.com/aws/aws-sdk-go-v2 v1.16.8/go.mod h1:6CpKuLXg2w7If3ABZCl/qZ6rEgwtjZTn4eAf4RcEyuw=
@@ -629,6 +629,7 @@ github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
github.com/go-resty/resty/v2 v2.1.1-0.20191201195748-d7b97669fe48/go.mod h1:dZGr0i9PLlaaTD4H/hoZIDjQ+r6xq8mgbRzHZf7f2J8=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
+github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.6.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
@@ -974,13 +975,14 @@ github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c
github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.0.0-20160803190731-bd40a432e4c7/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
+github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/joefitzgerald/rainbow-reporter v0.1.0/go.mod h1:481CNgqmVHQZzdIbN52CupLJyoVwB10FQ/IQlF1pdL8=
-github.com/johannesboyne/gofakes3 v0.0.0-20221128113635-c2f5cc6b5294 h1:AJISYN7tPo3lGqwYmEYQdlftcQz48i8LNk/BRUKCTig=
-github.com/johannesboyne/gofakes3 v0.0.0-20221128113635-c2f5cc6b5294/go.mod h1:LIAXxPvcUXwOcTIj9LSNSUpE9/eMHalTWxsP/kmWxQI=
+github.com/johannesboyne/gofakes3 v0.0.0-20230108161031-df26ca44a1e9 h1:PqhUbDge60cL99naOP9m3W0MiQtWc5kwteQQ9oU36PA=
+github.com/johannesboyne/gofakes3 v0.0.0-20230108161031-df26ca44a1e9/go.mod h1:Cnosl0cRZIfKjTMuH49sQog2LeNsU5Hf4WnPIDWIDV0=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8=
@@ -1486,12 +1488,12 @@ github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX
github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=
github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
-gitlab.com/gitlab-org/gitaly/v15 v15.6.2 h1:ivbMoXWgkDSJebuIFtPYGAIQ9/2P5ShxJoHt0cflwfo=
-gitlab.com/gitlab-org/gitaly/v15 v15.6.2/go.mod h1:RKa+3ADKfTonDb1pe8AtppdNHNeOM+ChtMmB7T0QWhY=
+gitlab.com/gitlab-org/gitaly/v15 v15.7.0 h1:dpcupsBqQSjp+AJ3Wy2UdNSIZKUAtyXbUcktq2uRakw=
+gitlab.com/gitlab-org/gitaly/v15 v15.7.0/go.mod h1:s37u+W94lg3T7cv+i+v5WtstyHvuKV1JlwYJNznZVJE=
gitlab.com/gitlab-org/golang-archive-zip v0.1.1 h1:35k9giivbxwF03+8A05Cm8YoxoakU8FBCj5gysjCTCE=
gitlab.com/gitlab-org/golang-archive-zip v0.1.1/go.mod h1:ZDtqpWPGPB9qBuZnZDrKQjIdJtkN7ZAoVwhT6H2o2kE=
-gitlab.com/gitlab-org/labkit v1.16.1 h1:J+HmNVR5bvPfrv9/fgKICFis2nmEugRXHMeRPvsVZUg=
-gitlab.com/gitlab-org/labkit v1.16.1/go.mod h1:tzZLVHeb0/Jrm9fPFdYuCrKmrYjfjEA0NmuLPXvvM+0=
+gitlab.com/gitlab-org/labkit v1.17.0 h1:mEkoLzXorLNdt8NkfgYS5xMDhdqCsIJaeEVtSf7d8cU=
+gitlab.com/gitlab-org/labkit v1.17.0/go.mod h1:nlLJvKgXcIclqWMI+rga2TckNBVHOtRCHMxBoVByNoE=
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
@@ -1680,7 +1682,6 @@ golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190310074541-c10a0554eabf/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
@@ -1935,8 +1936,9 @@ golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220731174439-a90be440212d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.2.0 h1:ljd4t30dBnAvMZaQCevtY0xLLD0A+bRZXbgLMLU1F/A=
+golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
diff --git a/workhorse/internal/api/api.go b/workhorse/internal/api/api.go
index 1758bb5a6a8..8a7fb191ec4 100644
--- a/workhorse/internal/api/api.go
+++ b/workhorse/internal/api/api.go
@@ -98,6 +98,8 @@ type RemoteObject struct {
GetURL string
// DeleteURL is a presigned S3 RemoveObject URL
DeleteURL string
+ // Whether Workhorse needs to delete the temporary object or not.
+ SkipDelete bool
// StoreURL is the temporary presigned S3 PutObject URL to which upload the first found file
StoreURL string
// Boolean to indicate whether to use headers included in PutHeaders
@@ -161,6 +163,8 @@ type Response struct {
ProcessLsif bool
// The maximum accepted size in bytes of the upload
MaximumSize int64
+ // A list of permitted hash functions. If empty, then all available are permitted.
+ UploadHashFunctions []string
}
type GitalyServer struct {
diff --git a/workhorse/internal/lsif_transformer/parser/docs.go b/workhorse/internal/lsif_transformer/parser/docs.go
index f87bc7fd86c..9cdec4c8d42 100644
--- a/workhorse/internal/lsif_transformer/parser/docs.go
+++ b/workhorse/internal/lsif_transformer/parser/docs.go
@@ -5,6 +5,7 @@ import (
"bufio"
"encoding/json"
"io"
+ "path/filepath"
"strings"
)
@@ -116,7 +117,7 @@ func (d *Docs) addMetadata(line []byte) error {
return err
}
- d.Root = strings.TrimSpace(metadata.Root) + "/"
+ d.Root = strings.TrimSpace(metadata.Root)
return nil
}
@@ -127,7 +128,12 @@ func (d *Docs) addDocument(line []byte) error {
return err
}
- d.Entries[doc.Id] = strings.TrimPrefix(doc.Uri, d.Root)
+ relativePath, err := filepath.Rel(d.Root, doc.Uri)
+ if err != nil {
+ relativePath = doc.Uri
+ }
+
+ d.Entries[doc.Id] = relativePath
return nil
}
diff --git a/workhorse/internal/lsif_transformer/parser/docs_test.go b/workhorse/internal/lsif_transformer/parser/docs_test.go
index 24e3eba8ac5..805bc53c0b7 100644
--- a/workhorse/internal/lsif_transformer/parser/docs_test.go
+++ b/workhorse/internal/lsif_transformer/parser/docs_test.go
@@ -18,16 +18,32 @@ func TestParse(t *testing.T) {
require.NoError(t, err)
defer d.Close()
- data := []byte(`{"id":"1","label":"metaData","projectRoot":"file:///Users/nested"}` + "\n")
- data = append(data, createLine("2", "document", "file:///Users/nested/file.rb")...)
- data = append(data, createLine("3", "document", "file:///Users/nested/folder/file.rb")...)
- data = append(data, createLine("4", "document", "file:///Users/wrong/file.rb")...)
+ for _, root := range []string{
+ "file:///Users/nested",
+ "file:///Users/nested/.",
+ "file:///Users/nested/",
+ } {
+ t.Run("Document with root: "+root, func(t *testing.T) {
+ data := []byte(`{"id":"1","label":"metaData","projectRoot":"` + root + `"}` + "\n")
+ data = append(data, createLine("2", "document", "file:///Users/nested/file.rb")...)
+ data = append(data, createLine("3", "document", "file:///Users/nested/folder/file.rb")...)
- require.NoError(t, d.Parse(bytes.NewReader(data)))
+ require.NoError(t, d.Parse(bytes.NewReader(data)))
+
+ require.Equal(t, "file.rb", d.Entries[2])
+ require.Equal(t, "folder/file.rb", d.Entries[3])
+ })
+ }
+
+ t.Run("Relative path cannot be calculated", func(t *testing.T) {
+ originalUri := "file:///Users/nested/folder/file.rb"
+ data := []byte(`{"id":"1","label":"metaData","projectRoot":"/a"}` + "\n")
+ data = append(data, createLine("2", "document", originalUri)...)
+
+ require.NoError(t, d.Parse(bytes.NewReader(data)))
- require.Equal(t, d.Entries[2], "file.rb")
- require.Equal(t, d.Entries[3], "folder/file.rb")
- require.Equal(t, d.Entries[4], "file:///Users/wrong/file.rb")
+ require.Equal(t, originalUri, d.Entries[2])
+ })
}
func TestParseContainsLine(t *testing.T) {
diff --git a/workhorse/internal/upload/body_uploader_test.go b/workhorse/internal/upload/body_uploader_test.go
index eff33757845..837d119e72e 100644
--- a/workhorse/internal/upload/body_uploader_test.go
+++ b/workhorse/internal/upload/body_uploader_test.go
@@ -92,11 +92,7 @@ func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
require.Equal(t, "application/x-www-form-urlencoded", r.Header.Get("Content-Type"), "Wrong Content-Type header")
- if destination.FIPSEnabled() {
- require.NotContains(t, r.PostForm, "file.md5")
- } else {
- require.Contains(t, r.PostForm, "file.md5")
- }
+ require.Contains(t, r.PostForm, "file.md5")
require.Contains(t, r.PostForm, "file.sha1")
require.Contains(t, r.PostForm, "file.sha256")
require.Contains(t, r.PostForm, "file.sha512")
@@ -123,11 +119,7 @@ func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
require.Contains(t, uploadFields, "remote_url")
require.Contains(t, uploadFields, "remote_id")
require.Contains(t, uploadFields, "size")
- if destination.FIPSEnabled() {
- require.NotContains(t, uploadFields, "md5")
- } else {
- require.Contains(t, uploadFields, "md5")
- }
+ require.Contains(t, uploadFields, "md5")
require.Contains(t, uploadFields, "sha1")
require.Contains(t, uploadFields, "sha256")
require.Contains(t, uploadFields, "sha512")
diff --git a/workhorse/internal/upload/destination/destination.go b/workhorse/internal/upload/destination/destination.go
index 5e145e2cb2a..a9fb81540d5 100644
--- a/workhorse/internal/upload/destination/destination.go
+++ b/workhorse/internal/upload/destination/destination.go
@@ -108,6 +108,7 @@ func (fh *FileHandler) GitLabFinalizeFields(prefix string) (map[string]string, e
type consumer interface {
Consume(context.Context, io.Reader, time.Time) (int64, error)
+ ConsumeWithoutDelete(context.Context, io.Reader, time.Time) (int64, error)
}
// Upload persists the provided reader content to all the location specified in opts. A cleanup will be performed once ctx is Done
@@ -120,7 +121,7 @@ func Upload(ctx context.Context, reader io.Reader, size int64, name string, opts
}
uploadStartTime := time.Now()
defer func() { fh.uploadDuration = time.Since(uploadStartTime).Seconds() }()
- hashes := newMultiHash()
+ hashes := newMultiHash(opts.UploadHashFunctions)
reader = io.TeeReader(reader, hashes.Writer)
var clientMode string
@@ -185,7 +186,12 @@ func Upload(ctx context.Context, reader io.Reader, size int64, name string, opts
reader = hlr
}
- fh.Size, err = uploadDestination.Consume(ctx, reader, opts.Deadline)
+ if opts.SkipDelete {
+ fh.Size, err = uploadDestination.ConsumeWithoutDelete(ctx, reader, opts.Deadline)
+ } else {
+ fh.Size, err = uploadDestination.Consume(ctx, reader, opts.Deadline)
+ }
+
if err != nil {
if (err == objectstore.ErrNotEnoughParts) || (hlr != nil && hlr.n < 0) {
err = ErrEntityTooLarge
diff --git a/workhorse/internal/upload/destination/destination_test.go b/workhorse/internal/upload/destination/destination_test.go
index 97645be168f..69dd02ca7c2 100644
--- a/workhorse/internal/upload/destination/destination_test.go
+++ b/workhorse/internal/upload/destination/destination_test.go
@@ -1,4 +1,4 @@
-package destination_test
+package destination
import (
"context"
@@ -17,12 +17,11 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
func testDeadline() time.Time {
- return time.Now().Add(destination.DefaultObjectStoreTimeout)
+ return time.Now().Add(DefaultObjectStoreTimeout)
}
func requireFileGetsRemovedAsync(t *testing.T, filePath string) {
@@ -44,10 +43,10 @@ func TestUploadWrongSize(t *testing.T) {
tmpFolder := t.TempDir()
- opts := &destination.UploadOpts{LocalTempPath: tmpFolder}
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize+1, "upload", opts)
+ opts := &UploadOpts{LocalTempPath: tmpFolder}
+ fh, err := Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize+1, "upload", opts)
require.Error(t, err)
- _, isSizeError := err.(destination.SizeError)
+ _, isSizeError := err.(SizeError)
require.True(t, isSizeError, "Should fail with SizeError")
require.Nil(t, fh)
}
@@ -58,10 +57,10 @@ func TestUploadWithKnownSizeExceedLimit(t *testing.T) {
tmpFolder := t.TempDir()
- opts := &destination.UploadOpts{LocalTempPath: tmpFolder, MaximumSize: test.ObjectSize - 1}
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", opts)
+ opts := &UploadOpts{LocalTempPath: tmpFolder, MaximumSize: test.ObjectSize - 1}
+ fh, err := Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", opts)
require.Error(t, err)
- _, isSizeError := err.(destination.SizeError)
+ _, isSizeError := err.(SizeError)
require.True(t, isSizeError, "Should fail with SizeError")
require.Nil(t, fh)
}
@@ -72,9 +71,9 @@ func TestUploadWithUnknownSizeExceedLimit(t *testing.T) {
tmpFolder := t.TempDir()
- opts := &destination.UploadOpts{LocalTempPath: tmpFolder, MaximumSize: test.ObjectSize - 1}
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), -1, "upload", opts)
- require.Equal(t, err, destination.ErrEntityTooLarge)
+ opts := &UploadOpts{LocalTempPath: tmpFolder, MaximumSize: test.ObjectSize - 1}
+ fh, err := Upload(ctx, strings.NewReader(test.ObjectContent), -1, "upload", opts)
+ require.Equal(t, err, ErrEntityTooLarge)
require.Nil(t, fh)
}
@@ -94,7 +93,7 @@ func TestUploadWrongETag(t *testing.T) {
objectURL := ts.URL + test.ObjectPath
- opts := &destination.UploadOpts{
+ opts := &UploadOpts{
RemoteID: "test-file",
RemoteURL: objectURL,
PresignedPut: objectURL + "?Signature=ASignature",
@@ -110,7 +109,7 @@ func TestUploadWrongETag(t *testing.T) {
osStub.InitiateMultipartUpload(test.ObjectPath)
}
ctx, cancel := context.WithCancel(context.Background())
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", opts)
+ fh, err := Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", opts)
require.Nil(t, fh)
require.Error(t, err)
require.Equal(t, 1, osStub.PutsCnt(), "File not uploaded")
@@ -135,18 +134,22 @@ func TestUpload(t *testing.T) {
tmpFolder := t.TempDir()
tests := []struct {
- name string
- local bool
- remote remote
+ name string
+ local bool
+ remote remote
+ skipDelete bool
}{
{name: "Local only", local: true},
{name: "Remote Single only", remote: remoteSingle},
{name: "Remote Multipart only", remote: remoteMultipart},
+ {name: "Local only With SkipDelete", local: true, skipDelete: true},
+ {name: "Remote Single only With SkipDelete", remote: remoteSingle, skipDelete: true},
+ {name: "Remote Multipart only With SkipDelete", remote: remoteMultipart, skipDelete: true},
}
for _, spec := range tests {
t.Run(spec.name, func(t *testing.T) {
- var opts destination.UploadOpts
+ var opts UploadOpts
var expectedDeletes, expectedPuts int
osStub, ts := test.StartObjectStore()
@@ -184,10 +187,16 @@ func TestUpload(t *testing.T) {
opts.LocalTempPath = tmpFolder
}
+ opts.SkipDelete = spec.skipDelete
+
+ if opts.SkipDelete {
+ expectedDeletes = 0
+ }
+
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
+ fh, err := Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
require.NoError(t, err)
require.NotNil(t, fh)
@@ -206,11 +215,7 @@ func TestUpload(t *testing.T) {
}
require.Equal(t, test.ObjectSize, fh.Size)
- if destination.FIPSEnabled() {
- require.Empty(t, fh.MD5())
- } else {
- require.Equal(t, test.ObjectMD5, fh.MD5())
- }
+ require.Equal(t, test.ObjectMD5, fh.MD5())
require.Equal(t, test.ObjectSHA256, fh.SHA256())
require.Equal(t, expectedPuts, osStub.PutsCnt(), "ObjectStore PutObject count mismatch")
@@ -255,7 +260,7 @@ func TestUploadWithS3WorkhorseClient(t *testing.T) {
name: "unknown object size with limit",
objectSize: -1,
maxSize: test.ObjectSize - 1,
- expectedErr: destination.ErrEntityTooLarge,
+ expectedErr: ErrEntityTooLarge,
},
}
@@ -269,12 +274,12 @@ func TestUploadWithS3WorkhorseClient(t *testing.T) {
defer cancel()
remoteObject := "tmp/test-file/1"
- opts := destination.UploadOpts{
+ opts := UploadOpts{
RemoteID: "test-file",
Deadline: testDeadline(),
UseWorkhorseClient: true,
RemoteTempObjectID: remoteObject,
- ObjectStorageConfig: destination.ObjectStorageConfig{
+ ObjectStorageConfig: ObjectStorageConfig{
Provider: "AWS",
S3Credentials: s3Creds,
S3Config: s3Config,
@@ -282,7 +287,7 @@ func TestUploadWithS3WorkhorseClient(t *testing.T) {
MaximumSize: tc.maxSize,
}
- _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), tc.objectSize, "upload", &opts)
+ _, err := Upload(ctx, strings.NewReader(test.ObjectContent), tc.objectSize, "upload", &opts)
if tc.expectedErr == nil {
require.NoError(t, err)
@@ -302,19 +307,19 @@ func TestUploadWithAzureWorkhorseClient(t *testing.T) {
defer cancel()
remoteObject := "tmp/test-file/1"
- opts := destination.UploadOpts{
+ opts := UploadOpts{
RemoteID: "test-file",
Deadline: testDeadline(),
UseWorkhorseClient: true,
RemoteTempObjectID: remoteObject,
- ObjectStorageConfig: destination.ObjectStorageConfig{
+ ObjectStorageConfig: ObjectStorageConfig{
Provider: "AzureRM",
URLMux: mux,
GoCloudConfig: config.GoCloudConfig{URL: "azblob://test-container"},
},
}
- _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
+ _, err := Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
require.NoError(t, err)
test.GoCloudObjectExists(t, bucketDir, remoteObject)
@@ -327,48 +332,65 @@ func TestUploadWithUnknownGoCloudScheme(t *testing.T) {
mux := new(blob.URLMux)
remoteObject := "tmp/test-file/1"
- opts := destination.UploadOpts{
+ opts := UploadOpts{
RemoteID: "test-file",
Deadline: testDeadline(),
UseWorkhorseClient: true,
RemoteTempObjectID: remoteObject,
- ObjectStorageConfig: destination.ObjectStorageConfig{
+ ObjectStorageConfig: ObjectStorageConfig{
Provider: "SomeCloud",
URLMux: mux,
GoCloudConfig: config.GoCloudConfig{URL: "foo://test-container"},
},
}
- _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
+ _, err := Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
require.Error(t, err)
}
func TestUploadMultipartInBodyFailure(t *testing.T) {
- osStub, ts := test.StartObjectStore()
- defer ts.Close()
-
- // this is a broken path because it contains bucket name but no key
- // this is the only way to get an in-body failure from our ObjectStoreStub
- objectPath := "/bucket-but-no-object-key"
- objectURL := ts.URL + objectPath
- opts := destination.UploadOpts{
- RemoteID: "test-file",
- RemoteURL: objectURL,
- PartSize: test.ObjectSize,
- PresignedParts: []string{objectURL + "?partNumber=1", objectURL + "?partNumber=2"},
- PresignedCompleteMultipart: objectURL + "?Signature=CompleteSignature",
- Deadline: testDeadline(),
+ tests := []struct {
+ name string
+ skipDelete bool
+ }{
+ {name: "With skipDelete false", skipDelete: false},
+ {name: "With skipDelete true", skipDelete: true},
}
- osStub.InitiateMultipartUpload(objectPath)
+ for _, spec := range tests {
+ t.Run(spec.name, func(t *testing.T) {
+ osStub, ts := test.StartObjectStore()
+ defer ts.Close()
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
+ // this is a broken path because it contains bucket name but no key
+ // this is the only way to get an in-body failure from our ObjectStoreStub
+ objectPath := "/bucket-but-no-object-key"
+ objectURL := ts.URL + objectPath
+ opts := UploadOpts{
+ RemoteID: "test-file",
+ RemoteURL: objectURL,
+ PartSize: test.ObjectSize,
+ PresignedParts: []string{objectURL + "?partNumber=1", objectURL + "?partNumber=2"},
+ PresignedCompleteMultipart: objectURL + "?Signature=CompleteSignature",
+ PresignedDelete: objectURL + "?Signature=AnotherSignature",
+ Deadline: testDeadline(),
+ SkipDelete: spec.skipDelete,
+ }
- fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
- require.Nil(t, fh)
- require.Error(t, err)
- require.EqualError(t, err, test.MultipartUploadInternalError().Error())
+ osStub.InitiateMultipartUpload(objectPath)
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ fh, err := Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, "upload", &opts)
+ require.Nil(t, fh)
+ require.Error(t, err)
+ require.EqualError(t, err, test.MultipartUploadInternalError().Error())
+
+ cancel() // this will trigger an async cleanup
+ requireObjectStoreDeletedAsync(t, 1, osStub)
+ })
+ }
}
func TestUploadRemoteFileWithLimit(t *testing.T) {
@@ -405,20 +427,20 @@ func TestUploadRemoteFileWithLimit(t *testing.T) {
testData: test.ObjectContent,
objectSize: -1,
maxSize: test.ObjectSize - 1,
- expectedErr: destination.ErrEntityTooLarge,
+ expectedErr: ErrEntityTooLarge,
},
{
name: "large object with unknown size with limit",
testData: string(make([]byte, 20000)),
objectSize: -1,
maxSize: 19000,
- expectedErr: destination.ErrEntityTooLarge,
+ expectedErr: ErrEntityTooLarge,
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
- var opts destination.UploadOpts
+ var opts UploadOpts
for _, remoteType := range remoteTypes {
osStub, ts := test.StartObjectStore()
@@ -454,7 +476,7 @@ func TestUploadRemoteFileWithLimit(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- fh, err := destination.Upload(ctx, strings.NewReader(tc.testData), tc.objectSize, "upload", &opts)
+ fh, err := Upload(ctx, strings.NewReader(tc.testData), tc.objectSize, "upload", &opts)
if tc.expectedErr == nil {
require.NoError(t, err)
@@ -468,7 +490,7 @@ func TestUploadRemoteFileWithLimit(t *testing.T) {
}
}
-func checkFileHandlerWithFields(t *testing.T, fh *destination.FileHandler, fields map[string]string, prefix string) {
+func checkFileHandlerWithFields(t *testing.T, fh *FileHandler, fields map[string]string, prefix string) {
key := func(field string) string {
if prefix == "" {
return field
@@ -482,11 +504,7 @@ func checkFileHandlerWithFields(t *testing.T, fh *destination.FileHandler, field
require.Equal(t, fh.RemoteURL, fields[key("remote_url")])
require.Equal(t, fh.RemoteID, fields[key("remote_id")])
require.Equal(t, strconv.FormatInt(test.ObjectSize, 10), fields[key("size")])
- if destination.FIPSEnabled() {
- require.Empty(t, fields[key("md5")])
- } else {
- require.Equal(t, test.ObjectMD5, fields[key("md5")])
- }
+ require.Equal(t, test.ObjectMD5, fields[key("md5")])
require.Equal(t, test.ObjectSHA1, fields[key("sha1")])
require.Equal(t, test.ObjectSHA256, fields[key("sha256")])
require.Equal(t, test.ObjectSHA512, fields[key("sha512")])
diff --git a/workhorse/internal/upload/destination/filestore/filestore.go b/workhorse/internal/upload/destination/filestore/filestore.go
index 2d88874bf25..6b2d8270b51 100644
--- a/workhorse/internal/upload/destination/filestore/filestore.go
+++ b/workhorse/internal/upload/destination/filestore/filestore.go
@@ -19,3 +19,7 @@ func (lf *LocalFile) Consume(_ context.Context, r io.Reader, _ time.Time) (int64
}
return n, err
}
+
+func (lf *LocalFile) ConsumeWithoutDelete(outerCtx context.Context, reader io.Reader, deadLine time.Time) (_ int64, err error) {
+ return lf.Consume(outerCtx, reader, deadLine)
+}
diff --git a/workhorse/internal/upload/destination/multi_hash.go b/workhorse/internal/upload/destination/multi_hash.go
index 8d5bf4424a8..3f8b0cbd903 100644
--- a/workhorse/internal/upload/destination/multi_hash.go
+++ b/workhorse/internal/upload/destination/multi_hash.go
@@ -8,9 +8,6 @@ import (
"encoding/hex"
"hash"
"io"
- "os"
-
- "gitlab.com/gitlab-org/labkit/fips"
)
var hashFactories = map[string](func() hash.Hash){
@@ -20,39 +17,39 @@ var hashFactories = map[string](func() hash.Hash){
"sha512": sha512.New,
}
-var fipsHashFactories = map[string](func() hash.Hash){
- "sha1": sha1.New,
- "sha256": sha256.New,
- "sha512": sha512.New,
-}
-
func factories() map[string](func() hash.Hash) {
- if FIPSEnabled() {
- return fipsHashFactories
- }
-
return hashFactories
}
-func FIPSEnabled() bool {
- if fips.Enabled() {
+type multiHash struct {
+ io.Writer
+ hashes map[string]hash.Hash
+}
+
+func permittedHashFunction(hashFunctions []string, hash string) bool {
+ if len(hashFunctions) == 0 {
return true
}
- return os.Getenv("WORKHORSE_TEST_FIPS_ENABLED") == "1"
-}
+ for _, name := range hashFunctions {
+ if name == hash {
+ return true
+ }
+ }
-type multiHash struct {
- io.Writer
- hashes map[string]hash.Hash
+ return false
}
-func newMultiHash() (m *multiHash) {
+func newMultiHash(hashFunctions []string) (m *multiHash) {
m = &multiHash{}
m.hashes = make(map[string]hash.Hash)
var writers []io.Writer
for hash, hashFactory := range factories() {
+ if !permittedHashFunction(hashFunctions, hash) {
+ continue
+ }
+
writer := hashFactory()
m.hashes[hash] = writer
diff --git a/workhorse/internal/upload/destination/multi_hash_test.go b/workhorse/internal/upload/destination/multi_hash_test.go
new file mode 100644
index 00000000000..9a976f5d25d
--- /dev/null
+++ b/workhorse/internal/upload/destination/multi_hash_test.go
@@ -0,0 +1,52 @@
+package destination
+
+import (
+ "sort"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestNewMultiHash(t *testing.T) {
+ tests := []struct {
+ name string
+ allowedHashes []string
+ expectedHashes []string
+ }{
+ {
+ name: "default",
+ allowedHashes: nil,
+ expectedHashes: []string{"md5", "sha1", "sha256", "sha512"},
+ },
+ {
+ name: "blank",
+ allowedHashes: []string{},
+ expectedHashes: []string{"md5", "sha1", "sha256", "sha512"},
+ },
+ {
+ name: "no MD5",
+ allowedHashes: []string{"sha1", "sha256", "sha512"},
+ expectedHashes: []string{"sha1", "sha256", "sha512"},
+ },
+
+ {
+ name: "unlisted hash",
+ allowedHashes: []string{"sha1", "sha256", "sha512", "sha3-256"},
+ expectedHashes: []string{"sha1", "sha256", "sha512"},
+ },
+ }
+
+ for _, test := range tests {
+ mh := newMultiHash(test.allowedHashes)
+
+ require.Equal(t, len(test.expectedHashes), len(mh.hashes))
+
+ var keys []string
+ for key := range mh.hashes {
+ keys = append(keys, key)
+ }
+
+ sort.Strings(keys)
+ require.Equal(t, test.expectedHashes, keys)
+ }
+}
diff --git a/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go b/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go
index 55d886087be..5a6a4b90b34 100644
--- a/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go
+++ b/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go
@@ -1,4 +1,4 @@
-package objectstore_test
+package objectstore
import (
"context"
@@ -10,7 +10,6 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
@@ -22,8 +21,8 @@ func TestGoCloudObjectUpload(t *testing.T) {
objectName := "test.png"
testURL := "azuretest://azure.example.com/test-container"
- p := &objectstore.GoCloudObjectParams{Ctx: ctx, Mux: mux, BucketURL: testURL, ObjectName: objectName}
- object, err := objectstore.NewGoCloudObject(p)
+ p := &GoCloudObjectParams{Ctx: ctx, Mux: mux, BucketURL: testURL, ObjectName: objectName}
+ object, err := NewGoCloudObject(p)
require.NotNil(t, object)
require.NoError(t, err)
@@ -48,8 +47,8 @@ func TestGoCloudObjectUpload(t *testing.T) {
if exists {
return fmt.Errorf("file %s is still present", objectName)
- } else {
- return nil
}
+
+ return nil
})
}
diff --git a/workhorse/internal/upload/destination/objectstore/multipart.go b/workhorse/internal/upload/destination/objectstore/multipart.go
index df336d2d901..900ca040dad 100644
--- a/workhorse/internal/upload/destination/objectstore/multipart.go
+++ b/workhorse/internal/upload/destination/objectstore/multipart.go
@@ -11,6 +11,8 @@ import (
"os"
"gitlab.com/gitlab-org/labkit/mask"
+
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/s3api"
)
// ErrNotEnoughParts will be used when writing more than size * len(partURLs)
@@ -51,7 +53,7 @@ func NewMultipart(partURLs []string, completeURL, abortURL, deleteURL string, pu
}
func (m *Multipart) Upload(ctx context.Context, r io.Reader) error {
- cmu := &CompleteMultipartUpload{}
+ cmu := &s3api.CompleteMultipartUpload{}
for i, partURL := range m.PartURLs {
src := io.LimitReader(r, m.partSize)
part, err := m.readAndUploadOnePart(ctx, partURL, m.PutHeaders, src, i+1)
@@ -91,7 +93,7 @@ func (m *Multipart) Delete() {
deleteURL(m.DeleteURL)
}
-func (m *Multipart) readAndUploadOnePart(ctx context.Context, partURL string, putHeaders map[string]string, src io.Reader, partNumber int) (*completeMultipartUploadPart, error) {
+func (m *Multipart) readAndUploadOnePart(ctx context.Context, partURL string, putHeaders map[string]string, src io.Reader, partNumber int) (*s3api.CompleteMultipartUploadPart, error) {
file, err := os.CreateTemp("", "part-buffer")
if err != nil {
return nil, fmt.Errorf("create temporary buffer file: %v", err)
@@ -118,7 +120,7 @@ func (m *Multipart) readAndUploadOnePart(ctx context.Context, partURL string, pu
if err != nil {
return nil, fmt.Errorf("upload part %d: %v", partNumber, err)
}
- return &completeMultipartUploadPart{PartNumber: partNumber, ETag: etag}, nil
+ return &s3api.CompleteMultipartUploadPart{PartNumber: partNumber, ETag: etag}, nil
}
func (m *Multipart) uploadPart(ctx context.Context, url string, headers map[string]string, body io.Reader, size int64) (string, error) {
@@ -142,7 +144,7 @@ func (m *Multipart) uploadPart(ctx context.Context, url string, headers map[stri
return part.ETag(), nil
}
-func (m *Multipart) complete(ctx context.Context, cmu *CompleteMultipartUpload) error {
+func (m *Multipart) complete(ctx context.Context, cmu *s3api.CompleteMultipartUpload) error {
body, err := xml.Marshal(cmu)
if err != nil {
return fmt.Errorf("marshal CompleteMultipartUpload request: %v", err)
diff --git a/workhorse/internal/upload/destination/objectstore/multipart_test.go b/workhorse/internal/upload/destination/objectstore/multipart_test.go
index 2a5161e42e7..00244a5c50b 100644
--- a/workhorse/internal/upload/destination/objectstore/multipart_test.go
+++ b/workhorse/internal/upload/destination/objectstore/multipart_test.go
@@ -1,4 +1,4 @@
-package objectstore_test
+package objectstore
import (
"context"
@@ -11,7 +11,6 @@ import (
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
@@ -48,7 +47,7 @@ func TestMultipartUploadWithUpcaseETags(t *testing.T) {
deadline := time.Now().Add(testTimeout)
- m, err := objectstore.NewMultipart(
+ m, err := NewMultipart(
[]string{ts.URL}, // a single presigned part URL
ts.URL, // the complete multipart upload URL
"", // no abort
diff --git a/workhorse/internal/upload/destination/objectstore/object_test.go b/workhorse/internal/upload/destination/objectstore/object_test.go
index 24117891b6d..2b94cd9e3b1 100644
--- a/workhorse/internal/upload/destination/objectstore/object_test.go
+++ b/workhorse/internal/upload/destination/objectstore/object_test.go
@@ -1,4 +1,4 @@
-package objectstore_test
+package objectstore
import (
"context"
@@ -11,7 +11,6 @@ import (
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
@@ -35,7 +34,7 @@ func testObjectUploadNoErrors(t *testing.T, startObjectStore osFactory, useDelet
defer cancel()
deadline := time.Now().Add(testTimeout)
- object, err := objectstore.NewObject(objectURL, deleteURL, putHeaders, test.ObjectSize)
+ object, err := NewObject(objectURL, deleteURL, putHeaders, test.ObjectSize)
require.NoError(t, err)
// copy data
@@ -97,12 +96,12 @@ func TestObjectUpload404(t *testing.T) {
deadline := time.Now().Add(testTimeout)
objectURL := ts.URL + test.ObjectPath
- object, err := objectstore.NewObject(objectURL, "", map[string]string{}, test.ObjectSize)
+ object, err := NewObject(objectURL, "", map[string]string{}, test.ObjectSize)
require.NoError(t, err)
_, err = object.Consume(ctx, strings.NewReader(test.ObjectContent), deadline)
require.Error(t, err)
- _, isStatusCodeError := err.(objectstore.StatusCodeError)
+ _, isStatusCodeError := err.(StatusCodeError)
require.True(t, isStatusCodeError, "Should fail with StatusCodeError")
require.Contains(t, err.Error(), "404")
}
@@ -140,7 +139,7 @@ func TestObjectUploadBrokenConnection(t *testing.T) {
deadline := time.Now().Add(testTimeout)
objectURL := ts.URL + test.ObjectPath
- object, err := objectstore.NewObject(objectURL, "", map[string]string{}, -1)
+ object, err := NewObject(objectURL, "", map[string]string{}, -1)
require.NoError(t, err)
_, copyErr := object.Consume(ctx, &endlessReader{}, deadline)
diff --git a/workhorse/internal/upload/destination/objectstore/s3_complete_multipart_api.go b/workhorse/internal/upload/destination/objectstore/s3_complete_multipart_api.go
index b84f5757f49..02799d0b9b0 100644
--- a/workhorse/internal/upload/destination/objectstore/s3_complete_multipart_api.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_complete_multipart_api.go
@@ -2,45 +2,15 @@ package objectstore
import (
"encoding/xml"
- "fmt"
-)
-
-// CompleteMultipartUpload is the S3 CompleteMultipartUpload body
-type CompleteMultipartUpload struct {
- Part []*completeMultipartUploadPart
-}
-type completeMultipartUploadPart struct {
- PartNumber int
- ETag string
-}
-
-// CompleteMultipartUploadResult is the S3 answer to CompleteMultipartUpload request
-type CompleteMultipartUploadResult struct {
- Location string
- Bucket string
- Key string
- ETag string
-}
-
-// CompleteMultipartUploadError is the in-body error structure
-// https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html#mpUploadComplete-examples
-// the answer contains other fields we are not using
-type CompleteMultipartUploadError struct {
- XMLName xml.Name `xml:"Error"`
- Code string
- Message string
-}
-
-func (c *CompleteMultipartUploadError) Error() string {
- return fmt.Sprintf("CompleteMultipartUpload remote error %q: %s", c.Code, c.Message)
-}
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/s3api"
+)
// compoundCompleteMultipartUploadResult holds both CompleteMultipartUploadResult and CompleteMultipartUploadError
// this allow us to deserialize the response body where the root element can either be Error orCompleteMultipartUploadResult
type compoundCompleteMultipartUploadResult struct {
- *CompleteMultipartUploadResult
- *CompleteMultipartUploadError
+ *s3api.CompleteMultipartUploadResult
+ *s3api.CompleteMultipartUploadError
// XMLName this overrides CompleteMultipartUploadError.XMLName tags
XMLName xml.Name
diff --git a/workhorse/internal/upload/destination/objectstore/s3_object_test.go b/workhorse/internal/upload/destination/objectstore/s3_object_test.go
index 0ed14a2e844..c99712d18ad 100644
--- a/workhorse/internal/upload/destination/objectstore/s3_object_test.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_object_test.go
@@ -1,4 +1,4 @@
-package objectstore_test
+package objectstore
import (
"context"
@@ -17,7 +17,6 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
@@ -50,7 +49,7 @@ func TestS3ObjectUpload(t *testing.T) {
objectName := filepath.Join(tmpDir, "s3-test-data")
ctx, cancel := context.WithCancel(context.Background())
- object, err := objectstore.NewS3Object(objectName, creds, config)
+ object, err := NewS3Object(objectName, creds, config)
require.NoError(t, err)
// copy data
@@ -107,7 +106,7 @@ func TestConcurrentS3ObjectUpload(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- object, err := objectstore.NewS3Object(objectName, creds, config)
+ object, err := NewS3Object(objectName, creds, config)
require.NoError(t, err)
// copy data
@@ -134,7 +133,7 @@ func TestS3ObjectUploadCancel(t *testing.T) {
objectName := filepath.Join(tmpDir, "s3-test-data")
- object, err := objectstore.NewS3Object(objectName, creds, config)
+ object, err := NewS3Object(objectName, creds, config)
require.NoError(t, err)
@@ -155,7 +154,7 @@ func TestS3ObjectUploadLimitReached(t *testing.T) {
tmpDir := t.TempDir()
objectName := filepath.Join(tmpDir, "s3-test-data")
- object, err := objectstore.NewS3Object(objectName, creds, config)
+ object, err := NewS3Object(objectName, creds, config)
require.NoError(t, err)
_, err = object.Consume(context.Background(), &failedReader{}, deadline)
diff --git a/workhorse/internal/upload/destination/objectstore/s3api/s3api.go b/workhorse/internal/upload/destination/objectstore/s3api/s3api.go
new file mode 100644
index 00000000000..49ab9347911
--- /dev/null
+++ b/workhorse/internal/upload/destination/objectstore/s3api/s3api.go
@@ -0,0 +1,37 @@
+package s3api
+
+import (
+ "encoding/xml"
+ "fmt"
+)
+
+// CompleteMultipartUploadError is the in-body error structure
+// https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html#mpUploadComplete-examples
+// the answer contains other fields we are not using
+type CompleteMultipartUploadError struct {
+ XMLName xml.Name `xml:"Error"`
+ Code string
+ Message string
+}
+
+func (c *CompleteMultipartUploadError) Error() string {
+ return fmt.Sprintf("CompleteMultipartUpload remote error %q: %s", c.Code, c.Message)
+}
+
+// CompleteMultipartUploadResult is the S3 answer to CompleteMultipartUpload request
+type CompleteMultipartUploadResult struct {
+ Location string
+ Bucket string
+ Key string
+ ETag string
+}
+
+// CompleteMultipartUpload is the S3 CompleteMultipartUpload body
+type CompleteMultipartUpload struct {
+ Part []*CompleteMultipartUploadPart
+}
+
+type CompleteMultipartUploadPart struct {
+ PartNumber int
+ ETag string
+}
diff --git a/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go
index 1a380bd5083..8fbb746d6ce 100644
--- a/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go
+++ b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go
@@ -12,7 +12,7 @@ import (
"strings"
"sync"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/s3api"
)
type partsEtagMap map[int]string
@@ -190,8 +190,8 @@ func (o *ObjectstoreStub) putObject(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(200)
}
-func MultipartUploadInternalError() *objectstore.CompleteMultipartUploadError {
- return &objectstore.CompleteMultipartUploadError{Code: "InternalError", Message: "malformed object path"}
+func MultipartUploadInternalError() *s3api.CompleteMultipartUploadError {
+ return &s3api.CompleteMultipartUploadError{Code: "InternalError", Message: "malformed object path"}
}
func (o *ObjectstoreStub) completeMultipartUpload(w http.ResponseWriter, r *http.Request) {
@@ -212,7 +212,7 @@ func (o *ObjectstoreStub) completeMultipartUpload(w http.ResponseWriter, r *http
return
}
- var msg objectstore.CompleteMultipartUpload
+ var msg s3api.CompleteMultipartUpload
err = xml.Unmarshal(buf, &msg)
if err != nil {
http.Error(w, err.Error(), 400)
@@ -245,7 +245,7 @@ func (o *ObjectstoreStub) completeMultipartUpload(w http.ResponseWriter, r *http
bucket := split[0]
key := split[1]
- answer := objectstore.CompleteMultipartUploadResult{
+ answer := s3api.CompleteMultipartUploadResult{
Location: r.URL.String(),
Bucket: bucket,
Key: key,
diff --git a/workhorse/internal/upload/destination/objectstore/uploader.go b/workhorse/internal/upload/destination/objectstore/uploader.go
index 43e573872ee..798a693aa93 100644
--- a/workhorse/internal/upload/destination/objectstore/uploader.go
+++ b/workhorse/internal/upload/destination/objectstore/uploader.go
@@ -38,11 +38,21 @@ func newETagCheckUploader(strategy uploadStrategy, metrics bool) *uploader {
func hexString(h hash.Hash) string { return hex.EncodeToString(h.Sum(nil)) }
+func (u *uploader) Consume(outerCtx context.Context, reader io.Reader, deadLine time.Time) (_ int64, err error) {
+ return u.consume(outerCtx, reader, deadLine, false)
+}
+
+func (u *uploader) ConsumeWithoutDelete(outerCtx context.Context, reader io.Reader, deadLine time.Time) (_ int64, err error) {
+ return u.consume(outerCtx, reader, deadLine, true)
+}
+
// Consume reads the reader until it reaches EOF or an error. It spawns a
// goroutine that waits for outerCtx to be done, after which the remote
// file is deleted. The deadline applies to the upload performed inside
// Consume, not to outerCtx.
-func (u *uploader) Consume(outerCtx context.Context, reader io.Reader, deadline time.Time) (_ int64, err error) {
+// SkipDelete optionaly call the Delete() function on the strategy once
+// rails is done handling the upload request.
+func (u *uploader) consume(outerCtx context.Context, reader io.Reader, deadLine time.Time, skipDelete bool) (_ int64, err error) {
if u.metrics {
objectStorageUploadsOpen.Inc()
defer func(started time.Time) {
@@ -59,17 +69,25 @@ func (u *uploader) Consume(outerCtx context.Context, reader io.Reader, deadline
// "delete" them.
if err != nil {
u.strategy.Abort()
+
+ if skipDelete {
+ // skipDelete avoided the object removal (see the goroutine below). Make
+ // here that the object is deleted if aborted.
+ u.strategy.Delete()
+ }
}
}()
- go func() {
- // Once gitlab-rails is done handling the request, we are supposed to
- // delete the upload from its temporary location.
- <-outerCtx.Done()
- u.strategy.Delete()
- }()
+ if !skipDelete {
+ go func() {
+ // Once gitlab-rails is done handling the request, we are supposed to
+ // delete the upload from its temporary location.
+ <-outerCtx.Done()
+ u.strategy.Delete()
+ }()
+ }
- uploadCtx, cancelFn := context.WithDeadline(outerCtx, deadline)
+ uploadCtx, cancelFn := context.WithDeadline(outerCtx, deadLine)
defer cancelFn()
var hasher hash.Hash
diff --git a/workhorse/internal/upload/destination/upload_opts.go b/workhorse/internal/upload/destination/upload_opts.go
index 58427b38b30..72efaebc16c 100644
--- a/workhorse/internal/upload/destination/upload_opts.go
+++ b/workhorse/internal/upload/destination/upload_opts.go
@@ -39,6 +39,8 @@ type UploadOpts struct {
PresignedPut string
// PresignedDelete is a presigned S3 DeleteObject compatible URL.
PresignedDelete string
+ // Whether Workhorse needs to delete the temporary object or not.
+ SkipDelete bool
// HTTP headers to be sent along with PUT request
PutHeaders map[string]string
// Whether to ignore Rails pre-signed URLs and have Workhorse directly access object storage provider
@@ -61,6 +63,8 @@ type UploadOpts struct {
PresignedCompleteMultipart string
// PresignedAbortMultipart is a presigned URL for AbortMultipartUpload
PresignedAbortMultipart string
+ // UploadHashFunctions contains a list of allowed hash functions (md5, sha1, etc.)
+ UploadHashFunctions []string
}
// UseWorkhorseClientEnabled checks if the options require direct access to object storage
@@ -90,16 +94,18 @@ func GetOpts(apiResponse *api.Response) (*UploadOpts, error) {
}
opts := UploadOpts{
- LocalTempPath: apiResponse.TempPath,
- RemoteID: apiResponse.RemoteObject.ID,
- RemoteURL: apiResponse.RemoteObject.GetURL,
- PresignedPut: apiResponse.RemoteObject.StoreURL,
- PresignedDelete: apiResponse.RemoteObject.DeleteURL,
- PutHeaders: apiResponse.RemoteObject.PutHeaders,
- UseWorkhorseClient: apiResponse.RemoteObject.UseWorkhorseClient,
- RemoteTempObjectID: apiResponse.RemoteObject.RemoteTempObjectID,
- Deadline: time.Now().Add(timeout),
- MaximumSize: apiResponse.MaximumSize,
+ LocalTempPath: apiResponse.TempPath,
+ RemoteID: apiResponse.RemoteObject.ID,
+ RemoteURL: apiResponse.RemoteObject.GetURL,
+ PresignedPut: apiResponse.RemoteObject.StoreURL,
+ PresignedDelete: apiResponse.RemoteObject.DeleteURL,
+ SkipDelete: apiResponse.RemoteObject.SkipDelete,
+ PutHeaders: apiResponse.RemoteObject.PutHeaders,
+ UseWorkhorseClient: apiResponse.RemoteObject.UseWorkhorseClient,
+ RemoteTempObjectID: apiResponse.RemoteObject.RemoteTempObjectID,
+ Deadline: time.Now().Add(timeout),
+ MaximumSize: apiResponse.MaximumSize,
+ UploadHashFunctions: apiResponse.UploadHashFunctions,
}
if opts.LocalTempPath != "" && opts.RemoteID != "" {
diff --git a/workhorse/internal/upload/destination/upload_opts_test.go b/workhorse/internal/upload/destination/upload_opts_test.go
index fd9e56db194..0fda3bd2381 100644
--- a/workhorse/internal/upload/destination/upload_opts_test.go
+++ b/workhorse/internal/upload/destination/upload_opts_test.go
@@ -1,4 +1,4 @@
-package destination_test
+package destination
import (
"testing"
@@ -8,7 +8,6 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
@@ -43,7 +42,7 @@ func TestUploadOptsLocalAndRemote(t *testing.T) {
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
- opts := destination.UploadOpts{
+ opts := UploadOpts{
LocalTempPath: test.localTempPath,
PresignedPut: test.presignedPut,
PartSize: test.partSize,
@@ -103,15 +102,17 @@ func TestGetOpts(t *testing.T) {
MultipartUpload: test.multipart,
CustomPutHeaders: test.customPutHeaders,
PutHeaders: test.putHeaders,
+ SkipDelete: true,
},
}
deadline := time.Now().Add(time.Duration(apiResponse.RemoteObject.Timeout) * time.Second)
- opts, err := destination.GetOpts(apiResponse)
+ opts, err := GetOpts(apiResponse)
require.NoError(t, err)
require.Equal(t, apiResponse.TempPath, opts.LocalTempPath)
require.WithinDuration(t, deadline, opts.Deadline, time.Second)
require.Equal(t, apiResponse.RemoteObject.ID, opts.RemoteID)
+ require.Equal(t, apiResponse.RemoteObject.SkipDelete, opts.SkipDelete)
require.Equal(t, apiResponse.RemoteObject.GetURL, opts.RemoteURL)
require.Equal(t, apiResponse.RemoteObject.StoreURL, opts.PresignedPut)
require.Equal(t, apiResponse.RemoteObject.DeleteURL, opts.PresignedDelete)
@@ -155,22 +156,22 @@ func TestGetOptsFail(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- _, err := destination.GetOpts(tc.in)
+ _, err := GetOpts(tc.in)
require.Error(t, err, "expect input to be rejected")
})
}
}
func TestGetOptsDefaultTimeout(t *testing.T) {
- deadline := time.Now().Add(destination.DefaultObjectStoreTimeout)
- opts, err := destination.GetOpts(&api.Response{TempPath: "/foo/bar"})
+ deadline := time.Now().Add(DefaultObjectStoreTimeout)
+ opts, err := GetOpts(&api.Response{TempPath: "/foo/bar"})
require.NoError(t, err)
require.WithinDuration(t, deadline, opts.Deadline, time.Minute)
}
func TestUseWorkhorseClientEnabled(t *testing.T) {
- cfg := destination.ObjectStorageConfig{
+ cfg := ObjectStorageConfig{
Provider: "AWS",
S3Config: config.S3Config{
Bucket: "test-bucket",
@@ -195,7 +196,7 @@ func TestUseWorkhorseClientEnabled(t *testing.T) {
name string
UseWorkhorseClient bool
remoteTempObjectID string
- objectStorageConfig destination.ObjectStorageConfig
+ objectStorageConfig ObjectStorageConfig
expected bool
}{
{
@@ -243,7 +244,7 @@ func TestUseWorkhorseClientEnabled(t *testing.T) {
name: "missing S3 bucket",
UseWorkhorseClient: true,
remoteTempObjectID: "test-object",
- objectStorageConfig: destination.ObjectStorageConfig{
+ objectStorageConfig: ObjectStorageConfig{
Provider: "AWS",
S3Config: config.S3Config{},
},
@@ -269,7 +270,7 @@ func TestUseWorkhorseClientEnabled(t *testing.T) {
},
}
deadline := time.Now().Add(time.Duration(apiResponse.RemoteObject.Timeout) * time.Second)
- opts, err := destination.GetOpts(apiResponse)
+ opts, err := GetOpts(apiResponse)
require.NoError(t, err)
opts.ObjectStorageConfig = test.objectStorageConfig
@@ -322,7 +323,7 @@ func TestGoCloudConfig(t *testing.T) {
},
}
deadline := time.Now().Add(time.Duration(apiResponse.RemoteObject.Timeout) * time.Second)
- opts, err := destination.GetOpts(apiResponse)
+ opts, err := GetOpts(apiResponse)
require.NoError(t, err)
opts.ObjectStorageConfig.URLMux = mux
diff --git a/workhorse/internal/upload/object_storage_preparer_test.go b/workhorse/internal/upload/object_storage_preparer_test.go
index 56de6bbf7d6..b983d68f1ad 100644
--- a/workhorse/internal/upload/object_storage_preparer_test.go
+++ b/workhorse/internal/upload/object_storage_preparer_test.go
@@ -1,4 +1,4 @@
-package upload_test
+package upload
import (
"testing"
@@ -7,7 +7,6 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload"
"github.com/stretchr/testify/require"
)
@@ -38,7 +37,7 @@ func TestPrepareWithS3Config(t *testing.T) {
},
}
- p := upload.NewObjectStoragePreparer(c)
+ p := NewObjectStoragePreparer(c)
opts, err := p.Prepare(r)
require.NoError(t, err)
@@ -51,7 +50,7 @@ func TestPrepareWithS3Config(t *testing.T) {
func TestPrepareWithNoConfig(t *testing.T) {
c := config.Config{}
r := &api.Response{RemoteObject: api.RemoteObject{ID: "id"}}
- p := upload.NewObjectStoragePreparer(c)
+ p := NewObjectStoragePreparer(c)
opts, err := p.Prepare(r)
require.NoError(t, err)
diff --git a/workhorse/internal/upload/uploads_test.go b/workhorse/internal/upload/uploads_test.go
index cc786079e36..69baa2dab6e 100644
--- a/workhorse/internal/upload/uploads_test.go
+++ b/workhorse/internal/upload/uploads_test.go
@@ -24,7 +24,6 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upstream/roundtripper"
)
@@ -111,13 +110,7 @@ func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
expectedLen := 12
- if destination.FIPSEnabled() {
- expectedLen--
- require.Empty(t, r.FormValue("file.md5"), "file hash md5")
- } else {
- require.Equal(t, "098f6bcd4621d373cade4e832627b4f6", r.FormValue("file.md5"), "file hash md5")
- }
-
+ require.Equal(t, "098f6bcd4621d373cade4e832627b4f6", r.FormValue("file.md5"), "file hash md5")
require.Len(t, r.MultipartForm.Value, expectedLen, "multipart form values")
w.WriteHeader(202)
diff --git a/workhorse/internal/zipartifacts/metadata_test.go b/workhorse/internal/zipartifacts/metadata_test.go
index e4799ba4a59..6bde56ef27d 100644
--- a/workhorse/internal/zipartifacts/metadata_test.go
+++ b/workhorse/internal/zipartifacts/metadata_test.go
@@ -1,4 +1,4 @@
-package zipartifacts_test
+package zipartifacts
import (
"bytes"
@@ -11,8 +11,6 @@ import (
"github.com/stretchr/testify/require"
zip "gitlab.com/gitlab-org/golang-archive-zip"
-
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
)
func generateTestArchive(w io.Writer) error {
@@ -72,10 +70,10 @@ func TestGenerateZipMetadataFromFile(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- archive, err := zipartifacts.OpenArchive(ctx, f.Name())
+ archive, err := OpenArchive(ctx, f.Name())
require.NoError(t, err, "zipartifacts: OpenArchive failed")
- err = zipartifacts.GenerateZipMetadata(&metaBuffer, archive)
+ err = GenerateZipMetadata(&metaBuffer, archive)
require.NoError(t, err, "zipartifacts: GenerateZipMetadata failed")
err = validateMetadata(&metaBuffer)
@@ -96,6 +94,6 @@ func TestErrNotAZip(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- _, err = zipartifacts.OpenArchive(ctx, f.Name())
- require.Equal(t, zipartifacts.ErrorCode[zipartifacts.CodeNotZip], err, "OpenArchive requires a zip file")
+ _, err = OpenArchive(ctx, f.Name())
+ require.Equal(t, ErrorCode[CodeNotZip], err, "OpenArchive requires a zip file")
}
diff --git a/workhorse/upload_test.go b/workhorse/upload_test.go
index 22abed25928..333301091c7 100644
--- a/workhorse/upload_test.go
+++ b/workhorse/upload_test.go
@@ -20,7 +20,6 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
type uploadArtifactsFunction func(url, contentType string, body io.Reader) (*http.Response, string, error)
@@ -66,13 +65,20 @@ func expectSignedRequest(t *testing.T, r *http.Request) {
require.NoError(t, err)
}
-func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
+func uploadTestServer(t *testing.T, allowedHashFunctions []string, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
return testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
if strings.HasSuffix(r.URL.Path, "/authorize") || r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
expectSignedRequest(t, r)
w.Header().Set("Content-Type", api.ResponseContentType)
- _, err := fmt.Fprintf(w, `{"TempPath":"%s"}`, scratchDir)
+ var err error
+
+ if len(allowedHashFunctions) == 0 {
+ _, err = fmt.Fprintf(w, `{"TempPath":"%s"}`, scratchDir)
+ } else {
+ _, err = fmt.Fprintf(w, `{"TempPath":"%s", "UploadHashFunctions": ["%s"]}`, scratchDir, strings.Join(allowedHashFunctions, `","`))
+ }
+
require.NoError(t, err)
if authorizeTests != nil {
@@ -83,15 +89,23 @@ func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraT
require.NoError(t, r.ParseMultipartForm(100000))
- var nValues int // file name, path, remote_url, remote_id, size, md5, sha1, sha256, sha512, upload_duration, gitlab-workhorse-upload for just the upload (no metadata because we are not POSTing a valid zip file)
- if destination.FIPSEnabled() {
- nValues = 10
+ nValues := len([]string{
+ "name",
+ "path",
+ "remote_url",
+ "remote_id",
+ "size",
+ "upload_duration",
+ "gitlab-workhorse-upload",
+ })
+
+ if n := len(allowedHashFunctions); n > 0 {
+ nValues += n
} else {
- nValues = 11
+ nValues += len([]string{"md5", "sha1", "sha256", "sha512"}) // Default hash functions
}
require.Len(t, r.MultipartForm.Value, nValues)
-
require.Empty(t, r.MultipartForm.File, "multipart form files")
if extraTests != nil {
@@ -104,7 +118,7 @@ func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraT
func signedUploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
t.Helper()
- return uploadTestServer(t, authorizeTests, func(r *http.Request) {
+ return uploadTestServer(t, nil, authorizeTests, func(r *http.Request) {
expectSignedRequest(t, r)
if extraTests != nil {
@@ -167,67 +181,80 @@ func TestAcceleratedUpload(t *testing.T) {
{"POST", "/api/v4/projects/group%2Fsubgroup%2Fproject/packages/helm/api/stable/charts", true},
}
- for _, tt := range tests {
- t.Run(tt.resource, func(t *testing.T) {
- ts := uploadTestServer(t,
- func(r *http.Request) {
- if r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
- // Nothing to validate: this is a hard coded URL
- return
- }
- resource := strings.TrimRight(tt.resource, "/")
- // Validate %2F characters haven't been unescaped
- require.Equal(t, resource+"/authorize", r.URL.String())
- },
- func(r *http.Request) {
- if tt.signedFinalization {
- expectSignedRequest(t, r)
- }
-
- token, err := jwt.ParseWithClaims(r.Header.Get(upload.RewrittenFieldsHeader), &upload.MultipartClaims{}, testhelper.ParseJWT)
- require.NoError(t, err)
-
- rewrittenFields := token.Claims.(*upload.MultipartClaims).RewrittenFields
- if len(rewrittenFields) != 1 || len(rewrittenFields["file"]) == 0 {
- t.Fatalf("Unexpected rewritten_fields value: %v", rewrittenFields)
- }
-
- token, jwtErr := jwt.ParseWithClaims(r.PostFormValue("file.gitlab-workhorse-upload"), &testhelper.UploadClaims{}, testhelper.ParseJWT)
- require.NoError(t, jwtErr)
-
- uploadFields := token.Claims.(*testhelper.UploadClaims).Upload
- require.Contains(t, uploadFields, "name")
- require.Contains(t, uploadFields, "path")
- require.Contains(t, uploadFields, "remote_url")
- require.Contains(t, uploadFields, "remote_id")
- require.Contains(t, uploadFields, "size")
- if destination.FIPSEnabled() {
- require.NotContains(t, uploadFields, "md5")
- } else {
- require.Contains(t, uploadFields, "md5")
- }
- require.Contains(t, uploadFields, "sha1")
- require.Contains(t, uploadFields, "sha256")
- require.Contains(t, uploadFields, "sha512")
- })
-
- defer ts.Close()
- ws := startWorkhorseServer(ts.URL)
- defer ws.Close()
-
- reqBody, contentType, err := multipartBodyWithFile()
- require.NoError(t, err)
-
- req, err := http.NewRequest(tt.method, ws.URL+tt.resource, reqBody)
- require.NoError(t, err)
+ allowedHashFunctions := map[string][]string{
+ "default": nil,
+ "sha2_only": {"sha256", "sha512"},
+ }
- req.Header.Set("Content-Type", contentType)
- resp, err := http.DefaultClient.Do(req)
- require.NoError(t, err)
- require.Equal(t, 200, resp.StatusCode)
+ for _, tt := range tests {
+ for hashSet, hashFunctions := range allowedHashFunctions {
+ t.Run(tt.resource, func(t *testing.T) {
+
+ ts := uploadTestServer(t,
+ hashFunctions,
+ func(r *http.Request) {
+ if r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
+ // Nothing to validate: this is a hard coded URL
+ return
+ }
+ resource := strings.TrimRight(tt.resource, "/")
+ // Validate %2F characters haven't been unescaped
+ require.Equal(t, resource+"/authorize", r.URL.String())
+ },
+ func(r *http.Request) {
+ if tt.signedFinalization {
+ expectSignedRequest(t, r)
+ }
+
+ token, err := jwt.ParseWithClaims(r.Header.Get(upload.RewrittenFieldsHeader), &upload.MultipartClaims{}, testhelper.ParseJWT)
+ require.NoError(t, err)
+
+ rewrittenFields := token.Claims.(*upload.MultipartClaims).RewrittenFields
+ if len(rewrittenFields) != 1 || len(rewrittenFields["file"]) == 0 {
+ t.Fatalf("Unexpected rewritten_fields value: %v", rewrittenFields)
+ }
+
+ token, jwtErr := jwt.ParseWithClaims(r.PostFormValue("file.gitlab-workhorse-upload"), &testhelper.UploadClaims{}, testhelper.ParseJWT)
+ require.NoError(t, jwtErr)
+
+ uploadFields := token.Claims.(*testhelper.UploadClaims).Upload
+ require.Contains(t, uploadFields, "name")
+ require.Contains(t, uploadFields, "path")
+ require.Contains(t, uploadFields, "remote_url")
+ require.Contains(t, uploadFields, "remote_id")
+ require.Contains(t, uploadFields, "size")
+
+ if hashSet == "default" {
+ require.Contains(t, uploadFields, "md5")
+ require.Contains(t, uploadFields, "sha1")
+ require.Contains(t, uploadFields, "sha256")
+ require.Contains(t, uploadFields, "sha512")
+ } else {
+ require.NotContains(t, uploadFields, "md5")
+ require.NotContains(t, uploadFields, "sha1")
+ require.Contains(t, uploadFields, "sha256")
+ require.Contains(t, uploadFields, "sha512")
+ }
+ })
+
+ defer ts.Close()
+ ws := startWorkhorseServer(ts.URL)
+ defer ws.Close()
+
+ reqBody, contentType, err := multipartBodyWithFile()
+ require.NoError(t, err)
+
+ req, err := http.NewRequest(tt.method, ws.URL+tt.resource, reqBody)
+ require.NoError(t, err)
+
+ req.Header.Set("Content-Type", contentType)
+ resp, err := http.DefaultClient.Do(req)
+ require.NoError(t, err)
+ require.Equal(t, 200, resp.StatusCode)
- resp.Body.Close()
- })
+ resp.Body.Close()
+ })
+ }
}
}