Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorSean McGivern <sean@mcgivern.me.uk>2018-02-02 16:59:43 +0300
committerKamil Trzciński <ayufan@ayufan.eu>2018-02-28 22:58:15 +0300
commita7dae52e9d27adde427ef8aa066c0761071a3cd9 (patch)
tree8b6229e4e0afe7e71f9754089758cee8acd56cde /spec
parent45d2c31643017807cb3fc66c0be6e9cad9964faf (diff)
Merge branch '4163-move-uploads-to-object-storage' into 'master'
Move uploads to object storage Closes #4163 See merge request gitlab-org/gitlab-ee!3867
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/groups/uploads_controller_spec.rb4
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb6
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb4
-rw-r--r--spec/controllers/uploads_controller_spec.rb13
-rw-r--r--spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb270
-rw-r--r--spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb22
-rw-r--r--spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb414
-rw-r--r--spec/ee/spec/models/ee/lfs_object_spec.rb8
-rw-r--r--spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb50
-rw-r--r--spec/ee/spec/services/geo/file_download_service_spec.rb227
-rw-r--r--spec/ee/spec/services/geo/files_expire_service_spec.rb51
-rw-r--r--spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb83
-rw-r--r--spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb291
-rw-r--r--spec/ee/workers/object_storage_upload_worker_spec.rb4
-rw-r--r--spec/factories/ci/job_artifacts.rb2
-rw-r--r--spec/factories/geo/event_log.rb121
-rw-r--r--spec/factories/groups.rb2
-rw-r--r--spec/factories/notes.rb4
-rw-r--r--spec/factories/projects.rb2
-rw-r--r--spec/factories/uploads.rb27
-rw-r--r--spec/factories/users.rb2
-rw-r--r--spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb57
-rw-r--r--spec/lib/gitlab/gfm/uploads_rewriter_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/uploads_restorer_spec.rb9
-rw-r--r--spec/lib/gitlab/import_export/uploads_saver_spec.rb4
-rw-r--r--spec/migrations/remove_empty_fork_networks_spec.rb4
-rw-r--r--spec/models/namespace_spec.rb2
-rw-r--r--spec/models/upload_spec.rb73
-rw-r--r--spec/requests/api/runner_spec.rb8
-rw-r--r--spec/requests/lfs_http_spec.rb8
-rw-r--r--spec/services/issues/move_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb4
-rw-r--r--spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb62
-rw-r--r--spec/support/shared_examples/uploaders/object_storage_shared_examples.rb126
-rw-r--r--spec/support/stub_object_storage.rb7
-rw-r--r--spec/support/test_env.rb2
-rw-r--r--spec/support/track_untracked_uploads_helpers.rb2
-rw-r--r--spec/tasks/gitlab/artifacts_rake_spec.rb32
-rw-r--r--spec/tasks/gitlab/lfs_rake_spec.rb4
-rw-r--r--spec/uploaders/attachment_uploader_spec.rb41
-rw-r--r--spec/uploaders/avatar_uploader_spec.rb44
-rw-r--r--spec/uploaders/file_mover_spec.rb18
-rw-r--r--spec/uploaders/file_uploader_spec.rb128
-rw-r--r--spec/uploaders/job_artifact_uploader_spec.rb46
-rw-r--r--spec/uploaders/legacy_artifact_uploader_spec.rb52
-rw-r--r--spec/uploaders/lfs_object_uploader_spec.rb43
-rw-r--r--spec/uploaders/namespace_file_uploader_spec.rb36
-rw-r--r--spec/uploaders/object_storage_spec.rb350
-rw-r--r--spec/uploaders/object_store_uploader_spec.rb315
-rw-r--r--spec/uploaders/personal_file_uploader_spec.rb45
-rw-r--r--spec/uploaders/records_uploads_spec.rb73
-rw-r--r--spec/workers/upload_checksum_worker_spec.rb29
52 files changed, 2451 insertions, 784 deletions
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
index 67a11e56e94..6a1869d1a48 100644
--- a/spec/controllers/groups/uploads_controller_spec.rb
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -6,5 +6,7 @@ describe Groups::UploadsController do
{ group_id: model }
end
- it_behaves_like 'handle uploads'
+ it_behaves_like 'handle uploads' do
+ let(:uploader_class) { NamespaceFileUploader }
+ end
end
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 46d618fa682..4ea6f869aa3 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -145,8 +145,8 @@ describe Projects::ArtifactsController do
context 'when using local file storage' do
it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
- let(:store) { ObjectStoreUploader::LOCAL_STORE }
- let(:archive_path) { JobArtifactUploader.local_store_path }
+ let(:store) { ObjectStorage::Store::LOCAL }
+ let(:archive_path) { JobArtifactUploader.root }
end
end
@@ -158,7 +158,7 @@ describe Projects::ArtifactsController do
it_behaves_like 'a valid file' do
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
- let(:store) { ObjectStoreUploader::REMOTE_STORE }
+ let(:store) { ObjectStorage::Store::REMOTE }
let(:archive_path) { 'https://' }
end
end
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index e4310a4847b..08e2ccf893a 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -47,7 +47,7 @@ describe Projects::RawController do
end
it 'serves the file' do
- expect(controller).to receive(:send_file).with("#{Gitlab.config.shared.path}/lfs-objects/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
+ expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
@@ -58,7 +58,7 @@ describe Projects::RawController do
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
stub_lfs_object_storage
- lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect to file' do
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index b1f601a19e5..376b229ffc9 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -180,6 +180,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png'
+
response
end
end
@@ -196,6 +197,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png'
+
response
end
end
@@ -220,6 +222,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
+
response
end
end
@@ -239,6 +242,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
+
response
end
end
@@ -291,6 +295,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
+
response
end
end
@@ -322,6 +327,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
+
response
end
end
@@ -341,6 +347,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
+
response
end
end
@@ -384,6 +391,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
+
response
end
end
@@ -420,6 +428,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
+
response
end
end
@@ -439,6 +448,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
+
response
end
end
@@ -491,6 +501,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
+
response
end
end
@@ -522,6 +533,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png'
+
response
end
end
@@ -541,6 +553,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png'
+
response
end
end
diff --git a/spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb b/spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb
new file mode 100644
index 00000000000..9f0f5f2ab87
--- /dev/null
+++ b/spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb
@@ -0,0 +1,270 @@
+require 'spec_helper'
+
+describe Geo::AttachmentRegistryFinder, :geo do
+ include ::EE::GeoHelpers
+
+ let(:secondary) { create(:geo_node) }
+
+ let(:synced_group) { create(:group) }
+ let(:synced_subgroup) { create(:group, parent: synced_group) }
+ let(:unsynced_group) { create(:group) }
+ let(:synced_project) { create(:project, group: synced_group) }
+ let(:unsynced_project) { create(:project, group: unsynced_group, repository_storage: 'broken') }
+
+ let!(:upload_1) { create(:upload, model: synced_group) }
+ let!(:upload_2) { create(:upload, model: unsynced_group) }
+ let!(:upload_3) { create(:upload, :issuable_upload, model: synced_project) }
+ let!(:upload_4) { create(:upload, model: unsynced_project) }
+ let(:upload_5) { create(:upload, model: synced_project) }
+ let(:upload_6) { create(:upload, :personal_snippet_upload) }
+ let(:upload_7) { create(:upload, model: synced_subgroup) }
+ let(:lfs_object) { create(:lfs_object) }
+
+ subject { described_class.new(current_node: secondary) }
+
+ before do
+ stub_current_geo_node(secondary)
+ end
+
+ # Disable transactions via :delete method because a foreign table
+ # can't see changes inside a transaction of a different connection.
+ context 'FDW', :delete do
+ before do
+ skip('FDW is not configured') if Gitlab::Database.postgresql? && !Gitlab::Geo.fdw?
+ end
+
+ describe '#find_synced_attachments' do
+ it 'delegates to #fdw_find_synced_attachments' do
+ expect(subject).to receive(:fdw_find_synced_attachments).and_call_original
+
+ subject.find_synced_attachments
+ end
+
+ it 'returns synced avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+ synced_attachments = subject.find_synced_attachments
+
+ expect(synced_attachments.pluck(:id)).to match_array([upload_1.id, upload_2.id, upload_6.id, upload_7.id])
+ end
+
+ context 'with selective sync' do
+ it 'falls back to legacy queries' do
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+ expect(subject).to receive(:legacy_find_synced_attachments)
+
+ subject.find_synced_attachments
+ end
+ end
+ end
+
+ describe '#find_failed_attachments' do
+ it 'delegates to #fdw_find_failed_attachments' do
+ expect(subject).to receive(:fdw_find_failed_attachments).and_call_original
+
+ subject.find_failed_attachments
+ end
+
+ it 'returns failed avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+ failed_attachments = subject.find_failed_attachments
+
+ expect(failed_attachments.pluck(:id)).to match_array([upload_3.id, upload_6.id, upload_7.id])
+ end
+
+ context 'with selective sync' do
+ it 'falls back to legacy queries' do
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+ expect(subject).to receive(:legacy_find_failed_attachments)
+
+ subject.find_failed_attachments
+ end
+ end
+ end
+
+ describe '#find_unsynced_attachments' do
+ it 'delegates to #fdw_find_unsynced_attachments' do
+ expect(subject).to receive(:fdw_find_unsynced_attachments).and_call_original
+
+ subject.find_unsynced_attachments(batch_size: 10)
+ end
+
+ it 'returns uploads without an entry on the tracking database' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+ uploads = subject.find_unsynced_attachments(batch_size: 10)
+
+ expect(uploads.map(&:id)).to match_array([upload_2.id, upload_3.id, upload_4.id])
+ end
+
+ it 'excludes uploads without an entry on the tracking database' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+ uploads = subject.find_unsynced_attachments(batch_size: 10, except_registry_ids: [upload_2.id])
+
+ expect(uploads.map(&:id)).to match_array([upload_3.id, upload_4.id])
+ end
+ end
+ end
+
+ context 'Legacy' do
+ before do
+ allow(Gitlab::Geo).to receive(:fdw?).and_return(false)
+ end
+
+ describe '#find_synced_attachments' do
+ it 'delegates to #legacy_find_synced_attachments' do
+ expect(subject).to receive(:legacy_find_synced_attachments).and_call_original
+
+ subject.find_synced_attachments
+ end
+
+ it 'returns synced avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+ synced_attachments = subject.find_synced_attachments
+
+ expect(synced_attachments).to match_array([upload_1, upload_2, upload_6, upload_7])
+ end
+
+ context 'with selective sync by namespace' do
+ it 'returns synced avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id)
+ create(:geo_file_registry, :avatar, file_id: upload_4.id)
+ create(:geo_file_registry, :avatar, file_id: upload_5.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+ synced_attachments = subject.find_synced_attachments
+
+ expect(synced_attachments).to match_array([upload_1, upload_3, upload_6, upload_7])
+ end
+ end
+
+ context 'with selective sync by shard' do
+ it 'returns synced avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id)
+ create(:geo_file_registry, :avatar, file_id: upload_4.id)
+ create(:geo_file_registry, :avatar, file_id: upload_5.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+ secondary.update!(selective_sync_type: 'shards', selective_sync_shards: ['default'])
+
+ synced_attachments = subject.find_synced_attachments
+
+ expect(synced_attachments).to match_array([upload_1, upload_3, upload_6])
+ end
+ end
+ end
+
+ describe '#find_failed_attachments' do
+ it 'delegates to #legacy_find_failed_attachments' do
+ expect(subject).to receive(:legacy_find_failed_attachments).and_call_original
+
+ subject.find_failed_attachments
+ end
+
+ it 'returns failed avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+ failed_attachments = subject.find_failed_attachments
+
+ expect(failed_attachments).to match_array([upload_3, upload_6, upload_7])
+ end
+
+ context 'with selective sync by namespace' do
+ it 'returns failed avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_4.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_5.id)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+ failed_attachments = subject.find_failed_attachments
+
+ expect(failed_attachments).to match_array([upload_1, upload_3, upload_6, upload_7])
+ end
+ end
+
+ context 'with selective sync by shard' do
+ it 'returns failed avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_4.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_5.id)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+ secondary.update!(selective_sync_type: 'shards', selective_sync_shards: ['default'])
+
+ failed_attachments = subject.find_failed_attachments
+
+ expect(failed_attachments).to match_array([upload_1, upload_3, upload_6])
+ end
+ end
+ end
+
+ describe '#find_unsynced_attachments' do
+ it 'delegates to #legacy_find_unsynced_attachments' do
+ expect(subject).to receive(:legacy_find_unsynced_attachments).and_call_original
+
+ subject.find_unsynced_attachments(batch_size: 10)
+ end
+
+ it 'returns LFS objects without an entry on the tracking database' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+ uploads = subject.find_unsynced_attachments(batch_size: 10)
+
+ expect(uploads).to match_array([upload_2, upload_3, upload_4])
+ end
+
+ it 'excludes uploads without an entry on the tracking database' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+ uploads = subject.find_unsynced_attachments(batch_size: 10, except_registry_ids: [upload_2.id])
+
+ expect(uploads).to match_array([upload_3, upload_4])
+ end
+ end
+ end
+end
diff --git a/spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb b/spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb
new file mode 100644
index 00000000000..4cb2a1ec08f
--- /dev/null
+++ b/spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+describe Gitlab::Geo::FileTransfer do
+ let(:user) { create(:user, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+ let(:upload) { Upload.find_by(model: user, uploader: 'AvatarUploader') }
+
+ subject { described_class.new(:file, upload) }
+
+ describe '#execute' do
+ context 'user avatar' do
+ it 'sets an absolute path' do
+ expect(subject.file_type).to eq(:file)
+ expect(subject.file_id).to eq(upload.id)
+ expect(subject.filename).to eq(upload.absolute_path)
+ expect(Pathname.new(subject.filename).absolute?).to be_truthy
+ expect(subject.request_data).to eq({ id: upload.model_id,
+ type: 'User',
+ checksum: upload.checksum })
+ end
+ end
+ end
+end
diff --git a/spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb b/spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb
new file mode 100644
index 00000000000..af475a966a0
--- /dev/null
+++ b/spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb
@@ -0,0 +1,414 @@
+require 'spec_helper'
+
+describe Gitlab::Geo::LogCursor::Daemon, :postgresql, :clean_gitlab_redis_shared_state do
+ include ::EE::GeoHelpers
+
+ set(:primary) { create(:geo_node, :primary) }
+ set(:secondary) { create(:geo_node) }
+
+ let(:options) { {} }
+ subject(:daemon) { described_class.new(options) }
+
+ around do |example|
+ Sidekiq::Testing.fake! { example.run }
+ end
+
+ before do
+ stub_current_geo_node(secondary)
+
+ allow(daemon).to receive(:trap_signals)
+ allow(daemon).to receive(:arbitrary_sleep).and_return(0.1)
+ end
+
+ describe '#run!' do
+ it 'traps signals' do
+ is_expected.to receive(:exit?).and_return(true)
+ is_expected.to receive(:trap_signals)
+
+ daemon.run!
+ end
+
+ it 'delegates to #run_once! in a loop' do
+ is_expected.to receive(:exit?).and_return(false, false, false, true)
+ is_expected.to receive(:run_once!).twice
+
+ daemon.run!
+ end
+
+ it 'skips execution if cannot achieve a lease' do
+ is_expected.to receive(:exit?).and_return(false, true)
+ is_expected.not_to receive(:run_once!)
+ expect_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain_with_ttl).and_return({ ttl: 1, uuid: false })
+
+ daemon.run!
+ end
+
+ it 'skips execution if not a Geo node' do
+ stub_current_geo_node(nil)
+
+ is_expected.to receive(:exit?).and_return(false, true)
+ is_expected.to receive(:sleep).with(1.minute)
+ is_expected.not_to receive(:run_once!)
+
+ daemon.run!
+ end
+
+ it 'skips execution if the current node is a primary' do
+ stub_current_geo_node(primary)
+
+ is_expected.to receive(:exit?).and_return(false, true)
+ is_expected.to receive(:sleep).with(1.minute)
+ is_expected.not_to receive(:run_once!)
+
+ daemon.run!
+ end
+ end
+
+ describe '#run_once!' do
+ context 'when replaying a repository created event' do
+ let(:project) { create(:project) }
+ let(:repository_created_event) { create(:geo_repository_created_event, project: project) }
+ let(:event_log) { create(:geo_event_log, repository_created_event: repository_created_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+ it 'creates a new project registry' do
+ expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(1)
+ end
+
+ it 'sets resync attributes to true' do
+ daemon.run_once!
+
+ registry = Geo::ProjectRegistry.last
+
+ expect(registry).to have_attributes(project_id: project.id, resync_repository: true, resync_wiki: true)
+ end
+
+ it 'sets resync_wiki to false if wiki_path is nil' do
+ repository_created_event.update!(wiki_path: nil)
+
+ daemon.run_once!
+
+ registry = Geo::ProjectRegistry.last
+
+ expect(registry).to have_attributes(project_id: project.id, resync_repository: true, resync_wiki: false)
+ end
+
+ it 'performs Geo::ProjectSyncWorker' do
+ expect(Geo::ProjectSyncWorker).to receive(:perform_async)
+ .with(project.id, anything).once
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when replaying a repository updated event' do
+ let(:project) { create(:project) }
+ let(:repository_updated_event) { create(:geo_repository_updated_event, project: project) }
+ let(:event_log) { create(:geo_event_log, repository_updated_event: repository_updated_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+ it 'creates a new project registry if it does not exist' do
+ expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(1)
+ end
+
+ it 'sets resync_repository to true if event source is repository' do
+ repository_updated_event.update!(source: Geo::RepositoryUpdatedEvent::REPOSITORY)
+ registry = create(:geo_project_registry, :synced, project: repository_updated_event.project)
+
+ daemon.run_once!
+
+ expect(registry.reload.resync_repository).to be true
+ end
+
+ it 'sets resync_wiki to true if event source is wiki' do
+ repository_updated_event.update!(source: Geo::RepositoryUpdatedEvent::WIKI)
+ registry = create(:geo_project_registry, :synced, project: repository_updated_event.project)
+
+ daemon.run_once!
+
+ expect(registry.reload.resync_wiki).to be true
+ end
+
+ it 'performs Geo::ProjectSyncWorker' do
+ expect(Geo::ProjectSyncWorker).to receive(:perform_async)
+ .with(project.id, anything).once
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when replaying a repository deleted event' do
+ let(:event_log) { create(:geo_event_log, :deleted_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:repository_deleted_event) { event_log.repository_deleted_event }
+ let(:project) { repository_deleted_event.project }
+
+ it 'does not create a tracking database entry' do
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+
+ it 'schedules a GeoRepositoryDestroyWorker' do
+ project_id = repository_deleted_event.project_id
+ project_name = repository_deleted_event.deleted_project_name
+ project_path = repository_deleted_event.deleted_path
+
+ expect(::GeoRepositoryDestroyWorker).to receive(:perform_async)
+ .with(project_id, project_name, project_path, project.repository_storage)
+
+ daemon.run_once!
+ end
+
+ it 'removes the tracking database entry if exist' do
+ create(:geo_project_registry, :synced, project: project)
+
+ expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(-1)
+ end
+ end
+
+ context 'when replaying a repositories changed event' do
+ let(:repositories_changed_event) { create(:geo_repositories_changed_event, geo_node: secondary) }
+ let(:event_log) { create(:geo_event_log, repositories_changed_event: repositories_changed_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+ it 'schedules a GeoRepositoryDestroyWorker when event node is the current node' do
+ expect(Geo::RepositoriesCleanUpWorker).to receive(:perform_in).with(within(5.minutes).of(1.hour), secondary.id)
+
+ daemon.run_once!
+ end
+
+ it 'does not schedule a GeoRepositoryDestroyWorker when event node is not the current node' do
+ stub_current_geo_node(build(:geo_node))
+
+ expect(Geo::RepositoriesCleanUpWorker).not_to receive(:perform_in)
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when node has namespace restrictions' do
+ let(:group_1) { create(:group) }
+ let(:group_2) { create(:group) }
+ let(:project) { create(:project, group: group_1) }
+ let(:repository_updated_event) { create(:geo_repository_updated_event, project: project) }
+ let(:event_log) { create(:geo_event_log, repository_updated_event: repository_updated_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+ before do
+ allow(Geo::ProjectSyncWorker).to receive(:perform_async)
+ end
+
+ it 'replays events for projects that belong to selected namespaces to replicate' do
+ secondary.update!(namespaces: [group_1])
+
+ expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(1)
+ end
+
+ it 'does not replay events for projects that do not belong to selected namespaces to replicate' do
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [group_2])
+
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+
+ it 'does not replay events for projects that do not belong to selected shards to replicate' do
+ secondary.update!(selective_sync_type: 'shards', selective_sync_shards: ['broken'])
+
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+ end
+
+ context 'when processing a repository renamed event' do
+ let(:event_log) { create(:geo_event_log, :renamed_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:repository_renamed_event) { event_log.repository_renamed_event }
+
+ it 'does not create a new project registry' do
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+
+ it 'schedules a Geo::RenameRepositoryWorker' do
+ project_id = repository_renamed_event.project_id
+ old_path_with_namespace = repository_renamed_event.old_path_with_namespace
+ new_path_with_namespace = repository_renamed_event.new_path_with_namespace
+
+ expect(::Geo::RenameRepositoryWorker).to receive(:perform_async)
+ .with(project_id, old_path_with_namespace, new_path_with_namespace)
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when processing a hashed storage migration event' do
+ let(:event_log) { create(:geo_event_log, :hashed_storage_migration_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:hashed_storage_migrated_event) { event_log.hashed_storage_migrated_event }
+
+ it 'does not create a new project registry' do
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+
+ it 'schedules a Geo::HashedStorageMigrationWorker' do
+ project = hashed_storage_migrated_event.project
+ old_disk_path = hashed_storage_migrated_event.old_disk_path
+ new_disk_path = hashed_storage_migrated_event.new_disk_path
+ old_storage_version = project.storage_version
+
+ expect(::Geo::HashedStorageMigrationWorker).to receive(:perform_async)
+ .with(project.id, old_disk_path, new_disk_path, old_storage_version)
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when processing an attachment migration event to hashed storage' do
+ let(:event_log) { create(:geo_event_log, :hashed_storage_attachments_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:hashed_storage_attachments_event) { event_log.hashed_storage_attachments_event }
+
+ it 'does not create a new project registry' do
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+
+ it 'schedules a Geo::HashedStorageAttachmentsMigrationWorker' do
+ project = hashed_storage_attachments_event.project
+ old_attachments_path = hashed_storage_attachments_event.old_attachments_path
+ new_attachments_path = hashed_storage_attachments_event.new_attachments_path
+
+ expect(::Geo::HashedStorageAttachmentsMigrationWorker).to receive(:perform_async)
+ .with(project.id, old_attachments_path, new_attachments_path)
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when replaying a LFS object deleted event' do
+ let(:event_log) { create(:geo_event_log, :lfs_object_deleted_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:lfs_object_deleted_event) { event_log.lfs_object_deleted_event }
+ let(:lfs_object) { lfs_object_deleted_event.lfs_object }
+
+ it 'does not create a tracking database entry' do
+ expect { daemon.run_once! }.not_to change(Geo::FileRegistry, :count)
+ end
+
+ it 'schedules a Geo::FileRemovalWorker' do
+ file_path = File.join(LfsObjectUploader.root, lfs_object_deleted_event.file_path)
+
+ expect(::Geo::FileRemovalWorker).to receive(:perform_async)
+ .with(file_path)
+
+ daemon.run_once!
+ end
+
+ it 'removes the tracking database entry if exist' do
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+ expect { daemon.run_once! }.to change(Geo::FileRegistry.lfs_objects, :count).by(-1)
+ end
+ end
+
+ context 'when replaying a job artifact event' do
+ let(:event_log) { create(:geo_event_log, :job_artifact_deleted_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:job_artifact_deleted_event) { event_log.job_artifact_deleted_event }
+ let(:job_artifact) { job_artifact_deleted_event.job_artifact }
+
+ context 'with a tracking database entry' do
+ before do
+ create(:geo_file_registry, :job_artifact, file_id: job_artifact.id)
+ end
+
+ context 'with a file' do
+ context 'when the delete succeeds' do
+ it 'removes the tracking database entry' do
+ expect { daemon.run_once! }.to change(Geo::FileRegistry.job_artifacts, :count).by(-1)
+ end
+
+ it 'deletes the file' do
+ expect { daemon.run_once! }.to change { File.exist?(job_artifact.file.path) }.from(true).to(false)
+ end
+ end
+
+ context 'when the delete fails' do
+ before do
+ expect(daemon).to receive(:delete_file).and_return(false)
+ end
+
+ it 'does not remove the tracking database entry' do
+ expect { daemon.run_once! }.not_to change(Geo::FileRegistry.job_artifacts, :count)
+ end
+ end
+ end
+
+ context 'without a file' do
+ before do
+ FileUtils.rm(job_artifact.file.path)
+ end
+
+ it 'removes the tracking database entry' do
+ expect { daemon.run_once! }.to change(Geo::FileRegistry.job_artifacts, :count).by(-1)
+ end
+ end
+ end
+
+ context 'without a tracking database entry' do
+ it 'does not create a tracking database entry' do
+ expect { daemon.run_once! }.not_to change(Geo::FileRegistry, :count)
+ end
+
+ it 'does not delete the file (yet, due to possible race condition)' do
+ expect { daemon.run_once! }.not_to change { File.exist?(job_artifact.file.path) }.from(true)
+ end
+ end
+ end
+ end
+
+ describe '#delete_file' do
+ context 'when the file exists' do
+ let!(:file) { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png") }
+
+ context 'when the delete does not raise an exception' do
+ it 'returns true' do
+ expect(daemon.send(:delete_file, file.path)).to be_truthy
+ end
+
+ it 'does not log an error' do
+ expect(daemon).not_to receive(:logger)
+
+ daemon.send(:delete_file, file.path)
+ end
+ end
+
+ context 'when the delete raises an exception' do
+ before do
+ expect(File).to receive(:delete).and_raise('something went wrong')
+ end
+
+ it 'returns false' do
+ expect(daemon.send(:delete_file, file.path)).to be_falsey
+ end
+
+ it 'logs an error' do
+ logger = double(logger)
+ expect(daemon).to receive(:logger).and_return(logger)
+ expect(logger).to receive(:error).with('Failed to remove file', exception: 'RuntimeError', details: 'something went wrong', filename: file.path)
+
+ daemon.send(:delete_file, file.path)
+ end
+ end
+ end
+
+ context 'when the file does not exist' do
+ it 'returns false' do
+ expect(daemon.send(:delete_file, '/does/not/exist')).to be_falsey
+ end
+
+ it 'logs an error' do
+ logger = double(logger)
+ expect(daemon).to receive(:logger).and_return(logger)
+ expect(logger).to receive(:error).with('Failed to remove file', exception: 'Errno::ENOENT', details: 'No such file or directory @ unlink_internal - /does/not/exist', filename: '/does/not/exist')
+
+ daemon.send(:delete_file, '/does/not/exist')
+ end
+ end
+ end
+end
diff --git a/spec/ee/spec/models/ee/lfs_object_spec.rb b/spec/ee/spec/models/ee/lfs_object_spec.rb
index b02327b4c73..e425f5bc112 100644
--- a/spec/ee/spec/models/ee/lfs_object_spec.rb
+++ b/spec/ee/spec/models/ee/lfs_object_spec.rb
@@ -8,14 +8,14 @@ describe LfsObject do
expect(subject.local_store?).to eq true
end
- it 'returns true when file_store is equal to LfsObjectUploader::LOCAL_STORE' do
- subject.file_store = LfsObjectUploader::LOCAL_STORE
+ it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
+ subject.file_store = LfsObjectUploader::Store::LOCAL
expect(subject.local_store?).to eq true
end
- it 'returns false whe file_store is equal to LfsObjectUploader::REMOTE_STORE' do
- subject.file_store = LfsObjectUploader::REMOTE_STORE
+ it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
+ subject.file_store = LfsObjectUploader::Store::REMOTE
expect(subject.local_store?).to eq false
end
diff --git a/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb
new file mode 100644
index 00000000000..9fa618fdc47
--- /dev/null
+++ b/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -0,0 +1,50 @@
+require 'spec_helper'
+
+describe Projects::HashedStorage::MigrateAttachmentsService do
+ let(:project) { create(:project, storage_version: 1) }
+ let(:service) { described_class.new(project) }
+ let(:legacy_storage) { Storage::LegacyProject.new(project) }
+ let(:hashed_storage) { Storage::HashedProject.new(project) }
+ let(:old_attachments_path) { legacy_storage.disk_path }
+ let(:new_attachments_path) { hashed_storage.disk_path }
+
+ describe '#execute' do
+ set(:primary) { create(:geo_node, :primary) }
+ set(:secondary) { create(:geo_node) }
+
+ context 'on success' do
+ before do
+ TestEnv.clean_test_path
+ FileUtils.mkdir_p(File.join(FileUploader.root, old_attachments_path))
+ end
+
+ it 'returns true' do
+ expect(service.execute).to be_truthy
+ end
+
+ it 'creates a Geo::HashedStorageAttachmentsEvent' do
+ expect { service.execute }.to change(Geo::EventLog, :count).by(1)
+
+ event = Geo::EventLog.first.event
+
+ expect(event).to be_a(Geo::HashedStorageAttachmentsEvent)
+ expect(event).to have_attributes(
+ old_attachments_path: old_attachments_path,
+ new_attachments_path: new_attachments_path
+ )
+ end
+ end
+
+ context 'on failure' do
+ it 'does not create a Geo event when skipped' do
+ expect { service.execute }.not_to change { Geo::EventLog.count }
+ end
+
+ it 'does not create a Geo event on failure' do
+ expect(service).to receive(:move_folder!).and_raise(::Projects::HashedStorage::AttachmentMigrationError)
+ expect { service.execute }.to raise_error(::Projects::HashedStorage::AttachmentMigrationError)
+ expect(Geo::EventLog.count).to eq(0)
+ end
+ end
+ end
+end
diff --git a/spec/ee/spec/services/geo/file_download_service_spec.rb b/spec/ee/spec/services/geo/file_download_service_spec.rb
new file mode 100644
index 00000000000..4fb0d89dbde
--- /dev/null
+++ b/spec/ee/spec/services/geo/file_download_service_spec.rb
@@ -0,0 +1,227 @@
+require 'spec_helper'
+
+describe Geo::FileDownloadService do
+ include ::EE::GeoHelpers
+
+ set(:primary) { create(:geo_node, :primary) }
+ set(:secondary) { create(:geo_node) }
+
+ before do
+ stub_current_geo_node(secondary)
+
+ allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true)
+ end
+
+ describe '#execute' do
+ context 'user avatar' do
+ let(:user) { create(:user, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+ let(:upload) { Upload.find_by(model: user, uploader: 'AvatarUploader') }
+
+ subject(:execute!) { described_class.new(:avatar, upload.id).execute }
+
+ it 'downloads a user avatar' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ expect(Geo::FileRegistry.last.retry_count).to eq(1)
+ expect(Geo::FileRegistry.last.retry_at).to be_present
+ end
+
+ it 'registers when the download fails with some other error' do
+ stub_transfer(Gitlab::Geo::FileTransfer, nil)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'group avatar' do
+ let(:group) { create(:group, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+ let(:upload) { Upload.find_by(model: group, uploader: 'AvatarUploader') }
+
+ subject(:execute!) { described_class.new(:avatar, upload.id).execute }
+
+ it 'downloads a group avatar' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'project avatar' do
+ let(:project) { create(:project, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+ let(:upload) { Upload.find_by(model: project, uploader: 'AvatarUploader') }
+
+ subject(:execute!) { described_class.new(:avatar, upload.id).execute }
+
+ it 'downloads a project avatar' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'with an attachment' do
+ let(:note) { create(:note, :with_attachment) }
+ let(:upload) { Upload.find_by(model: note, uploader: 'AttachmentUploader') }
+
+ subject(:execute!) { described_class.new(:attachment, upload.id).execute }
+
+ it 'downloads the attachment' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'with a snippet' do
+ let(:upload) { create(:upload, :personal_snippet_upload) }
+
+ subject(:execute!) { described_class.new(:personal_file, upload.id).execute }
+
+ it 'downloads the file' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'with file upload' do
+ let(:project) { create(:project) }
+ let(:upload) { Upload.find_by(model: project, uploader: 'FileUploader') }
+
+ subject { described_class.new(:file, upload.id) }
+
+ before do
+ FileUploader.new(project).store!(fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png'))
+ end
+
+ it 'downloads the file' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'with namespace file upload' do
+ let(:group) { create(:group) }
+ let(:upload) { Upload.find_by(model: group, uploader: 'NamespaceFileUploader') }
+
+ subject { described_class.new(:file, upload.id) }
+
+ before do
+ NamespaceFileUploader.new(group).store!(fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png'))
+ end
+
+ it 'downloads the file' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'LFS object' do
+ let(:lfs_object) { create(:lfs_object) }
+
+ subject { described_class.new(:lfs, lfs_object.id) }
+
+ it 'downloads an LFS object' do
+ stub_transfer(Gitlab::Geo::LfsTransfer, 100)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::LfsTransfer, -1)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+
+ it 'logs a message' do
+ stub_transfer(Gitlab::Geo::LfsTransfer, 100)
+
+ expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, success: true, bytes_downloaded: 100)).and_call_original
+
+ subject.execute
+ end
+ end
+
+ context 'job artifacts' do
+ let(:job_artifact) { create(:ci_job_artifact) }
+
+ subject { described_class.new(:job_artifact, job_artifact.id) }
+
+ it 'downloads a job artifact' do
+ stub_transfer(Gitlab::Geo::JobArtifactTransfer, 100)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::JobArtifactTransfer, -1)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+
+ it 'logs a message' do
+ stub_transfer(Gitlab::Geo::JobArtifactTransfer, 100)
+
+ expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, success: true, bytes_downloaded: 100)).and_call_original
+
+ subject.execute
+ end
+ end
+
+ context 'bad object type' do
+ it 'raises an error' do
+ expect { described_class.new(:bad, 1).execute }.to raise_error(NameError)
+ end
+ end
+
+ def stub_transfer(kls, result)
+ instance = double("(instance of #{kls})", download_from_primary: result)
+ allow(kls).to receive(:new).and_return(instance)
+ end
+ end
+end
diff --git a/spec/ee/spec/services/geo/files_expire_service_spec.rb b/spec/ee/spec/services/geo/files_expire_service_spec.rb
new file mode 100644
index 00000000000..09b0b386ed1
--- /dev/null
+++ b/spec/ee/spec/services/geo/files_expire_service_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+# Disable transactions via :delete method because a foreign table
+# can't see changes inside a transaction of a different connection.
+describe Geo::FilesExpireService, :geo, :delete do
+ let(:project) { create(:project) }
+ let!(:old_full_path) { project.full_path }
+ subject { described_class.new(project, old_full_path) }
+
+ describe '#execute' do
+ let(:file_uploader) { build(:file_uploader, project: project) }
+ let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
+ let!(:file_registry) { create(:geo_file_registry, file_id: upload.id) }
+
+ before do
+ project.update(path: "#{project.path}_renamed")
+ end
+
+ context 'when in Geo secondary node' do
+ before do
+ allow(Gitlab::Geo).to receive(:secondary?) { true }
+ end
+
+ it 'remove file from disk' do
+ file_path = File.join(subject.base_dir, upload.path)
+ expect(File.exist?(file_path)).to be_truthy
+
+ Sidekiq::Testing.inline! { subject.execute }
+
+ expect(File.exist?(file_path)).to be_falsey
+ end
+
+ it 'removes file_registry associates with upload' do
+ expect(file_registry.success).to be_truthy
+
+ subject.execute
+
+ expect { file_registry.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'when not in Geo secondary node' do
+ it 'no-op execute action' do
+ expect(subject).not_to receive(:schedule_file_removal)
+ expect(subject).not_to receive(:mark_for_resync!)
+
+ subject.execute
+ end
+ end
+ end
+end
diff --git a/spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb b/spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb
new file mode 100644
index 00000000000..40e06705cf5
--- /dev/null
+++ b/spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb
@@ -0,0 +1,83 @@
+require 'spec_helper'
+
+def base_path(storage)
+ File.join(FileUploader.root, storage.disk_path)
+end
+
+describe Geo::HashedStorageAttachmentsMigrationService do
+ let!(:project) { create(:project) }
+
+ let(:legacy_storage) { Storage::LegacyProject.new(project) }
+ let(:hashed_storage) { Storage::HashedProject.new(project) }
+
+ let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
+ let(:file_uploader) { build(:file_uploader, project: project) }
+ let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
+ let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
+
+ subject(:service) do
+ described_class.new(project.id,
+ old_attachments_path: legacy_storage.disk_path,
+ new_attachments_path: hashed_storage.disk_path)
+ end
+
+ describe '#execute' do
+ context 'when succeeds' do
+ it 'moves attachments to hashed storage layout' do
+ expect(File.file?(old_path)).to be_truthy
+ expect(File.file?(new_path)).to be_falsey
+ expect(File.exist?(base_path(legacy_storage))).to be_truthy
+ expect(File.exist?(base_path(hashed_storage))).to be_falsey
+ expect(FileUtils).to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage)).and_call_original
+
+ service.execute
+
+ expect(File.exist?(base_path(hashed_storage))).to be_truthy
+ expect(File.exist?(base_path(legacy_storage))).to be_falsey
+ expect(File.file?(old_path)).to be_falsey
+ expect(File.file?(new_path)).to be_truthy
+ end
+ end
+
+ context 'when original folder does not exist anymore' do
+ before do
+ FileUtils.rm_rf(base_path(legacy_storage))
+ end
+
+ it 'skips moving folders and go to next' do
+ expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
+
+ service.execute
+
+ expect(File.exist?(base_path(hashed_storage))).to be_falsey
+ expect(File.file?(new_path)).to be_falsey
+ end
+ end
+
+ context 'when target folder already exists' do
+ before do
+ FileUtils.mkdir_p(base_path(hashed_storage))
+ end
+
+ it 'raises AttachmentMigrationError' do
+ expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
+
+ expect { service.execute }.to raise_error(::Geo::AttachmentMigrationError)
+ end
+ end
+ end
+
+ describe '#async_execute' do
+ it 'starts the worker' do
+ expect(Geo::HashedStorageAttachmentsMigrationWorker).to receive(:perform_async)
+
+ service.async_execute
+ end
+
+ it 'returns job id' do
+ allow(Geo::HashedStorageAttachmentsMigrationWorker).to receive(:perform_async).and_return('foo')
+
+ expect(service.async_execute).to eq('foo')
+ end
+ end
+end
diff --git a/spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb b/spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb
new file mode 100644
index 00000000000..ad7cad3128a
--- /dev/null
+++ b/spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb
@@ -0,0 +1,291 @@
+require 'spec_helper'
+
+describe Geo::FileDownloadDispatchWorker, :geo do
+ include ::EE::GeoHelpers
+
+ let(:primary) { create(:geo_node, :primary, host: 'primary-geo-node') }
+ let(:secondary) { create(:geo_node) }
+
+ before do
+ stub_current_geo_node(secondary)
+ allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true)
+ allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:renew).and_return(true)
+ allow_any_instance_of(described_class).to receive(:over_time?).and_return(false)
+ WebMock.stub_request(:get, /primary-geo-node/).to_return(status: 200, body: "", headers: {})
+ end
+
+ subject { described_class.new }
+
+ shared_examples '#perform' do |skip_tests|
+ before do
+ skip('FDW is not configured') if skip_tests
+ end
+
+ it 'does not schedule anything when tracking database is not configured' do
+ create(:lfs_object, :with_file)
+
+ allow(Gitlab::Geo).to receive(:geo_database_configured?) { false }
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+ subject.perform
+
+ # We need to unstub here or the DatabaseCleaner will have issues since it
+ # will appear as though the tracking DB were not available
+ allow(Gitlab::Geo).to receive(:geo_database_configured?).and_call_original
+ end
+
+ it 'does not schedule anything when node is disabled' do
+ create(:lfs_object, :with_file)
+
+ secondary.enabled = false
+ secondary.save
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+ subject.perform
+ end
+
+ context 'with LFS objects' do
+ let!(:lfs_object_local_store) { create(:lfs_object, :with_file) }
+ let!(:lfs_object_remote_store) { create(:lfs_object, :with_file) }
+
+ before do
+ stub_lfs_object_storage
+ lfs_object_remote_store.file.migrate!(LfsObjectUploader::Store::REMOTE)
+ end
+
+ it 'filters S3-backed files' do
+ expect(Geo::FileDownloadWorker).to receive(:perform_async).with(:lfs, lfs_object_local_store.id)
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async).with(:lfs, lfs_object_remote_store.id)
+
+ subject.perform
+ end
+ end
+
+ context 'with job artifacts' do
+ it 'performs Geo::FileDownloadWorker for unsynced job artifacts' do
+ artifact = create(:ci_job_artifact)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with(:job_artifact, artifact.id).once.and_return(spy)
+
+ subject.perform
+ end
+
+ it 'performs Geo::FileDownloadWorker for failed-sync job artifacts' do
+ artifact = create(:ci_job_artifact)
+
+ Geo::FileRegistry.create!(file_type: :job_artifact, file_id: artifact.id, bytes: 0, success: false)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with('job_artifact', artifact.id).once.and_return(spy)
+
+ subject.perform
+ end
+
+ it 'does not perform Geo::FileDownloadWorker for synced job artifacts' do
+ artifact = create(:ci_job_artifact)
+
+ Geo::FileRegistry.create!(file_type: :job_artifact, file_id: artifact.id, bytes: 1234, success: true)
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+ subject.perform
+ end
+
+ it 'does not perform Geo::FileDownloadWorker for synced job artifacts even with 0 bytes downloaded' do
+ artifact = create(:ci_job_artifact)
+
+ Geo::FileRegistry.create!(file_type: :job_artifact, file_id: artifact.id, bytes: 0, success: true)
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+ subject.perform
+ end
+ end
+
+ # Test the case where we have:
+ #
+ # 1. A total of 10 files in the queue, and we can load a maximimum of 5 and send 2 at a time.
+ # 2. We send 2, wait for 1 to finish, and then send again.
+ it 'attempts to load a new batch without pending downloads' do
+ stub_const('Geo::BaseSchedulerWorker::DB_RETRIEVE_BATCH_SIZE', 5)
+ secondary.update!(files_max_capacity: 2)
+ allow_any_instance_of(::Gitlab::Geo::Transfer).to receive(:download_from_primary).and_return(100)
+
+ avatar = fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'))
+ create_list(:lfs_object, 2, :with_file)
+ create_list(:user, 2, avatar: avatar)
+ create_list(:note, 2, :with_attachment)
+ create_list(:upload, 1, :personal_snippet_upload)
+ create_list(:ci_job_artifact, 1)
+ create(:appearance, logo: avatar, header_logo: avatar)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async).exactly(10).times.and_call_original
+ # For 10 downloads, we expect four database reloads:
+ # 1. Load the first batch of 5.
+ # 2. 4 get sent out, 1 remains. This triggers another reload, which loads in the next 5.
+ # 3. Those 4 get sent out, and 1 remains.
+ # 3. Since the second reload filled the pipe with 4, we need to do a final reload to ensure
+ # zero are left.
+ expect(subject).to receive(:load_pending_resources).exactly(4).times.and_call_original
+
+ Sidekiq::Testing.inline! do
+ subject.perform
+ end
+ end
+
+ context 'with a failed file' do
+ let(:failed_registry) { create(:geo_file_registry, :lfs, file_id: 999, success: false) }
+
+ it 'does not stall backfill' do
+ unsynced = create(:lfs_object, :with_file)
+
+ stub_const('Geo::BaseSchedulerWorker::DB_RETRIEVE_BATCH_SIZE', 1)
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async).with(:lfs, failed_registry.file_id)
+ expect(Geo::FileDownloadWorker).to receive(:perform_async).with(:lfs, unsynced.id)
+
+ subject.perform
+ end
+
+ it 'retries failed files' do
+ expect(Geo::FileDownloadWorker).to receive(:perform_async).with('lfs', failed_registry.file_id)
+
+ subject.perform
+ end
+
+ it 'does not retries failed files when retry_at is tomorrow' do
+ failed_registry = create(:geo_file_registry, :lfs, file_id: 999, success: false, retry_at: Date.tomorrow)
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async).with('lfs', failed_registry.file_id)
+
+ subject.perform
+ end
+
+ it 'does not retries failed files when retry_at is in the past' do
+ failed_registry = create(:geo_file_registry, :lfs, file_id: 999, success: false, retry_at: Date.yesterday)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async).with('lfs', failed_registry.file_id)
+
+ subject.perform
+ end
+ end
+
+ context 'when node has namespace restrictions' do
+ let(:synced_group) { create(:group) }
+ let(:project_in_synced_group) { create(:project, group: synced_group) }
+ let(:unsynced_project) { create(:project) }
+
+ before do
+ allow(ProjectCacheWorker).to receive(:perform_async).and_return(true)
+
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+ end
+
+ it 'does not perform Geo::FileDownloadWorker for LFS object that does not belong to selected namespaces to replicate' do
+ lfs_object_in_synced_group = create(:lfs_objects_project, project: project_in_synced_group)
+ create(:lfs_objects_project, project: unsynced_project)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with(:lfs, lfs_object_in_synced_group.lfs_object_id).once.and_return(spy)
+
+ subject.perform
+ end
+
+ it 'does not perform Geo::FileDownloadWorker for job artifact that does not belong to selected namespaces to replicate' do
+ create(:ci_job_artifact, project: unsynced_project)
+ job_artifact_in_synced_group = create(:ci_job_artifact, project: project_in_synced_group)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with(:job_artifact, job_artifact_in_synced_group.id).once.and_return(spy)
+
+ subject.perform
+ end
+
+ it 'does not perform Geo::FileDownloadWorker for upload objects that do not belong to selected namespaces to replicate' do
+ avatar = fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'))
+ avatar_in_synced_group = create(:upload, model: synced_group, path: avatar)
+ create(:upload, model: create(:group), path: avatar)
+ avatar_in_project_in_synced_group = create(:upload, model: project_in_synced_group, path: avatar)
+ create(:upload, model: unsynced_project, path: avatar)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with('avatar', avatar_in_project_in_synced_group.id).once.and_return(spy)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with('avatar', avatar_in_synced_group.id).once.and_return(spy)
+
+ subject.perform
+ end
+ end
+ end
+
+ # Disable transactions via :delete method because a foreign table
+ # can't see changes inside a transaction of a different connection.
+ describe 'when PostgreSQL FDW is available', :geo, :delete do
+ # Skip if FDW isn't activated on this database
+ it_behaves_like '#perform', Gitlab::Database.postgresql? && !Gitlab::Geo.fdw?
+ end
+
+ describe 'when PostgreSQL FDW is not enabled', :geo do
+ before do
+ allow(Gitlab::Geo).to receive(:fdw?).and_return(false)
+ end
+
+ it_behaves_like '#perform', false
+ end
+
+ describe '#take_batch' do
+ it 'returns a batch of jobs' do
+ a = [[2, :lfs], [3, :lfs]]
+ b = []
+ c = [[3, :job_artifact], [8, :job_artifact], [9, :job_artifact]]
+ expect(subject).to receive(:db_retrieve_batch_size).and_return(4)
+
+ expect(subject.send(:take_batch, a, b, c)).to eq([
+ [3, :job_artifact],
+ [2, :lfs],
+ [8, :job_artifact],
+ [3, :lfs]
+ ])
+ end
+ end
+
+ describe '#interleave' do
+ # Notice ties are resolved by taking the "first" tied element
+ it 'interleaves 2 arrays' do
+ a = %w{1 2 3}
+ b = %w{A B C}
+ expect(subject.send(:interleave, a, b)).to eq(%w{1 A 2 B 3 C})
+ end
+
+ # Notice there are no ties in this call
+ it 'interleaves 2 arrays with a longer second array' do
+ a = %w{1 2}
+ b = %w{A B C}
+ expect(subject.send(:interleave, a, b)).to eq(%w{A 1 B 2 C})
+ end
+
+ it 'interleaves 2 arrays with a longer first array' do
+ a = %w{1 2 3}
+ b = %w{A B}
+ expect(subject.send(:interleave, a, b)).to eq(%w{1 A 2 B 3})
+ end
+
+ it 'interleaves 3 arrays' do
+ a = %w{1 2 3}
+ b = %w{A B C}
+ c = %w{i ii iii}
+ expect(subject.send(:interleave, a, b, c)).to eq(%w{1 A i 2 B ii 3 C iii})
+ end
+
+ it 'interleaves 3 arrays of unequal length' do
+ a = %w{1 2}
+ b = %w{A}
+ c = %w{i ii iii iiii}
+ expect(subject.send(:interleave, a, b, c)).to eq(%w{i 1 ii A iii 2 iiii})
+ end
+ end
+end
diff --git a/spec/ee/workers/object_storage_upload_worker_spec.rb b/spec/ee/workers/object_storage_upload_worker_spec.rb
index d421fdf95a9..32ddcbe9757 100644
--- a/spec/ee/workers/object_storage_upload_worker_spec.rb
+++ b/spec/ee/workers/object_storage_upload_worker_spec.rb
@@ -1,8 +1,8 @@
require 'spec_helper'
describe ObjectStorageUploadWorker do
- let(:local) { ObjectStoreUploader::LOCAL_STORE }
- let(:remote) { ObjectStoreUploader::REMOTE_STORE }
+ let(:local) { ObjectStorage::Store::LOCAL }
+ let(:remote) { ObjectStorage::Store::REMOTE }
def perform
described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 436735e7ed3..9bb456e89ff 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -6,7 +6,7 @@ FactoryBot.define do
file_type :archive
trait :remote_store do
- file_store JobArtifactUploader::REMOTE_STORE
+ file_store JobArtifactUploader::Store::REMOTE
end
after :build do |artifact|
diff --git a/spec/factories/geo/event_log.rb b/spec/factories/geo/event_log.rb
new file mode 100644
index 00000000000..dbe2f400f97
--- /dev/null
+++ b/spec/factories/geo/event_log.rb
@@ -0,0 +1,121 @@
+FactoryBot.define do
+ factory :geo_event_log, class: Geo::EventLog do
+ trait :created_event do
+ repository_created_event factory: :geo_repository_created_event
+ end
+
+ trait :updated_event do
+ repository_updated_event factory: :geo_repository_updated_event
+ end
+
+ trait :deleted_event do
+ repository_deleted_event factory: :geo_repository_deleted_event
+ end
+
+ trait :renamed_event do
+ repository_renamed_event factory: :geo_repository_renamed_event
+ end
+
+ trait :hashed_storage_migration_event do
+ hashed_storage_migrated_event factory: :geo_hashed_storage_migrated_event
+ end
+
+ trait :hashed_storage_attachments_event do
+ hashed_storage_attachments_event factory: :geo_hashed_storage_attachments_event
+ end
+
+ trait :lfs_object_deleted_event do
+ lfs_object_deleted_event factory: :geo_lfs_object_deleted_event
+ end
+
+ trait :job_artifact_deleted_event do
+ job_artifact_deleted_event factory: :geo_job_artifact_deleted_event
+ end
+ end
+
+ factory :geo_repository_created_event, class: Geo::RepositoryCreatedEvent do
+ project
+
+ repository_storage_name { project.repository_storage }
+ repository_storage_path { project.repository_storage_path }
+ add_attribute(:repo_path) { project.disk_path }
+ project_name { project.name }
+ wiki_path { project.wiki.disk_path }
+ end
+
+ factory :geo_repository_updated_event, class: Geo::RepositoryUpdatedEvent do
+ project
+
+ source 0
+ branches_affected 0
+ tags_affected 0
+ end
+
+ factory :geo_repository_deleted_event, class: Geo::RepositoryDeletedEvent do
+ project
+
+ repository_storage_name { project.repository_storage }
+ repository_storage_path { project.repository_storage_path }
+ deleted_path { project.path_with_namespace }
+ deleted_project_name { project.name }
+ end
+
+ factory :geo_repositories_changed_event, class: Geo::RepositoriesChangedEvent do
+ geo_node
+ end
+
+ factory :geo_repository_renamed_event, class: Geo::RepositoryRenamedEvent do
+ project { create(:project, :repository) }
+
+ repository_storage_name { project.repository_storage }
+ repository_storage_path { project.repository_storage_path }
+ old_path_with_namespace { project.path_with_namespace }
+ new_path_with_namespace { project.path_with_namespace + '_new' }
+ old_wiki_path_with_namespace { project.wiki.path_with_namespace }
+ new_wiki_path_with_namespace { project.wiki.path_with_namespace + '_new' }
+ old_path { project.path }
+ new_path { project.path + '_new' }
+ end
+
+ factory :geo_hashed_storage_migrated_event, class: Geo::HashedStorageMigratedEvent do
+ project { create(:project, :repository) }
+
+ repository_storage_name { project.repository_storage }
+ repository_storage_path { project.repository_storage_path }
+ old_disk_path { project.path_with_namespace }
+ new_disk_path { project.path_with_namespace + '_new' }
+ old_wiki_disk_path { project.wiki.path_with_namespace }
+ new_wiki_disk_path { project.wiki.path_with_namespace + '_new' }
+ new_storage_version { Project::HASHED_STORAGE_FEATURES[:repository] }
+ end
+
+ factory :geo_hashed_storage_attachments_event, class: Geo::HashedStorageAttachmentsEvent do
+ project { create(:project, :repository) }
+
+ old_attachments_path { Storage::LegacyProject.new(project).disk_path }
+ new_attachments_path { Storage::HashedProject.new(project).disk_path }
+ end
+
+ factory :geo_lfs_object_deleted_event, class: Geo::LfsObjectDeletedEvent do
+ lfs_object { create(:lfs_object, :with_file) }
+
+ after(:build, :stub) do |event, _|
+ local_store_path = Pathname.new(LfsObjectUploader.root)
+ relative_path = Pathname.new(event.lfs_object.file.path).relative_path_from(local_store_path)
+
+ event.oid = event.lfs_object.oid
+ event.file_path = relative_path
+ end
+ end
+
+ factory :geo_job_artifact_deleted_event, class: Geo::JobArtifactDeletedEvent do
+ job_artifact { create(:ci_job_artifact, :archive) }
+
+ after(:build, :stub) do |event, _|
+ local_store_path = Pathname.new(JobArtifactUploader.root)
+ relative_path = Pathname.new(event.job_artifact.file.path).relative_path_from(local_store_path)
+
+ event.file_path = relative_path
+ end
+ end
+end
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index 1512f5a0e58..8c531cf5909 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -18,7 +18,7 @@ FactoryBot.define do
end
trait :with_avatar do
- avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
+ avatar { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :access_requestable do
diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb
index 707ecbd6be5..0889c5090fb 100644
--- a/spec/factories/notes.rb
+++ b/spec/factories/notes.rb
@@ -122,11 +122,11 @@ FactoryBot.define do
end
trait :with_attachment do
- attachment { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png") }
+ attachment { fixture_file_upload(Rails.root.join( "spec/fixtures/dk.png"), "image/png") }
end
trait :with_svg_attachment do
- attachment { fixture_file_upload(Rails.root + "spec/fixtures/unsanitized.svg", "image/svg+xml") }
+ attachment { fixture_file_upload(Rails.root.join("spec/fixtures/unsanitized.svg"), "image/svg+xml") }
end
transient do
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index d0f3911f730..16d328a5bc2 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -90,7 +90,7 @@ FactoryBot.define do
end
trait :with_avatar do
- avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
+ avatar { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :broken_storage do
diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb
index c39500faea1..9e8a55eaedb 100644
--- a/spec/factories/uploads.rb
+++ b/spec/factories/uploads.rb
@@ -1,24 +1,43 @@
FactoryBot.define do
factory :upload do
model { build(:project) }
- path { "uploads/-/system/project/avatar/avatar.jpg" }
size 100.kilobytes
uploader "AvatarUploader"
+ store ObjectStorage::Store::LOCAL
- trait :personal_snippet do
+ # we should build a mount agnostic upload by default
+ transient do
+ mounted_as :avatar
+ secret SecureRandom.hex
+ end
+
+ # this needs to comply with RecordsUpload::Concern#upload_path
+ path { File.join("uploads/-/system", model.class.to_s.underscore, mounted_as.to_s, 'avatar.jpg') }
+
+ trait :personal_snippet_upload do
model { build(:personal_snippet) }
+ path { File.join(secret, 'myfile.jpg') }
uploader "PersonalFileUploader"
end
trait :issuable_upload do
- path { "#{SecureRandom.hex}/myfile.jpg" }
+ path { File.join(secret, 'myfile.jpg') }
uploader "FileUploader"
end
trait :namespace_upload do
- path { "#{SecureRandom.hex}/myfile.jpg" }
model { build(:group) }
+ path { File.join(secret, 'myfile.jpg') }
uploader "NamespaceFileUploader"
end
+
+ trait :attachment_upload do
+ transient do
+ mounted_as :attachment
+ end
+
+ model { build(:note) }
+ uploader "AttachmentUploader"
+ end
end
end
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index e62e0b263ca..769fd656e7a 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -38,7 +38,7 @@ FactoryBot.define do
end
trait :with_avatar do
- avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
+ avatar { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :two_factor_via_otp do
diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
index 8bb9ebe0419..370c2490b97 100644
--- a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
+++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
@@ -23,6 +23,27 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
+ # E.g. The installation is in use at the time of migration, and someone has
+ # just uploaded a file
+ shared_examples 'does not add files in /uploads/tmp' do
+ let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
+
+ before do
+ FileUtils.mkdir(File.dirname(tmp_file))
+ FileUtils.touch(tmp_file)
+ end
+
+ after do
+ FileUtils.rm(tmp_file)
+ end
+
+ it 'does not add files from /uploads/tmp' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(5)
+ end
+ end
+
it 'ensures the untracked_files_for_uploads table exists' do
expect do
described_class.new.perform
@@ -109,24 +130,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
- # E.g. The installation is in use at the time of migration, and someone has
- # just uploaded a file
context 'when there are files in /uploads/tmp' do
- let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
-
- before do
- FileUtils.touch(tmp_file)
- end
-
- after do
- FileUtils.rm(tmp_file)
- end
-
- it 'does not add files from /uploads/tmp' do
- described_class.new.perform
-
- expect(untracked_files_for_uploads.count).to eq(5)
- end
+ it_behaves_like 'does not add files in /uploads/tmp'
end
end
end
@@ -197,24 +202,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
- # E.g. The installation is in use at the time of migration, and someone has
- # just uploaded a file
context 'when there are files in /uploads/tmp' do
- let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
-
- before do
- FileUtils.touch(tmp_file)
- end
-
- after do
- FileUtils.rm(tmp_file)
- end
-
- it 'does not add files from /uploads/tmp' do
- described_class.new.perform
-
- expect(untracked_files_for_uploads.count).to eq(5)
- end
+ it_behaves_like 'does not add files in /uploads/tmp'
end
end
end
diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
index 39e3b875c49..326ed2f2ecf 100644
--- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
@@ -17,7 +17,7 @@ describe Gitlab::Gfm::UploadsRewriter do
end
let(:text) do
- "Text and #{image_uploader.to_markdown} and #{zip_uploader.to_markdown}"
+ "Text and #{image_uploader.markdown_link} and #{zip_uploader.markdown_link}"
end
describe '#rewrite' do
diff --git a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
index 63992ea8ab8..a685521cbf0 100644
--- a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
@@ -4,7 +4,6 @@ describe Gitlab::ImportExport::UploadsRestorer do
describe 'bundle a project Git repo' do
let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
- let(:uploads_path) { FileUploader.dynamic_path_segment(project) }
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
@@ -26,9 +25,9 @@ describe Gitlab::ImportExport::UploadsRestorer do
end
it 'copies the uploads to the project path' do
- restorer.restore
+ subject.restore
- uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
+ uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('dummy.txt')
end
@@ -44,9 +43,9 @@ describe Gitlab::ImportExport::UploadsRestorer do
end
it 'copies the uploads to the project path' do
- restorer.restore
+ subject.restore
- uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
+ uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('dummy.txt')
end
diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
index e8948de1f3a..959779523f4 100644
--- a/spec/lib/gitlab/import_export/uploads_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
@@ -30,7 +30,7 @@ describe Gitlab::ImportExport::UploadsSaver do
it 'copies the uploads to the export path' do
saver.save
- uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
+ uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('banana_sample.gif')
end
@@ -52,7 +52,7 @@ describe Gitlab::ImportExport::UploadsSaver do
it 'copies the uploads to the export path' do
saver.save
- uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
+ uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('banana_sample.gif')
end
diff --git a/spec/migrations/remove_empty_fork_networks_spec.rb b/spec/migrations/remove_empty_fork_networks_spec.rb
index cf6ae5cda74..ca9086a84d0 100644
--- a/spec/migrations/remove_empty_fork_networks_spec.rb
+++ b/spec/migrations/remove_empty_fork_networks_spec.rb
@@ -12,6 +12,10 @@ describe RemoveEmptyForkNetworks, :migration do
deleted_project.destroy!
end
+ after do
+ Upload.reset_column_information
+ end
+
it 'deletes only the fork network without members' do
expect(fork_networks.count).to eq(2)
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index b3f160f3119..138b2a4935f 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -204,7 +204,7 @@ describe Namespace do
let(:parent) { create(:group, name: 'parent', path: 'parent') }
let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) }
- let(:uploads_dir) { File.join(CarrierWave.root, FileUploader.base_dir) }
+ let(:uploads_dir) { FileUploader.root }
let(:pages_dir) { File.join(TestEnv.pages_path) }
before do
diff --git a/spec/models/upload_spec.rb b/spec/models/upload_spec.rb
index 345382ea8c7..42f3d609770 100644
--- a/spec/models/upload_spec.rb
+++ b/spec/models/upload_spec.rb
@@ -45,51 +45,6 @@ describe Upload do
end
end
- describe '.remove_path' do
- it 'removes all records at the given path' do
- described_class.create!(
- size: File.size(__FILE__),
- path: __FILE__,
- model: build_stubbed(:user),
- uploader: 'AvatarUploader'
- )
-
- expect { described_class.remove_path(__FILE__) }
- .to change { described_class.count }.from(1).to(0)
- end
- end
-
- describe '.record' do
- let(:fake_uploader) do
- double(
- file: double(size: 12_345),
- relative_path: 'foo/bar.jpg',
- model: build_stubbed(:user),
- class: 'AvatarUploader'
- )
- end
-
- it 'removes existing paths before creation' do
- expect(described_class).to receive(:remove_path)
- .with(fake_uploader.relative_path)
-
- described_class.record(fake_uploader)
- end
-
- it 'creates a new record and assigns size, path, model, and uploader' do
- upload = described_class.record(fake_uploader)
-
- aggregate_failures do
- expect(upload).to be_persisted
- expect(upload.size).to eq fake_uploader.file.size
- expect(upload.path).to eq fake_uploader.relative_path
- expect(upload.model_id).to eq fake_uploader.model.id
- expect(upload.model_type).to eq fake_uploader.model.class.to_s
- expect(upload.uploader).to eq fake_uploader.class
- end
- end
- end
-
describe '#absolute_path' do
it 'returns the path directly when already absolute' do
path = '/path/to/namespace/project/secret/file.jpg'
@@ -111,27 +66,27 @@ describe Upload do
end
end
- describe '#calculate_checksum' do
- it 'calculates the SHA256 sum' do
- upload = described_class.new(
- path: __FILE__,
- size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
- )
+ describe '#calculate_checksum!' do
+ let(:upload) do
+ described_class.new(path: __FILE__,
+ size: described_class::CHECKSUM_THRESHOLD - 1.megabyte)
+ end
+
+ it 'sets `checksum` to SHA256 sum of the file' do
expected = Digest::SHA256.file(__FILE__).hexdigest
- expect { upload.calculate_checksum }
+ expect { upload.calculate_checksum! }
.to change { upload.checksum }.from(nil).to(expected)
end
- it 'returns nil for a non-existant file' do
- upload = described_class.new(
- path: __FILE__,
- size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
- )
-
+ it 'sets `checksum` to nil for a non-existant file' do
expect(upload).to receive(:exist?).and_return(false)
- expect(upload.calculate_checksum).to be_nil
+ checksum = Digest::SHA256.file(__FILE__).hexdigest
+ upload.checksum = checksum
+
+ expect { upload.calculate_checksum! }
+ .to change { upload.checksum }.from(checksum).to(nil)
end
end
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 5c6eee09285..8086b91a488 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -947,7 +947,7 @@ describe API::Runner do
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
- allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
+ allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
end
context 'when job has been erased' do
@@ -1124,7 +1124,7 @@ describe API::Runner do
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir
- allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
+ allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir)
end
after do
@@ -1153,7 +1153,7 @@ describe API::Runner do
context 'when job has artifacts' do
let(:job) { create(:ci_build) }
- let(:store) { JobArtifactUploader::LOCAL_STORE }
+ let(:store) { JobArtifactUploader::Store::LOCAL }
before do
create(:ci_job_artifact, :archive, file_store: store, job: job)
@@ -1175,7 +1175,7 @@ describe API::Runner do
end
context 'when artifacts are stored remotely' do
- let(:store) { JobArtifactUploader::REMOTE_STORE }
+ let(:store) { JobArtifactUploader::Store::REMOTE }
let!(:job) { create(:ci_build) }
it 'download artifacts' do
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 8bfc8693981..0a8788fd57e 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -245,7 +245,7 @@ describe 'Git LFS API and storage' do
context 'when LFS uses object storage' do
let(:before_get) do
stub_lfs_object_storage
- lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect' do
@@ -975,7 +975,7 @@ describe 'Git LFS API and storage' do
end
it 'responds with status 200, location of lfs store and object details' do
- expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
+ expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
@@ -1132,7 +1132,7 @@ describe 'Git LFS API and storage' do
end
it 'with location of lfs store and object details' do
- expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
+ expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
@@ -1246,7 +1246,7 @@ describe 'Git LFS API and storage' do
end
def setup_tempfile(lfs_tmp)
- upload_path = "#{Gitlab.config.lfs.storage_path}/tmp/upload"
+ upload_path = LfsObjectUploader.workhorse_upload_path
FileUtils.mkdir_p(upload_path)
FileUtils.touch(File.join(upload_path, lfs_tmp))
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 53ea88332fb..dfe9adbbcdc 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -244,7 +244,7 @@ describe Issues::MoveService do
context 'issue description with uploads' do
let(:uploader) { build(:file_uploader, project: old_project) }
- let(:description) { "Text and #{uploader.to_markdown}" }
+ let(:description) { "Text and #{uploader.markdown_link}" }
include_context 'issue move executed'
diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
index 50e59954f73..15699574b3a 100644
--- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -6,7 +6,7 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
- let!(:upload) { Upload.find_by(path: file_uploader.relative_path) }
+ let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let(:file_uploader) { build(:file_uploader, project: project) }
let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
@@ -58,6 +58,6 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
end
def base_path(storage)
- FileUploader.dynamic_path_builder(storage.disk_path)
+ File.join(FileUploader.root, storage.disk_path)
end
end
diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
index 935c08221e0..7ce80c82439 100644
--- a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
@@ -2,6 +2,8 @@ shared_examples 'handle uploads' do
let(:user) { create(:user) }
let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') }
let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') }
+ let(:secret) { FileUploader.generate_secret }
+ let(:uploader_class) { FileUploader }
describe "POST #create" do
context 'when a user is not authorized to upload a file' do
@@ -65,7 +67,12 @@ shared_examples 'handle uploads' do
describe "GET #show" do
let(:show_upload) do
- get :show, params.merge(secret: "123456", filename: "image.jpg")
+ get :show, params.merge(secret: secret, filename: "rails_sample.jpg")
+ end
+
+ before do
+ expect(FileUploader).to receive(:generate_secret).and_return(secret)
+ UploadService.new(model, jpg, uploader_class).execute
end
context "when the model is public" do
@@ -75,11 +82,6 @@ shared_examples 'handle uploads' do
context "when not signed in" do
context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
it "responds with status 200" do
show_upload
@@ -88,6 +90,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+ end
+
it "responds with status 404" do
show_upload
@@ -102,11 +108,6 @@ shared_examples 'handle uploads' do
end
context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
it "responds with status 200" do
show_upload
@@ -115,6 +116,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+ end
+
it "responds with status 404" do
show_upload
@@ -131,11 +136,6 @@ shared_examples 'handle uploads' do
context "when not signed in" do
context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
context "when the file is an image" do
before do
allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
@@ -149,6 +149,10 @@ shared_examples 'handle uploads' do
end
context "when the file is not an image" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:image?).and_return(false)
+ end
+
it "redirects to the sign in page" do
show_upload
@@ -158,6 +162,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+ end
+
it "redirects to the sign in page" do
show_upload
@@ -177,11 +185,6 @@ shared_examples 'handle uploads' do
end
context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
it "responds with status 200" do
show_upload
@@ -190,6 +193,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+ end
+
it "responds with status 404" do
show_upload
@@ -200,11 +207,6 @@ shared_examples 'handle uploads' do
context "when the user doesn't have access to the model" do
context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
context "when the file is an image" do
before do
allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
@@ -218,6 +220,10 @@ shared_examples 'handle uploads' do
end
context "when the file is not an image" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:image?).and_return(false)
+ end
+
it "responds with status 404" do
show_upload
@@ -227,6 +233,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+ end
+
it "responds with status 404" do
show_upload
diff --git a/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
new file mode 100644
index 00000000000..0022b2f803f
--- /dev/null
+++ b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
@@ -0,0 +1,126 @@
+shared_context 'with storage' do |store, **stub_params|
+ before do
+ subject.object_store = store
+ end
+end
+
+shared_examples "migrates" do |to_store:, from_store: nil|
+ let(:to) { to_store }
+ let(:from) { from_store || subject.object_store }
+
+ def migrate(to)
+ subject.migrate!(to)
+ end
+
+ def checksum
+ Digest::SHA256.hexdigest(subject.read)
+ end
+
+ before do
+ migrate(from)
+ end
+
+ it 'does nothing when migrating to the current store' do
+ expect { migrate(from) }.not_to change { subject.object_store }.from(from)
+ end
+
+ it 'migrate to the specified store' do
+ from_checksum = checksum
+
+ expect { migrate(to) }.to change { subject.object_store }.from(from).to(to)
+ expect(checksum).to eq(from_checksum)
+ end
+
+ it 'removes the original file after the migration' do
+ original_file = subject.file.path
+ migrate(to)
+
+ expect(File.exist?(original_file)).to be_falsey
+ end
+
+ context 'migration is unsuccessful' do
+ shared_examples "handles gracefully" do |error:|
+ it 'does not update the object_store' do
+ expect { migrate(to) }.to raise_error(error)
+ expect(subject.object_store).to eq(from)
+ end
+
+ it 'does not delete the original file' do
+ expect { migrate(to) }.to raise_error(error)
+ expect(subject.exists?).to be_truthy
+ end
+ end
+
+ context 'when the store is not supported' do
+ let(:to) { -1 } # not a valid store
+
+ include_examples "handles gracefully", error: ObjectStorage::UnknownStoreError
+ end
+
+ context 'upon a fog failure' do
+ before do
+ storage_class = subject.send(:storage_for, to).class
+ expect_any_instance_of(storage_class).to receive(:store!).and_raise("Store failure.")
+ end
+
+ include_examples "handles gracefully", error: "Store failure."
+ end
+
+ context 'upon a database failure' do
+ before do
+ expect(uploader).to receive(:persist_object_store!).and_raise("ActiveRecord failure.")
+ end
+
+ include_examples "handles gracefully", error: "ActiveRecord failure."
+ end
+ end
+end
+
+shared_examples "matches the method pattern" do |method|
+ let(:target) { subject }
+ let(:args) { nil }
+ let(:pattern) { patterns[method] }
+
+ it do
+ return skip "No pattern provided, skipping." unless pattern
+
+ expect(target.method(method).call(*args)).to match(pattern)
+ end
+end
+
+shared_examples "builds correct paths" do |**patterns|
+ let(:patterns) { patterns }
+
+ before do
+ allow(subject).to receive(:filename).and_return('<filename>')
+ end
+
+ describe "#store_dir" do
+ it_behaves_like "matches the method pattern", :store_dir
+ end
+
+ describe "#cache_dir" do
+ it_behaves_like "matches the method pattern", :cache_dir
+ end
+
+ describe "#work_dir" do
+ it_behaves_like "matches the method pattern", :work_dir
+ end
+
+ describe "#upload_path" do
+ it_behaves_like "matches the method pattern", :upload_path
+ end
+
+ describe ".absolute_path" do
+ it_behaves_like "matches the method pattern", :absolute_path do
+ let(:target) { subject.class }
+ let(:args) { [upload] }
+ end
+ end
+
+ describe ".base_dir" do
+ it_behaves_like "matches the method pattern", :base_dir do
+ let(:target) { subject.class }
+ end
+ end
+end
diff --git a/spec/support/stub_object_storage.rb b/spec/support/stub_object_storage.rb
index 4f469648d5c..93477e513f2 100644
--- a/spec/support/stub_object_storage.rb
+++ b/spec/support/stub_object_storage.rb
@@ -30,4 +30,11 @@ module StubConfiguration
remote_directory: 'lfs-objects',
**params)
end
+
+ def stub_uploads_object_storage(uploader = described_class, **params)
+ stub_object_storage_uploader(config: Gitlab.config.uploads.object_store,
+ uploader: uploader,
+ remote_directory: 'uploads',
+ **params)
+ end
end
diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb
index 664698fcbaf..3b79d769e02 100644
--- a/spec/support/test_env.rb
+++ b/spec/support/test_env.rb
@@ -239,7 +239,7 @@ module TestEnv
end
def artifacts_path
- Gitlab.config.artifacts.path
+ Gitlab.config.artifacts.storage_path
end
# When no cached assets exist, manually hit the root path to create them
diff --git a/spec/support/track_untracked_uploads_helpers.rb b/spec/support/track_untracked_uploads_helpers.rb
index d05eda08201..5752078d2a0 100644
--- a/spec/support/track_untracked_uploads_helpers.rb
+++ b/spec/support/track_untracked_uploads_helpers.rb
@@ -1,6 +1,6 @@
module TrackUntrackedUploadsHelpers
def uploaded_file
- fixture_path = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
+ fixture_path = Rails.root.join('spec/fixtures/rails_sample.jpg')
fixture_file_upload(fixture_path)
end
diff --git a/spec/tasks/gitlab/artifacts_rake_spec.rb b/spec/tasks/gitlab/artifacts_rake_spec.rb
index a30823b8875..570c7fa7503 100644
--- a/spec/tasks/gitlab/artifacts_rake_spec.rb
+++ b/spec/tasks/gitlab/artifacts_rake_spec.rb
@@ -18,7 +18,7 @@ describe 'gitlab:artifacts namespace rake task' do
let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
context 'when local storage is used' do
- let(:store) { ObjectStoreUploader::LOCAL_STORE }
+ let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
@@ -27,8 +27,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
- expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
- expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
@@ -38,8 +38,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
- expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
- expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
@@ -47,8 +47,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "fails to migrate to remote storage" do
subject
- expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
- expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::LOCAL_STORE)
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::LOCAL)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::LOCAL)
end
end
end
@@ -56,13 +56,13 @@ describe 'gitlab:artifacts namespace rake task' do
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
- let(:store) { ObjectStoreUploader::REMOTE_STORE }
+ let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do
subject
- expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
- expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
end
@@ -72,7 +72,7 @@ describe 'gitlab:artifacts namespace rake task' do
let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
context 'when local storage is used' do
- let(:store) { ObjectStoreUploader::LOCAL_STORE }
+ let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
@@ -81,7 +81,7 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
- expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
@@ -91,7 +91,7 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
- expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
@@ -99,19 +99,19 @@ describe 'gitlab:artifacts namespace rake task' do
it "fails to migrate to remote storage" do
subject
- expect(artifact.reload.file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
- let(:store) { ObjectStoreUploader::REMOTE_STORE }
+ let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do
subject
- expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
end
diff --git a/spec/tasks/gitlab/lfs_rake_spec.rb b/spec/tasks/gitlab/lfs_rake_spec.rb
index faed24f2010..f1b677bd6ee 100644
--- a/spec/tasks/gitlab/lfs_rake_spec.rb
+++ b/spec/tasks/gitlab/lfs_rake_spec.rb
@@ -6,8 +6,8 @@ describe 'gitlab:lfs namespace rake task' do
end
describe 'migrate' do
- let(:local) { ObjectStoreUploader::LOCAL_STORE }
- let(:remote) { ObjectStoreUploader::REMOTE_STORE }
+ let(:local) { ObjectStorage::Store::LOCAL }
+ let(:remote) { ObjectStorage::Store::REMOTE }
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
def lfs_migrate
diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb
index 04ee6e9bfad..70618f6bc19 100644
--- a/spec/uploaders/attachment_uploader_spec.rb
+++ b/spec/uploaders/attachment_uploader_spec.rb
@@ -1,28 +1,37 @@
require 'spec_helper'
describe AttachmentUploader do
- let(:uploader) { described_class.new(build_stubbed(:user)) }
+ let(:note) { create(:note, :with_attachment) }
+ let(:uploader) { note.attachment }
+ let(:upload) { create(:upload, :attachment_upload, model: uploader.model) }
- describe "#store_dir" do
- it "stores in the system dir" do
- expect(uploader.store_dir).to start_with("uploads/-/system/user")
- end
+ subject { uploader }
- it "uses the old path when using object storage" do
- expect(described_class).to receive(:file_storage?).and_return(false)
- expect(uploader.store_dir).to start_with("uploads/user")
- end
- end
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/note/attachment/],
+ upload_path: %r[uploads/-/system/note/attachment/],
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/]
- describe '#move_to_cache' do
- it 'is true' do
- expect(uploader.move_to_cache).to eq(true)
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[note/attachment/],
+ upload_path: %r[note/attachment/]
end
- describe '#move_to_store' do
- it 'is true' do
- expect(uploader.move_to_store).to eq(true)
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb
index 1dc574699d8..6f4dbae26ab 100644
--- a/spec/uploaders/avatar_uploader_spec.rb
+++ b/spec/uploaders/avatar_uploader_spec.rb
@@ -1,28 +1,40 @@
require 'spec_helper'
describe AvatarUploader do
- let(:uploader) { described_class.new(build_stubbed(:user)) }
+ let(:model) { build_stubbed(:user) }
+ let(:uploader) { described_class.new(model, :avatar) }
+ let(:upload) { create(:upload, model: model) }
- describe "#store_dir" do
- it "stores in the system dir" do
- expect(uploader.store_dir).to start_with("uploads/-/system/user")
- end
+ subject { uploader }
- it "uses the old path when using object storage" do
- expect(described_class).to receive(:file_storage?).and_return(false)
- expect(uploader.store_dir).to start_with("uploads/user")
- end
- end
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/user/avatar/],
+ upload_path: %r[uploads/-/system/user/avatar/],
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/]
- describe '#move_to_cache' do
- it 'is false' do
- expect(uploader.move_to_cache).to eq(false)
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[user/avatar/],
+ upload_path: %r[user/avatar/]
end
- describe '#move_to_store' do
- it 'is false' do
- expect(uploader.move_to_store).to eq(false)
+ context "with a file" do
+ let(:project) { create(:project, :with_avatar) }
+ let(:uploader) { project.avatar }
+ let(:upload) { uploader.upload }
+
+ before do
+ stub_uploads_object_storage
end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb
index 0cf462e9553..bc024cd307c 100644
--- a/spec/uploaders/file_mover_spec.rb
+++ b/spec/uploaders/file_mover_spec.rb
@@ -3,13 +3,13 @@ require 'spec_helper'
describe FileMover do
let(:filename) { 'banana_sample.gif' }
let(:file) { fixture_file_upload(Rails.root.join('spec', 'fixtures', filename)) }
+ let(:temp_file_path) { File.join('uploads/-/system/temp', 'secret55', filename) }
+
let(:temp_description) do
- 'test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) same ![banana_sample]'\
- '(/uploads/-/system/temp/secret55/banana_sample.gif)'
+ "test ![banana_sample](/#{temp_file_path}) "\
+ "same ![banana_sample](/#{temp_file_path}) "
end
- let(:temp_file_path) { File.join('secret55', filename).to_s }
- let(:file_path) { File.join('uploads', '-', 'system', 'personal_snippet', snippet.id.to_s, 'secret55', filename).to_s }
-
+ let(:file_path) { File.join('uploads/-/system/personal_snippet', snippet.id.to_s, 'secret55', filename) }
let(:snippet) { create(:personal_snippet, description: temp_description) }
subject { described_class.new(file_path, snippet).execute }
@@ -28,8 +28,8 @@ describe FileMover do
expect(snippet.reload.description)
.to eq(
- "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"\
- " same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"
+ "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "\
+ "same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "
)
end
@@ -50,8 +50,8 @@ describe FileMover do
expect(snippet.reload.description)
.to eq(
- "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"\
- " same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"
+ "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "\
+ "same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "
)
end
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index fd195d6f9b8..b92d52727c1 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -1,118 +1,78 @@
require 'spec_helper'
describe FileUploader do
- let(:uploader) { described_class.new(build_stubbed(:project)) }
+ let(:group) { create(:group, name: 'awesome') }
+ let(:project) { create(:project, namespace: group, name: 'project') }
+ let(:uploader) { described_class.new(project) }
+ let(:upload) { double(model: project, path: 'secret/foo.jpg') }
- context 'legacy storage' do
- let(:project) { build_stubbed(:project) }
-
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: project, path: 'secret/foo.jpg')
-
- dynamic_segment = project.full_path
-
- expect(described_class.absolute_path(upload))
- .to end_with("#{dynamic_segment}/secret/foo.jpg")
- end
- end
+ subject { uploader }
- describe "#store_dir" do
- it "stores in the namespace path" do
- uploader = described_class.new(project)
-
- expect(uploader.store_dir).to include(project.full_path)
- expect(uploader.store_dir).not_to include("system")
- end
- end
+ shared_examples 'builds correct legacy storage paths' do
+ include_examples 'builds correct paths',
+ store_dir: %r{awesome/project/\h+},
+ absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg}
end
- context 'hashed storage' do
+ shared_examples 'uses hashed storage' do
context 'when rolled out attachments' do
- let(:project) { build_stubbed(:project, :hashed) }
-
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: project, path: 'secret/foo.jpg')
-
- dynamic_segment = project.disk_path
-
- expect(described_class.absolute_path(upload))
- .to end_with("#{dynamic_segment}/secret/foo.jpg")
- end
+ before do
+ allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed')
end
- describe "#store_dir" do
- it "stores in the namespace path" do
- uploader = described_class.new(project)
+ let(:project) { build_stubbed(:project, :hashed, namespace: group, name: 'project') }
- expect(uploader.store_dir).to include(project.disk_path)
- expect(uploader.store_dir).not_to include("system")
- end
- end
+ it_behaves_like 'builds correct paths',
+ store_dir: %r{ca/fe/fe/ed/\h+},
+ absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg}
end
context 'when only repositories are rolled out' do
- let(:project) { build_stubbed(:project, storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
+ let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: project, path: 'secret/foo.jpg')
+ it_behaves_like 'builds correct legacy storage paths'
+ end
+ end
- dynamic_segment = project.full_path
+ context 'legacy storage' do
+ it_behaves_like 'builds correct legacy storage paths'
+ include_examples 'uses hashed storage'
+ end
- expect(described_class.absolute_path(upload))
- .to end_with("#{dynamic_segment}/secret/foo.jpg")
- end
- end
+ context 'object store is remote' do
+ before do
+ stub_uploads_object_storage
+ end
- describe "#store_dir" do
- it "stores in the namespace path" do
- uploader = described_class.new(project)
+ include_context 'with storage', described_class::Store::REMOTE
- expect(uploader.store_dir).to include(project.full_path)
- expect(uploader.store_dir).not_to include("system")
- end
- end
- end
+ it_behaves_like 'builds correct legacy storage paths'
+ include_examples 'uses hashed storage'
end
describe 'initialize' do
- it 'generates a secret if none is provided' do
- expect(SecureRandom).to receive(:hex).and_return('secret')
-
- uploader = described_class.new(double)
-
- expect(uploader.secret).to eq 'secret'
- end
+ let(:uploader) { described_class.new(double, 'secret') }
it 'accepts a secret parameter' do
- expect(SecureRandom).not_to receive(:hex)
-
- uploader = described_class.new(double, 'secret')
-
- expect(uploader.secret).to eq 'secret'
+ expect(described_class).not_to receive(:generate_secret)
+ expect(uploader.secret).to eq('secret')
end
end
- describe '#move_to_cache' do
- it 'is true' do
- expect(uploader.move_to_cache).to eq(true)
+ describe '#secret' do
+ it 'generates a secret if none is provided' do
+ expect(described_class).to receive(:generate_secret).and_return('secret')
+ expect(uploader.secret).to eq('secret')
end
end
- describe '#move_to_store' do
- it 'is true' do
- expect(uploader.move_to_store).to eq(true)
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/dk.png')))
+ stub_uploads_object_storage
end
- end
-
- describe '#relative_path' do
- it 'removes the leading dynamic path segment' do
- fixture = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
- uploader.store!(fixture_file_upload(fixture))
- expect(uploader.relative_path).to match(/\A\h{32}\/rails_sample.jpg\z/)
- end
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index decea35c86d..fda70a8441b 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -1,46 +1,26 @@
require 'spec_helper'
describe JobArtifactUploader do
- let(:store) { described_class::LOCAL_STORE }
+ let(:store) { described_class::Store::LOCAL }
let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) }
- let(:local_path) { Gitlab.config.artifacts.path }
- describe '#store_dir' do
- subject { uploader.store_dir }
+ subject { uploader }
- let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z],
+ cache_dir: %r[artifacts/tmp/cache],
+ work_dir: %r[artifacts/tmp/work]
- context 'when using local storage' do
- it { is_expected.to start_with(local_path) }
- it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
- it { is_expected.to end_with(path) }
- end
-
- context 'when using remote storage' do
- let(:store) { described_class::REMOTE_STORE }
-
- before do
- stub_artifacts_object_storage
- end
-
- it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
- it { is_expected.to end_with(path) }
+ context "object store is REMOTE" do
+ before do
+ stub_artifacts_object_storage
end
- end
-
- describe '#cache_dir' do
- subject { uploader.cache_dir }
-
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/cache') }
- end
- describe '#work_dir' do
- subject { uploader.work_dir }
+ include_context 'with storage', described_class::Store::REMOTE
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/work') }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
end
context 'file is stored in valid local_path' do
@@ -55,7 +35,7 @@ describe JobArtifactUploader do
subject { uploader.file.path }
- it { is_expected.to start_with(local_path) }
+ it { is_expected.to start_with("#{uploader.root}/#{uploader.class.base_dir}") }
it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
it { is_expected.to include("/#{job_artifact.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb
index 7b316072f47..eeb6fd90c9d 100644
--- a/spec/uploaders/legacy_artifact_uploader_spec.rb
+++ b/spec/uploaders/legacy_artifact_uploader_spec.rb
@@ -1,51 +1,35 @@
require 'rails_helper'
describe LegacyArtifactUploader do
- let(:store) { described_class::LOCAL_STORE }
+ let(:store) { described_class::Store::LOCAL }
let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
- let(:local_path) { Gitlab.config.artifacts.path }
+ let(:local_path) { described_class.root }
- describe '.local_store_path' do
- subject { described_class.local_store_path }
+ subject { uploader }
- it "delegate to artifacts path" do
- expect(Gitlab.config.artifacts).to receive(:path)
-
- subject
- end
- end
-
- describe '.artifacts_upload_path' do
- subject { described_class.artifacts_upload_path }
+ # TODO: move to Workhorse::UploadPath
+ describe '.workhorse_upload_path' do
+ subject { described_class.workhorse_upload_path }
it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('tmp/uploads/') }
+ it { is_expected.to end_with('tmp/uploads') }
end
- describe '#store_dir' do
- subject { uploader.store_dir }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z],
+ cache_dir: %r[artifacts/tmp/cache],
+ work_dir: %r[artifacts/tmp/work]
- let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
-
- context 'when using local storage' do
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with(path) }
+ context 'object store is remote' do
+ before do
+ stub_artifacts_object_storage
end
- end
- describe '#cache_dir' do
- subject { uploader.cache_dir }
+ include_context 'with storage', described_class::Store::REMOTE
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/cache') }
- end
-
- describe '#work_dir' do
- subject { uploader.work_dir }
-
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/work') }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z]
end
describe '#filename' do
@@ -70,7 +54,7 @@ describe LegacyArtifactUploader do
subject { uploader.file.path }
- it { is_expected.to start_with(local_path) }
+ it { is_expected.to start_with("#{uploader.root}") }
it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
it { is_expected.to include("/#{job.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb
index 9b8e2835ebc..2e4bd008afe 100644
--- a/spec/uploaders/lfs_object_uploader_spec.rb
+++ b/spec/uploaders/lfs_object_uploader_spec.rb
@@ -5,37 +5,22 @@ describe LfsObjectUploader do
let(:uploader) { described_class.new(lfs_object, :file) }
let(:path) { Gitlab.config.lfs.storage_path }
- describe '#move_to_cache' do
- it 'is true' do
- expect(uploader.move_to_cache).to eq(true)
- end
- end
-
- describe '#move_to_store' do
- it 'is true' do
- expect(uploader.move_to_store).to eq(true)
- end
- end
+ subject { uploader }
- describe '#store_dir' do
- subject { uploader.store_dir }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}],
+ cache_dir: %r[/lfs-objects/tmp/cache],
+ work_dir: %r[/lfs-objects/tmp/work]
- it { is_expected.to start_with(path) }
- it { is_expected.to end_with("#{lfs_object.oid[0, 2]}/#{lfs_object.oid[2, 2]}") }
- end
-
- describe '#cache_dir' do
- subject { uploader.cache_dir }
-
- it { is_expected.to start_with(path) }
- it { is_expected.to end_with('/tmp/cache') }
- end
+ context "object store is REMOTE" do
+ before do
+ stub_lfs_object_storage
+ end
- describe '#work_dir' do
- subject { uploader.work_dir }
+ include_context 'with storage', described_class::Store::REMOTE
- it { is_expected.to start_with(path) }
- it { is_expected.to end_with('/tmp/work') }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}]
end
describe 'migration to object storage' do
@@ -73,7 +58,7 @@ describe LfsObjectUploader do
end
describe 'remote file' do
- let(:remote) { described_class::REMOTE_STORE }
+ let(:remote) { described_class::Store::REMOTE }
let(:lfs_object) { create(:lfs_object, file_store: remote) }
context 'with object storage enabled' do
@@ -103,7 +88,7 @@ describe LfsObjectUploader do
end
def store_file(lfs_object)
- lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+ lfs_object.file = fixture_file_upload(Rails.root.join("spec/fixtures/dk.png"), "`/png")
lfs_object.save!
end
end
diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb
index c6c4500c179..2f2c27127fc 100644
--- a/spec/uploaders/namespace_file_uploader_spec.rb
+++ b/spec/uploaders/namespace_file_uploader_spec.rb
@@ -1,21 +1,39 @@
require 'spec_helper'
+IDENTIFIER = %r{\h+/\S+}
+
describe NamespaceFileUploader do
let(:group) { build_stubbed(:group) }
let(:uploader) { described_class.new(group) }
+ let(:upload) { create(:upload, :namespace_upload, model: group) }
+
+ subject { uploader }
- describe "#store_dir" do
- it "stores in the namespace id directory" do
- expect(uploader.store_dir).to include(group.id.to_s)
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/namespace/\d+],
+ upload_path: IDENTIFIER,
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}]
+
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
- end
- describe ".absolute_path" do
- it "stores in thecorrect directory" do
- upload_record = create(:upload, :namespace_upload, model: group)
+ include_context 'with storage', described_class::Store::REMOTE
- expect(described_class.absolute_path(upload_record))
- .to include("-/system/namespace/#{group.id}")
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[namespace/\d+/\h+],
+ upload_path: IDENTIFIER
+ end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
new file mode 100644
index 00000000000..e01ad9af1dc
--- /dev/null
+++ b/spec/uploaders/object_storage_spec.rb
@@ -0,0 +1,350 @@
+require 'rails_helper'
+require 'carrierwave/storage/fog'
+
+class Implementation < GitlabUploader
+ include ObjectStorage::Concern
+ include ::RecordsUploads::Concern
+ prepend ::ObjectStorage::Extension::RecordsUploads
+
+ storage_options Gitlab.config.uploads
+
+ private
+
+ # user/:id
+ def dynamic_segment
+ File.join(model.class.to_s.underscore, model.id.to_s)
+ end
+end
+
+describe ObjectStorage do
+ let(:uploader_class) { Implementation }
+ let(:object) { build_stubbed(:user) }
+ let(:uploader) { uploader_class.new(object, :file) }
+
+ before do
+ allow(uploader_class).to receive(:object_store_enabled?).and_return(true)
+ end
+
+ describe '#object_store=' do
+ it "reload the local storage" do
+ uploader.object_store = described_class::Store::LOCAL
+ expect(uploader.file_storage?).to be_truthy
+ end
+
+ it "reload the REMOTE storage" do
+ uploader.object_store = described_class::Store::REMOTE
+ expect(uploader.file_storage?).to be_falsey
+ end
+ end
+
+ context 'object_store is Store::LOCAL' do
+ before do
+ uploader.object_store = described_class::Store::LOCAL
+ end
+
+ describe '#store_dir' do
+ it 'is the composition of (base_dir, dynamic_segment)' do
+ expect(uploader.store_dir).to start_with("uploads/-/system/user/")
+ end
+ end
+ end
+
+ context 'object_store is Store::REMOTE' do
+ before do
+ uploader.object_store = described_class::Store::REMOTE
+ end
+
+ describe '#store_dir' do
+ it 'is the composition of (dynamic_segment)' do
+ expect(uploader.store_dir).to start_with("user/")
+ end
+ end
+ end
+
+ describe '#object_store' do
+ it "delegates to <mount>_store on model" do
+ expect(object).to receive(:file_store)
+
+ uploader.object_store
+ end
+
+ context 'when store is null' do
+ before do
+ expect(object).to receive(:file_store).and_return(nil)
+ end
+
+ it "returns Store::LOCAL" do
+ expect(uploader.object_store).to eq(described_class::Store::LOCAL)
+ end
+ end
+
+ context 'when value is set' do
+ before do
+ expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE)
+ end
+
+ it "returns the given value" do
+ expect(uploader.object_store).to eq(described_class::Store::REMOTE)
+ end
+ end
+ end
+
+ describe '#file_cache_storage?' do
+ context 'when file storage is used' do
+ before do
+ uploader_class.cache_storage(:file)
+ end
+
+ it { expect(uploader).to be_file_cache_storage }
+ end
+
+ context 'when is remote storage' do
+ before do
+ uploader_class.cache_storage(:fog)
+ end
+
+ it { expect(uploader).not_to be_file_cache_storage }
+ end
+ end
+
+ # this means the model shall include
+ # include RecordsUpload::Concern
+ # prepend ObjectStorage::Extension::RecordsUploads
+ # the object_store persistence is delegated to the `Upload` model.
+ #
+ context 'when persist_object_store? is false' do
+ let(:object) { create(:project, :with_avatar) }
+ let(:uploader) { object.avatar }
+
+ it { expect(object).to be_a(Avatarable) }
+ it { expect(uploader.persist_object_store?).to be_falsey }
+
+ describe 'delegates the object_store logic to the `Upload` model' do
+ it 'sets @upload to the found `upload`' do
+ expect(uploader.upload).to eq(uploader.upload)
+ end
+
+ it 'sets @object_store to the `Upload` value' do
+ expect(uploader.object_store).to eq(uploader.upload.store)
+ end
+ end
+ end
+
+ # this means the model holds an <mounted_as>_store attribute directly
+ # and do not delegate the object_store persistence to the `Upload` model.
+ #
+ context 'persist_object_store? is true' do
+ context 'when using JobArtifactsUploader' do
+ let(:store) { described_class::Store::LOCAL }
+ let(:object) { create(:ci_job_artifact, :archive, file_store: store) }
+ let(:uploader) { object.file }
+
+ context 'checking described_class' do
+ it "uploader include described_class::Concern" do
+ expect(uploader).to be_a(described_class::Concern)
+ end
+ end
+
+ describe '#use_file' do
+ context 'when file is stored locally' do
+ it "calls a regular path" do
+ expect { |b| uploader.use_file(&b) }.not_to yield_with_args(%r[tmp/cache])
+ end
+ end
+
+ context 'when file is stored remotely' do
+ let(:store) { described_class::Store::REMOTE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it "calls a cache path" do
+ expect { |b| uploader.use_file(&b) }.to yield_with_args(%r[tmp/cache])
+ end
+ end
+ end
+
+ describe '#migrate!' do
+ subject { uploader.migrate!(new_store) }
+
+ shared_examples "updates the underlying <mounted>_store" do
+ it do
+ subject
+
+ expect(object.file_store).to eq(new_store)
+ end
+ end
+
+ context 'when using the same storage' do
+ let(:new_store) { store }
+
+ it "to not migrate the storage" do
+ subject
+
+ expect(uploader).not_to receive(:store!)
+ expect(uploader.object_store).to eq(store)
+ end
+ end
+
+ context 'when migrating to local storage' do
+ let(:store) { described_class::Store::REMOTE }
+ let(:new_store) { described_class::Store::LOCAL }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_examples "updates the underlying <mounted>_store"
+
+ it "local file does not exist" do
+ expect(File.exist?(uploader.path)).to eq(false)
+ end
+
+ it "remote file exist" do
+ expect(uploader.file.exists?).to be_truthy
+ end
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ expect(File.exist?(uploader.path)).to eq(true)
+ end
+ end
+
+ context 'when migrating to remote storage' do
+ let(:new_store) { described_class::Store::REMOTE }
+ let!(:current_path) { uploader.path }
+
+ it "file does exist" do
+ expect(File.exist?(current_path)).to eq(true)
+ end
+
+ context 'when storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it "to raise an error" do
+ expect { subject }.to raise_error(/Object Storage is not enabled/)
+ end
+ end
+
+ context 'when storage is unlicensed' do
+ before do
+ stub_artifacts_object_storage(licensed: false)
+ end
+
+ it "raises an error" do
+ expect { subject }.to raise_error(/Object Storage feature is missing/)
+ end
+ end
+
+ context 'when credentials are set' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_examples "updates the underlying <mounted>_store"
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ end
+
+ it "does delete original file" do
+ subject
+
+ expect(File.exist?(current_path)).to eq(false)
+ end
+
+ context 'when subject save fails' do
+ before do
+ expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception")
+ end
+
+ it "original file is not removed" do
+ expect { subject }.to raise_error(/exception/)
+
+ expect(File.exist?(current_path)).to eq(true)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '#fog_directory' do
+ let(:remote_directory) { 'directory' }
+
+ before do
+ uploader_class.storage_options double(object_store: double(remote_directory: remote_directory))
+ end
+
+ subject { uploader.fog_directory }
+
+ it { is_expected.to eq(remote_directory) }
+ end
+
+ describe '#fog_credentials' do
+ let(:connection) { Settingslogic.new("provider" => "AWS") }
+
+ before do
+ uploader_class.storage_options double(object_store: double(connection: connection))
+ end
+
+ subject { uploader.fog_credentials }
+
+ it { is_expected.to eq(provider: 'AWS') }
+ end
+
+ describe '#fog_public' do
+ subject { uploader.fog_public }
+
+ it { is_expected.to eq(false) }
+ end
+
+ describe '#verify_license!' do
+ subject { uploader.verify_license!(nil) }
+
+ context 'when using local storage' do
+ before do
+ expect(object).to receive(:file_store) { described_class::Store::LOCAL }
+ end
+
+ it "does not raise an error" do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when using remote storage' do
+ before do
+ uploader_class.storage_options double(object_store: double(enabled: true))
+ expect(object).to receive(:file_store) { described_class::Store::REMOTE }
+ end
+
+ context 'feature is not available' do
+ before do
+ expect(License).to receive(:feature_available?).with(:object_storage).and_return(false)
+ end
+
+ it "does raise an error" do
+ expect { subject }.to raise_error(/Object Storage feature is missing/)
+ end
+ end
+
+ context 'feature is available' do
+ before do
+ expect(License).to receive(:feature_available?).with(:object_storage).and_return(true)
+ end
+
+ it "does not raise an error" do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/uploaders/object_store_uploader_spec.rb b/spec/uploaders/object_store_uploader_spec.rb
deleted file mode 100644
index 2f52867bb91..00000000000
--- a/spec/uploaders/object_store_uploader_spec.rb
+++ /dev/null
@@ -1,315 +0,0 @@
-require 'rails_helper'
-require 'carrierwave/storage/fog'
-
-describe ObjectStoreUploader do
- let(:uploader_class) { Class.new(described_class) }
- let(:object) { double }
- let(:uploader) { uploader_class.new(object, :file) }
-
- before do
- allow(object.class).to receive(:uploader_option).with(:file, :mount_on) { nil }
- end
-
- describe '#object_store' do
- it "calls artifacts_file_store on object" do
- expect(object).to receive(:file_store)
-
- uploader.object_store
- end
-
- context 'when store is null' do
- before do
- expect(object).to receive(:file_store).twice.and_return(nil)
- end
-
- it "returns LOCAL_STORE" do
- expect(uploader.real_object_store).to be_nil
- expect(uploader.object_store).to eq(described_class::LOCAL_STORE)
- end
- end
-
- context 'when value is set' do
- before do
- expect(object).to receive(:file_store).twice.and_return(described_class::REMOTE_STORE)
- end
-
- it "returns given value" do
- expect(uploader.real_object_store).not_to be_nil
- expect(uploader.object_store).to eq(described_class::REMOTE_STORE)
- end
- end
- end
-
- describe '#object_store=' do
- it "calls artifacts_file_store= on object" do
- expect(object).to receive(:file_store=).with(described_class::REMOTE_STORE)
-
- uploader.object_store = described_class::REMOTE_STORE
- end
- end
-
- describe '#file_storage?' do
- context 'when file storage is used' do
- before do
- expect(object).to receive(:file_store).and_return(described_class::LOCAL_STORE)
- end
-
- it { expect(uploader).to be_file_storage }
- end
-
- context 'when is remote storage' do
- before do
- uploader_class.storage_options double(
- object_store: double(enabled: true))
- expect(object).to receive(:file_store).and_return(described_class::REMOTE_STORE)
- end
-
- it { expect(uploader).not_to be_file_storage }
- end
- end
-
- describe '#file_cache_storage?' do
- context 'when file storage is used' do
- before do
- uploader_class.cache_storage(:file)
- end
-
- it { expect(uploader).to be_file_cache_storage }
- end
-
- context 'when is remote storage' do
- before do
- uploader_class.cache_storage(:fog)
- end
-
- it { expect(uploader).not_to be_file_cache_storage }
- end
- end
-
- context 'when using JobArtifactsUploader' do
- let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
- let(:uploader) { artifact.file }
-
- context 'checking described_class' do
- let(:store) { described_class::LOCAL_STORE }
-
- it "uploader is of a described_class" do
- expect(uploader).to be_a(described_class)
- end
-
- it 'moves files locally' do
- expect(uploader.move_to_store).to be(true)
- expect(uploader.move_to_cache).to be(true)
- end
- end
-
- context 'when store is null' do
- let(:store) { nil }
-
- it "sets the store to LOCAL_STORE" do
- expect(artifact.file_store).to eq(described_class::LOCAL_STORE)
- end
- end
-
- describe '#use_file' do
- context 'when file is stored locally' do
- let(:store) { described_class::LOCAL_STORE }
-
- it "calls a regular path" do
- expect { |b| uploader.use_file(&b) }.not_to yield_with_args(/tmp\/cache/)
- end
- end
-
- context 'when file is stored remotely' do
- let(:store) { described_class::REMOTE_STORE }
-
- before do
- stub_artifacts_object_storage
- end
-
- it "calls a cache path" do
- expect { |b| uploader.use_file(&b) }.to yield_with_args(/tmp\/cache/)
- end
- end
- end
-
- describe '#migrate!' do
- let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
- let(:uploader) { artifact.file }
- let(:store) { described_class::LOCAL_STORE }
-
- subject { uploader.migrate!(new_store) }
-
- context 'when using the same storage' do
- let(:new_store) { store }
-
- it "to not migrate the storage" do
- subject
-
- expect(uploader.object_store).to eq(store)
- end
- end
-
- context 'when migrating to local storage' do
- let(:store) { described_class::REMOTE_STORE }
- let(:new_store) { described_class::LOCAL_STORE }
-
- before do
- stub_artifacts_object_storage
- end
-
- it "local file does not exist" do
- expect(File.exist?(uploader.path)).to eq(false)
- end
-
- it "does migrate the file" do
- subject
-
- expect(uploader.object_store).to eq(new_store)
- expect(File.exist?(uploader.path)).to eq(true)
- end
- end
-
- context 'when migrating to remote storage' do
- let(:new_store) { described_class::REMOTE_STORE }
- let!(:current_path) { uploader.path }
-
- it "file does exist" do
- expect(File.exist?(current_path)).to eq(true)
- end
-
- context 'when storage is disabled' do
- before do
- stub_artifacts_object_storage(enabled: false)
- end
-
- it "to raise an error" do
- expect { subject }.to raise_error(/Object Storage is not enabled/)
- end
- end
-
- context 'when storage is unlicensed' do
- before do
- stub_artifacts_object_storage(licensed: false)
- end
-
- it "raises an error" do
- expect { subject }.to raise_error(/Object Storage feature is missing/)
- end
- end
-
- context 'when credentials are set' do
- before do
- stub_artifacts_object_storage
- end
-
- it "does migrate the file" do
- subject
-
- expect(uploader.object_store).to eq(new_store)
- expect(File.exist?(current_path)).to eq(false)
- end
-
- it "does delete original file" do
- subject
-
- expect(File.exist?(current_path)).to eq(false)
- end
-
- context 'when subject save fails' do
- before do
- expect(artifact).to receive(:save!).and_raise(RuntimeError, "exception")
- end
-
- it "does catch an error" do
- expect { subject }.to raise_error(/exception/)
- end
-
- it "original file is not removed" do
- begin
- subject
- rescue
- end
-
- expect(File.exist?(current_path)).to eq(true)
- end
- end
- end
- end
- end
- end
-
- describe '#fog_directory' do
- let(:remote_directory) { 'directory' }
-
- before do
- uploader_class.storage_options double(
- object_store: double(remote_directory: remote_directory))
- end
-
- subject { uploader.fog_directory }
-
- it { is_expected.to eq(remote_directory) }
- end
-
- describe '#fog_credentials' do
- let(:connection) { 'connection' }
-
- before do
- uploader_class.storage_options double(
- object_store: double(connection: connection))
- end
-
- subject { uploader.fog_credentials }
-
- it { is_expected.to eq(connection) }
- end
-
- describe '#fog_public' do
- subject { uploader.fog_public }
-
- it { is_expected.to eq(false) }
- end
-
- describe '#verify_license!' do
- subject { uploader.verify_license!(nil) }
-
- context 'when using local storage' do
- before do
- expect(object).to receive(:file_store) { described_class::LOCAL_STORE }
- end
-
- it "does not raise an error" do
- expect { subject }.not_to raise_error
- end
- end
-
- context 'when using remote storage' do
- before do
- uploader_class.storage_options double(
- object_store: double(enabled: true))
- expect(object).to receive(:file_store) { described_class::REMOTE_STORE }
- end
-
- context 'feature is not available' do
- before do
- expect(License).to receive(:feature_available?).with(:object_storage) { false }
- end
-
- it "does raise an error" do
- expect { subject }.to raise_error(/Object Storage feature is missing/)
- end
- end
-
- context 'feature is available' do
- before do
- expect(License).to receive(:feature_available?).with(:object_storage) { true }
- end
-
- it "does not raise an error" do
- expect { subject }.not_to raise_error
- end
- end
- end
- end
-end
diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb
index cbafa9f478d..ef5a70f668b 100644
--- a/spec/uploaders/personal_file_uploader_spec.rb
+++ b/spec/uploaders/personal_file_uploader_spec.rb
@@ -1,25 +1,40 @@
require 'spec_helper'
+IDENTIFIER = %r{\h+/\S+}
+
describe PersonalFileUploader do
- let(:uploader) { described_class.new(build_stubbed(:project)) }
- let(:snippet) { create(:personal_snippet) }
+ let(:model) { create(:personal_snippet) }
+ let(:uploader) { described_class.new(model) }
+ let(:upload) { create(:upload, :personal_snippet_upload) }
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: snippet, path: 'secret/foo.jpg')
+ subject { uploader }
- dynamic_segment = "personal_snippet/#{snippet.id}"
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/personal_snippet/\d+],
+ upload_path: IDENTIFIER,
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}]
- expect(described_class.absolute_path(upload)).to end_with("/-/system/#{dynamic_segment}/secret/foo.jpg")
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[\d+/\h+],
+ upload_path: IDENTIFIER
end
describe '#to_h' do
- it 'returns the hass' do
- uploader = described_class.new(snippet, 'secret')
+ before do
+ subject.instance_variable_set(:@secret, 'secret')
+ end
+ it 'is correct' do
allow(uploader).to receive(:file).and_return(double(extension: 'txt', filename: 'file_name'))
- expected_url = "/uploads/-/system/personal_snippet/#{snippet.id}/secret/file_name"
+ expected_url = "/uploads/-/system/personal_snippet/#{model.id}/secret/file_name"
expect(uploader.to_h).to eq(
alt: 'file_name',
@@ -28,4 +43,14 @@ describe PersonalFileUploader do
)
end
end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
end
diff --git a/spec/uploaders/records_uploads_spec.rb b/spec/uploaders/records_uploads_spec.rb
index 7ef7fb7d758..9a3e5d83e01 100644
--- a/spec/uploaders/records_uploads_spec.rb
+++ b/spec/uploaders/records_uploads_spec.rb
@@ -3,16 +3,16 @@ require 'rails_helper'
describe RecordsUploads do
let!(:uploader) do
class RecordsUploadsExampleUploader < GitlabUploader
- include RecordsUploads
+ include RecordsUploads::Concern
storage :file
- def model
- FactoryBot.build_stubbed(:user)
+ def dynamic_segment
+ 'co/fe/ee'
end
end
- RecordsUploadsExampleUploader.new
+ RecordsUploadsExampleUploader.new(build_stubbed(:user))
end
def upload_fixture(filename)
@@ -20,48 +20,55 @@ describe RecordsUploads do
end
describe 'callbacks' do
- it 'calls `record_upload` after `store`' do
+ let(:upload) { create(:upload) }
+
+ before do
+ uploader.upload = upload
+ end
+
+ it '#record_upload after `store`' do
expect(uploader).to receive(:record_upload).once
uploader.store!(upload_fixture('doc_sample.txt'))
end
- it 'calls `destroy_upload` after `remove`' do
- expect(uploader).to receive(:destroy_upload).once
-
+ it '#destroy_upload after `remove`' do
uploader.store!(upload_fixture('doc_sample.txt'))
+ expect(uploader).to receive(:destroy_upload).once
uploader.remove!
end
end
describe '#record_upload callback' do
- it 'returns early when not using file storage' do
- allow(uploader).to receive(:file_storage?).and_return(false)
- expect(Upload).not_to receive(:record)
-
- uploader.store!(upload_fixture('rails_sample.jpg'))
+ it 'creates an Upload record after store' do
+ expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.to change { Upload.count }.by(1)
end
- it "returns early when the file doesn't exist" do
- allow(uploader).to receive(:file).and_return(double(exists?: false))
- expect(Upload).not_to receive(:record)
-
+ it 'creates a new record and assigns size, path, model, and uploader' do
uploader.store!(upload_fixture('rails_sample.jpg'))
+
+ upload = uploader.upload
+ aggregate_failures do
+ expect(upload).to be_persisted
+ expect(upload.size).to eq uploader.file.size
+ expect(upload.path).to eq uploader.upload_path
+ expect(upload.model_id).to eq uploader.model.id
+ expect(upload.model_type).to eq uploader.model.class.to_s
+ expect(upload.uploader).to eq uploader.class.to_s
+ end
end
- it 'creates an Upload record after store' do
- expect(Upload).to receive(:record)
- .with(uploader)
+ it "does not create an Upload record when the file doesn't exist" do
+ allow(uploader).to receive(:file).and_return(double(exists?: false))
- uploader.store!(upload_fixture('rails_sample.jpg'))
+ expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count }
end
it 'does not create an Upload record if model is missing' do
- expect_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil)
- expect(Upload).not_to receive(:record).with(uploader)
+ allow_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil)
- uploader.store!(upload_fixture('rails_sample.jpg'))
+ expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count }
end
it 'it destroys Upload records at the same path before recording' do
@@ -72,29 +79,15 @@ describe RecordsUploads do
uploader: uploader.class.to_s
)
+ uploader.upload = existing
uploader.store!(upload_fixture('rails_sample.jpg'))
expect { existing.reload }.to raise_error(ActiveRecord::RecordNotFound)
- expect(Upload.count).to eq 1
+ expect(Upload.count).to eq(1)
end
end
describe '#destroy_upload callback' do
- it 'returns early when not using file storage' do
- uploader.store!(upload_fixture('rails_sample.jpg'))
-
- allow(uploader).to receive(:file_storage?).and_return(false)
- expect(Upload).not_to receive(:remove_path)
-
- uploader.remove!
- end
-
- it 'returns early when file is nil' do
- expect(Upload).not_to receive(:remove_path)
-
- uploader.remove!
- end
-
it 'it destroys Upload records at the same path after removal' do
uploader.store!(upload_fixture('rails_sample.jpg'))
diff --git a/spec/workers/upload_checksum_worker_spec.rb b/spec/workers/upload_checksum_worker_spec.rb
index 911360da66c..9e50ce15871 100644
--- a/spec/workers/upload_checksum_worker_spec.rb
+++ b/spec/workers/upload_checksum_worker_spec.rb
@@ -2,18 +2,31 @@ require 'rails_helper'
describe UploadChecksumWorker do
describe '#perform' do
- it 'rescues ActiveRecord::RecordNotFound' do
- expect { described_class.new.perform(999_999) }.not_to raise_error
+ subject { described_class.new }
+
+ context 'without a valid record' do
+ it 'rescues ActiveRecord::RecordNotFound' do
+ expect { subject.perform(999_999) }.not_to raise_error
+ end
end
- it 'calls calculate_checksum_without_delay and save!' do
- upload = spy
- expect(Upload).to receive(:find).with(999_999).and_return(upload)
+ context 'with a valid record' do
+ let(:upload) { create(:user, :with_avatar).avatar.upload }
+
+ before do
+ expect(Upload).to receive(:find).and_return(upload)
+ allow(upload).to receive(:foreground_checksumable?).and_return(false)
+ end
- described_class.new.perform(999_999)
+ it 'calls calculate_checksum!' do
+ expect(upload).to receive(:calculate_checksum!)
+ subject.perform(upload.id)
+ end
- expect(upload).to have_received(:calculate_checksum)
- expect(upload).to have_received(:save!)
+ it 'calls save!' do
+ expect(upload).to receive(:save!)
+ subject.perform(upload.id)
+ end
end
end
end