Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSean McGivern <sean@mcgivern.me.uk>2018-02-02 16:59:43 +0300
committerKamil Trzciński <ayufan@ayufan.eu>2018-02-28 22:58:15 +0300
commita7dae52e9d27adde427ef8aa066c0761071a3cd9 (patch)
tree8b6229e4e0afe7e71f9754089758cee8acd56cde /spec/uploaders
parent45d2c31643017807cb3fc66c0be6e9cad9964faf (diff)
Merge branch '4163-move-uploads-to-object-storage' into 'master'
Move uploads to object storage Closes #4163 See merge request gitlab-org/gitlab-ee!3867
Diffstat (limited to 'spec/uploaders')
-rw-r--r--spec/uploaders/attachment_uploader_spec.rb41
-rw-r--r--spec/uploaders/avatar_uploader_spec.rb44
-rw-r--r--spec/uploaders/file_mover_spec.rb18
-rw-r--r--spec/uploaders/file_uploader_spec.rb128
-rw-r--r--spec/uploaders/job_artifact_uploader_spec.rb46
-rw-r--r--spec/uploaders/legacy_artifact_uploader_spec.rb52
-rw-r--r--spec/uploaders/lfs_object_uploader_spec.rb43
-rw-r--r--spec/uploaders/namespace_file_uploader_spec.rb36
-rw-r--r--spec/uploaders/object_storage_spec.rb350
-rw-r--r--spec/uploaders/object_store_uploader_spec.rb315
-rw-r--r--spec/uploaders/personal_file_uploader_spec.rb45
-rw-r--r--spec/uploaders/records_uploads_spec.rb73
12 files changed, 596 insertions, 595 deletions
diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb
index 04ee6e9bfad..70618f6bc19 100644
--- a/spec/uploaders/attachment_uploader_spec.rb
+++ b/spec/uploaders/attachment_uploader_spec.rb
@@ -1,28 +1,37 @@
require 'spec_helper'
describe AttachmentUploader do
- let(:uploader) { described_class.new(build_stubbed(:user)) }
+ let(:note) { create(:note, :with_attachment) }
+ let(:uploader) { note.attachment }
+ let(:upload) { create(:upload, :attachment_upload, model: uploader.model) }
- describe "#store_dir" do
- it "stores in the system dir" do
- expect(uploader.store_dir).to start_with("uploads/-/system/user")
- end
+ subject { uploader }
- it "uses the old path when using object storage" do
- expect(described_class).to receive(:file_storage?).and_return(false)
- expect(uploader.store_dir).to start_with("uploads/user")
- end
- end
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/note/attachment/],
+ upload_path: %r[uploads/-/system/note/attachment/],
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/]
- describe '#move_to_cache' do
- it 'is true' do
- expect(uploader.move_to_cache).to eq(true)
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[note/attachment/],
+ upload_path: %r[note/attachment/]
end
- describe '#move_to_store' do
- it 'is true' do
- expect(uploader.move_to_store).to eq(true)
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb
index 1dc574699d8..6f4dbae26ab 100644
--- a/spec/uploaders/avatar_uploader_spec.rb
+++ b/spec/uploaders/avatar_uploader_spec.rb
@@ -1,28 +1,40 @@
require 'spec_helper'
describe AvatarUploader do
- let(:uploader) { described_class.new(build_stubbed(:user)) }
+ let(:model) { build_stubbed(:user) }
+ let(:uploader) { described_class.new(model, :avatar) }
+ let(:upload) { create(:upload, model: model) }
- describe "#store_dir" do
- it "stores in the system dir" do
- expect(uploader.store_dir).to start_with("uploads/-/system/user")
- end
+ subject { uploader }
- it "uses the old path when using object storage" do
- expect(described_class).to receive(:file_storage?).and_return(false)
- expect(uploader.store_dir).to start_with("uploads/user")
- end
- end
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/user/avatar/],
+ upload_path: %r[uploads/-/system/user/avatar/],
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/]
- describe '#move_to_cache' do
- it 'is false' do
- expect(uploader.move_to_cache).to eq(false)
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[user/avatar/],
+ upload_path: %r[user/avatar/]
end
- describe '#move_to_store' do
- it 'is false' do
- expect(uploader.move_to_store).to eq(false)
+ context "with a file" do
+ let(:project) { create(:project, :with_avatar) }
+ let(:uploader) { project.avatar }
+ let(:upload) { uploader.upload }
+
+ before do
+ stub_uploads_object_storage
end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb
index 0cf462e9553..bc024cd307c 100644
--- a/spec/uploaders/file_mover_spec.rb
+++ b/spec/uploaders/file_mover_spec.rb
@@ -3,13 +3,13 @@ require 'spec_helper'
describe FileMover do
let(:filename) { 'banana_sample.gif' }
let(:file) { fixture_file_upload(Rails.root.join('spec', 'fixtures', filename)) }
+ let(:temp_file_path) { File.join('uploads/-/system/temp', 'secret55', filename) }
+
let(:temp_description) do
- 'test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) same ![banana_sample]'\
- '(/uploads/-/system/temp/secret55/banana_sample.gif)'
+ "test ![banana_sample](/#{temp_file_path}) "\
+ "same ![banana_sample](/#{temp_file_path}) "
end
- let(:temp_file_path) { File.join('secret55', filename).to_s }
- let(:file_path) { File.join('uploads', '-', 'system', 'personal_snippet', snippet.id.to_s, 'secret55', filename).to_s }
-
+ let(:file_path) { File.join('uploads/-/system/personal_snippet', snippet.id.to_s, 'secret55', filename) }
let(:snippet) { create(:personal_snippet, description: temp_description) }
subject { described_class.new(file_path, snippet).execute }
@@ -28,8 +28,8 @@ describe FileMover do
expect(snippet.reload.description)
.to eq(
- "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"\
- " same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"
+ "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "\
+ "same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "
)
end
@@ -50,8 +50,8 @@ describe FileMover do
expect(snippet.reload.description)
.to eq(
- "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"\
- " same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"
+ "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "\
+ "same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "
)
end
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index fd195d6f9b8..b92d52727c1 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -1,118 +1,78 @@
require 'spec_helper'
describe FileUploader do
- let(:uploader) { described_class.new(build_stubbed(:project)) }
+ let(:group) { create(:group, name: 'awesome') }
+ let(:project) { create(:project, namespace: group, name: 'project') }
+ let(:uploader) { described_class.new(project) }
+ let(:upload) { double(model: project, path: 'secret/foo.jpg') }
- context 'legacy storage' do
- let(:project) { build_stubbed(:project) }
-
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: project, path: 'secret/foo.jpg')
-
- dynamic_segment = project.full_path
-
- expect(described_class.absolute_path(upload))
- .to end_with("#{dynamic_segment}/secret/foo.jpg")
- end
- end
+ subject { uploader }
- describe "#store_dir" do
- it "stores in the namespace path" do
- uploader = described_class.new(project)
-
- expect(uploader.store_dir).to include(project.full_path)
- expect(uploader.store_dir).not_to include("system")
- end
- end
+ shared_examples 'builds correct legacy storage paths' do
+ include_examples 'builds correct paths',
+ store_dir: %r{awesome/project/\h+},
+ absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg}
end
- context 'hashed storage' do
+ shared_examples 'uses hashed storage' do
context 'when rolled out attachments' do
- let(:project) { build_stubbed(:project, :hashed) }
-
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: project, path: 'secret/foo.jpg')
-
- dynamic_segment = project.disk_path
-
- expect(described_class.absolute_path(upload))
- .to end_with("#{dynamic_segment}/secret/foo.jpg")
- end
+ before do
+ allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed')
end
- describe "#store_dir" do
- it "stores in the namespace path" do
- uploader = described_class.new(project)
+ let(:project) { build_stubbed(:project, :hashed, namespace: group, name: 'project') }
- expect(uploader.store_dir).to include(project.disk_path)
- expect(uploader.store_dir).not_to include("system")
- end
- end
+ it_behaves_like 'builds correct paths',
+ store_dir: %r{ca/fe/fe/ed/\h+},
+ absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg}
end
context 'when only repositories are rolled out' do
- let(:project) { build_stubbed(:project, storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
+ let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: project, path: 'secret/foo.jpg')
+ it_behaves_like 'builds correct legacy storage paths'
+ end
+ end
- dynamic_segment = project.full_path
+ context 'legacy storage' do
+ it_behaves_like 'builds correct legacy storage paths'
+ include_examples 'uses hashed storage'
+ end
- expect(described_class.absolute_path(upload))
- .to end_with("#{dynamic_segment}/secret/foo.jpg")
- end
- end
+ context 'object store is remote' do
+ before do
+ stub_uploads_object_storage
+ end
- describe "#store_dir" do
- it "stores in the namespace path" do
- uploader = described_class.new(project)
+ include_context 'with storage', described_class::Store::REMOTE
- expect(uploader.store_dir).to include(project.full_path)
- expect(uploader.store_dir).not_to include("system")
- end
- end
- end
+ it_behaves_like 'builds correct legacy storage paths'
+ include_examples 'uses hashed storage'
end
describe 'initialize' do
- it 'generates a secret if none is provided' do
- expect(SecureRandom).to receive(:hex).and_return('secret')
-
- uploader = described_class.new(double)
-
- expect(uploader.secret).to eq 'secret'
- end
+ let(:uploader) { described_class.new(double, 'secret') }
it 'accepts a secret parameter' do
- expect(SecureRandom).not_to receive(:hex)
-
- uploader = described_class.new(double, 'secret')
-
- expect(uploader.secret).to eq 'secret'
+ expect(described_class).not_to receive(:generate_secret)
+ expect(uploader.secret).to eq('secret')
end
end
- describe '#move_to_cache' do
- it 'is true' do
- expect(uploader.move_to_cache).to eq(true)
+ describe '#secret' do
+ it 'generates a secret if none is provided' do
+ expect(described_class).to receive(:generate_secret).and_return('secret')
+ expect(uploader.secret).to eq('secret')
end
end
- describe '#move_to_store' do
- it 'is true' do
- expect(uploader.move_to_store).to eq(true)
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/dk.png')))
+ stub_uploads_object_storage
end
- end
-
- describe '#relative_path' do
- it 'removes the leading dynamic path segment' do
- fixture = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
- uploader.store!(fixture_file_upload(fixture))
- expect(uploader.relative_path).to match(/\A\h{32}\/rails_sample.jpg\z/)
- end
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index decea35c86d..fda70a8441b 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -1,46 +1,26 @@
require 'spec_helper'
describe JobArtifactUploader do
- let(:store) { described_class::LOCAL_STORE }
+ let(:store) { described_class::Store::LOCAL }
let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) }
- let(:local_path) { Gitlab.config.artifacts.path }
- describe '#store_dir' do
- subject { uploader.store_dir }
+ subject { uploader }
- let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z],
+ cache_dir: %r[artifacts/tmp/cache],
+ work_dir: %r[artifacts/tmp/work]
- context 'when using local storage' do
- it { is_expected.to start_with(local_path) }
- it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
- it { is_expected.to end_with(path) }
- end
-
- context 'when using remote storage' do
- let(:store) { described_class::REMOTE_STORE }
-
- before do
- stub_artifacts_object_storage
- end
-
- it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
- it { is_expected.to end_with(path) }
+ context "object store is REMOTE" do
+ before do
+ stub_artifacts_object_storage
end
- end
-
- describe '#cache_dir' do
- subject { uploader.cache_dir }
-
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/cache') }
- end
- describe '#work_dir' do
- subject { uploader.work_dir }
+ include_context 'with storage', described_class::Store::REMOTE
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/work') }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
end
context 'file is stored in valid local_path' do
@@ -55,7 +35,7 @@ describe JobArtifactUploader do
subject { uploader.file.path }
- it { is_expected.to start_with(local_path) }
+ it { is_expected.to start_with("#{uploader.root}/#{uploader.class.base_dir}") }
it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
it { is_expected.to include("/#{job_artifact.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb
index 7b316072f47..eeb6fd90c9d 100644
--- a/spec/uploaders/legacy_artifact_uploader_spec.rb
+++ b/spec/uploaders/legacy_artifact_uploader_spec.rb
@@ -1,51 +1,35 @@
require 'rails_helper'
describe LegacyArtifactUploader do
- let(:store) { described_class::LOCAL_STORE }
+ let(:store) { described_class::Store::LOCAL }
let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
- let(:local_path) { Gitlab.config.artifacts.path }
+ let(:local_path) { described_class.root }
- describe '.local_store_path' do
- subject { described_class.local_store_path }
+ subject { uploader }
- it "delegate to artifacts path" do
- expect(Gitlab.config.artifacts).to receive(:path)
-
- subject
- end
- end
-
- describe '.artifacts_upload_path' do
- subject { described_class.artifacts_upload_path }
+ # TODO: move to Workhorse::UploadPath
+ describe '.workhorse_upload_path' do
+ subject { described_class.workhorse_upload_path }
it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('tmp/uploads/') }
+ it { is_expected.to end_with('tmp/uploads') }
end
- describe '#store_dir' do
- subject { uploader.store_dir }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z],
+ cache_dir: %r[artifacts/tmp/cache],
+ work_dir: %r[artifacts/tmp/work]
- let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
-
- context 'when using local storage' do
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with(path) }
+ context 'object store is remote' do
+ before do
+ stub_artifacts_object_storage
end
- end
- describe '#cache_dir' do
- subject { uploader.cache_dir }
+ include_context 'with storage', described_class::Store::REMOTE
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/cache') }
- end
-
- describe '#work_dir' do
- subject { uploader.work_dir }
-
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/work') }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z]
end
describe '#filename' do
@@ -70,7 +54,7 @@ describe LegacyArtifactUploader do
subject { uploader.file.path }
- it { is_expected.to start_with(local_path) }
+ it { is_expected.to start_with("#{uploader.root}") }
it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
it { is_expected.to include("/#{job.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb
index 9b8e2835ebc..2e4bd008afe 100644
--- a/spec/uploaders/lfs_object_uploader_spec.rb
+++ b/spec/uploaders/lfs_object_uploader_spec.rb
@@ -5,37 +5,22 @@ describe LfsObjectUploader do
let(:uploader) { described_class.new(lfs_object, :file) }
let(:path) { Gitlab.config.lfs.storage_path }
- describe '#move_to_cache' do
- it 'is true' do
- expect(uploader.move_to_cache).to eq(true)
- end
- end
-
- describe '#move_to_store' do
- it 'is true' do
- expect(uploader.move_to_store).to eq(true)
- end
- end
+ subject { uploader }
- describe '#store_dir' do
- subject { uploader.store_dir }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}],
+ cache_dir: %r[/lfs-objects/tmp/cache],
+ work_dir: %r[/lfs-objects/tmp/work]
- it { is_expected.to start_with(path) }
- it { is_expected.to end_with("#{lfs_object.oid[0, 2]}/#{lfs_object.oid[2, 2]}") }
- end
-
- describe '#cache_dir' do
- subject { uploader.cache_dir }
-
- it { is_expected.to start_with(path) }
- it { is_expected.to end_with('/tmp/cache') }
- end
+ context "object store is REMOTE" do
+ before do
+ stub_lfs_object_storage
+ end
- describe '#work_dir' do
- subject { uploader.work_dir }
+ include_context 'with storage', described_class::Store::REMOTE
- it { is_expected.to start_with(path) }
- it { is_expected.to end_with('/tmp/work') }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}]
end
describe 'migration to object storage' do
@@ -73,7 +58,7 @@ describe LfsObjectUploader do
end
describe 'remote file' do
- let(:remote) { described_class::REMOTE_STORE }
+ let(:remote) { described_class::Store::REMOTE }
let(:lfs_object) { create(:lfs_object, file_store: remote) }
context 'with object storage enabled' do
@@ -103,7 +88,7 @@ describe LfsObjectUploader do
end
def store_file(lfs_object)
- lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+ lfs_object.file = fixture_file_upload(Rails.root.join("spec/fixtures/dk.png"), "`/png")
lfs_object.save!
end
end
diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb
index c6c4500c179..2f2c27127fc 100644
--- a/spec/uploaders/namespace_file_uploader_spec.rb
+++ b/spec/uploaders/namespace_file_uploader_spec.rb
@@ -1,21 +1,39 @@
require 'spec_helper'
+IDENTIFIER = %r{\h+/\S+}
+
describe NamespaceFileUploader do
let(:group) { build_stubbed(:group) }
let(:uploader) { described_class.new(group) }
+ let(:upload) { create(:upload, :namespace_upload, model: group) }
+
+ subject { uploader }
- describe "#store_dir" do
- it "stores in the namespace id directory" do
- expect(uploader.store_dir).to include(group.id.to_s)
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/namespace/\d+],
+ upload_path: IDENTIFIER,
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}]
+
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
- end
- describe ".absolute_path" do
- it "stores in thecorrect directory" do
- upload_record = create(:upload, :namespace_upload, model: group)
+ include_context 'with storage', described_class::Store::REMOTE
- expect(described_class.absolute_path(upload_record))
- .to include("-/system/namespace/#{group.id}")
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[namespace/\d+/\h+],
+ upload_path: IDENTIFIER
+ end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
new file mode 100644
index 00000000000..e01ad9af1dc
--- /dev/null
+++ b/spec/uploaders/object_storage_spec.rb
@@ -0,0 +1,350 @@
+require 'rails_helper'
+require 'carrierwave/storage/fog'
+
+class Implementation < GitlabUploader
+ include ObjectStorage::Concern
+ include ::RecordsUploads::Concern
+ prepend ::ObjectStorage::Extension::RecordsUploads
+
+ storage_options Gitlab.config.uploads
+
+ private
+
+ # user/:id
+ def dynamic_segment
+ File.join(model.class.to_s.underscore, model.id.to_s)
+ end
+end
+
+describe ObjectStorage do
+ let(:uploader_class) { Implementation }
+ let(:object) { build_stubbed(:user) }
+ let(:uploader) { uploader_class.new(object, :file) }
+
+ before do
+ allow(uploader_class).to receive(:object_store_enabled?).and_return(true)
+ end
+
+ describe '#object_store=' do
+ it "reload the local storage" do
+ uploader.object_store = described_class::Store::LOCAL
+ expect(uploader.file_storage?).to be_truthy
+ end
+
+ it "reload the REMOTE storage" do
+ uploader.object_store = described_class::Store::REMOTE
+ expect(uploader.file_storage?).to be_falsey
+ end
+ end
+
+ context 'object_store is Store::LOCAL' do
+ before do
+ uploader.object_store = described_class::Store::LOCAL
+ end
+
+ describe '#store_dir' do
+ it 'is the composition of (base_dir, dynamic_segment)' do
+ expect(uploader.store_dir).to start_with("uploads/-/system/user/")
+ end
+ end
+ end
+
+ context 'object_store is Store::REMOTE' do
+ before do
+ uploader.object_store = described_class::Store::REMOTE
+ end
+
+ describe '#store_dir' do
+ it 'is the composition of (dynamic_segment)' do
+ expect(uploader.store_dir).to start_with("user/")
+ end
+ end
+ end
+
+ describe '#object_store' do
+ it "delegates to <mount>_store on model" do
+ expect(object).to receive(:file_store)
+
+ uploader.object_store
+ end
+
+ context 'when store is null' do
+ before do
+ expect(object).to receive(:file_store).and_return(nil)
+ end
+
+ it "returns Store::LOCAL" do
+ expect(uploader.object_store).to eq(described_class::Store::LOCAL)
+ end
+ end
+
+ context 'when value is set' do
+ before do
+ expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE)
+ end
+
+ it "returns the given value" do
+ expect(uploader.object_store).to eq(described_class::Store::REMOTE)
+ end
+ end
+ end
+
+ describe '#file_cache_storage?' do
+ context 'when file storage is used' do
+ before do
+ uploader_class.cache_storage(:file)
+ end
+
+ it { expect(uploader).to be_file_cache_storage }
+ end
+
+ context 'when is remote storage' do
+ before do
+ uploader_class.cache_storage(:fog)
+ end
+
+ it { expect(uploader).not_to be_file_cache_storage }
+ end
+ end
+
+ # this means the model shall include
+ # include RecordsUpload::Concern
+ # prepend ObjectStorage::Extension::RecordsUploads
+ # the object_store persistence is delegated to the `Upload` model.
+ #
+ context 'when persist_object_store? is false' do
+ let(:object) { create(:project, :with_avatar) }
+ let(:uploader) { object.avatar }
+
+ it { expect(object).to be_a(Avatarable) }
+ it { expect(uploader.persist_object_store?).to be_falsey }
+
+ describe 'delegates the object_store logic to the `Upload` model' do
+ it 'sets @upload to the found `upload`' do
+ expect(uploader.upload).to eq(uploader.upload)
+ end
+
+ it 'sets @object_store to the `Upload` value' do
+ expect(uploader.object_store).to eq(uploader.upload.store)
+ end
+ end
+ end
+
+ # this means the model holds an <mounted_as>_store attribute directly
+ # and do not delegate the object_store persistence to the `Upload` model.
+ #
+ context 'persist_object_store? is true' do
+ context 'when using JobArtifactsUploader' do
+ let(:store) { described_class::Store::LOCAL }
+ let(:object) { create(:ci_job_artifact, :archive, file_store: store) }
+ let(:uploader) { object.file }
+
+ context 'checking described_class' do
+ it "uploader include described_class::Concern" do
+ expect(uploader).to be_a(described_class::Concern)
+ end
+ end
+
+ describe '#use_file' do
+ context 'when file is stored locally' do
+ it "calls a regular path" do
+ expect { |b| uploader.use_file(&b) }.not_to yield_with_args(%r[tmp/cache])
+ end
+ end
+
+ context 'when file is stored remotely' do
+ let(:store) { described_class::Store::REMOTE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it "calls a cache path" do
+ expect { |b| uploader.use_file(&b) }.to yield_with_args(%r[tmp/cache])
+ end
+ end
+ end
+
+ describe '#migrate!' do
+ subject { uploader.migrate!(new_store) }
+
+ shared_examples "updates the underlying <mounted>_store" do
+ it do
+ subject
+
+ expect(object.file_store).to eq(new_store)
+ end
+ end
+
+ context 'when using the same storage' do
+ let(:new_store) { store }
+
+ it "to not migrate the storage" do
+ subject
+
+ expect(uploader).not_to receive(:store!)
+ expect(uploader.object_store).to eq(store)
+ end
+ end
+
+ context 'when migrating to local storage' do
+ let(:store) { described_class::Store::REMOTE }
+ let(:new_store) { described_class::Store::LOCAL }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_examples "updates the underlying <mounted>_store"
+
+ it "local file does not exist" do
+ expect(File.exist?(uploader.path)).to eq(false)
+ end
+
+ it "remote file exist" do
+ expect(uploader.file.exists?).to be_truthy
+ end
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ expect(File.exist?(uploader.path)).to eq(true)
+ end
+ end
+
+ context 'when migrating to remote storage' do
+ let(:new_store) { described_class::Store::REMOTE }
+ let!(:current_path) { uploader.path }
+
+ it "file does exist" do
+ expect(File.exist?(current_path)).to eq(true)
+ end
+
+ context 'when storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it "to raise an error" do
+ expect { subject }.to raise_error(/Object Storage is not enabled/)
+ end
+ end
+
+ context 'when storage is unlicensed' do
+ before do
+ stub_artifacts_object_storage(licensed: false)
+ end
+
+ it "raises an error" do
+ expect { subject }.to raise_error(/Object Storage feature is missing/)
+ end
+ end
+
+ context 'when credentials are set' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_examples "updates the underlying <mounted>_store"
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ end
+
+ it "does delete original file" do
+ subject
+
+ expect(File.exist?(current_path)).to eq(false)
+ end
+
+ context 'when subject save fails' do
+ before do
+ expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception")
+ end
+
+ it "original file is not removed" do
+ expect { subject }.to raise_error(/exception/)
+
+ expect(File.exist?(current_path)).to eq(true)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '#fog_directory' do
+ let(:remote_directory) { 'directory' }
+
+ before do
+ uploader_class.storage_options double(object_store: double(remote_directory: remote_directory))
+ end
+
+ subject { uploader.fog_directory }
+
+ it { is_expected.to eq(remote_directory) }
+ end
+
+ describe '#fog_credentials' do
+ let(:connection) { Settingslogic.new("provider" => "AWS") }
+
+ before do
+ uploader_class.storage_options double(object_store: double(connection: connection))
+ end
+
+ subject { uploader.fog_credentials }
+
+ it { is_expected.to eq(provider: 'AWS') }
+ end
+
+ describe '#fog_public' do
+ subject { uploader.fog_public }
+
+ it { is_expected.to eq(false) }
+ end
+
+ describe '#verify_license!' do
+ subject { uploader.verify_license!(nil) }
+
+ context 'when using local storage' do
+ before do
+ expect(object).to receive(:file_store) { described_class::Store::LOCAL }
+ end
+
+ it "does not raise an error" do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when using remote storage' do
+ before do
+ uploader_class.storage_options double(object_store: double(enabled: true))
+ expect(object).to receive(:file_store) { described_class::Store::REMOTE }
+ end
+
+ context 'feature is not available' do
+ before do
+ expect(License).to receive(:feature_available?).with(:object_storage).and_return(false)
+ end
+
+ it "does raise an error" do
+ expect { subject }.to raise_error(/Object Storage feature is missing/)
+ end
+ end
+
+ context 'feature is available' do
+ before do
+ expect(License).to receive(:feature_available?).with(:object_storage).and_return(true)
+ end
+
+ it "does not raise an error" do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/uploaders/object_store_uploader_spec.rb b/spec/uploaders/object_store_uploader_spec.rb
deleted file mode 100644
index 2f52867bb91..00000000000
--- a/spec/uploaders/object_store_uploader_spec.rb
+++ /dev/null
@@ -1,315 +0,0 @@
-require 'rails_helper'
-require 'carrierwave/storage/fog'
-
-describe ObjectStoreUploader do
- let(:uploader_class) { Class.new(described_class) }
- let(:object) { double }
- let(:uploader) { uploader_class.new(object, :file) }
-
- before do
- allow(object.class).to receive(:uploader_option).with(:file, :mount_on) { nil }
- end
-
- describe '#object_store' do
- it "calls artifacts_file_store on object" do
- expect(object).to receive(:file_store)
-
- uploader.object_store
- end
-
- context 'when store is null' do
- before do
- expect(object).to receive(:file_store).twice.and_return(nil)
- end
-
- it "returns LOCAL_STORE" do
- expect(uploader.real_object_store).to be_nil
- expect(uploader.object_store).to eq(described_class::LOCAL_STORE)
- end
- end
-
- context 'when value is set' do
- before do
- expect(object).to receive(:file_store).twice.and_return(described_class::REMOTE_STORE)
- end
-
- it "returns given value" do
- expect(uploader.real_object_store).not_to be_nil
- expect(uploader.object_store).to eq(described_class::REMOTE_STORE)
- end
- end
- end
-
- describe '#object_store=' do
- it "calls artifacts_file_store= on object" do
- expect(object).to receive(:file_store=).with(described_class::REMOTE_STORE)
-
- uploader.object_store = described_class::REMOTE_STORE
- end
- end
-
- describe '#file_storage?' do
- context 'when file storage is used' do
- before do
- expect(object).to receive(:file_store).and_return(described_class::LOCAL_STORE)
- end
-
- it { expect(uploader).to be_file_storage }
- end
-
- context 'when is remote storage' do
- before do
- uploader_class.storage_options double(
- object_store: double(enabled: true))
- expect(object).to receive(:file_store).and_return(described_class::REMOTE_STORE)
- end
-
- it { expect(uploader).not_to be_file_storage }
- end
- end
-
- describe '#file_cache_storage?' do
- context 'when file storage is used' do
- before do
- uploader_class.cache_storage(:file)
- end
-
- it { expect(uploader).to be_file_cache_storage }
- end
-
- context 'when is remote storage' do
- before do
- uploader_class.cache_storage(:fog)
- end
-
- it { expect(uploader).not_to be_file_cache_storage }
- end
- end
-
- context 'when using JobArtifactsUploader' do
- let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
- let(:uploader) { artifact.file }
-
- context 'checking described_class' do
- let(:store) { described_class::LOCAL_STORE }
-
- it "uploader is of a described_class" do
- expect(uploader).to be_a(described_class)
- end
-
- it 'moves files locally' do
- expect(uploader.move_to_store).to be(true)
- expect(uploader.move_to_cache).to be(true)
- end
- end
-
- context 'when store is null' do
- let(:store) { nil }
-
- it "sets the store to LOCAL_STORE" do
- expect(artifact.file_store).to eq(described_class::LOCAL_STORE)
- end
- end
-
- describe '#use_file' do
- context 'when file is stored locally' do
- let(:store) { described_class::LOCAL_STORE }
-
- it "calls a regular path" do
- expect { |b| uploader.use_file(&b) }.not_to yield_with_args(/tmp\/cache/)
- end
- end
-
- context 'when file is stored remotely' do
- let(:store) { described_class::REMOTE_STORE }
-
- before do
- stub_artifacts_object_storage
- end
-
- it "calls a cache path" do
- expect { |b| uploader.use_file(&b) }.to yield_with_args(/tmp\/cache/)
- end
- end
- end
-
- describe '#migrate!' do
- let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
- let(:uploader) { artifact.file }
- let(:store) { described_class::LOCAL_STORE }
-
- subject { uploader.migrate!(new_store) }
-
- context 'when using the same storage' do
- let(:new_store) { store }
-
- it "to not migrate the storage" do
- subject
-
- expect(uploader.object_store).to eq(store)
- end
- end
-
- context 'when migrating to local storage' do
- let(:store) { described_class::REMOTE_STORE }
- let(:new_store) { described_class::LOCAL_STORE }
-
- before do
- stub_artifacts_object_storage
- end
-
- it "local file does not exist" do
- expect(File.exist?(uploader.path)).to eq(false)
- end
-
- it "does migrate the file" do
- subject
-
- expect(uploader.object_store).to eq(new_store)
- expect(File.exist?(uploader.path)).to eq(true)
- end
- end
-
- context 'when migrating to remote storage' do
- let(:new_store) { described_class::REMOTE_STORE }
- let!(:current_path) { uploader.path }
-
- it "file does exist" do
- expect(File.exist?(current_path)).to eq(true)
- end
-
- context 'when storage is disabled' do
- before do
- stub_artifacts_object_storage(enabled: false)
- end
-
- it "to raise an error" do
- expect { subject }.to raise_error(/Object Storage is not enabled/)
- end
- end
-
- context 'when storage is unlicensed' do
- before do
- stub_artifacts_object_storage(licensed: false)
- end
-
- it "raises an error" do
- expect { subject }.to raise_error(/Object Storage feature is missing/)
- end
- end
-
- context 'when credentials are set' do
- before do
- stub_artifacts_object_storage
- end
-
- it "does migrate the file" do
- subject
-
- expect(uploader.object_store).to eq(new_store)
- expect(File.exist?(current_path)).to eq(false)
- end
-
- it "does delete original file" do
- subject
-
- expect(File.exist?(current_path)).to eq(false)
- end
-
- context 'when subject save fails' do
- before do
- expect(artifact).to receive(:save!).and_raise(RuntimeError, "exception")
- end
-
- it "does catch an error" do
- expect { subject }.to raise_error(/exception/)
- end
-
- it "original file is not removed" do
- begin
- subject
- rescue
- end
-
- expect(File.exist?(current_path)).to eq(true)
- end
- end
- end
- end
- end
- end
-
- describe '#fog_directory' do
- let(:remote_directory) { 'directory' }
-
- before do
- uploader_class.storage_options double(
- object_store: double(remote_directory: remote_directory))
- end
-
- subject { uploader.fog_directory }
-
- it { is_expected.to eq(remote_directory) }
- end
-
- describe '#fog_credentials' do
- let(:connection) { 'connection' }
-
- before do
- uploader_class.storage_options double(
- object_store: double(connection: connection))
- end
-
- subject { uploader.fog_credentials }
-
- it { is_expected.to eq(connection) }
- end
-
- describe '#fog_public' do
- subject { uploader.fog_public }
-
- it { is_expected.to eq(false) }
- end
-
- describe '#verify_license!' do
- subject { uploader.verify_license!(nil) }
-
- context 'when using local storage' do
- before do
- expect(object).to receive(:file_store) { described_class::LOCAL_STORE }
- end
-
- it "does not raise an error" do
- expect { subject }.not_to raise_error
- end
- end
-
- context 'when using remote storage' do
- before do
- uploader_class.storage_options double(
- object_store: double(enabled: true))
- expect(object).to receive(:file_store) { described_class::REMOTE_STORE }
- end
-
- context 'feature is not available' do
- before do
- expect(License).to receive(:feature_available?).with(:object_storage) { false }
- end
-
- it "does raise an error" do
- expect { subject }.to raise_error(/Object Storage feature is missing/)
- end
- end
-
- context 'feature is available' do
- before do
- expect(License).to receive(:feature_available?).with(:object_storage) { true }
- end
-
- it "does not raise an error" do
- expect { subject }.not_to raise_error
- end
- end
- end
- end
-end
diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb
index cbafa9f478d..ef5a70f668b 100644
--- a/spec/uploaders/personal_file_uploader_spec.rb
+++ b/spec/uploaders/personal_file_uploader_spec.rb
@@ -1,25 +1,40 @@
require 'spec_helper'
+IDENTIFIER = %r{\h+/\S+}
+
describe PersonalFileUploader do
- let(:uploader) { described_class.new(build_stubbed(:project)) }
- let(:snippet) { create(:personal_snippet) }
+ let(:model) { create(:personal_snippet) }
+ let(:uploader) { described_class.new(model) }
+ let(:upload) { create(:upload, :personal_snippet_upload) }
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: snippet, path: 'secret/foo.jpg')
+ subject { uploader }
- dynamic_segment = "personal_snippet/#{snippet.id}"
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/personal_snippet/\d+],
+ upload_path: IDENTIFIER,
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}]
- expect(described_class.absolute_path(upload)).to end_with("/-/system/#{dynamic_segment}/secret/foo.jpg")
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[\d+/\h+],
+ upload_path: IDENTIFIER
end
describe '#to_h' do
- it 'returns the hass' do
- uploader = described_class.new(snippet, 'secret')
+ before do
+ subject.instance_variable_set(:@secret, 'secret')
+ end
+ it 'is correct' do
allow(uploader).to receive(:file).and_return(double(extension: 'txt', filename: 'file_name'))
- expected_url = "/uploads/-/system/personal_snippet/#{snippet.id}/secret/file_name"
+ expected_url = "/uploads/-/system/personal_snippet/#{model.id}/secret/file_name"
expect(uploader.to_h).to eq(
alt: 'file_name',
@@ -28,4 +43,14 @@ describe PersonalFileUploader do
)
end
end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
end
diff --git a/spec/uploaders/records_uploads_spec.rb b/spec/uploaders/records_uploads_spec.rb
index 7ef7fb7d758..9a3e5d83e01 100644
--- a/spec/uploaders/records_uploads_spec.rb
+++ b/spec/uploaders/records_uploads_spec.rb
@@ -3,16 +3,16 @@ require 'rails_helper'
describe RecordsUploads do
let!(:uploader) do
class RecordsUploadsExampleUploader < GitlabUploader
- include RecordsUploads
+ include RecordsUploads::Concern
storage :file
- def model
- FactoryBot.build_stubbed(:user)
+ def dynamic_segment
+ 'co/fe/ee'
end
end
- RecordsUploadsExampleUploader.new
+ RecordsUploadsExampleUploader.new(build_stubbed(:user))
end
def upload_fixture(filename)
@@ -20,48 +20,55 @@ describe RecordsUploads do
end
describe 'callbacks' do
- it 'calls `record_upload` after `store`' do
+ let(:upload) { create(:upload) }
+
+ before do
+ uploader.upload = upload
+ end
+
+ it '#record_upload after `store`' do
expect(uploader).to receive(:record_upload).once
uploader.store!(upload_fixture('doc_sample.txt'))
end
- it 'calls `destroy_upload` after `remove`' do
- expect(uploader).to receive(:destroy_upload).once
-
+ it '#destroy_upload after `remove`' do
uploader.store!(upload_fixture('doc_sample.txt'))
+ expect(uploader).to receive(:destroy_upload).once
uploader.remove!
end
end
describe '#record_upload callback' do
- it 'returns early when not using file storage' do
- allow(uploader).to receive(:file_storage?).and_return(false)
- expect(Upload).not_to receive(:record)
-
- uploader.store!(upload_fixture('rails_sample.jpg'))
+ it 'creates an Upload record after store' do
+ expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.to change { Upload.count }.by(1)
end
- it "returns early when the file doesn't exist" do
- allow(uploader).to receive(:file).and_return(double(exists?: false))
- expect(Upload).not_to receive(:record)
-
+ it 'creates a new record and assigns size, path, model, and uploader' do
uploader.store!(upload_fixture('rails_sample.jpg'))
+
+ upload = uploader.upload
+ aggregate_failures do
+ expect(upload).to be_persisted
+ expect(upload.size).to eq uploader.file.size
+ expect(upload.path).to eq uploader.upload_path
+ expect(upload.model_id).to eq uploader.model.id
+ expect(upload.model_type).to eq uploader.model.class.to_s
+ expect(upload.uploader).to eq uploader.class.to_s
+ end
end
- it 'creates an Upload record after store' do
- expect(Upload).to receive(:record)
- .with(uploader)
+ it "does not create an Upload record when the file doesn't exist" do
+ allow(uploader).to receive(:file).and_return(double(exists?: false))
- uploader.store!(upload_fixture('rails_sample.jpg'))
+ expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count }
end
it 'does not create an Upload record if model is missing' do
- expect_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil)
- expect(Upload).not_to receive(:record).with(uploader)
+ allow_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil)
- uploader.store!(upload_fixture('rails_sample.jpg'))
+ expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count }
end
it 'it destroys Upload records at the same path before recording' do
@@ -72,29 +79,15 @@ describe RecordsUploads do
uploader: uploader.class.to_s
)
+ uploader.upload = existing
uploader.store!(upload_fixture('rails_sample.jpg'))
expect { existing.reload }.to raise_error(ActiveRecord::RecordNotFound)
- expect(Upload.count).to eq 1
+ expect(Upload.count).to eq(1)
end
end
describe '#destroy_upload callback' do
- it 'returns early when not using file storage' do
- uploader.store!(upload_fixture('rails_sample.jpg'))
-
- allow(uploader).to receive(:file_storage?).and_return(false)
- expect(Upload).not_to receive(:remove_path)
-
- uploader.remove!
- end
-
- it 'returns early when file is nil' do
- expect(Upload).not_to receive(:remove_path)
-
- uploader.remove!
- end
-
it 'it destroys Upload records at the same path after removal' do
uploader.store!(upload_fixture('rails_sample.jpg'))