Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-03-16 12:11:17 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-03-16 12:11:17 +0300
commit67cdffe4deb5c887c17115d4f974c0e8a267ffd2 (patch)
tree05023f6d748b4ca308eb9d61d28696726cad85f3 /spec/services/ci/job_artifacts
parent93c27b216aa57d57ebd8f5f2581e45dc300324b8 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec/services/ci/job_artifacts')
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb279
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb252
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb81
3 files changed, 612 insertions, 0 deletions
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
new file mode 100644
index 00000000000..22aa9e62c6f
--- /dev/null
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -0,0 +1,279 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::JobArtifacts::CreateService do
+ let_it_be(:project) { create(:project) }
+ let(:service) { described_class.new(job) }
+ let(:job) { create(:ci_build, project: project) }
+ let(:artifacts_sha256) { '0' * 64 }
+ let(:metadata_file) { nil }
+
+ let(:artifacts_file) do
+ file_to_upload('spec/fixtures/ci_build_artifacts.zip', sha256: artifacts_sha256)
+ end
+
+ let(:params) do
+ {
+ 'artifact_type' => 'archive',
+ 'artifact_format' => 'zip'
+ }.with_indifferent_access
+ end
+
+ def file_to_upload(path, params = {})
+ upload = Tempfile.new('upload')
+ FileUtils.copy(path, upload.path)
+
+ UploadedFile.new(upload.path, **params)
+ end
+
+ def unique_metrics_report_uploaders
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: described_class::METRICS_REPORT_UPLOAD_EVENT_NAME,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ end
+
+ describe '#execute' do
+ subject { service.execute(artifacts_file, params, metadata_file: metadata_file) }
+
+ context 'when artifacts file is uploaded' do
+ it 'saves artifact for the given type' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(1)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact.project).to eq(job.project)
+ expect(new_artifact.file).to be_present
+ expect(new_artifact.file_type).to eq(params['artifact_type'])
+ expect(new_artifact.file_format).to eq(params['artifact_format'])
+ expect(new_artifact.file_sha256).to eq(artifacts_sha256)
+ end
+
+ it 'does not track the job user_id' do
+ subject
+
+ expect(unique_metrics_report_uploaders).to eq(0)
+ end
+
+ context 'when metadata file is also uploaded' do
+ let(:metadata_file) do
+ file_to_upload('spec/fixtures/ci_build_artifacts_metadata.gz', sha256: artifacts_sha256)
+ end
+
+ before do
+ stub_application_setting(default_artifacts_expire_in: '1 day')
+ end
+
+ it 'saves metadata artifact' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(2)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact.project).to eq(job.project)
+ expect(new_artifact.file).to be_present
+ expect(new_artifact.file_type).to eq('metadata')
+ expect(new_artifact.file_format).to eq('gzip')
+ expect(new_artifact.file_sha256).to eq(artifacts_sha256)
+ end
+
+ it 'sets expiration date according to application settings' do
+ expected_expire_at = 1.day.from_now
+
+ expect(subject).to match(a_hash_including(status: :success))
+ archive_artifact, metadata_artifact = job.job_artifacts.last(2)
+
+ expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at)
+ expect(archive_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
+ expect(metadata_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
+ end
+
+ context 'when expire_in params is set to a specific value' do
+ before do
+ params.merge!('expire_in' => '2 hours')
+ end
+
+ it 'sets expiration date according to the parameter' do
+ expected_expire_at = 2.hours.from_now
+
+ expect(subject).to match(a_hash_including(status: :success))
+ archive_artifact, metadata_artifact = job.job_artifacts.last(2)
+
+ expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at)
+ expect(archive_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
+ expect(metadata_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
+ end
+ end
+
+ context 'when expire_in params is set to `never`' do
+ before do
+ params.merge!('expire_in' => 'never')
+ end
+
+ it 'sets expiration date according to the parameter' do
+ expected_expire_at = nil
+
+ expect(subject).to be_truthy
+ archive_artifact, metadata_artifact = job.job_artifacts.last(2)
+
+ expect(job.artifacts_expire_at).to eq(expected_expire_at)
+ expect(archive_artifact.expire_at).to eq(expected_expire_at)
+ expect(metadata_artifact.expire_at).to eq(expected_expire_at)
+ end
+ end
+ end
+ end
+
+ context 'when artifacts file already exists' do
+ let!(:existing_artifact) do
+ create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job)
+ end
+
+ context 'when sha256 of uploading artifact is the same of the existing one' do
+ let(:existing_sha256) { artifacts_sha256 }
+
+ it 'ignores the changes' do
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ expect(subject).to match(a_hash_including(status: :success))
+ end
+ end
+
+ context 'when sha256 of uploading artifact is different than the existing one' do
+ let(:existing_sha256) { '1' * 64 }
+
+ it 'returns error status' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
+
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ expect(subject).to match(
+ a_hash_including(http_status: :bad_request,
+ message: 'another artifact of the same type already exists',
+ status: :error))
+ end
+ end
+ end
+
+ context 'when artifact type is dotenv' do
+ let(:artifacts_file) do
+ file_to_upload('spec/fixtures/build.env.gz', sha256: artifacts_sha256)
+ end
+
+ let(:params) do
+ {
+ 'artifact_type' => 'dotenv',
+ 'artifact_format' => 'gzip'
+ }.with_indifferent_access
+ end
+
+ it 'calls parse service' do
+ expect_any_instance_of(Ci::ParseDotenvArtifactService) do |service|
+ expect(service).to receive(:execute).once.and_call_original
+ end
+
+ expect(subject[:status]).to eq(:success)
+ expect(job.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => 'VAR1', 'source' => 'dotenv'),
+ hash_including('key' => 'KEY2', 'value' => 'VAR2', 'source' => 'dotenv'))
+ end
+
+ context 'when ci_synchronous_artifact_parsing feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_synchronous_artifact_parsing: false)
+ end
+
+ it 'does not call parse service' do
+ expect(Ci::ParseDotenvArtifactService).not_to receive(:new)
+
+ expect(subject[:status]).to eq(:success)
+ end
+ end
+ end
+
+ context 'when artifact_type is metrics' do
+ before do
+ allow(job).to receive(:user_id).and_return(123)
+ end
+
+ let(:params) { { 'artifact_type' => 'metrics', 'artifact_format' => 'gzip' }.with_indifferent_access }
+
+ it 'tracks the job user_id' do
+ subject
+
+ expect(unique_metrics_report_uploaders).to eq(1)
+ end
+ end
+
+ context 'when artifact type is cluster_applications' do
+ let(:artifacts_file) do
+ file_to_upload('spec/fixtures/helm/helm_list_v2_prometheus_missing.json.gz', sha256: artifacts_sha256)
+ end
+
+ let(:params) do
+ {
+ 'artifact_type' => 'cluster_applications',
+ 'artifact_format' => 'gzip'
+ }.with_indifferent_access
+ end
+
+ it 'calls cluster applications parse service' do
+ expect_next_instance_of(Clusters::ParseClusterApplicationsArtifactService) do |service|
+ expect(service).to receive(:execute).once.and_call_original
+ end
+
+ subject
+ end
+
+ context 'when there is a deployment cluster' do
+ let(:user) { project.owner }
+
+ before do
+ job.update!(user: user)
+ end
+
+ it 'calls cluster applications parse service with job and job user', :aggregate_failures do
+ expect(Clusters::ParseClusterApplicationsArtifactService).to receive(:new).with(job, user).and_call_original
+
+ subject
+ end
+ end
+
+ context 'when ci_synchronous_artifact_parsing feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_synchronous_artifact_parsing: false)
+ end
+
+ it 'does not call parse service' do
+ expect(Clusters::ParseClusterApplicationsArtifactService).not_to receive(:new)
+
+ expect(subject[:status]).to eq(:success)
+ end
+ end
+ end
+
+ shared_examples 'rescues object storage error' do |klass, message, expected_message|
+ it "handles #{klass}" do
+ allow_next_instance_of(JobArtifactUploader) do |uploader|
+ allow(uploader).to receive(:store!).and_raise(klass, message)
+ end
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .and_call_original
+
+ expect(subject).to match(
+ a_hash_including(
+ http_status: :service_unavailable,
+ message: expected_message || message,
+ status: :error))
+ end
+ end
+
+ it_behaves_like 'rescues object storage error',
+ Errno::EIO, 'some/path', 'Input/output error - some/path'
+
+ it_behaves_like 'rescues object storage error',
+ Google::Apis::ServerError, 'Server error'
+
+ it_behaves_like 'rescues object storage error',
+ Signet::RemoteServerError, 'The service is currently unavailable'
+ end
+end
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
new file mode 100644
index 00000000000..04fa55068f2
--- /dev/null
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -0,0 +1,252 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_shared_state do
+ include ExclusiveLeaseHelpers
+
+ let(:service) { described_class.new }
+
+ describe '.execute' do
+ subject { service.execute }
+
+ let_it_be(:artifact, refind: true) do
+ create(:ci_job_artifact, expire_at: 1.day.ago)
+ end
+
+ before(:all) do
+ artifact.job.pipeline.unlocked!
+ end
+
+ context 'when artifact is expired' do
+ context 'with preloaded relationships' do
+ before do
+ job = create(:ci_build, pipeline: artifact.job.pipeline)
+ create(:ci_job_artifact, :archive, :expired, job: job)
+
+ stub_const("#{described_class}::LOOP_LIMIT", 1)
+ end
+
+ it 'performs the smallest number of queries for job_artifacts' do
+ log = ActiveRecord::QueryRecorder.new { subject }
+
+ # SELECT expired ci_job_artifacts - 3 queries from each_batch
+ # PRELOAD projects, routes, project_statistics
+ # BEGIN
+ # INSERT into ci_deleted_objects
+ # DELETE loaded ci_job_artifacts
+ # DELETE security_findings -- for EE
+ # COMMIT
+ # SELECT next expired ci_job_artifacts
+
+ expect(log.count).to be_within(1).of(11)
+ end
+ end
+
+ context 'when artifact is not locked' do
+ it 'deletes job artifact record' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ end
+
+ context 'when the artifact does not a file attached to it' do
+ it 'does not create deleted objects' do
+ expect(artifact.exists?).to be_falsy # sanity check
+
+ expect { subject }.not_to change { Ci::DeletedObject.count }
+ end
+ end
+
+ context 'when the artifact has a file attached to it' do
+ before do
+ artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
+ artifact.save!
+ end
+
+ it 'creates a deleted object' do
+ expect { subject }.to change { Ci::DeletedObject.count }.by(1)
+ end
+
+ it 'resets project statistics' do
+ expect(ProjectStatistics).to receive(:increment_statistic).once
+ .with(artifact.project, :build_artifacts_size, -artifact.file.size)
+ .and_call_original
+
+ subject
+ end
+
+ it 'does not remove the files' do
+ expect { subject }.not_to change { artifact.file.exists? }
+ end
+ end
+ end
+
+ context 'when artifact is locked' do
+ before do
+ artifact.job.pipeline.artifacts_locked!
+ end
+
+ it 'does not destroy job artifact' do
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ end
+ end
+ end
+
+ context 'when artifact is not expired' do
+ before do
+ artifact.update_column(:expire_at, 1.day.since)
+ end
+
+ it 'does not destroy expired job artifacts' do
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ end
+ end
+
+ context 'when artifact is permanent' do
+ before do
+ artifact.update_column(:expire_at, nil)
+ end
+
+ it 'does not destroy expired job artifacts' do
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ end
+ end
+
+ context 'when failed to destroy artifact' do
+ before do
+ stub_const("#{described_class}::LOOP_LIMIT", 10)
+ end
+
+ context 'when the import fails' do
+ before do
+ expect(Ci::DeletedObject)
+ .to receive(:bulk_import)
+ .once
+ .and_raise(ActiveRecord::RecordNotDestroyed)
+ end
+
+ it 'raises an exception and stop destroying' do
+ expect { subject }.to raise_error(ActiveRecord::RecordNotDestroyed)
+ .and not_change { Ci::JobArtifact.count }.from(1)
+ end
+ end
+
+ context 'when the delete fails' do
+ before do
+ expect(Ci::JobArtifact)
+ .to receive(:id_in)
+ .once
+ .and_raise(ActiveRecord::RecordNotDestroyed)
+ end
+
+ it 'raises an exception rolls back the insert' do
+ expect { subject }.to raise_error(ActiveRecord::RecordNotDestroyed)
+ .and not_change { Ci::DeletedObject.count }.from(0)
+ end
+ end
+ end
+
+ context 'when exclusive lease has already been taken by the other instance' do
+ before do
+ stub_exclusive_lease_taken(described_class::EXCLUSIVE_LOCK_KEY, timeout: described_class::LOCK_TIMEOUT)
+ end
+
+ it 'raises an error and does not start destroying' do
+ expect { subject }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+ end
+
+ context 'when timeout happens' do
+ let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
+
+ before do
+ stub_const("#{described_class}::LOOP_TIMEOUT", 0.seconds)
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+
+ second_artifact.job.pipeline.unlocked!
+ end
+
+ it 'destroys one artifact' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(1)
+ end
+ end
+
+ context 'when loop reached loop limit' do
+ before do
+ stub_const("#{described_class}::LOOP_LIMIT", 1)
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+
+ second_artifact.job.pipeline.unlocked!
+ end
+
+ let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
+
+ it 'destroys one artifact' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(1)
+ end
+ end
+
+ context 'when there are no artifacts' do
+ before do
+ artifact.destroy!
+ end
+
+ it 'does not raise error' do
+ expect { subject }.not_to raise_error
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(0)
+ end
+ end
+
+ context 'when there are artifacts more than batch sizes' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+
+ second_artifact.job.pipeline.unlocked!
+ end
+
+ let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
+
+ it 'destroys all expired artifacts' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(2)
+ end
+ end
+
+ context 'when some artifacts are locked' do
+ before do
+ pipeline = create(:ci_pipeline, locked: :artifacts_locked)
+ job = create(:ci_build, pipeline: pipeline)
+ create(:ci_job_artifact, expire_at: 1.day.ago, job: job)
+ end
+
+ it 'destroys only unlocked artifacts' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ end
+ end
+
+ context 'when all artifacts are locked' do
+ before do
+ pipeline = create(:ci_pipeline, locked: :artifacts_locked)
+ job = create(:ci_build, pipeline: pipeline)
+ artifact.update!(job: job)
+ end
+
+ it 'destroys no artifacts' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(0)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
new file mode 100644
index 00000000000..52aaf73d67e
--- /dev/null
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::JobArtifacts::DestroyBatchService do
+ include ExclusiveLeaseHelpers
+
+ let(:artifacts) { Ci::JobArtifact.all }
+ let(:service) { described_class.new(artifacts, pick_up_at: Time.current) }
+
+ describe '.execute' do
+ subject(:execute) { service.execute }
+
+ let_it_be(:artifact, refind: true) do
+ create(:ci_job_artifact)
+ end
+
+ context 'when the artifact has a file attached to it' do
+ before do
+ artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
+ artifact.save!
+ end
+
+ it 'creates a deleted object' do
+ expect { subject }.to change { Ci::DeletedObject.count }.by(1)
+ end
+
+ it 'resets project statistics' do
+ expect(ProjectStatistics).to receive(:increment_statistic).once
+ .with(artifact.project, :build_artifacts_size, -artifact.file.size)
+ .and_call_original
+
+ execute
+ end
+
+ it 'does not remove the files' do
+ expect { execute }.not_to change { artifact.file.exists? }
+ end
+
+ it 'reports metrics for destroyed artifacts' do
+ expect_next_instance_of(Gitlab::Ci::Artifacts::Metrics) do |metrics|
+ expect(metrics).to receive(:increment_destroyed_artifacts).with(1).and_call_original
+ end
+
+ execute
+ end
+ end
+
+ context 'when failed to destroy artifact' do
+ context 'when the import fails' do
+ before do
+ expect(Ci::DeletedObject)
+ .to receive(:bulk_import)
+ .once
+ .and_raise(ActiveRecord::RecordNotDestroyed)
+ end
+
+ it 'raises an exception and stop destroying' do
+ expect { execute }.to raise_error(ActiveRecord::RecordNotDestroyed)
+ .and not_change { Ci::JobArtifact.count }.from(1)
+ end
+ end
+ end
+
+ context 'when there are no artifacts' do
+ let(:artifacts) { Ci::JobArtifact.none }
+
+ before do
+ artifact.destroy!
+ end
+
+ it 'does not raise error' do
+ expect { execute }.not_to raise_error
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(destroyed_artifacts_count: 0, status: :success)
+ end
+ end
+ end
+end