Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-09-19 04:45:44 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-09-19 04:45:44 +0300
commit85dc423f7090da0a52c73eb66faf22ddb20efff9 (patch)
tree9160f299afd8c80c038f08e1545be119f5e3f1e1 /spec/services/ci
parent15c2c8c66dbe422588e5411eee7e68f1fa440bb8 (diff)
Add latest changes from gitlab-org/gitlab@13-4-stable-ee
Diffstat (limited to 'spec/services/ci')
-rw-r--r--spec/services/ci/cancel_user_pipelines_service_spec.rb12
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb (renamed from spec/services/ci/create_cross_project_pipeline_service_spec.rb)72
-rw-r--r--spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb8
-rw-r--r--spec/services/ci/destroy_expired_job_artifacts_service_spec.rb52
-rw-r--r--spec/services/ci/destroy_pipeline_service_spec.rb2
-rw-r--r--spec/services/ci/generate_coverage_reports_service_spec.rb12
-rw-r--r--spec/services/ci/parse_dotenv_artifact_service_spec.rb11
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_needs_one_build_and_test.yml9
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_needs_one_build_and_test.yml21
-rw-r--r--spec/services/ci/pipelines/create_artifact_service_spec.rb67
-rw-r--r--spec/services/ci/register_job_service_spec.rb28
-rw-r--r--spec/services/ci/retry_build_service_spec.rb32
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb14
-rw-r--r--spec/services/ci/update_build_state_service_spec.rb238
-rw-r--r--spec/services/ci/update_runner_service_spec.rb2
-rw-r--r--spec/services/ci/web_ide_config_service_spec.rb91
17 files changed, 512 insertions, 161 deletions
diff --git a/spec/services/ci/cancel_user_pipelines_service_spec.rb b/spec/services/ci/cancel_user_pipelines_service_spec.rb
index 12117051b64..8491242dfd5 100644
--- a/spec/services/ci/cancel_user_pipelines_service_spec.rb
+++ b/spec/services/ci/cancel_user_pipelines_service_spec.rb
@@ -19,5 +19,17 @@ RSpec.describe Ci::CancelUserPipelinesService do
expect(build.reload).to be_canceled
end
end
+
+ context 'when an error ocurrs' do
+ it 'raises a service level error' do
+ service = double(execute: ServiceResponse.error(message: 'Error canceling pipeline'))
+ allow(::Ci::CancelUserPipelinesService).to receive(:new).and_return(service)
+
+ result = subject
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_error
+ end
+ end
end
end
diff --git a/spec/services/ci/create_cross_project_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index 1aabdb85afd..a6ea30e4703 100644
--- a/spec/services/ci/create_cross_project_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreateCrossProjectPipelineService, '#execute' do
+RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
let_it_be(:user) { create(:user) }
let(:upstream_project) { create(:project, :repository) }
let_it_be(:downstream_project) { create(:project, :repository) }
@@ -130,7 +130,7 @@ RSpec.describe Ci::CreateCrossProjectPipelineService, '#execute' do
expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
.with(
- instance_of(Ci::CreateCrossProjectPipelineService::DuplicateDownstreamPipelineError),
+ instance_of(described_class::DuplicateDownstreamPipelineError),
bridge_id: bridge.id, project_id: bridge.project.id)
.and_call_original
expect(Ci::CreatePipelineService).not_to receive(:new)
@@ -179,7 +179,7 @@ RSpec.describe Ci::CreateCrossProjectPipelineService, '#execute' do
end
end
- context 'when downstream project is the same as the job project' do
+ context 'when downstream project is the same as the upstream project' do
let(:trigger) do
{ trigger: { project: upstream_project.full_path } }
end
@@ -311,24 +311,78 @@ RSpec.describe Ci::CreateCrossProjectPipelineService, '#execute' do
end
end
- context 'when upstream pipeline is a child pipeline' do
- let!(:pipeline_source) do
+ context 'when upstream pipeline has a parent pipeline' do
+ before do
create(:ci_sources_pipeline,
source_pipeline: create(:ci_pipeline, project: upstream_pipeline.project),
pipeline: upstream_pipeline
)
end
+ it 'creates the pipeline' do
+ expect { service.execute(bridge) }
+ .to change { Ci::Pipeline.count }.by(1)
+
+ expect(bridge.reload).to be_success
+ end
+
+ context 'when FF ci_child_of_child_pipeline is disabled' do
+ before do
+ stub_feature_flags(ci_child_of_child_pipeline: false)
+ end
+
+ it 'does not create a further child pipeline' do
+ expect { service.execute(bridge) }
+ .not_to change { Ci::Pipeline.count }
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq 'bridge_pipeline_is_child_pipeline'
+ end
+ end
+ end
+
+ context 'when upstream pipeline has a parent pipeline, which has a parent pipeline' do
before do
- upstream_pipeline.update!(source: :parent_pipeline)
+ parent_of_upstream_pipeline = create(:ci_pipeline, project: upstream_pipeline.project)
+
+ create(:ci_sources_pipeline,
+ source_pipeline: create(:ci_pipeline, project: upstream_pipeline.project),
+ pipeline: parent_of_upstream_pipeline
+ )
+
+ create(:ci_sources_pipeline,
+ source_pipeline: parent_of_upstream_pipeline,
+ pipeline: upstream_pipeline
+ )
end
- it 'does not create a further child pipeline' do
+ it 'does not create a second descendant pipeline' do
expect { service.execute(bridge) }
.not_to change { Ci::Pipeline.count }
expect(bridge.reload).to be_failed
- expect(bridge.failure_reason).to eq 'bridge_pipeline_is_child_pipeline'
+ expect(bridge.failure_reason).to eq 'reached_max_descendant_pipelines_depth'
+ end
+ end
+
+ context 'when upstream pipeline has two level upstream pipelines from different projects' do
+ before do
+ upstream_of_upstream_of_upstream_pipeline = create(:ci_pipeline)
+ upstream_of_upstream_pipeline = create(:ci_pipeline)
+
+ create(:ci_sources_pipeline,
+ source_pipeline: upstream_of_upstream_of_upstream_pipeline,
+ pipeline: upstream_of_upstream_pipeline
+ )
+
+ create(:ci_sources_pipeline,
+ source_pipeline: upstream_of_upstream_pipeline,
+ pipeline: upstream_pipeline
+ )
+ end
+
+ it 'create the pipeline' do
+ expect { service.execute(bridge) }.to change { Ci::Pipeline.count }.by(1)
end
end
end
@@ -397,7 +451,7 @@ RSpec.describe Ci::CreateCrossProjectPipelineService, '#execute' do
context 'when pipeline variables are defined' do
before do
- upstream_pipeline.variables.create(key: 'PIPELINE_VARIABLE', value: 'my-value')
+ upstream_pipeline.variables.create!(key: 'PIPELINE_VARIABLE', value: 'my-value')
end
it 'does not pass pipeline variables directly downstream' do
diff --git a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
index 3be5ac1f739..b5b3832ac00 100644
--- a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
+++ b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
@@ -101,7 +101,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'contains only errors' do
- error_message = 'root config contains unknown keys: invalid'
+ error_message = 'jobs invalid config should implement a script: or a trigger: keyword'
expect(pipeline.yaml_errors).to eq(error_message)
expect(pipeline.error_messages.map(&:content)).to contain_exactly(error_message)
expect(pipeline.errors.full_messages).to contain_exactly(error_message)
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index db4c2f5a047..e0893ed6de3 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -223,7 +223,7 @@ RSpec.describe Ci::CreatePipelineService do
context 'auto-cancel enabled' do
before do
- project.update(auto_cancel_pending_pipelines: 'enabled')
+ project.update!(auto_cancel_pending_pipelines: 'enabled')
end
it 'does not cancel HEAD pipeline' do
@@ -248,7 +248,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'cancel created outdated pipelines', :sidekiq_might_not_need_inline do
- pipeline_on_previous_commit.update(status: 'created')
+ pipeline_on_previous_commit.update!(status: 'created')
pipeline
expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: pipeline.id)
@@ -439,7 +439,7 @@ RSpec.describe Ci::CreatePipelineService do
context 'auto-cancel disabled' do
before do
- project.update(auto_cancel_pending_pipelines: 'disabled')
+ project.update!(auto_cancel_pending_pipelines: 'disabled')
end
it 'does not auto cancel pending non-HEAD pipelines' do
@@ -513,7 +513,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'pull it from Auto-DevOps' do
pipeline = execute_service
expect(pipeline).to be_auto_devops_source
- expect(pipeline.builds.map(&:name)).to match_array(%w[build code_quality eslint-sast secret_detection_default_branch secrets-sast test])
+ expect(pipeline.builds.map(&:name)).to match_array(%w[build code_quality eslint-sast secret_detection_default_branch test])
end
end
diff --git a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
index 79443f16276..1c96be42a2f 100644
--- a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
+++ b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
@@ -11,6 +11,10 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
let(:service) { described_class.new }
let!(:artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
+ before do
+ artifact.job.pipeline.unlocked!
+ end
+
context 'when artifact is expired' do
context 'when artifact is not locked' do
before do
@@ -88,6 +92,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 1)
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
+
+ second_artifact.job.pipeline.unlocked!
end
let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
@@ -102,7 +108,9 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
end
context 'when there are no artifacts' do
- let!(:artifact) { }
+ before do
+ artifact.destroy!
+ end
it 'does not raise error' do
expect { subject }.not_to raise_error
@@ -112,6 +120,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when there are artifacts more than batch sizes' do
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
+
+ second_artifact.job.pipeline.unlocked!
end
let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
@@ -120,5 +130,45 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
end
end
+
+ context 'when artifact is a pipeline artifact' do
+ context 'when artifacts are expired' do
+ let!(:pipeline_artifact_1) { create(:ci_pipeline_artifact, expire_at: 1.week.ago) }
+ let!(:pipeline_artifact_2) { create(:ci_pipeline_artifact, expire_at: 1.week.ago) }
+
+ before do
+ [pipeline_artifact_1, pipeline_artifact_2].each { |pipeline_artifact| pipeline_artifact.pipeline.unlocked! }
+ end
+
+ it 'destroys pipeline artifacts' do
+ expect { subject }.to change { Ci::PipelineArtifact.count }.by(-2)
+ end
+ end
+
+ context 'when artifacts are not expired' do
+ let!(:pipeline_artifact_1) { create(:ci_pipeline_artifact, expire_at: 2.days.from_now) }
+ let!(:pipeline_artifact_2) { create(:ci_pipeline_artifact, expire_at: 2.days.from_now) }
+
+ before do
+ [pipeline_artifact_1, pipeline_artifact_2].each { |pipeline_artifact| pipeline_artifact.pipeline.unlocked! }
+ end
+
+ it 'does not destroy pipeline artifacts' do
+ expect { subject }.not_to change { Ci::PipelineArtifact.count }
+ end
+ end
+ end
+
+ context 'when some artifacts are locked' do
+ before do
+ pipeline = create(:ci_pipeline, locked: :artifacts_locked)
+ job = create(:ci_build, pipeline: pipeline)
+ create(:ci_job_artifact, expire_at: 1.day.ago, job: job)
+ end
+
+ it 'destroys only unlocked artifacts' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ end
+ end
end
end
diff --git a/spec/services/ci/destroy_pipeline_service_spec.rb b/spec/services/ci/destroy_pipeline_service_spec.rb
index 23cbe683d2f..6977c99e335 100644
--- a/spec/services/ci/destroy_pipeline_service_spec.rb
+++ b/spec/services/ci/destroy_pipeline_service_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe ::Ci::DestroyPipelineService do
end
it 'does not log an audit event' do
- expect { subject }.not_to change { SecurityEvent.count }
+ expect { subject }.not_to change { AuditEvent.count }
end
context 'when the pipeline has jobs' do
diff --git a/spec/services/ci/generate_coverage_reports_service_spec.rb b/spec/services/ci/generate_coverage_reports_service_spec.rb
index a3ed2eec713..d39053adebc 100644
--- a/spec/services/ci/generate_coverage_reports_service_spec.rb
+++ b/spec/services/ci/generate_coverage_reports_service_spec.rb
@@ -15,21 +15,25 @@ RSpec.describe Ci::GenerateCoverageReportsService do
let!(:head_pipeline) { merge_request.head_pipeline }
let!(:base_pipeline) { nil }
- it 'returns status and data' do
+ it 'returns status and data', :aggregate_failures do
+ expect_any_instance_of(Ci::PipelineArtifact) do |instance|
+ expect(instance).to receive(:present)
+ expect(instance).to receive(:for_files).with(merge_request.new_paths).and_call_original
+ end
+
expect(subject[:status]).to eq(:parsed)
expect(subject[:data]).to eq(files: {})
end
end
- context 'when head pipeline has corrupted coverage reports' do
+ context 'when head pipeline does not have a coverage report artifact' do
let!(:merge_request) { create(:merge_request, :with_coverage_reports, source_project: project) }
let!(:service) { described_class.new(project, nil, id: merge_request.id) }
let!(:head_pipeline) { merge_request.head_pipeline }
let!(:base_pipeline) { nil }
before do
- build = create(:ci_build, pipeline: head_pipeline, project: head_pipeline.project)
- create(:ci_job_artifact, :coverage_with_corrupted_data, job: build, project: project)
+ head_pipeline.pipeline_artifacts.destroy_all # rubocop: disable Cop/DestroyAll
end
it 'returns status and error message' do
diff --git a/spec/services/ci/parse_dotenv_artifact_service_spec.rb b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
index a5f01187a83..91b81af9fd1 100644
--- a/spec/services/ci/parse_dotenv_artifact_service_spec.rb
+++ b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
@@ -66,12 +66,13 @@ RSpec.describe Ci::ParseDotenvArtifactService do
end
context 'when multiple key/value pairs exist in one line' do
- let(:blob) { 'KEY1=VAR1KEY2=VAR1' }
+ let(:blob) { 'KEY=VARCONTAINING=EQLS' }
- it 'returns error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to eq("Validation failed: Key can contain only letters, digits and '_'.")
- expect(subject[:http_status]).to eq(:bad_request)
+ it 'parses the dotenv data' do
+ subject
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY', 'value' => 'VARCONTAINING=EQLS'))
end
end
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_needs_one_build_and_test.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_needs_one_build_and_test.yml
index a133023b12d..ef4ddff9b64 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_needs_one_build_and_test.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_needs_one_build_and_test.yml
@@ -47,16 +47,13 @@ transitions:
- event: drop
jobs: [build_2]
expect:
- pipeline: running
+ pipeline: failed
stages:
build: failed
test: skipped
- deploy: pending
+ deploy: skipped
jobs:
build_1: success
build_2: failed
test: skipped
- deploy: pending
-
-# TODO: should we run deploy?
-# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_needs_one_build_and_test.yml b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_needs_one_build_and_test.yml
index f324525bd56..29c1562389c 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_needs_one_build_and_test.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_needs_one_build_and_test.yml
@@ -34,30 +34,13 @@ transitions:
- event: success
jobs: [build_1, build_2]
expect:
- pipeline: running
- stages:
- build: success
- test: skipped
- deploy: pending
- jobs:
- build_1: success
- build_2: success
- test: skipped
- deploy: pending
-
- - event: success
- jobs: [deploy]
- expect:
pipeline: success
stages:
build: success
test: skipped
- deploy: success
+ deploy: skipped
jobs:
build_1: success
build_2: success
test: skipped
- deploy: success
-
-# TODO: should we run deploy?
-# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
+ deploy: skipped
diff --git a/spec/services/ci/pipelines/create_artifact_service_spec.rb b/spec/services/ci/pipelines/create_artifact_service_spec.rb
new file mode 100644
index 00000000000..d5e9cf83a6d
--- /dev/null
+++ b/spec/services/ci/pipelines/create_artifact_service_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::Pipelines::CreateArtifactService do
+ describe '#execute' do
+ subject { described_class.new.execute(pipeline) }
+
+ context 'when pipeline has coverage reports' do
+ let(:pipeline) { create(:ci_pipeline, :with_coverage_reports) }
+
+ context 'when pipeline is finished' do
+ it 'creates a pipeline artifact' do
+ subject
+
+ expect(Ci::PipelineArtifact.count).to eq(1)
+ end
+
+ it 'persists the default file name' do
+ subject
+
+ file = Ci::PipelineArtifact.first.file
+
+ expect(file.filename).to eq('code_coverage.json')
+ end
+
+ it 'sets expire_at to 1 week' do
+ freeze_time do
+ subject
+
+ pipeline_artifact = Ci::PipelineArtifact.first
+
+ expect(pipeline_artifact.expire_at).to eq(1.week.from_now)
+ end
+ end
+ end
+
+ context 'when feature is disabled' do
+ it 'does not create a pipeline artifact' do
+ stub_feature_flags(coverage_report_view: false)
+
+ subject
+
+ expect(Ci::PipelineArtifact.count).to eq(0)
+ end
+ end
+
+ context 'when pipeline artifact has already been created' do
+ it 'do not raise an error and do not persist the same artifact twice' do
+ expect { 2.times { described_class.new.execute(pipeline) } }.not_to raise_error(ActiveRecord::RecordNotUnique)
+
+ expect(Ci::PipelineArtifact.count).to eq(1)
+ end
+ end
+ end
+
+ context 'when pipeline is running and coverage report does not exist' do
+ let(:pipeline) { create(:ci_pipeline, :running) }
+
+ it 'does not persist data' do
+ subject
+
+ expect(Ci::PipelineArtifact.count).to eq(0)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 921f5ba4c7e..0cdc8d2c870 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -15,14 +15,14 @@ module Ci
describe '#execute' do
context 'runner follow tag list' do
it "picks build with the same tag" do
- pending_job.update(tag_list: ["linux"])
- specific_runner.update(tag_list: ["linux"])
+ pending_job.update!(tag_list: ["linux"])
+ specific_runner.update!(tag_list: ["linux"])
expect(execute(specific_runner)).to eq(pending_job)
end
it "does not pick build with different tag" do
- pending_job.update(tag_list: ["linux"])
- specific_runner.update(tag_list: ["win32"])
+ pending_job.update!(tag_list: ["linux"])
+ specific_runner.update!(tag_list: ["win32"])
expect(execute(specific_runner)).to be_falsey
end
@@ -31,24 +31,24 @@ module Ci
end
it "does not pick build with tag" do
- pending_job.update(tag_list: ["linux"])
+ pending_job.update!(tag_list: ["linux"])
expect(execute(specific_runner)).to be_falsey
end
it "pick build without tag" do
- specific_runner.update(tag_list: ["win32"])
+ specific_runner.update!(tag_list: ["win32"])
expect(execute(specific_runner)).to eq(pending_job)
end
end
context 'deleted projects' do
before do
- project.update(pending_delete: true)
+ project.update!(pending_delete: true)
end
context 'for shared runners' do
before do
- project.update(shared_runners_enabled: true)
+ project.update!(shared_runners_enabled: true)
end
it 'does not pick a build' do
@@ -65,7 +65,7 @@ module Ci
context 'allow shared runners' do
before do
- project.update(shared_runners_enabled: true)
+ project.update!(shared_runners_enabled: true)
end
context 'for multiple builds' do
@@ -131,7 +131,7 @@ module Ci
context 'disallow shared runners' do
before do
- project.update(shared_runners_enabled: false)
+ project.update!(shared_runners_enabled: false)
end
context 'shared runner' do
@@ -152,7 +152,7 @@ module Ci
context 'disallow when builds are disabled' do
before do
- project.update(shared_runners_enabled: true, group_runners_enabled: true)
+ project.update!(shared_runners_enabled: true, group_runners_enabled: true)
project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED)
end
@@ -591,8 +591,8 @@ module Ci
.with(:job_queue_duration_seconds, anything, anything, anything)
.and_return(job_queue_duration_seconds)
- project.update(shared_runners_enabled: true)
- pending_job.update(created_at: current_time - 3600, queued_at: current_time - 1800)
+ project.update!(shared_runners_enabled: true)
+ pending_job.update!(created_at: current_time - 3600, queued_at: current_time - 1800)
end
shared_examples 'attempt counter collector' do
@@ -661,7 +661,7 @@ module Ci
context 'when pending job with queued_at=nil is used' do
before do
- pending_job.update(queued_at: nil)
+ pending_job.update!(queued_at: nil)
end
it_behaves_like 'attempt counter collector'
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 5a245415b32..51741440075 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Ci::RetryBuildService do
described_class.new(project, user)
end
- clone_accessors = described_class::CLONE_ACCESSORS
+ clone_accessors = described_class.clone_accessors
reject_accessors =
%i[id status user token token_encrypted coverage trace runner
@@ -50,7 +50,7 @@ RSpec.describe Ci::RetryBuildService do
metadata runner_session trace_chunks upstream_pipeline_id
artifacts_file artifacts_metadata artifacts_size commands
resource resource_group_id processed security_scans author
- pipeline_id report_results].freeze
+ pipeline_id report_results pending_state pages_deployments].freeze
shared_examples 'build duplication' do
let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
@@ -70,7 +70,7 @@ RSpec.describe Ci::RetryBuildService do
# Make sure that build has both `stage_id` and `stage` because FactoryBot
# can reset one of the fields when assigning another. We plan to deprecate
# and remove legacy `stage` column in the future.
- build.update(stage: 'test', stage_id: stage.id)
+ build.update!(stage: 'test', stage_id: stage.id)
# Make sure we have one instance for every possible job_artifact_X
# associations to check they are correctly rejected on build duplication.
@@ -143,6 +143,8 @@ RSpec.describe Ci::RetryBuildService do
Ci::Build.reflect_on_all_associations.map(&:name) +
[:tag_list, :needs_attributes]
+ current_accessors << :secrets if Gitlab.ee?
+
current_accessors.uniq!
expect(current_accessors).to include(*processed_accessors)
@@ -181,17 +183,24 @@ RSpec.describe Ci::RetryBuildService do
service.execute(build)
end
- context 'when there are subsequent builds that are skipped' do
+ context 'when there are subsequent processables that are skipped' do
let!(:subsequent_build) do
create(:ci_build, :skipped, stage_idx: 2,
pipeline: pipeline,
stage: 'deploy')
end
- it 'resumes pipeline processing in a subsequent stage' do
+ let!(:subsequent_bridge) do
+ create(:ci_bridge, :skipped, stage_idx: 2,
+ pipeline: pipeline,
+ stage: 'deploy')
+ end
+
+ it 'resumes pipeline processing in the subsequent stage' do
service.execute(build)
expect(subsequent_build.reload).to be_created
+ expect(subsequent_bridge.reload).to be_created
end
end
@@ -223,6 +232,19 @@ RSpec.describe Ci::RetryBuildService do
end
end
end
+
+ context 'when the pipeline is a child pipeline and the bridge is depended' do
+ let!(:parent_pipeline) { create(:ci_pipeline, project: project) }
+ let!(:pipeline) { create(:ci_pipeline, project: project) }
+ let!(:bridge) { create(:ci_bridge, :strategy_depend, pipeline: parent_pipeline, status: 'success') }
+ let!(:source_pipeline) { create(:ci_sources_pipeline, pipeline: pipeline, source_job: bridge) }
+
+ it 'marks source bridge as pending' do
+ service.execute(build)
+
+ expect(bridge.reload).to be_pending
+ end
+ end
end
context 'when user does not have ability to execute build' do
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 212c8f99865..526c2f39b46 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -280,6 +280,20 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
expect(build3.reload.scheduling_type).to eq('dag')
end
end
+
+ context 'when the pipeline is a downstream pipeline and the bridge is depended' do
+ let!(:bridge) { create(:ci_bridge, :strategy_depend, status: 'success') }
+
+ before do
+ create(:ci_sources_pipeline, pipeline: pipeline, source_job: bridge)
+ end
+
+ it 'marks source bridge as pending' do
+ service.execute(pipeline)
+
+ expect(bridge.reload).to be_pending
+ end
+ end
end
context 'when user is not allowed to retry pipeline' do
diff --git a/spec/services/ci/update_build_state_service_spec.rb b/spec/services/ci/update_build_state_service_spec.rb
new file mode 100644
index 00000000000..f5ad732bf7e
--- /dev/null
+++ b/spec/services/ci/update_build_state_service_spec.rb
@@ -0,0 +1,238 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::UpdateBuildStateService do
+ let(:project) { create(:project) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:build) { create(:ci_build, :running, pipeline: pipeline) }
+ let(:metrics) { spy('metrics') }
+
+ subject { described_class.new(build, params) }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ end
+
+ context 'when build does not have checksum' do
+ context 'when state has changed' do
+ let(:params) { { state: 'success' } }
+
+ it 'updates a state of a running build' do
+ subject.execute
+
+ expect(build).to be_success
+ end
+
+ it 'returns 200 OK status' do
+ result = subject.execute
+
+ expect(result.status).to eq 200
+ end
+
+ it 'does not increment finalized trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .not_to have_received(:increment_trace_operation)
+ .with(operation: :finalized)
+ end
+ end
+
+ context 'when it is a heartbeat request' do
+ let(:params) { { state: 'success' } }
+
+ it 'updates a build timestamp' do
+ expect { subject.execute }.to change { build.updated_at }
+ end
+ end
+
+ context 'when request payload carries a trace' do
+ let(:params) { { state: 'success', trace: 'overwritten' } }
+
+ it 'overwrites a trace' do
+ result = subject.execute
+
+ expect(build.trace.raw).to eq 'overwritten'
+ expect(result.status).to eq 200
+ end
+
+ it 'updates overwrite operation metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :overwrite)
+ end
+ end
+
+ context 'when state is unknown' do
+ let(:params) { { state: 'unknown' } }
+
+ it 'responds with 400 bad request' do
+ result = subject.execute
+
+ expect(result.status).to eq 400
+ expect(build).to be_running
+ end
+ end
+ end
+
+ context 'when build has a checksum' do
+ let(:params) do
+ { checksum: 'crc32:12345678', state: 'failed', failure_reason: 'script_failure' }
+ end
+
+ context 'when build trace has been migrated' do
+ before do
+ create(:ci_build_trace_chunk, :database_with_data, build: build)
+ end
+
+ it 'updates a build state' do
+ subject.execute
+
+ expect(build).to be_failed
+ end
+
+ it 'responds with 200 OK status' do
+ result = subject.execute
+
+ expect(result.status).to eq 200
+ end
+
+ it 'increments trace finalized operation metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :finalized)
+ end
+ end
+
+ context 'when build trace has not been migrated yet' do
+ before do
+ create(:ci_build_trace_chunk, :redis_with_data, build: build)
+ end
+
+ it 'does not update a build state' do
+ subject.execute
+
+ expect(build).to be_running
+ end
+
+ it 'responds with 202 accepted' do
+ result = subject.execute
+
+ expect(result.status).to eq 202
+ end
+
+ it 'schedules live chunks for migration' do
+ expect(Ci::BuildTraceChunkFlushWorker)
+ .to receive(:perform_async)
+ .with(build.trace_chunks.first.id)
+
+ subject.execute
+ end
+
+ it 'increments trace accepted operation metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :accepted)
+ end
+
+ it 'creates a pending state record' do
+ subject.execute
+
+ build.pending_state.then do |status|
+ expect(status).to be_present
+ expect(status.state).to eq 'failed'
+ expect(status.trace_checksum).to eq 'crc32:12345678'
+ expect(status.failure_reason).to eq 'script_failure'
+ end
+ end
+
+ context 'when build pending state is outdated' do
+ before do
+ build.create_pending_state(
+ state: 'failed',
+ trace_checksum: 'crc32:12345678',
+ failure_reason: 'script_failure',
+ created_at: 10.minutes.ago
+ )
+ end
+
+ it 'responds with 200 OK' do
+ result = subject.execute
+
+ expect(result.status).to eq 200
+ end
+
+ it 'updates build state' do
+ subject.execute
+
+ expect(build.reload).to be_failed
+ expect(build.failure_reason).to eq 'script_failure'
+ end
+
+ it 'increments discarded traces metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :discarded)
+ end
+
+ it 'does not increment finalized trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .not_to have_received(:increment_trace_operation)
+ .with(operation: :finalized)
+ end
+ end
+
+ context 'when build pending state has changes' do
+ before do
+ build.create_pending_state(
+ state: 'success',
+ created_at: 10.minutes.ago
+ )
+ end
+
+ it 'uses stored state and responds with 200 OK' do
+ result = subject.execute
+
+ expect(result.status).to eq 200
+ end
+
+ it 'increments conflict trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :conflict)
+ end
+ end
+
+ context 'when live traces are disabled' do
+ before do
+ stub_feature_flags(ci_enable_live_trace: false)
+ end
+
+ it 'responds with 200 OK' do
+ result = subject.execute
+
+ expect(result.status).to eq 200
+ end
+ end
+ end
+ end
+
+ def execute_with_stubbed_metrics!
+ described_class
+ .new(build, params, metrics)
+ .execute
+ end
+end
diff --git a/spec/services/ci/update_runner_service_spec.rb b/spec/services/ci/update_runner_service_spec.rb
index cad9e893335..1c875b2f54a 100644
--- a/spec/services/ci/update_runner_service_spec.rb
+++ b/spec/services/ci/update_runner_service_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Ci::UpdateRunnerService do
end
def update
- described_class.new(runner).update(params)
+ described_class.new(runner).update(params) # rubocop: disable Rails/SaveBang
end
end
end
diff --git a/spec/services/ci/web_ide_config_service_spec.rb b/spec/services/ci/web_ide_config_service_spec.rb
deleted file mode 100644
index 437b468cec8..00000000000
--- a/spec/services/ci/web_ide_config_service_spec.rb
+++ /dev/null
@@ -1,91 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::WebIdeConfigService do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
- let(:sha) { 'sha' }
-
- describe '#execute' do
- subject { described_class.new(project, user, sha: sha).execute }
-
- context 'when insufficient permission' do
- it 'returns an error' do
- is_expected.to include(
- status: :error,
- message: 'Insufficient permissions to read configuration')
- end
- end
-
- context 'for developer' do
- before do
- project.add_developer(user)
- end
-
- context 'when file is missing' do
- it 'returns an error' do
- is_expected.to include(
- status: :error,
- message: "Failed to load Web IDE config file '.gitlab/.gitlab-webide.yml' for sha")
- end
- end
-
- context 'when file is present' do
- before do
- allow(project.repository).to receive(:blob_data_at).with('sha', anything) do
- config_content
- end
- end
-
- context 'content is not valid' do
- let(:config_content) { 'invalid content' }
-
- it 'returns an error' do
- is_expected.to include(
- status: :error,
- message: "Invalid configuration format")
- end
- end
-
- context 'content is valid, but terminal not defined' do
- let(:config_content) { '{}' }
-
- it 'returns success' do
- is_expected.to include(
- status: :success,
- terminal: nil)
- end
- end
-
- context 'content is valid, with enabled terminal' do
- let(:config_content) { 'terminal: {}' }
-
- it 'returns success' do
- is_expected.to include(
- status: :success,
- terminal: {
- tag_list: [],
- yaml_variables: [],
- options: { script: ["sleep 60"] }
- })
- end
- end
-
- context 'content is valid, with custom terminal' do
- let(:config_content) { 'terminal: { before_script: [ls] }' }
-
- it 'returns success' do
- is_expected.to include(
- status: :success,
- terminal: {
- tag_list: [],
- yaml_variables: [],
- options: { before_script: ["ls"], script: ["sleep 60"] }
- })
- end
- end
- end
- end
- end
-end