Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/models/ci')
-rw-r--r--spec/models/ci/bridge_spec.rb60
-rw-r--r--spec/models/ci/build_spec.rb19
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb66
-rw-r--r--spec/models/ci/build_trace_chunks/legacy_fog_spec.rb164
-rw-r--r--spec/models/ci/daily_build_group_report_result_spec.rb77
-rw-r--r--spec/models/ci/pipeline_spec.rb28
-rw-r--r--spec/models/ci/test_case_failure_spec.rb73
-rw-r--r--spec/models/ci/test_case_spec.rb31
8 files changed, 315 insertions, 203 deletions
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index c464e176c17..51e82061d97 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -55,6 +55,17 @@ RSpec.describe Ci::Bridge do
expect(bridge.scoped_variables_hash.keys).to include(*variables)
end
+
+ context 'when bridge has dependency which has dotenv variable' do
+ let(:test) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
+ let(:bridge) { create(:ci_bridge, pipeline: pipeline, stage_idx: 1, options: { dependencies: [test.name] }) }
+
+ let!(:job_variable) { create(:ci_job_variable, :dotenv_source, job: test) }
+
+ it 'includes inherited variable' do
+ expect(bridge.scoped_variables_hash).to include(job_variable.key => job_variable.value)
+ end
+ end
end
describe 'state machine transitions' do
@@ -357,4 +368,53 @@ RSpec.describe Ci::Bridge do
it { is_expected.to be_falsey }
end
end
+
+ describe '#dependency_variables' do
+ subject { bridge.dependency_variables }
+
+ shared_context 'when ci_bridge_dependency_variables is disabled' do
+ before do
+ stub_feature_flags(ci_bridge_dependency_variables: false)
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when downloading from previous stages' do
+ let!(:prepare1) { create(:ci_build, name: 'prepare1', pipeline: pipeline, stage_idx: 0) }
+ let!(:bridge) { create(:ci_bridge, pipeline: pipeline, stage_idx: 1) }
+
+ let!(:job_variable_1) { create(:ci_job_variable, :dotenv_source, job: prepare1) }
+ let!(:job_variable_2) { create(:ci_job_variable, job: prepare1) }
+
+ it 'inherits only dependent variables' do
+ expect(subject.to_hash).to eq(job_variable_1.key => job_variable_1.value)
+ end
+
+ it_behaves_like 'when ci_bridge_dependency_variables is disabled'
+ end
+
+ context 'when using needs' do
+ let!(:prepare1) { create(:ci_build, name: 'prepare1', pipeline: pipeline, stage_idx: 0) }
+ let!(:prepare2) { create(:ci_build, name: 'prepare2', pipeline: pipeline, stage_idx: 0) }
+ let!(:prepare3) { create(:ci_build, name: 'prepare3', pipeline: pipeline, stage_idx: 0) }
+ let!(:bridge) do
+ create(:ci_bridge, pipeline: pipeline,
+ stage_idx: 1,
+ scheduling_type: 'dag',
+ needs_attributes: [{ name: 'prepare1', artifacts: true },
+ { name: 'prepare2', artifacts: false }])
+ end
+
+ let!(:job_variable_1) { create(:ci_job_variable, :dotenv_source, job: prepare1) }
+ let!(:job_variable_2) { create(:ci_job_variable, :dotenv_source, job: prepare2) }
+ let!(:job_variable_3) { create(:ci_job_variable, :dotenv_source, job: prepare3) }
+
+ it 'inherits only needs with artifacts variables' do
+ expect(subject.to_hash).to eq(job_variable_1.key => job_variable_1.value)
+ end
+
+ it_behaves_like 'when ci_bridge_dependency_variables is disabled'
+ end
+ end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index f1d51324bbf..5ff9b4dd493 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -2464,7 +2464,7 @@ RSpec.describe Ci::Build do
end
before do
- allow(Gitlab::Ci::Jwt).to receive(:for_build).with(build).and_return('ci.job.jwt')
+ allow(Gitlab::Ci::Jwt).to receive(:for_build).and_return('ci.job.jwt')
build.set_token('my-token')
build.yaml_variables = []
end
@@ -2482,12 +2482,17 @@ RSpec.describe Ci::Build do
end
context 'when CI_JOB_JWT generation fails' do
- it 'CI_JOB_JWT is not included' do
- expect(Gitlab::Ci::Jwt).to receive(:for_build).and_raise(OpenSSL::PKey::RSAError, 'Neither PUB key nor PRIV key: not enough data')
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
-
- expect { subject }.not_to raise_error
- expect(subject.pluck(:key)).not_to include('CI_JOB_JWT')
+ [
+ OpenSSL::PKey::RSAError,
+ Gitlab::Ci::Jwt::NoSigningKeyError
+ ].each do |reason_to_fail|
+ it 'CI_JOB_JWT is not included' do
+ expect(Gitlab::Ci::Jwt).to receive(:for_build).and_raise(reason_to_fail)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+
+ expect { subject }.not_to raise_error
+ expect(subject.pluck(:key)).not_to include('CI_JOB_JWT')
+ end
end
end
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index 871f279db08..dce7b1d30ca 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -100,15 +100,15 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
subject { described_class.all_stores }
it 'returns a correctly ordered array' do
- is_expected.to eq(%w[redis database fog])
+ is_expected.to eq(%i[redis database fog])
end
it 'returns redis store as the lowest precedence' do
- expect(subject.first).to eq('redis')
+ expect(subject.first).to eq(:redis)
end
it 'returns fog store as the highest precedence' do
- expect(subject.last).to eq('fog')
+ expect(subject.last).to eq(:fog)
end
end
@@ -135,32 +135,40 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when data_store is fog' do
let(:data_store) { :fog }
- context 'when legacy Fog is enabled' do
- before do
- stub_feature_flags(ci_trace_new_fog_store: false)
- build_trace_chunk.send(:unsafe_set_data!, +'Sample data in fog')
- end
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, +'Sample data in fog')
+ end
- it { is_expected.to eq('Sample data in fog') }
+ it { is_expected.to eq('Sample data in fog') }
- it 'returns a LegacyFog store' do
- expect(described_class.get_store_class(data_store)).to be_a(Ci::BuildTraceChunks::LegacyFog)
- end
+ it 'returns a new Fog store' do
+ expect(described_class.get_store_class(data_store)).to be_a(Ci::BuildTraceChunks::Fog)
end
+ end
+ end
- context 'when new Fog is enabled' do
- before do
- stub_feature_flags(ci_trace_new_fog_store: true)
- build_trace_chunk.send(:unsafe_set_data!, +'Sample data in fog')
- end
+ describe '#get_store_class' do
+ using RSpec::Parameterized::TableSyntax
- it { is_expected.to eq('Sample data in fog') }
+ where(:data_store, :expected_store) do
+ :redis | Ci::BuildTraceChunks::Redis
+ :database | Ci::BuildTraceChunks::Database
+ :fog | Ci::BuildTraceChunks::Fog
+ end
- it 'returns a new Fog store' do
- expect(described_class.get_store_class(data_store)).to be_a(Ci::BuildTraceChunks::Fog)
+ with_them do
+ context "with store" do
+ it 'returns an instance of the right class' do
+ expect(expected_store).to receive(:new).twice.and_call_original
+ expect(described_class.get_store_class(data_store.to_s)).to be_a(expected_store)
+ expect(described_class.get_store_class(data_store.to_sym)).to be_a(expected_store)
end
end
end
+
+ it 'raises an error' do
+ expect { described_class.get_store_class('unknown') }.to raise_error('Unknown store type: unknown')
+ end
end
describe '#append' do
@@ -614,23 +622,19 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when the chunk is being locked by a different worker' do
let(:metrics) { spy('metrics') }
- it 'does not raise an exception' do
- lock_chunk do
- expect { build_trace_chunk.persist_data! }.not_to raise_error
- end
- end
-
it 'increments stalled chunk trace metric' do
allow(build_trace_chunk)
.to receive(:metrics)
.and_return(metrics)
- lock_chunk { build_trace_chunk.persist_data! }
+ expect do
+ subject
- expect(metrics)
- .to have_received(:increment_trace_operation)
- .with(operation: :stalled)
- .once
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :stalled)
+ .once
+ end.to raise_error(described_class::FailedToPersistDataError)
end
def lock_chunk(&block)
diff --git a/spec/models/ci/build_trace_chunks/legacy_fog_spec.rb b/spec/models/ci/build_trace_chunks/legacy_fog_spec.rb
deleted file mode 100644
index ca4b414b992..00000000000
--- a/spec/models/ci/build_trace_chunks/legacy_fog_spec.rb
+++ /dev/null
@@ -1,164 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::BuildTraceChunks::LegacyFog do
- let(:data_store) { described_class.new }
-
- before do
- stub_artifacts_object_storage
- end
-
- describe '#available?' do
- subject { data_store.available? }
-
- context 'when object storage is enabled' do
- it { is_expected.to be_truthy }
- end
-
- context 'when object storage is disabled' do
- before do
- stub_artifacts_object_storage(enabled: false)
- end
-
- it { is_expected.to be_falsy }
- end
- end
-
- describe '#data' do
- subject { data_store.data(model) }
-
- context 'when data exists' do
- let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
-
- it 'returns the data' do
- is_expected.to eq('sample data in fog')
- end
- end
-
- context 'when data does not exist' do
- let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
-
- it 'returns nil' do
- expect(data_store.data(model)).to be_nil
- end
- end
- end
-
- describe '#set_data' do
- let(:new_data) { 'abc123' }
-
- context 'when data exists' do
- let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
-
- it 'overwrites data' do
- expect(data_store.data(model)).to eq('sample data in fog')
-
- data_store.set_data(model, new_data)
-
- expect(data_store.data(model)).to eq new_data
- end
- end
-
- context 'when data does not exist' do
- let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
-
- it 'sets new data' do
- expect(data_store.data(model)).to be_nil
-
- data_store.set_data(model, new_data)
-
- expect(data_store.data(model)).to eq new_data
- end
- end
- end
-
- describe '#delete_data' do
- subject { data_store.delete_data(model) }
-
- context 'when data exists' do
- let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
-
- it 'deletes data' do
- expect(data_store.data(model)).to eq('sample data in fog')
-
- subject
-
- expect(data_store.data(model)).to be_nil
- end
- end
-
- context 'when data does not exist' do
- let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
-
- it 'does nothing' do
- expect(data_store.data(model)).to be_nil
-
- subject
-
- expect(data_store.data(model)).to be_nil
- end
- end
- end
-
- describe '#size' do
- context 'when data exists' do
- let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'üabcd') }
-
- it 'returns data bytesize correctly' do
- expect(data_store.size(model)).to eq 6
- end
- end
-
- context 'when data does not exist' do
- let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
-
- it 'returns zero' do
- expect(data_store.size(model)).to be_zero
- end
- end
- end
-
- describe '#keys' do
- subject { data_store.keys(relation) }
-
- let(:build) { create(:ci_build) }
- let(:relation) { build.trace_chunks }
-
- before do
- create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build)
- create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build)
- end
-
- it 'returns keys' do
- is_expected.to eq([[build.id, 0], [build.id, 1]])
- end
- end
-
- describe '#delete_keys' do
- subject { data_store.delete_keys(keys) }
-
- let(:build) { create(:ci_build) }
- let(:relation) { build.trace_chunks }
- let(:keys) { data_store.keys(relation) }
-
- before do
- create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build)
- create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build)
- end
-
- it 'deletes multiple data' do
- ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
- expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body]).to be_present
- expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body]).to be_present
- end
-
- subject
-
- ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
- expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body] }.to raise_error(Excon::Error::NotFound)
- expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body] }.to raise_error(Excon::Error::NotFound)
- end
- end
- end
-end
diff --git a/spec/models/ci/daily_build_group_report_result_spec.rb b/spec/models/ci/daily_build_group_report_result_spec.rb
index 326366666cb..f16396d62c9 100644
--- a/spec/models/ci/daily_build_group_report_result_spec.rb
+++ b/spec/models/ci/daily_build_group_report_result_spec.rb
@@ -81,4 +81,81 @@ RSpec.describe Ci::DailyBuildGroupReportResult do
end
end
end
+
+ describe 'scopes' do
+ let_it_be(:project) { create(:project) }
+ let(:recent_build_group_report_result) { create(:ci_daily_build_group_report_result, project: project) }
+ let(:old_build_group_report_result) do
+ create(:ci_daily_build_group_report_result, date: 1.week.ago, project: project)
+ end
+
+ describe '.by_projects' do
+ subject { described_class.by_projects([project.id]) }
+
+ it 'returns records by projects' do
+ expect(subject).to contain_exactly(recent_build_group_report_result, old_build_group_report_result)
+ end
+ end
+
+ describe '.with_coverage' do
+ subject { described_class.with_coverage }
+
+ it 'returns data with coverage' do
+ expect(subject).to contain_exactly(recent_build_group_report_result, old_build_group_report_result)
+ end
+ end
+
+ describe '.with_default_branch' do
+ subject(:coverages) { described_class.with_default_branch }
+
+ context 'when coverage for the default branch exist' do
+ let!(:recent_build_group_report_result) { create(:ci_daily_build_group_report_result, project: project) }
+ let!(:coverage_feature_branch) { create(:ci_daily_build_group_report_result, :on_feature_branch, project: project) }
+
+ it 'returns coverage with the default branch' do
+ expect(coverages).to contain_exactly(recent_build_group_report_result)
+ end
+ end
+
+ context 'when coverage for the default branch does not exist' do
+ it 'returns an empty collection' do
+ expect(coverages).to be_empty
+ end
+ end
+ end
+
+ describe '.by_date' do
+ subject(:coverages) { described_class.by_date(start_date) }
+
+ let!(:coverage_1) { create(:ci_daily_build_group_report_result, date: 1.week.ago) }
+
+ context 'when project has several coverage' do
+ let!(:coverage_2) { create(:ci_daily_build_group_report_result, date: 2.weeks.ago) }
+ let(:start_date) { 1.week.ago.to_date.to_s }
+
+ it 'returns the coverage from the start_date' do
+ expect(coverages).to contain_exactly(coverage_1)
+ end
+ end
+
+ context 'when start_date is over 90 days' do
+ let!(:coverage_2) { create(:ci_daily_build_group_report_result, date: 90.days.ago) }
+ let!(:coverage_3) { create(:ci_daily_build_group_report_result, date: 91.days.ago) }
+ let(:start_date) { 1.year.ago.to_date.to_s }
+
+ it 'returns the coverage in the last 90 days' do
+ expect(coverages).to contain_exactly(coverage_1, coverage_2)
+ end
+ end
+
+ context 'when start_date is not a string' do
+ let!(:coverage_2) { create(:ci_daily_build_group_report_result, date: 90.days.ago) }
+ let(:start_date) { 1.week.ago }
+
+ it 'returns the coverage in the last 90 days' do
+ expect(coverages).to contain_exactly(coverage_1, coverage_2)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 88d08f1ec45..1ca370dc950 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -625,7 +625,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe "coverage" do
+ describe '#coverage' do
let(:project) { create(:project, build_coverage_regex: "/.*/") }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
@@ -1972,6 +1972,32 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '.latest_running_for_ref' do
+ include_context 'with some outdated pipelines'
+
+ let!(:latest_running_pipeline) do
+ create_pipeline(:running, 'ref', 'D', project)
+ end
+
+ it 'returns the latest running pipeline' do
+ expect(described_class.latest_running_for_ref('ref'))
+ .to eq(latest_running_pipeline)
+ end
+ end
+
+ describe '.latest_failed_for_ref' do
+ include_context 'with some outdated pipelines'
+
+ let!(:latest_failed_pipeline) do
+ create_pipeline(:failed, 'ref', 'D', project)
+ end
+
+ it 'returns the latest failed pipeline' do
+ expect(described_class.latest_failed_for_ref('ref'))
+ .to eq(latest_failed_pipeline)
+ end
+ end
+
describe '.latest_successful_for_sha' do
include_context 'with some outdated pipelines'
diff --git a/spec/models/ci/test_case_failure_spec.rb b/spec/models/ci/test_case_failure_spec.rb
new file mode 100644
index 00000000000..34f89b663ed
--- /dev/null
+++ b/spec/models/ci/test_case_failure_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::TestCaseFailure do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:build) }
+ it { is_expected.to belong_to(:test_case) }
+ end
+
+ describe 'validations' do
+ subject { build(:ci_test_case_failure) }
+
+ it { is_expected.to validate_presence_of(:test_case) }
+ it { is_expected.to validate_presence_of(:build) }
+ it { is_expected.to validate_presence_of(:failed_at) }
+ end
+
+ describe '.recent_failures_count' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:recent_failures) do
+ described_class.recent_failures_count(
+ project: project,
+ test_case_keys: test_case_keys
+ )
+ end
+
+ context 'when test case failures are within the date range and are for the test case keys' do
+ let(:tc_1) { create(:ci_test_case, project: project) }
+ let(:tc_2) { create(:ci_test_case, project: project) }
+ let(:test_case_keys) { [tc_1.key_hash, tc_2.key_hash] }
+
+ before do
+ create_list(:ci_test_case_failure, 3, test_case: tc_1, failed_at: 1.day.ago)
+ create_list(:ci_test_case_failure, 2, test_case: tc_2, failed_at: 3.days.ago)
+ end
+
+ it 'returns the number of failures for each test case key hash for the past 14 days by default' do
+ expect(recent_failures).to eq(
+ tc_1.key_hash => 3,
+ tc_2.key_hash => 2
+ )
+ end
+ end
+
+ context 'when test case failures are within the date range but are not for the test case keys' do
+ let(:tc) { create(:ci_test_case, project: project) }
+ let(:test_case_keys) { ['some-other-key-hash'] }
+
+ before do
+ create(:ci_test_case_failure, test_case: tc, failed_at: 1.day.ago)
+ end
+
+ it 'excludes them from the count' do
+ expect(recent_failures[tc.key_hash]).to be_nil
+ end
+ end
+
+ context 'when test case failures are not within the date range but are for the test case keys' do
+ let(:tc) { create(:ci_test_case, project: project) }
+ let(:test_case_keys) { [tc.key_hash] }
+
+ before do
+ create(:ci_test_case_failure, test_case: tc, failed_at: 15.days.ago)
+ end
+
+ it 'excludes them from the count' do
+ expect(recent_failures[tc.key_hash]).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/test_case_spec.rb b/spec/models/ci/test_case_spec.rb
new file mode 100644
index 00000000000..45311e285a6
--- /dev/null
+++ b/spec/models/ci/test_case_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::TestCase do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:test_case_failures) }
+ end
+
+ describe 'validations' do
+ subject { build(:ci_test_case) }
+
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:key_hash) }
+ end
+
+ describe '.find_or_create_by_batch' do
+ it 'finds or creates records for the given test case keys', :aggregate_failures do
+ project = create(:project)
+ existing_tc = create(:ci_test_case, project: project)
+ new_key = Digest::SHA256.hexdigest(SecureRandom.hex)
+ keys = [existing_tc.key_hash, new_key]
+
+ result = described_class.find_or_create_by_batch(project, keys)
+
+ expect(result.map(&:key_hash)).to match_array([existing_tc.key_hash, new_key])
+ expect(result).to all(be_persisted)
+ end
+ end
+end