Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-10-20 11:43:02 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-10-20 11:43:02 +0300
commitd9ab72d6080f594d0b3cae15f14b3ef2c6c638cb (patch)
tree2341ef426af70ad1e289c38036737e04b0aa5007 /spec/services
parentd6e514dd13db8947884cd58fe2a9c2a063400a9b (diff)
Add latest changes from gitlab-org/gitlab@14-4-stable-eev14.4.0-rc42
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/application_settings/update_service_spec.rb26
-rw-r--r--spec/services/boards/issues/list_service_spec.rb21
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb (renamed from spec/services/bulk_import_service_spec.rb)19
-rw-r--r--spec/services/bulk_imports/file_export_service_spec.rb37
-rw-r--r--spec/services/bulk_imports/get_importable_data_service_spec.rb46
-rw-r--r--spec/services/bulk_imports/relation_export_service_spec.rb18
-rw-r--r--spec/services/bulk_imports/tree_export_service_spec.rb35
-rw-r--r--spec/services/ci/archive_trace_service_spec.rb26
-rw-r--r--spec/services/ci/create_pipeline_service/include_spec.rb22
-rw-r--r--spec/services/ci/drop_pipeline_service_spec.rb3
-rw-r--r--spec/services/ci/pipelines/add_job_service_spec.rb13
-rw-r--r--spec/services/ci/pipelines/hook_service_spec.rb47
-rw-r--r--spec/services/ci/play_bridge_service_spec.rb12
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb68
-rw-r--r--spec/services/ci/register_job_service_spec.rb50
-rw-r--r--spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb86
-rw-r--r--spec/services/ci/retry_build_service_spec.rb13
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb39
-rw-r--r--spec/services/ci/stuck_builds/drop_pending_service_spec.rb (renamed from spec/services/ci/stuck_builds/drop_service_spec.rb)97
-rw-r--r--spec/services/ci/stuck_builds/drop_running_service_spec.rb72
-rw-r--r--spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb53
-rw-r--r--spec/services/ci/update_build_state_service_spec.rb52
-rw-r--r--spec/services/ci/update_pending_build_service_spec.rb44
-rw-r--r--spec/services/clusters/agent_tokens/create_service_spec.rb64
-rw-r--r--spec/services/clusters/agents/create_service_spec.rb52
-rw-r--r--spec/services/clusters/agents/delete_service_spec.rb35
-rw-r--r--spec/services/concerns/rate_limited_service_spec.rb196
-rw-r--r--spec/services/container_expiration_policies/cleanup_service_spec.rb6
-rw-r--r--spec/services/customer_relations/contacts/create_service_spec.rb61
-rw-r--r--spec/services/customer_relations/contacts/update_service_spec.rb56
-rw-r--r--spec/services/customer_relations/organizations/create_service_spec.rb10
-rw-r--r--spec/services/customer_relations/organizations/update_service_spec.rb4
-rw-r--r--spec/services/dependency_proxy/auth_token_service_spec.rb75
-rw-r--r--spec/services/dependency_proxy/find_or_create_blob_service_spec.rb28
-rw-r--r--spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb32
-rw-r--r--spec/services/dependency_proxy/group_settings/update_service_spec.rb60
-rw-r--r--spec/services/deployments/older_deployments_drop_service_spec.rb2
-rw-r--r--spec/services/deployments/update_service_spec.rb8
-rw-r--r--spec/services/error_tracking/list_issues_service_spec.rb90
-rw-r--r--spec/services/feature_flags/hook_service_spec.rb31
-rw-r--r--spec/services/groups/transfer_service_spec.rb93
-rw-r--r--spec/services/groups/update_service_spec.rb2
-rw-r--r--spec/services/groups/update_shared_runners_service_spec.rb8
-rw-r--r--spec/services/import/validate_remote_git_endpoint_service_spec.rb96
-rw-r--r--spec/services/issues/close_service_spec.rb14
-rw-r--r--spec/services/issues/create_service_spec.rb19
-rw-r--r--spec/services/issues/relative_position_rebalancing_service_spec.rb15
-rw-r--r--spec/services/issues/reopen_service_spec.rb20
-rw-r--r--spec/services/members/create_service_spec.rb2
-rw-r--r--spec/services/members/invite_service_spec.rb2
-rw-r--r--spec/services/merge_requests/assign_issues_service_spec.rb2
-rw-r--r--spec/services/merge_requests/build_service_spec.rb2
-rw-r--r--spec/services/merge_requests/mergeability/check_base_service_spec.rb40
-rw-r--r--spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb57
-rw-r--r--spec/services/merge_requests/mergeability/run_checks_service_spec.rb104
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb2
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb2
-rw-r--r--spec/services/notification_service_spec.rb2
-rw-r--r--spec/services/packages/composer/create_package_service_spec.rb25
-rw-r--r--spec/services/packages/debian/process_changes_service_spec.rb2
-rw-r--r--spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb133
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb541
-rw-r--r--spec/services/projects/create_service_spec.rb40
-rw-r--r--spec/services/projects/destroy_service_spec.rb109
-rw-r--r--spec/services/projects/group_links/update_service_spec.rb90
-rw-r--r--spec/services/projects/import_service_spec.rb6
-rw-r--r--spec/services/projects/move_access_service_spec.rb2
-rw-r--r--spec/services/projects/operations/update_service_spec.rb8
-rw-r--r--spec/services/projects/participants_service_spec.rb146
-rw-r--r--spec/services/projects/transfer_service_spec.rb185
-rw-r--r--spec/services/projects/update_pages_service_spec.rb16
-rw-r--r--spec/services/projects/update_service_spec.rb62
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb15
-rw-r--r--spec/services/security/ci_configuration/sast_create_service_spec.rb23
-rw-r--r--spec/services/service_ping/submit_service_ping_service_spec.rb24
-rw-r--r--spec/services/user_project_access_changed_service_spec.rb4
-rw-r--r--spec/services/users/activity_service_spec.rb4
-rw-r--r--spec/services/users/update_service_spec.rb70
-rw-r--r--spec/services/users/upsert_credit_card_validation_service_spec.rb36
-rw-r--r--spec/services/web_hook_service_spec.rb2
80 files changed, 2880 insertions, 854 deletions
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index a1fd89bcad7..5c9d2c5e680 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -413,6 +413,32 @@ RSpec.describe ApplicationSettings::UpdateService do
end
end
+ context 'when deprecated API rate limits are passed' do
+ let(:params) do
+ {
+ throttle_unauthenticated_deprecated_api_enabled: 1,
+ throttle_unauthenticated_deprecated_api_period_in_seconds: 500,
+ throttle_unauthenticated_deprecated_api_requests_per_period: 20,
+ throttle_authenticated_deprecated_api_enabled: 1,
+ throttle_authenticated_deprecated_api_period_in_seconds: 600,
+ throttle_authenticated_deprecated_api_requests_per_period: 10
+ }
+ end
+
+ it 'updates deprecated API throttle settings' do
+ subject.execute
+
+ application_settings.reload
+
+ expect(application_settings.throttle_unauthenticated_deprecated_api_enabled).to be_truthy
+ expect(application_settings.throttle_unauthenticated_deprecated_api_period_in_seconds).to eq(500)
+ expect(application_settings.throttle_unauthenticated_deprecated_api_requests_per_period).to eq(20)
+ expect(application_settings.throttle_authenticated_deprecated_api_enabled).to be_truthy
+ expect(application_settings.throttle_authenticated_deprecated_api_period_in_seconds).to eq(600)
+ expect(application_settings.throttle_authenticated_deprecated_api_requests_per_period).to eq(10)
+ end
+ end
+
context 'when git lfs rate limits are passed' do
let(:params) do
{
diff --git a/spec/services/boards/issues/list_service_spec.rb b/spec/services/boards/issues/list_service_spec.rb
index d1f854f72bc..72027911e51 100644
--- a/spec/services/boards/issues/list_service_spec.rb
+++ b/spec/services/boards/issues/list_service_spec.rb
@@ -56,12 +56,23 @@ RSpec.describe Boards::Issues::ListService do
it_behaves_like 'issues list service'
end
- context 'when filtering by type' do
- it 'only returns the specified type' do
- issue = create(:labeled_issue, project: project, milestone: m1, labels: [development, p1], issue_type: 'incident')
- params = { board_id: board.id, id: list1.id, issue_types: 'incident' }
+ context 'when filtering' do
+ let_it_be(:incident) { create(:labeled_issue, project: project, milestone: m1, labels: [development, p1], issue_type: 'incident') }
- expect(described_class.new(parent, user, params).execute).to eq [issue]
+ context 'when filtering by type' do
+ it 'only returns the specified type' do
+ params = { board_id: board.id, id: list1.id, issue_types: 'incident' }
+
+ expect(described_class.new(parent, user, params).execute).to eq [incident]
+ end
+ end
+
+ context 'when filtering by negated type' do
+ it 'only returns the specified type' do
+ params = { board_id: board.id, id: list1.id, not: { issue_types: ['issue'] } }
+
+ expect(described_class.new(parent, user, params).execute).to contain_exactly(incident)
+ end
end
end
end
diff --git a/spec/services/bulk_import_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index 1b60a5cb0f8..67ec6fee1ae 100644
--- a/spec/services/bulk_import_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImportService do
+RSpec.describe BulkImports::CreateService do
let(:user) { create(:user) }
let(:credentials) { { url: 'http://gitlab.example', access_token: 'token' } }
let(:params) do
@@ -31,8 +31,25 @@ RSpec.describe BulkImportService do
subject { described_class.new(user, params, credentials) }
describe '#execute' do
+ let_it_be(:source_version) do
+ Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
+ ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
+ end
+
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:instance_version).and_return(source_version)
+ end
+ end
+
it 'creates bulk import' do
expect { subject.execute }.to change { BulkImport.count }.by(1)
+
+ last_bulk_import = BulkImport.last
+
+ expect(last_bulk_import.user).to eq(user)
+ expect(last_bulk_import.source_version).to eq(source_version.to_s)
+ expect(last_bulk_import.user).to eq(user)
end
it 'creates bulk import entities' do
diff --git a/spec/services/bulk_imports/file_export_service_spec.rb b/spec/services/bulk_imports/file_export_service_spec.rb
new file mode 100644
index 00000000000..0d129c75384
--- /dev/null
+++ b/spec/services/bulk_imports/file_export_service_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::FileExportService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:export_path) { Dir.mktmpdir }
+ let_it_be(:relation) { 'uploads' }
+
+ subject(:service) { described_class.new(project, export_path, relation) }
+
+ describe '#execute' do
+ it 'executes export service and archives exported data' do
+ expect_next_instance_of(BulkImports::UploadsExportService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ expect(subject).to receive(:tar_cf).with(archive: File.join(export_path, 'uploads.tar'), dir: export_path)
+
+ subject.execute
+ end
+
+ context 'when unsupported relation is passed' do
+ it 'raises an error' do
+ service = described_class.new(project, export_path, 'unsupported')
+
+ expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
+ end
+ end
+ end
+
+ describe '#exported_filename' do
+ it 'returns filename of the exported file' do
+ expect(subject.exported_filename).to eq('uploads.tar')
+ end
+ end
+end
diff --git a/spec/services/bulk_imports/get_importable_data_service_spec.rb b/spec/services/bulk_imports/get_importable_data_service_spec.rb
new file mode 100644
index 00000000000..eccd3e5f49d
--- /dev/null
+++ b/spec/services/bulk_imports/get_importable_data_service_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::GetImportableDataService do
+ describe '#execute' do
+ include_context 'bulk imports requests context', 'https://gitlab.example.com'
+
+ let_it_be(:params) { { per_page: 20, page: 1 } }
+ let_it_be(:query_params) { { top_level_only: true, min_access_level: 50, search: '' } }
+ let_it_be(:credentials) { { url: 'https://gitlab.example.com', access_token: 'demo-pat' } }
+ let_it_be(:expected_version_validation) do
+ {
+ features: {
+ project_migration: {
+ available: true,
+ min_version: BulkImport.min_gl_version_for_project_migration.to_s
+ },
+ 'source_instance_version': BulkImport.min_gl_version_for_project_migration.to_s
+ }
+ }
+ end
+
+ let_it_be(:expected_parsed_response) do
+ [
+ {
+ 'id' => 2595438,
+ 'web_url' => 'https://gitlab.com/groups/auto-breakfast',
+ 'name' => 'Stub',
+ 'path' => 'stub-group',
+ 'full_name' => 'Stub',
+ 'full_path' => 'stub-group'
+ }
+ ]
+ end
+
+ subject do
+ described_class.new(params, query_params, credentials).execute
+ end
+
+ it 'returns version_validation and a response' do
+ expect(subject[:version_validation]).to eq(expected_version_validation)
+ expect(subject[:response].parsed_response).to eq(expected_parsed_response)
+ end
+ end
+end
diff --git a/spec/services/bulk_imports/relation_export_service_spec.rb b/spec/services/bulk_imports/relation_export_service_spec.rb
index 333cd9201d8..27a6ca60515 100644
--- a/spec/services/bulk_imports/relation_export_service_spec.rb
+++ b/spec/services/bulk_imports/relation_export_service_spec.rb
@@ -7,12 +7,14 @@ RSpec.describe BulkImports::RelationExportService do
let_it_be(:relation) { 'labels' }
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project) }
let_it_be(:label) { create(:group_label, group: group) }
let_it_be(:export_path) { "#{Dir.tmpdir}/relation_export_service_spec/tree" }
let_it_be_with_reload(:export) { create(:bulk_import_export, group: group, relation: relation) }
before do
group.add_owner(user)
+ project.add_maintainer(user)
allow(export).to receive(:export_path).and_return(export_path)
end
@@ -25,6 +27,10 @@ RSpec.describe BulkImports::RelationExportService do
describe '#execute' do
it 'exports specified relation and marks export as finished' do
+ expect_next_instance_of(BulkImports::TreeExportService) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
subject.execute
expect(export.reload.upload.export_file).to be_present
@@ -43,6 +49,18 @@ RSpec.describe BulkImports::RelationExportService do
expect(export.upload.export_file).to be_present
end
+ context 'when exporting a file relation' do
+ it 'uses file export service' do
+ service = described_class.new(user, project, 'uploads', jid)
+
+ expect_next_instance_of(BulkImports::FileExportService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ service.execute
+ end
+ end
+
context 'when export record does not exist' do
let(:another_group) { create(:group) }
diff --git a/spec/services/bulk_imports/tree_export_service_spec.rb b/spec/services/bulk_imports/tree_export_service_spec.rb
new file mode 100644
index 00000000000..f2ed747b64e
--- /dev/null
+++ b/spec/services/bulk_imports/tree_export_service_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::TreeExportService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:export_path) { Dir.mktmpdir }
+ let_it_be(:relation) { 'issues' }
+
+ subject(:service) { described_class.new(project, export_path, relation) }
+
+ describe '#execute' do
+ it 'executes export service and archives exported data' do
+ expect_next_instance_of(Gitlab::ImportExport::Json::StreamingSerializer) do |serializer|
+ expect(serializer).to receive(:serialize_relation)
+ end
+
+ subject.execute
+ end
+
+ context 'when unsupported relation is passed' do
+ it 'raises an error' do
+ service = described_class.new(project, export_path, 'unsupported')
+
+ expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
+ end
+ end
+ end
+
+ describe '#exported_filename' do
+ it 'returns filename of the exported file' do
+ expect(subject.exported_filename).to eq('issues.ndjson')
+ end
+ end
+end
diff --git a/spec/services/ci/archive_trace_service_spec.rb b/spec/services/ci/archive_trace_service_spec.rb
index 071b5c3b2f9..b08ba6fd5e5 100644
--- a/spec/services/ci/archive_trace_service_spec.rb
+++ b/spec/services/ci/archive_trace_service_spec.rb
@@ -88,6 +88,32 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
subject
end
+
+ context 'job has archive and chunks' do
+ let(:job) { create(:ci_build, :success, :trace_artifact) }
+
+ before do
+ create(:ci_build_trace_chunk, build: job, chunk_index: 0)
+ end
+
+ context 'archive is not completed' do
+ before do
+ job.job_artifacts_trace.file.remove!
+ end
+
+ it 'cleanups any stale archive data' do
+ expect(job.job_artifacts_trace).to be_present
+
+ subject
+
+ expect(job.reload.job_artifacts_trace).to be_nil
+ end
+ end
+
+ it 'removes trace chunks' do
+ expect { subject }.to change { job.trace_chunks.count }.to(0)
+ end
+ end
end
context 'when the archival process is backed off' do
diff --git a/spec/services/ci/create_pipeline_service/include_spec.rb b/spec/services/ci/create_pipeline_service/include_spec.rb
index 46271ee36c0..5e7dace8e15 100644
--- a/spec/services/ci/create_pipeline_service/include_spec.rb
+++ b/spec/services/ci/create_pipeline_service/include_spec.rb
@@ -58,17 +58,6 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline).to be_created_successfully
expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
end
-
- context 'when the FF ci_include_rules is disabled' do
- before do
- stub_feature_flags(ci_include_rules: false)
- end
-
- it 'includes the job in the file' do
- expect(pipeline).to be_created_successfully
- expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
- end
- end
end
context 'when the rules does not match' do
@@ -78,17 +67,6 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline).to be_created_successfully
expect(pipeline.processables.pluck(:name)).to contain_exactly('job')
end
-
- context 'when the FF ci_include_rules is disabled' do
- before do
- stub_feature_flags(ci_include_rules: false)
- end
-
- it 'includes the job in the file' do
- expect(pipeline).to be_created_successfully
- expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
- end
- end
end
end
end
diff --git a/spec/services/ci/drop_pipeline_service_spec.rb b/spec/services/ci/drop_pipeline_service_spec.rb
index c6a118c6083..ddb53712d9c 100644
--- a/spec/services/ci/drop_pipeline_service_spec.rb
+++ b/spec/services/ci/drop_pipeline_service_spec.rb
@@ -50,13 +50,14 @@ RSpec.describe Ci::DropPipelineService do
end.count
writes_per_build = 2
+ load_balancer_queries = 3
expected_reads_count = control_count - writes_per_build
create_list(:ci_build, 5, :running, pipeline: cancelable_pipeline)
expect do
drop_pipeline!(cancelable_pipeline)
- end.not_to exceed_query_limit(expected_reads_count + (5 * writes_per_build))
+ end.not_to exceed_query_limit(expected_reads_count + (5 * writes_per_build) + load_balancer_queries)
end
end
end
diff --git a/spec/services/ci/pipelines/add_job_service_spec.rb b/spec/services/ci/pipelines/add_job_service_spec.rb
index 3a77d26dd9e..709a840c644 100644
--- a/spec/services/ci/pipelines/add_job_service_spec.rb
+++ b/spec/services/ci/pipelines/add_job_service_spec.rb
@@ -77,19 +77,6 @@ RSpec.describe Ci::Pipelines::AddJobService do
expect(execute).to be_success
expect(execute.payload[:job]).to eq(job)
end
-
- context 'when the FF ci_pipeline_add_job_with_lock is disabled' do
- before do
- stub_feature_flags(ci_pipeline_add_job_with_lock: false)
- end
-
- it 'does not use exclusive lock' do
- expect(Gitlab::ExclusiveLease).not_to receive(:new).with(lock_key, timeout: lock_timeout)
-
- expect(execute).to be_success
- expect(execute.payload[:job]).to eq(job)
- end
- end
end
end
end
diff --git a/spec/services/ci/pipelines/hook_service_spec.rb b/spec/services/ci/pipelines/hook_service_spec.rb
new file mode 100644
index 00000000000..0e1ef6afd0d
--- /dev/null
+++ b/spec/services/ci/pipelines/hook_service_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Pipelines::HookService do
+ describe '#execute_hooks' do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project, :repository, namespace: namespace) }
+ let_it_be(:pipeline) { create(:ci_empty_pipeline, :created, project: project) }
+
+ let(:hook_enabled) { true }
+ let!(:hook) { create(:project_hook, project: project, pipeline_events: hook_enabled) }
+ let(:hook_data) { double }
+
+ subject(:service) { described_class.new(pipeline) }
+
+ describe 'HOOK_NAME' do
+ specify { expect(described_class::HOOK_NAME).to eq(:pipeline_hooks) }
+ end
+
+ context 'with pipeline hooks enabled' do
+ before do
+ allow(Gitlab::DataBuilder::Pipeline).to receive(:build).with(pipeline).once.and_return(hook_data)
+ end
+
+ it 'calls pipeline.project.execute_hooks and pipeline.project.execute_integrations' do
+ create(:pipelines_email_integration, project: project)
+
+ expect(pipeline.project).to receive(:execute_hooks).with(hook_data, described_class::HOOK_NAME)
+ expect(pipeline.project).to receive(:execute_integrations).with(hook_data, described_class::HOOK_NAME)
+
+ service.execute
+ end
+ end
+
+ context 'with pipeline hooks and integrations disabled' do
+ let(:hook_enabled) { false }
+
+ it 'does not call pipeline.project.execute_hooks and pipeline.project.execute_integrations' do
+ expect(pipeline.project).not_to receive(:execute_hooks)
+ expect(pipeline.project).not_to receive(:execute_integrations)
+
+ service.execute
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/play_bridge_service_spec.rb b/spec/services/ci/play_bridge_service_spec.rb
index 3f97bfdf5ae..56b1615a56d 100644
--- a/spec/services/ci/play_bridge_service_spec.rb
+++ b/spec/services/ci/play_bridge_service_spec.rb
@@ -23,18 +23,18 @@ RSpec.describe Ci::PlayBridgeService, '#execute' do
expect(bridge.reload).to be_pending
end
- it 'enqueues Ci::CreateCrossProjectPipelineWorker' do
- expect(::Ci::CreateCrossProjectPipelineWorker).to receive(:perform_async).with(bridge.id)
-
- execute_service
- end
-
it "updates bridge's user" do
execute_service
expect(bridge.reload.user).to eq(user)
end
+ it 'enqueues Ci::CreateDownstreamPipelineWorker' do
+ expect(::Ci::CreateDownstreamPipelineWorker).to receive(:perform_async).with(bridge.id)
+
+ execute_service
+ end
+
context 'when a subsequent job is skipped' do
let!(:job) { create(:ci_build, :skipped, pipeline: pipeline, stage_idx: bridge.stage_idx + 1) }
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index b5bf0adadaf..404e1bf7c87 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -10,11 +10,9 @@ RSpec.describe Ci::ProcessPipelineService do
end
let(:pipeline_processing_events_counter) { double(increment: true) }
- let(:legacy_update_jobs_counter) { double(increment: true) }
let(:metrics) do
- double(pipeline_processing_events_counter: pipeline_processing_events_counter,
- legacy_update_jobs_counter: legacy_update_jobs_counter)
+ double(pipeline_processing_events_counter: pipeline_processing_events_counter)
end
subject { described_class.new(pipeline) }
@@ -33,68 +31,4 @@ RSpec.describe Ci::ProcessPipelineService do
subject.execute
end
end
-
- describe 'updating a list of retried builds' do
- let!(:build_retried) { create_build('build') }
- let!(:build) { create_build('build') }
- let!(:test) { create_build('test') }
-
- context 'when FF ci_remove_update_retried_from_process_pipeline is enabled' do
- it 'does not update older builds as retried' do
- subject.execute
-
- expect(all_builds.latest).to contain_exactly(build, build_retried, test)
- expect(all_builds.retried).to be_empty
- end
- end
-
- context 'when FF ci_remove_update_retried_from_process_pipeline is disabled' do
- before do
- stub_feature_flags(ci_remove_update_retried_from_process_pipeline: false)
- end
-
- it 'returns unique statuses' do
- subject.execute
-
- expect(all_builds.latest).to contain_exactly(build, test)
- expect(all_builds.retried).to contain_exactly(build_retried)
- end
-
- it 'increments the counter' do
- expect(legacy_update_jobs_counter).to receive(:increment)
-
- subject.execute
- end
-
- it 'logs the project and pipeline id' do
- expect(Gitlab::AppJsonLogger).to receive(:info).with(event: 'update_retried_is_used',
- project_id: project.id,
- pipeline_id: pipeline.id)
-
- subject.execute
- end
-
- context 'when the previous build has already retried column true' do
- before do
- build_retried.update_columns(retried: true)
- end
-
- it 'does not increment the counter' do
- expect(legacy_update_jobs_counter).not_to receive(:increment)
-
- subject.execute
- end
- end
- end
-
- private
-
- def create_build(name, **opts)
- create(:ci_build, :created, pipeline: pipeline, name: name, **opts)
- end
-
- def all_builds
- pipeline.builds.order(:stage_idx, :id)
- end
- end
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 73ff15ec393..650353eb751 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -14,7 +14,7 @@ module Ci
let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
describe '#execute' do
- context 'checks database loadbalancing stickiness', :db_load_balancing do
+ context 'checks database loadbalancing stickiness' do
subject { described_class.new(shared_runner).execute }
before do
@@ -22,14 +22,14 @@ module Ci
end
it 'result is valid if replica did caught-up' do
- expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:all_caught_up?)
+ expect(ApplicationRecord.sticking).to receive(:all_caught_up?)
.with(:runner, shared_runner.id) { true }
expect(subject).to be_valid
end
it 'result is invalid if replica did not caught-up' do
- expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:all_caught_up?)
+ expect(ApplicationRecord.sticking).to receive(:all_caught_up?)
.with(:runner, shared_runner.id) { false }
expect(subject).not_to be_valid
@@ -87,19 +87,25 @@ module Ci
end
context 'for specific runner' do
- context 'with FF disabled' do
+ context 'with tables decoupling disabled' do
before do
stub_feature_flags(
ci_pending_builds_project_runners_decoupling: false,
ci_queueing_builds_enabled_checks: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
it 'does not pick a build' do
expect(execute(specific_runner)).to be_nil
end
end
- context 'with FF enabled' do
+ context 'with tables decoupling enabled' do
before do
stub_feature_flags(
ci_pending_builds_project_runners_decoupling: true,
@@ -266,17 +272,23 @@ module Ci
context 'and uses project runner' do
let(:build) { execute(specific_runner) }
- context 'with FF disabled' do
+ context 'with tables decoupling disabled' do
before do
stub_feature_flags(
ci_pending_builds_project_runners_decoupling: false,
ci_queueing_builds_enabled_checks: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
it { expect(build).to be_nil }
end
- context 'with FF enabled' do
+ context 'with tables decoupling enabled' do
before do
stub_feature_flags(
ci_pending_builds_project_runners_decoupling: true,
@@ -791,6 +803,12 @@ module Ci
stub_feature_flags(ci_queueing_denormalize_shared_runners_information: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
include_examples 'handles runner assignment'
end
@@ -807,6 +825,12 @@ module Ci
stub_feature_flags(ci_queueing_denormalize_tags_information: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
include_examples 'handles runner assignment'
end
@@ -815,6 +839,12 @@ module Ci
stub_feature_flags(ci_queueing_denormalize_namespace_traversal_ids: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
include_examples 'handles runner assignment'
end
end
@@ -824,6 +854,12 @@ module Ci
stub_feature_flags(ci_pending_builds_queue_source: false)
end
+ around do |example|
+ allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
+ example.run
+ end
+ end
+
include_examples 'handles runner assignment'
end
end
diff --git a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
index 53aa842bc28..194203a422c 100644
--- a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
+++ b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
@@ -48,6 +48,92 @@ RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupService do
expect(build).to be_pending
end
end
+
+ context 'when process mode is oldest_first' do
+ let(:resource_group) { create(:ci_resource_group, process_mode: :oldest_first, project: project) }
+
+ it 'requests resource' do
+ subject
+
+ expect(build.reload).to be_pending
+ expect(build.resource).to be_present
+ end
+
+ context 'when the other job exists in the newer pipeline' do
+ let!(:build_2) { create(:ci_build, :waiting_for_resource, project: project, user: user, resource_group: resource_group) }
+
+ it 'requests resource for the job in the oldest pipeline' do
+ subject
+
+ expect(build.reload).to be_pending
+ expect(build.resource).to be_present
+ expect(build_2.reload).to be_waiting_for_resource
+ expect(build_2.resource).to be_nil
+ end
+ end
+
+ context 'when build is not `waiting_for_resource` state' do
+ let!(:build) { create(:ci_build, :created, project: project, user: user, resource_group: resource_group) }
+
+ it 'attempts to request a resource' do
+ expect_next_found_instance_of(Ci::Build) do |job|
+ expect(job).to receive(:enqueue_waiting_for_resource).and_call_original
+ end
+
+ subject
+ end
+
+ it 'does not change the job status' do
+ subject
+
+ expect(build.reload).to be_created
+ expect(build.resource).to be_nil
+ end
+ end
+ end
+
+ context 'when process mode is newest_first' do
+ let(:resource_group) { create(:ci_resource_group, process_mode: :newest_first, project: project) }
+
+ it 'requests resource' do
+ subject
+
+ expect(build.reload).to be_pending
+ expect(build.resource).to be_present
+ end
+
+ context 'when the other job exists in the newer pipeline' do
+ let!(:build_2) { create(:ci_build, :waiting_for_resource, project: project, user: user, resource_group: resource_group) }
+
+ it 'requests resource for the job in the newest pipeline' do
+ subject
+
+ expect(build.reload).to be_waiting_for_resource
+ expect(build.resource).to be_nil
+ expect(build_2.reload).to be_pending
+ expect(build_2.resource).to be_present
+ end
+ end
+
+ context 'when build is not `waiting_for_resource` state' do
+ let!(:build) { create(:ci_build, :created, project: project, user: user, resource_group: resource_group) }
+
+ it 'attempts to request a resource' do
+ expect_next_found_instance_of(Ci::Build) do |job|
+ expect(job).to receive(:enqueue_waiting_for_resource).and_call_original
+ end
+
+ subject
+ end
+
+ it 'does not change the job status' do
+ subject
+
+ expect(build.reload).to be_created
+ expect(build.resource).to be_nil
+ end
+ end
+ end
end
context 'when there are no available resources' do
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index ce2e6ba5e15..15c88c9f657 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe Ci::RetryBuildService do
job_artifacts_network_referee job_artifacts_dotenv
job_artifacts_cobertura needs job_artifacts_accessibility
job_artifacts_requirements job_artifacts_coverage_fuzzing
- job_artifacts_api_fuzzing].freeze
+ job_artifacts_api_fuzzing terraform_state_versions].freeze
ignore_accessors =
%i[type lock_version target_url base_tags trace_sections
@@ -88,6 +88,7 @@ RSpec.describe Ci::RetryBuildService do
create(:ci_job_variable, job: build)
create(:ci_build_need, build: build)
+ create(:terraform_state_version, build: build)
end
describe 'clone accessors' do
@@ -276,13 +277,17 @@ RSpec.describe Ci::RetryBuildService do
end
end
- describe '#reprocess' do
+ describe '#clone!' do
let(:new_build) do
travel_to(1.second.from_now) do
- service.reprocess!(build)
+ service.clone!(build)
end
end
+ it 'raises an error when an unexpected class is passed' do
+ expect { service.clone!(create(:ci_build).present) }.to raise_error(TypeError)
+ end
+
context 'when user has ability to execute build' do
before do
stub_not_protect_default_branch
@@ -338,7 +343,7 @@ RSpec.describe Ci::RetryBuildService do
let(:user) { reporter }
it 'raises an error' do
- expect { service.reprocess!(build) }
+ expect { service.clone!(build) }
.to raise_error Gitlab::Access::AccessDeniedError
end
end
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 3e2e9f07723..12106b70969 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -316,6 +316,26 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
expect(bridge.reload).to be_pending
end
end
+
+ context 'when there are skipped jobs in later stages' do
+ before do
+ create_build('build 1', :success, 0)
+ create_build('test 2', :failed, 1)
+ create_build('report 3', :skipped, 2)
+ create_bridge('deploy 4', :skipped, 2)
+ end
+
+ it 'retries failed jobs and processes skipped jobs' do
+ service.execute(pipeline)
+
+ expect(build('build 1')).to be_success
+ expect(build('test 2')).to be_pending
+ expect(build('report 3')).to be_created
+ expect(build('deploy 4')).to be_created
+
+ expect(pipeline.reload).to be_running
+ end
+ end
end
context 'when user is not allowed to retry pipeline' do
@@ -390,16 +410,25 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
pipeline.reload.statuses
end
+ # The method name can be confusing because this can actually return both Ci::Build and Ci::Bridge
def build(name)
statuses.latest.find_by(name: name)
end
def create_build(name, status, stage_num, **opts)
- create(:ci_build, name: name,
- status: status,
- stage: "stage_#{stage_num}",
- stage_idx: stage_num,
- pipeline: pipeline, **opts) do |build|
+ create_processable(:ci_build, name, status, stage_num, **opts)
+ end
+
+ def create_bridge(name, status, stage_num, **opts)
+ create_processable(:ci_bridge, name, status, stage_num, **opts)
+ end
+
+ def create_processable(type, name, status, stage_num, **opts)
+ create(type, name: name,
+ status: status,
+ stage: "stage_#{stage_num}",
+ stage_idx: stage_num,
+ pipeline: pipeline, **opts) do |_job|
::Ci::ProcessPipelineService.new(pipeline).execute
end
end
diff --git a/spec/services/ci/stuck_builds/drop_service_spec.rb b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
index 8dfd1bc1b3d..aa0526edf57 100644
--- a/spec/services/ci/stuck_builds/drop_service_spec.rb
+++ b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::StuckBuilds::DropService do
+RSpec.describe Ci::StuckBuilds::DropPendingService do
let!(:runner) { create :ci_runner }
let!(:job) { create :ci_build, runner: runner }
let(:created_at) { }
@@ -17,48 +17,6 @@ RSpec.describe Ci::StuckBuilds::DropService do
job.update!(job_attributes)
end
- shared_examples 'job is dropped' do
- it 'changes status' do
- expect(service).to receive(:drop).exactly(3).times.and_call_original
- expect(service).to receive(:drop_stuck).exactly(:once).and_call_original
-
- service.execute
- job.reload
-
- expect(job).to be_failed
- expect(job).to be_stuck_or_timeout_failure
- end
-
- context 'when job have data integrity problem' do
- it "does drop the job and logs the reason" do
- job.update_columns(yaml_variables: '[{"key" => "value"}]')
-
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(anything, a_hash_including(build_id: job.id))
- .once
- .and_call_original
-
- service.execute
- job.reload
-
- expect(job).to be_failed
- expect(job).to be_data_integrity_failure
- end
- end
- end
-
- shared_examples 'job is unchanged' do
- it 'does not change status' do
- expect(service).to receive(:drop).exactly(3).times.and_call_original
- expect(service).to receive(:drop_stuck).exactly(:once).and_call_original
-
- service.execute
- job.reload
-
- expect(job.status).to eq(status)
- end
- end
-
context 'when job is pending' do
let(:status) { 'pending' }
@@ -75,13 +33,13 @@ RSpec.describe Ci::StuckBuilds::DropService do
context 'when created_at is the same as updated_at' do
let(:created_at) { 1.5.days.ago }
- it_behaves_like 'job is dropped'
+ it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
end
context 'when created_at is before updated_at' do
let(:created_at) { 3.days.ago }
- it_behaves_like 'job is dropped'
+ it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
end
context 'when created_at is outside lookback window' do
@@ -149,13 +107,13 @@ RSpec.describe Ci::StuckBuilds::DropService do
context 'when created_at is the same as updated_at' do
let(:created_at) { 1.5.hours.ago }
- it_behaves_like 'job is dropped'
+ it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
end
context 'when created_at is before updated_at' do
let(:created_at) { 3.days.ago }
- it_behaves_like 'job is dropped'
+ it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
end
context 'when created_at is outside lookback window' do
@@ -195,7 +153,7 @@ RSpec.describe Ci::StuckBuilds::DropService do
context 'when job was updated_at more than an hour ago' do
let(:updated_at) { 2.hours.ago }
- it_behaves_like 'job is dropped'
+ it_behaves_like 'job is unchanged'
end
context 'when job was updated in less than 1 hour ago' do
@@ -238,47 +196,6 @@ RSpec.describe Ci::StuckBuilds::DropService do
job.project.update!(pending_delete: true)
end
- it_behaves_like 'job is dropped'
- end
-
- describe 'drop stale scheduled builds' do
- let(:status) { 'scheduled' }
- let(:updated_at) { }
-
- context 'when scheduled at 2 hours ago but it is not executed yet' do
- let!(:job) { create(:ci_build, :scheduled, scheduled_at: 2.hours.ago) }
-
- it 'drops the stale scheduled build' do
- expect(Ci::Build.scheduled.count).to eq(1)
- expect(job).to be_scheduled
-
- service.execute
- job.reload
-
- expect(Ci::Build.scheduled.count).to eq(0)
- expect(job).to be_failed
- expect(job).to be_stale_schedule
- end
- end
-
- context 'when scheduled at 30 minutes ago but it is not executed yet' do
- let!(:job) { create(:ci_build, :scheduled, scheduled_at: 30.minutes.ago) }
-
- it 'does not drop the stale scheduled build yet' do
- expect(Ci::Build.scheduled.count).to eq(1)
- expect(job).to be_scheduled
-
- service.execute
-
- expect(Ci::Build.scheduled.count).to eq(1)
- expect(job).to be_scheduled
- end
- end
-
- context 'when there are no stale scheduled builds' do
- it 'does not drop the stale scheduled build yet' do
- expect { service.execute }.not_to raise_error
- end
- end
+ it_behaves_like 'job is unchanged'
end
end
diff --git a/spec/services/ci/stuck_builds/drop_running_service_spec.rb b/spec/services/ci/stuck_builds/drop_running_service_spec.rb
new file mode 100644
index 00000000000..c1c92c2b8e2
--- /dev/null
+++ b/spec/services/ci/stuck_builds/drop_running_service_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::StuckBuilds::DropRunningService do
+ let!(:runner) { create :ci_runner }
+ let!(:job) { create(:ci_build, runner: runner, created_at: created_at, updated_at: updated_at, status: status) }
+
+ subject(:service) { described_class.new }
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ shared_examples 'running builds' do
+ context 'when job is running' do
+ let(:status) { 'running' }
+ let(:outdated_time) { described_class::BUILD_RUNNING_OUTDATED_TIMEOUT.ago - 30.minutes }
+ let(:fresh_time) { described_class::BUILD_RUNNING_OUTDATED_TIMEOUT.ago + 30.minutes }
+
+ context 'when job is outdated' do
+ let(:created_at) { outdated_time }
+ let(:updated_at) { outdated_time }
+
+ it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
+ end
+
+ context 'when job is fresh' do
+ let(:created_at) { fresh_time }
+ let(:updated_at) { fresh_time }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when job freshly updated' do
+ let(:created_at) { outdated_time }
+ let(:updated_at) { fresh_time }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+ end
+
+ include_examples 'running builds'
+
+ context 'when new query flag is disabled' do
+ before do
+ stub_feature_flags(ci_new_query_for_running_stuck_jobs: false)
+ end
+
+ include_examples 'running builds'
+ end
+
+ %w(success skipped failed canceled scheduled pending).each do |status|
+ context "when job is #{status}" do
+ let(:status) { status }
+ let(:updated_at) { 2.days.ago }
+
+ context 'when created_at is the same as updated_at' do
+ let(:created_at) { 2.days.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when created_at is before updated_at' do
+ let(:created_at) { 3.days.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb b/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb
new file mode 100644
index 00000000000..1416fab3d25
--- /dev/null
+++ b/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::StuckBuilds::DropScheduledService do
+ let_it_be(:runner) { create :ci_runner }
+
+ let!(:job) { create :ci_build, :scheduled, scheduled_at: scheduled_at, runner: runner }
+
+ subject(:service) { described_class.new }
+
+ context 'when job is scheduled' do
+ context 'for more than an hour ago' do
+ let(:scheduled_at) { 2.hours.ago }
+
+ it_behaves_like 'job is dropped with failure reason', 'stale_schedule'
+ end
+
+ context 'for less than 1 hour ago' do
+ let(:scheduled_at) { 30.minutes.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+
+ %w(success skipped failed canceled running pending).each do |status|
+ context "when job is #{status}" do
+ before do
+ job.update!(status: status)
+ end
+
+ context 'and scheduled for more than an hour ago' do
+ let(:scheduled_at) { 2.hours.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'and scheduled for less than 1 hour ago' do
+ let(:scheduled_at) { 30.minutes.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+ end
+
+ context 'when there are no stale scheduled builds' do
+ let(:job) { }
+
+ it 'does not drop the stale scheduled build yet' do
+ expect { service.execute }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/services/ci/update_build_state_service_spec.rb b/spec/services/ci/update_build_state_service_spec.rb
index 5bb3843da8f..e4dd3d0500f 100644
--- a/spec/services/ci/update_build_state_service_spec.rb
+++ b/spec/services/ci/update_build_state_service_spec.rb
@@ -112,6 +112,14 @@ RSpec.describe Ci::UpdateBuildStateService do
.not_to have_received(:increment_trace_operation)
.with(operation: :invalid)
end
+
+ it 'does not increment chunks_invalid_checksum trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+ end
end
context 'when build trace has been migrated' do
@@ -174,6 +182,14 @@ RSpec.describe Ci::UpdateBuildStateService do
.to have_received(:increment_trace_operation)
.with(operation: :invalid)
end
+
+ it 'increments chunks_invalid_checksum trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+ end
end
context 'when trace checksum is valid' do
@@ -191,6 +207,14 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.not_to have_received(:increment_trace_operation)
.with(operation: :corrupted)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_size)
end
context 'when using deprecated parameters' do
@@ -208,6 +232,14 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.not_to have_received(:increment_trace_operation)
.with(operation: :corrupted)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_size)
end
end
end
@@ -227,6 +259,14 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.to have_received(:increment_trace_operation)
.with(operation: :corrupted)
+
+ expect(metrics)
+ .to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+
+ expect(metrics)
+ .to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_size)
end
end
@@ -243,8 +283,16 @@ RSpec.describe Ci::UpdateBuildStateService do
.with(operation: :invalid)
expect(metrics)
+ .to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
+
+ expect(metrics)
.not_to have_received(:increment_trace_operation)
.with(operation: :corrupted)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_size)
end
end
@@ -325,6 +373,10 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.not_to have_received(:increment_trace_operation)
.with(operation: :invalid)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(type: :chunks_invalid_checksum)
end
context 'when build pending state is outdated' do
diff --git a/spec/services/ci/update_pending_build_service_spec.rb b/spec/services/ci/update_pending_build_service_spec.rb
index d842042de40..d36564938c8 100644
--- a/spec/services/ci/update_pending_build_service_spec.rb
+++ b/spec/services/ci/update_pending_build_service_spec.rb
@@ -3,21 +3,23 @@
require 'spec_helper'
RSpec.describe Ci::UpdatePendingBuildService do
- describe '#execute' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, namespace: group) }
- let_it_be(:pending_build_1) { create(:ci_pending_build, project: project, instance_runners_enabled: false) }
- let_it_be(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
- let_it_be(:update_params) { { instance_runners_enabled: true } }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be_with_reload(:pending_build_1) { create(:ci_pending_build, project: project, instance_runners_enabled: false) }
+ let_it_be_with_reload(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
+ let_it_be(:update_params) { { instance_runners_enabled: true } }
+
+ let(:service) { described_class.new(model, update_params) }
- subject(:service) { described_class.new(model, update_params).execute }
+ describe '#execute' do
+ subject(:update_pending_builds) { service.execute }
context 'validations' do
context 'when model is invalid' do
let(:model) { pending_build_1 }
it 'raises an error' do
- expect { service }.to raise_error(described_class::InvalidModelError)
+ expect { update_pending_builds }.to raise_error(described_class::InvalidModelError)
end
end
@@ -26,7 +28,7 @@ RSpec.describe Ci::UpdatePendingBuildService do
let(:update_params) { { minutes_exceeded: true } }
it 'raises an error' do
- expect { service }.to raise_error(described_class::InvalidParamsError)
+ expect { update_pending_builds }.to raise_error(described_class::InvalidParamsError)
end
end
end
@@ -35,10 +37,10 @@ RSpec.describe Ci::UpdatePendingBuildService do
let(:model) { group }
it 'updates all pending builds', :aggregate_failures do
- service
+ update_pending_builds
- expect(pending_build_1.reload.instance_runners_enabled).to be_truthy
- expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ expect(pending_build_1.instance_runners_enabled).to be_truthy
+ expect(pending_build_2.instance_runners_enabled).to be_truthy
end
context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do
@@ -47,10 +49,10 @@ RSpec.describe Ci::UpdatePendingBuildService do
end
it 'does not update all pending builds', :aggregate_failures do
- service
+ update_pending_builds
- expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
- expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ expect(pending_build_1.instance_runners_enabled).to be_falsey
+ expect(pending_build_2.instance_runners_enabled).to be_truthy
end
end
end
@@ -59,10 +61,10 @@ RSpec.describe Ci::UpdatePendingBuildService do
let(:model) { project }
it 'updates all pending builds', :aggregate_failures do
- service
+ update_pending_builds
- expect(pending_build_1.reload.instance_runners_enabled).to be_truthy
- expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ expect(pending_build_1.instance_runners_enabled).to be_truthy
+ expect(pending_build_2.instance_runners_enabled).to be_truthy
end
context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do
@@ -71,10 +73,10 @@ RSpec.describe Ci::UpdatePendingBuildService do
end
it 'does not update all pending builds', :aggregate_failures do
- service
+ update_pending_builds
- expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
- expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ expect(pending_build_1.instance_runners_enabled).to be_falsey
+ expect(pending_build_2.instance_runners_enabled).to be_truthy
end
end
end
diff --git a/spec/services/clusters/agent_tokens/create_service_spec.rb b/spec/services/clusters/agent_tokens/create_service_spec.rb
new file mode 100644
index 00000000000..92629af06c8
--- /dev/null
+++ b/spec/services/clusters/agent_tokens/create_service_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::AgentTokens::CreateService do
+ subject(:service) { described_class.new(container: project, current_user: user, params: params) }
+
+ let_it_be(:user) { create(:user) }
+
+ let(:cluster_agent) { create(:cluster_agent) }
+ let(:project) { cluster_agent.project }
+ let(:params) { { agent_id: cluster_agent.id, description: 'token description', name: 'token name' } }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ it 'does not create a new token due to user permissions' do
+ expect { subject }.not_to change(::Clusters::AgentToken, :count)
+ end
+
+ it 'returns permission errors', :aggregate_failures do
+ expect(subject.status).to eq(:error)
+ expect(subject.message).to eq('User has insufficient permissions to create a token for this project')
+ end
+
+ context 'with user permissions' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'creates a new token' do
+ expect { subject }.to change { ::Clusters::AgentToken.count }.by(1)
+ end
+
+ it 'returns success status', :aggregate_failures do
+ expect(subject.status).to eq(:success)
+ expect(subject.message).to be_nil
+ end
+
+ it 'returns token information', :aggregate_failures do
+ token = subject.payload[:token]
+
+ expect(subject.payload[:secret]).not_to be_nil
+
+ expect(token.created_by_user).to eq(user)
+ expect(token.description).to eq(params[:description])
+ expect(token.name).to eq(params[:name])
+ end
+
+ context 'when params are invalid' do
+ let(:params) { { agent_id: 'bad_id' } }
+
+ it 'does not create a new token' do
+ expect { subject }.not_to change(::Clusters::AgentToken, :count)
+ end
+
+ it 'returns validation errors', :aggregate_failures do
+ expect(subject.status).to eq(:error)
+ expect(subject.message).to eq(["Agent must exist", "Name can't be blank"])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/agents/create_service_spec.rb b/spec/services/clusters/agents/create_service_spec.rb
new file mode 100644
index 00000000000..2b3bbcae13c
--- /dev/null
+++ b/spec/services/clusters/agents/create_service_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Agents::CreateService do
+ subject(:service) { described_class.new(project, user) }
+
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { create(:user) }
+
+ describe '#execute' do
+ context 'without user permissions' do
+ it 'returns errors when user does not have permissions' do
+ expect(service.execute(name: 'missing-permissions')).to eq({
+ status: :error,
+ message: 'You have insufficient permissions to create a cluster agent for this project'
+ })
+ end
+ end
+
+ context 'with user permissions' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'creates a new clusters_agent' do
+ expect { service.execute(name: 'with-user') }.to change { ::Clusters::Agent.count }.by(1)
+ end
+
+ it 'returns success status', :aggregate_failures do
+ result = service.execute(name: 'success')
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:message]).to be_nil
+ end
+
+ it 'returns agent values', :aggregate_failures do
+ new_agent = service.execute(name: 'new-agent')[:cluster_agent]
+
+ expect(new_agent.name).to eq('new-agent')
+ expect(new_agent.created_by_user).to eq(user)
+ end
+
+ it 'generates an error message when name is invalid' do
+ expect(service.execute(name: '@bad_agent_name!')).to eq({
+ status: :error,
+ message: ["Name can contain only lowercase letters, digits, and '-', but cannot start or end with '-'"]
+ })
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/agents/delete_service_spec.rb b/spec/services/clusters/agents/delete_service_spec.rb
new file mode 100644
index 00000000000..1d6bc9618dd
--- /dev/null
+++ b/spec/services/clusters/agents/delete_service_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Agents::DeleteService do
+ subject(:service) { described_class.new(container: project, current_user: user) }
+
+ let(:cluster_agent) { create(:cluster_agent) }
+ let(:project) { cluster_agent.project }
+ let(:user) { create(:user) }
+
+ describe '#execute' do
+ context 'without user permissions' do
+ it 'fails to delete when the user has no permissions', :aggregate_failures do
+ response = service.execute(cluster_agent)
+
+ expect(response.status).to eq(:error)
+ expect(response.message).to eq('You have insufficient permissions to delete this cluster agent')
+
+ expect { cluster_agent.reload }.not_to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'with user permissions' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'deletes a cluster agent', :aggregate_failures do
+ expect { service.execute(cluster_agent) }.to change { ::Clusters::Agent.count }.by(-1)
+ expect { cluster_agent.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+end
diff --git a/spec/services/concerns/rate_limited_service_spec.rb b/spec/services/concerns/rate_limited_service_spec.rb
new file mode 100644
index 00000000000..f73871b7e44
--- /dev/null
+++ b/spec/services/concerns/rate_limited_service_spec.rb
@@ -0,0 +1,196 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe RateLimitedService do
+ let(:key) { :issues_create }
+ let(:scope) { [:project, :current_user] }
+ let(:opts) { { scope: scope, users_allowlist: -> { [User.support_bot.username] } } }
+ let(:rate_limiter_klass) { ::Gitlab::ApplicationRateLimiter }
+ let(:rate_limiter_instance) { rate_limiter_klass.new(key, **opts) }
+
+ describe 'RateLimitedError' do
+ subject { described_class::RateLimitedError.new(key: key, rate_limiter: rate_limiter_instance) }
+
+ describe '#headers' do
+ it 'returns a Hash of HTTP headers' do
+ # TODO: This will be fleshed out in https://gitlab.com/gitlab-org/gitlab/-/issues/342370
+ expected_headers = {}
+
+ expect(subject.headers).to eq(expected_headers)
+ end
+ end
+
+ describe '#log_request' do
+ it 'logs the request' do
+ request = instance_double(Grape::Request)
+ user = instance_double(User)
+
+ expect(rate_limiter_klass).to receive(:log_request).with(request, "#{key}_request_limit".to_sym, user)
+
+ subject.log_request(request, user)
+ end
+ end
+ end
+
+ describe 'RateLimiterScopedAndKeyed' do
+ subject { described_class::RateLimiterScopedAndKeyed.new(key: key, opts: opts, rate_limiter_klass: rate_limiter_klass) }
+
+ describe '#rate_limit!' do
+ let(:project_with_feature_enabled) { create(:project) }
+ let(:project_without_feature_enabled) { create(:project) }
+
+ let(:project) { nil }
+
+ let(:current_user) { create(:user) }
+ let(:service) { instance_double(Issues::CreateService, project: project, current_user: current_user) }
+ let(:evaluated_scope) { [project, current_user] }
+ let(:evaluated_opts) { { scope: evaluated_scope, users_allowlist: %w[support-bot] } }
+ let(:rate_limited_service_issues_create_feature_enabled) { nil }
+
+ before do
+ allow(rate_limiter_klass).to receive(:new).with(key, **evaluated_opts).and_return(rate_limiter_instance)
+ stub_feature_flags(rate_limited_service_issues_create: rate_limited_service_issues_create_feature_enabled)
+ end
+
+ shared_examples 'a service that does not attempt to throttle' do
+ it 'does not attempt to throttle' do
+ expect(rate_limiter_instance).not_to receive(:throttled?)
+
+ expect(subject.rate_limit!(service)).to be_nil
+ end
+ end
+
+ shared_examples 'a service that does attempt to throttle' do
+ before do
+ allow(rate_limiter_instance).to receive(:throttled?).and_return(throttled)
+ end
+
+ context 'when rate limiting is not in effect' do
+ let(:throttled) { false }
+
+ it 'does not raise an exception' do
+ expect(subject.rate_limit!(service)).to be_nil
+ end
+ end
+
+ context 'when rate limiting is in effect' do
+ let(:throttled) { true }
+
+ it 'raises a RateLimitedError exception' do
+ expect { subject.rate_limit!(service) }.to raise_error(described_class::RateLimitedError, 'This endpoint has been requested too many times. Try again later.')
+ end
+ end
+ end
+
+ context 'when :rate_limited_service_issues_create feature is globally disabled' do
+ let(:rate_limited_service_issues_create_feature_enabled) { false }
+
+ it_behaves_like 'a service that does not attempt to throttle'
+ end
+
+ context 'when :rate_limited_service_issues_create feature is globally enabled' do
+ let(:throttled) { nil }
+ let(:rate_limited_service_issues_create_feature_enabled) { true }
+ let(:project) { project_without_feature_enabled }
+
+ it_behaves_like 'a service that does attempt to throttle'
+ end
+
+ context 'when :rate_limited_service_issues_create feature is enabled for project_with_feature_enabled' do
+ let(:throttled) { nil }
+ let(:rate_limited_service_issues_create_feature_enabled) { project_with_feature_enabled }
+
+ context 'for project_without_feature_enabled' do
+ let(:project) { project_without_feature_enabled }
+
+ it_behaves_like 'a service that does not attempt to throttle'
+ end
+
+ context 'for project_with_feature_enabled' do
+ let(:project) { project_with_feature_enabled }
+
+ it_behaves_like 'a service that does attempt to throttle'
+ end
+ end
+ end
+ end
+
+ describe '#execute_without_rate_limiting' do
+ let(:rate_limiter_scoped_and_keyed) { instance_double(RateLimitedService::RateLimiterScopedAndKeyed) }
+ let(:subject) do
+ local_key = key
+ local_opts = opts
+
+ Class.new do
+ prepend RateLimitedService
+
+ rate_limit key: local_key, opts: local_opts
+
+ def execute(*args, **kwargs)
+ 'main logic here'
+ end
+ end.new
+ end
+
+ before do
+ allow(RateLimitedService::RateLimiterScopedAndKeyed).to receive(:new).with(key: key, opts: opts, rate_limiter_klass: rate_limiter_klass).and_return(rate_limiter_scoped_and_keyed)
+ end
+
+ context 'bypasses rate limiting' do
+ it 'calls super' do
+ expect(rate_limiter_scoped_and_keyed).not_to receive(:rate_limit!).with(subject)
+
+ expect(subject.execute_without_rate_limiting).to eq('main logic here')
+ end
+ end
+ end
+
+ describe '#execute' do
+ context 'when rate_limit has not been called' do
+ let(:subject) { Class.new { prepend RateLimitedService }.new }
+
+ it 'raises an RateLimitedNotSetupError exception' do
+ expect { subject.execute }.to raise_error(described_class::RateLimitedNotSetupError)
+ end
+ end
+
+ context 'when rate_limit has been called' do
+ let(:rate_limiter_scoped_and_keyed) { instance_double(RateLimitedService::RateLimiterScopedAndKeyed) }
+ let(:subject) do
+ local_key = key
+ local_opts = opts
+
+ Class.new do
+ prepend RateLimitedService
+
+ rate_limit key: local_key, opts: local_opts
+
+ def execute(*args, **kwargs)
+ 'main logic here'
+ end
+ end.new
+ end
+
+ before do
+ allow(RateLimitedService::RateLimiterScopedAndKeyed).to receive(:new).with(key: key, opts: opts, rate_limiter_klass: rate_limiter_klass).and_return(rate_limiter_scoped_and_keyed)
+ end
+
+ context 'and applies rate limiting' do
+ it 'raises an RateLimitedService::RateLimitedError exception' do
+ expect(rate_limiter_scoped_and_keyed).to receive(:rate_limit!).with(subject).and_raise(RateLimitedService::RateLimitedError.new(key: key, rate_limiter: rate_limiter_instance))
+
+ expect { subject.execute }.to raise_error(RateLimitedService::RateLimitedError)
+ end
+ end
+
+ context 'but does not apply rate limiting' do
+ it 'calls super' do
+ expect(rate_limiter_scoped_and_keyed).to receive(:rate_limit!).with(subject).and_return(nil)
+
+ expect(subject.execute).to eq('main logic here')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/container_expiration_policies/cleanup_service_spec.rb b/spec/services/container_expiration_policies/cleanup_service_spec.rb
index 5f284b9dd8b..a1f76e5e5dd 100644
--- a/spec/services/container_expiration_policies/cleanup_service_spec.rb
+++ b/spec/services/container_expiration_policies/cleanup_service_spec.rb
@@ -24,8 +24,8 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
it 'completely clean up the repository' do
expect(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).with(project, nil, cleanup_tags_service_params).and_return(cleanup_tags_service)
- expect(cleanup_tags_service).to receive(:execute).with(repository).and_return(status: :success)
+ .to receive(:new).with(repository, nil, cleanup_tags_service_params).and_return(cleanup_tags_service)
+ expect(cleanup_tags_service).to receive(:execute).and_return(status: :success)
response = subject
@@ -69,6 +69,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
before_truncate_size: 800,
after_truncate_size: 200,
before_delete_size: 100,
+ cached_tags_count: 0,
deleted_size: 100
}
end
@@ -86,6 +87,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
cleanup_tags_service_before_truncate_size: 800,
cleanup_tags_service_after_truncate_size: 200,
cleanup_tags_service_before_delete_size: 100,
+ cleanup_tags_service_cached_tags_count: 0,
cleanup_tags_service_deleted_size: 100
)
expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
diff --git a/spec/services/customer_relations/contacts/create_service_spec.rb b/spec/services/customer_relations/contacts/create_service_spec.rb
new file mode 100644
index 00000000000..71eb447055e
--- /dev/null
+++ b/spec/services/customer_relations/contacts/create_service_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CustomerRelations::Contacts::CreateService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:not_found_or_does_not_belong) { 'The specified organization was not found or does not belong to this group' }
+
+ let(:params) { attributes_for(:contact, group: group) }
+
+ subject(:response) { described_class.new(group: group, current_user: user, params: params).execute }
+
+ context 'when user does not have permission' do
+ let_it_be(:group) { create(:group) }
+
+ before_all do
+ group.add_reporter(user)
+ end
+
+ it 'returns an error' do
+ expect(response).to be_error
+ expect(response.message).to match_array(['You have insufficient permissions to create a contact for this group'])
+ end
+ end
+
+ context 'when user has permission' do
+ let_it_be(:group) { create(:group) }
+
+ before_all do
+ group.add_developer(user)
+ end
+
+ it 'creates a contact' do
+ expect(response).to be_success
+ end
+
+ it 'returns an error when the contact is not persisted' do
+ params[:last_name] = nil
+
+ expect(response).to be_error
+ expect(response.message).to match_array(["Last name can't be blank"])
+ end
+
+ it 'returns an error when the organization_id is invalid' do
+ params[:organization_id] = non_existing_record_id
+
+ expect(response).to be_error
+ expect(response.message).to match_array([not_found_or_does_not_belong])
+ end
+
+ it 'returns an error when the organization belongs to a different group' do
+ organization = create(:organization)
+ params[:organization_id] = organization.id
+
+ expect(response).to be_error
+ expect(response.message).to match_array([not_found_or_does_not_belong])
+ end
+ end
+ end
+end
diff --git a/spec/services/customer_relations/contacts/update_service_spec.rb b/spec/services/customer_relations/contacts/update_service_spec.rb
new file mode 100644
index 00000000000..7c5fbabb600
--- /dev/null
+++ b/spec/services/customer_relations/contacts/update_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CustomerRelations::Contacts::UpdateService do
+ let_it_be(:user) { create(:user) }
+
+ let(:contact) { create(:contact, first_name: 'Mark', group: group) }
+
+ subject(:update) { described_class.new(group: group, current_user: user, params: params).execute(contact) }
+
+ describe '#execute' do
+ context 'when the user has no permission' do
+ let_it_be(:group) { create(:group) }
+
+ let(:params) { { first_name: 'Gary' } }
+
+ it 'returns an error' do
+ response = update
+
+ expect(response).to be_error
+ expect(response.message).to match_array(['You have insufficient permissions to update a contact for this group'])
+ end
+ end
+
+ context 'when user has permission' do
+ let_it_be(:group) { create(:group) }
+
+ before_all do
+ group.add_developer(user)
+ end
+
+ context 'when first_name is changed' do
+ let(:params) { { first_name: 'Gary' } }
+
+ it 'updates the contact' do
+ response = update
+
+ expect(response).to be_success
+ expect(response.payload.first_name).to eq('Gary')
+ end
+ end
+
+ context 'when the contact is invalid' do
+ let(:params) { { first_name: nil } }
+
+ it 'returns an error' do
+ response = update
+
+ expect(response).to be_error
+ expect(response.message).to match_array(["First name can't be blank"])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/customer_relations/organizations/create_service_spec.rb b/spec/services/customer_relations/organizations/create_service_spec.rb
index b4764f6b97a..d8985d8d90b 100644
--- a/spec/services/customer_relations/organizations/create_service_spec.rb
+++ b/spec/services/customer_relations/organizations/create_service_spec.rb
@@ -12,22 +12,24 @@ RSpec.describe CustomerRelations::Organizations::CreateService do
subject(:response) { described_class.new(group: group, current_user: user, params: params).execute }
it 'creates an organization' do
- group.add_reporter(user)
+ group.add_developer(user)
expect(response).to be_success
end
it 'returns an error when user does not have permission' do
+ group.add_reporter(user)
+
expect(response).to be_error
- expect(response.message).to eq('You have insufficient permissions to create an organization for this group')
+ expect(response.message).to match_array(['You have insufficient permissions to create an organization for this group'])
end
it 'returns an error when the organization is not persisted' do
- group.add_reporter(user)
+ group.add_developer(user)
params[:name] = nil
expect(response).to be_error
- expect(response.message).to eq(["Name can't be blank"])
+ expect(response.message).to match_array(["Name can't be blank"])
end
end
end
diff --git a/spec/services/customer_relations/organizations/update_service_spec.rb b/spec/services/customer_relations/organizations/update_service_spec.rb
index eb253540863..bc40cb3e8e7 100644
--- a/spec/services/customer_relations/organizations/update_service_spec.rb
+++ b/spec/services/customer_relations/organizations/update_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe CustomerRelations::Organizations::UpdateService do
response = update
expect(response).to be_error
- expect(response.message).to eq('You have insufficient permissions to update an organization for this group')
+ expect(response.message).to eq(['You have insufficient permissions to update an organization for this group'])
end
end
@@ -27,7 +27,7 @@ RSpec.describe CustomerRelations::Organizations::UpdateService do
let_it_be(:group) { create(:group) }
before_all do
- group.add_reporter(user)
+ group.add_developer(user)
end
context 'when name is changed' do
diff --git a/spec/services/dependency_proxy/auth_token_service_spec.rb b/spec/services/dependency_proxy/auth_token_service_spec.rb
index 6214d75dfa0..c686f57c5cb 100644
--- a/spec/services/dependency_proxy/auth_token_service_spec.rb
+++ b/spec/services/dependency_proxy/auth_token_service_spec.rb
@@ -4,47 +4,72 @@ require 'spec_helper'
RSpec.describe DependencyProxy::AuthTokenService do
include DependencyProxyHelpers
- describe '.decoded_token_payload' do
- let_it_be(:user) { create(:user) }
- let_it_be(:token) { build_jwt(user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:deploy_token) { create(:deploy_token) }
- subject { described_class.decoded_token_payload(token.encoded) }
+ describe '.user_or_deploy_token_from_jwt' do
+ subject { described_class.user_or_deploy_token_from_jwt(token.encoded) }
- it 'returns the user' do
- result = subject
+ shared_examples 'handling token errors' do
+ context 'with a decoding error' do
+ before do
+ allow(JWT).to receive(:decode).and_raise(JWT::DecodeError)
+ end
- expect(result['user_id']).to eq(user.id)
- expect(result['deploy_token']).to be_nil
- end
+ it { is_expected.to eq(nil) }
+ end
- context 'with a deploy token' do
- let_it_be(:deploy_token) { create(:deploy_token) }
- let_it_be(:token) { build_jwt(deploy_token) }
+ context 'with an immature signature error' do
+ before do
+ allow(JWT).to receive(:decode).and_raise(JWT::ImmatureSignature)
+ end
- it 'returns the deploy token' do
- result = subject
+ it { is_expected.to eq(nil) }
+ end
- expect(result['deploy_token']).to eq(deploy_token.token)
- expect(result['user_id']).to be_nil
+ context 'with an expired signature error' do
+ it 'returns nil' do
+ travel_to(Time.zone.now + Auth::DependencyProxyAuthenticationService.token_expire_at + 1.minute) do
+ expect(subject).to eq(nil)
+ end
+ end
end
end
- it 'raises an error if the token is expired' do
- travel_to(Time.zone.now + Auth::DependencyProxyAuthenticationService.token_expire_at + 1.minute) do
- expect { subject }.to raise_error(JWT::ExpiredSignature)
+ context 'with a user' do
+ let_it_be(:token) { build_jwt(user) }
+
+ it { is_expected.to eq(user) }
+
+ context 'with an invalid user id' do
+ let_it_be(:token) { build_jwt { |jwt| jwt['user_id'] = 'this_is_not_a_user_id' } }
+
+ it 'raises an not found error' do
+ expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
+ end
end
+
+ it_behaves_like 'handling token errors'
end
- it 'raises an error if decoding fails' do
- allow(JWT).to receive(:decode).and_raise(JWT::DecodeError)
+ context 'with a deploy token' do
+ let_it_be(:token) { build_jwt(deploy_token) }
+
+ it { is_expected.to eq(deploy_token) }
+
+ context 'with an invalid token' do
+ let_it_be(:token) { build_jwt { |jwt| jwt['deploy_token'] = 'this_is_not_a_token' } }
+
+ it { is_expected.to eq(nil) }
+ end
- expect { subject }.to raise_error(JWT::DecodeError)
+ it_behaves_like 'handling token errors'
end
- it 'raises an error if signature is immature' do
- allow(JWT).to receive(:decode).and_raise(JWT::ImmatureSignature)
+ context 'with an empty token payload' do
+ let_it_be(:token) { build_jwt(nil) }
- expect { subject }.to raise_error(JWT::ImmatureSignature)
+ it { is_expected.to eq(nil) }
end
end
end
diff --git a/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb b/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
index 3fac749be29..20b0546effa 100644
--- a/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
+++ b/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe DependencyProxy::FindOrCreateBlobService do
include DependencyProxyHelpers
- let(:blob) { create(:dependency_proxy_blob) }
+ let_it_be_with_reload(:blob) { create(:dependency_proxy_blob) }
+
let(:group) { blob.group }
let(:image) { 'alpine' }
let(:tag) { '3.9' }
@@ -17,11 +18,7 @@ RSpec.describe DependencyProxy::FindOrCreateBlobService do
stub_registry_auth(image, token)
end
- context 'no cache' do
- before do
- stub_blob_download(image, blob_sha)
- end
-
+ shared_examples 'downloads the remote blob' do
it 'downloads blob from remote registry if there is no cached one' do
expect(subject[:status]).to eq(:success)
expect(subject[:blob]).to be_a(DependencyProxy::Blob)
@@ -30,15 +27,34 @@ RSpec.describe DependencyProxy::FindOrCreateBlobService do
end
end
+ context 'no cache' do
+ before do
+ stub_blob_download(image, blob_sha)
+ end
+
+ it_behaves_like 'downloads the remote blob'
+ end
+
context 'cached blob' do
let(:blob_sha) { blob.file_name.sub('.gz', '') }
it 'uses cached blob instead of downloading one' do
+ expect { subject }.to change { blob.reload.updated_at }
+
expect(subject[:status]).to eq(:success)
expect(subject[:blob]).to be_a(DependencyProxy::Blob)
expect(subject[:blob]).to eq(blob)
expect(subject[:from_cache]).to eq true
end
+
+ context 'when the cached blob is expired' do
+ before do
+ blob.update_column(:status, DependencyProxy::Blob.statuses[:expired])
+ stub_blob_download(image, blob_sha)
+ end
+
+ it_behaves_like 'downloads the remote blob'
+ end
end
context 'no such blob exists remotely' do
diff --git a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
index 5896aa255f0..b3f88f91289 100644
--- a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
@@ -21,19 +21,19 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
describe '#execute' do
subject { described_class.new(group, image, tag, token).execute }
+ shared_examples 'downloading the manifest' do
+ it 'downloads manifest from remote registry if there is no cached one', :aggregate_failures do
+ expect { subject }.to change { group.dependency_proxy_manifests.count }.by(1)
+ expect(subject[:status]).to eq(:success)
+ expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
+ expect(subject[:manifest]).to be_persisted
+ expect(subject[:from_cache]).to eq false
+ end
+ end
+
context 'when no manifest exists' do
let_it_be(:image) { 'new-image' }
- shared_examples 'downloading the manifest' do
- it 'downloads manifest from remote registry if there is no cached one', :aggregate_failures do
- expect { subject }.to change { group.dependency_proxy_manifests.count }.by(1)
- expect(subject[:status]).to eq(:success)
- expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
- expect(subject[:manifest]).to be_persisted
- expect(subject[:from_cache]).to eq false
- end
- end
-
context 'successful head request' do
before do
stub_manifest_head(image, tag, headers: headers)
@@ -60,6 +60,8 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
shared_examples 'using the cached manifest' do
it 'uses cached manifest instead of downloading one', :aggregate_failures do
+ expect { subject }.to change { dependency_proxy_manifest.reload.updated_at }
+
expect(subject[:status]).to eq(:success)
expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
expect(subject[:manifest]).to eq(dependency_proxy_manifest)
@@ -87,6 +89,16 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
end
end
+ context 'when the cached manifest is expired' do
+ before do
+ dependency_proxy_manifest.update_column(:status, DependencyProxy::Manifest.statuses[:expired])
+ stub_manifest_head(image, tag, headers: headers)
+ stub_manifest_download(image, tag, headers: headers)
+ end
+
+ it_behaves_like 'downloading the manifest'
+ end
+
context 'failed connection' do
before do
expect(DependencyProxy::HeadManifestService).to receive(:new).and_raise(Net::OpenTimeout)
diff --git a/spec/services/dependency_proxy/group_settings/update_service_spec.rb b/spec/services/dependency_proxy/group_settings/update_service_spec.rb
new file mode 100644
index 00000000000..6f8c55daa8d
--- /dev/null
+++ b/spec/services/dependency_proxy/group_settings/update_service_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::DependencyProxy::GroupSettings::UpdateService do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:group_settings) { create(:dependency_proxy_group_setting, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:params) { { enabled: false } }
+
+ describe '#execute' do
+ subject { described_class.new(container: group, current_user: user, params: params).execute }
+
+ shared_examples 'updating the dependency proxy group settings' do
+ it_behaves_like 'updating the dependency proxy group settings attributes',
+ from: { enabled: true },
+ to: { enabled: false }
+
+ it 'returns a success' do
+ result = subject
+
+ expect(result.payload[:dependency_proxy_setting]).to be_present
+ expect(result).to be_success
+ end
+ end
+
+ shared_examples 'denying access to dependency proxy group settings' do
+ context 'with existing dependency proxy group settings' do
+ it 'returns an error' do
+ result = subject
+
+ expect(result).to have_attributes(
+ message: 'Access Denied',
+ status: :error,
+ http_status: 403
+ )
+ end
+ end
+ end
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'updating the dependency proxy group settings'
+ :developer | 'updating the dependency proxy group settings'
+ :reporter | 'denying access to dependency proxy group settings'
+ :guest | 'denying access to dependency proxy group settings'
+ :anonymous | 'denying access to dependency proxy group settings'
+ end
+
+ with_them do
+ before do
+ stub_config(dependency_proxy: { enabled: true })
+ group.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+end
diff --git a/spec/services/deployments/older_deployments_drop_service_spec.rb b/spec/services/deployments/older_deployments_drop_service_spec.rb
index 6152a95cc3c..e6fd6725d7d 100644
--- a/spec/services/deployments/older_deployments_drop_service_spec.rb
+++ b/spec/services/deployments/older_deployments_drop_service_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe Deployments::OlderDeploymentsDropService do
it 'does not drop an older deployment and tracks the exception' do
expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(kind_of(RuntimeError), subject_id: deployment.id, deployment_id: older_deployment.id)
+ .with(kind_of(RuntimeError), subject_id: deployment.id, build_id: older_deployment.deployable_id)
expect { subject }.not_to change { Ci::Build.failed.count }
end
diff --git a/spec/services/deployments/update_service_spec.rb b/spec/services/deployments/update_service_spec.rb
index 16b24d0dee8..d3840189ba4 100644
--- a/spec/services/deployments/update_service_spec.rb
+++ b/spec/services/deployments/update_service_spec.rb
@@ -34,9 +34,11 @@ RSpec.describe Deployments::UpdateService do
expect(deploy).to be_canceled
end
- it 'raises ArgumentError if the status is invalid' do
- expect { described_class.new(deploy, status: 'kittens').execute }
- .to raise_error(ArgumentError)
+ it 'does not change the state if the status is invalid' do
+ expect(described_class.new(deploy, status: 'kittens').execute)
+ .to be_falsy
+
+ expect(deploy).to be_created
end
it 'links merge requests when changing the status to success', :sidekiq_inline do
diff --git a/spec/services/error_tracking/list_issues_service_spec.rb b/spec/services/error_tracking/list_issues_service_spec.rb
index b49095ab8b9..a7bd6c75df5 100644
--- a/spec/services/error_tracking/list_issues_service_spec.rb
+++ b/spec/services/error_tracking/list_issues_service_spec.rb
@@ -5,56 +5,71 @@ require 'spec_helper'
RSpec.describe ErrorTracking::ListIssuesService do
include_context 'sentry error tracking context'
- let(:params) { { search_term: 'something', sort: 'last_seen', cursor: 'some-cursor' } }
- let(:list_sentry_issues_args) do
- {
- issue_status: 'unresolved',
- limit: 20,
- search_term: 'something',
- sort: 'last_seen',
- cursor: 'some-cursor'
- }
- end
+ let(:params) { {} }
subject { described_class.new(project, user, params) }
describe '#execute' do
- context 'with authorized user' do
- let(:issues) { [] }
+ context 'Sentry backend' do
+ let(:params) { { search_term: 'something', sort: 'last_seen', cursor: 'some-cursor' } }
+
+ let(:list_sentry_issues_args) do
+ {
+ issue_status: 'unresolved',
+ limit: 20,
+ search_term: 'something',
+ sort: 'last_seen',
+ cursor: 'some-cursor'
+ }
+ end
+
+ context 'with authorized user' do
+ let(:issues) { [] }
+
+ described_class::ISSUE_STATUS_VALUES.each do |status|
+ it "returns the issues with #{status} issue_status" do
+ params[:issue_status] = status
+ list_sentry_issues_args[:issue_status] = status
+ expect_list_sentry_issues_with(list_sentry_issues_args)
+
+ expect(result).to eq(status: :success, pagination: {}, issues: issues)
+ end
+ end
- described_class::ISSUE_STATUS_VALUES.each do |status|
- it "returns the issues with #{status} issue_status" do
- params[:issue_status] = status
- list_sentry_issues_args[:issue_status] = status
+ it 'returns the issues with no issue_status' do
expect_list_sentry_issues_with(list_sentry_issues_args)
expect(result).to eq(status: :success, pagination: {}, issues: issues)
end
- end
- it 'returns the issues with no issue_status' do
- expect_list_sentry_issues_with(list_sentry_issues_args)
+ it 'returns bad request for an issue_status not on the whitelist' do
+ params[:issue_status] = 'assigned'
+
+ expect(error_tracking_setting).not_to receive(:list_sentry_issues)
+ expect(result).to eq(message: "Bad Request: Invalid issue_status", status: :error, http_status: :bad_request)
+ end
- expect(result).to eq(status: :success, pagination: {}, issues: issues)
+ include_examples 'error tracking service data not ready', :list_sentry_issues
+ include_examples 'error tracking service sentry error handling', :list_sentry_issues
+ include_examples 'error tracking service http status handling', :list_sentry_issues
end
- it 'returns bad request for an issue_status not on the whitelist' do
- params[:issue_status] = 'assigned'
+ include_examples 'error tracking service unauthorized user'
+ include_examples 'error tracking service disabled'
- expect(error_tracking_setting).not_to receive(:list_sentry_issues)
- expect(result).to eq(message: "Bad Request: Invalid issue_status", status: :error, http_status: :bad_request)
+ def expect_list_sentry_issues_with(list_sentry_issues_args)
+ expect(error_tracking_setting)
+ .to receive(:list_sentry_issues)
+ .with(list_sentry_issues_args)
+ .and_return(issues: [], pagination: {})
end
-
- include_examples 'error tracking service data not ready', :list_sentry_issues
- include_examples 'error tracking service sentry error handling', :list_sentry_issues
- include_examples 'error tracking service http status handling', :list_sentry_issues
end
- include_examples 'error tracking service unauthorized user'
- include_examples 'error tracking service disabled'
+ context 'GitLab backend' do
+ let_it_be(:error1) { create(:error_tracking_error, name: 'foo', project: project) }
+ let_it_be(:error2) { create(:error_tracking_error, name: 'bar', project: project) }
- context 'integrated error tracking' do
- let_it_be(:error) { create(:error_tracking_error, project: project) }
+ let(:params) { { limit: '1' } }
before do
error_tracking_setting.update!(integrated: true)
@@ -63,7 +78,9 @@ RSpec.describe ErrorTracking::ListIssuesService do
it 'returns the error in expected format' do
expect(result[:status]).to eq(:success)
expect(result[:issues].size).to eq(1)
- expect(result[:issues].first.to_json).to eq(error.to_sentry_error.to_json)
+ expect(result[:issues].first.to_json).to eq(error2.to_sentry_error.to_json)
+ expect(result[:pagination][:next][:cursor]).to be_present
+ expect(result[:pagination][:previous]).to be_nil
end
end
end
@@ -76,10 +93,3 @@ RSpec.describe ErrorTracking::ListIssuesService do
end
end
end
-
-def expect_list_sentry_issues_with(list_sentry_issues_args)
- expect(error_tracking_setting)
- .to receive(:list_sentry_issues)
- .with(list_sentry_issues_args)
- .and_return(issues: [], pagination: {})
-end
diff --git a/spec/services/feature_flags/hook_service_spec.rb b/spec/services/feature_flags/hook_service_spec.rb
new file mode 100644
index 00000000000..02cdbbd86ac
--- /dev/null
+++ b/spec/services/feature_flags/hook_service_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::HookService do
+ describe '#execute_hooks' do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project, :repository, namespace: namespace) }
+ let_it_be(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let_it_be(:user) { namespace.owner }
+
+ let!(:hook) { create(:project_hook, project: project) }
+ let(:hook_data) { double }
+
+ subject(:service) { described_class.new(feature_flag, user) }
+
+ describe 'HOOK_NAME' do
+ specify { expect(described_class::HOOK_NAME).to eq(:feature_flag_hooks) }
+ end
+
+ before do
+ allow(Gitlab::DataBuilder::FeatureFlag).to receive(:build).with(feature_flag, user).once.and_return(hook_data)
+ end
+
+ it 'calls feature_flag.project.execute_hooks' do
+ expect(feature_flag.project).to receive(:execute_hooks).with(hook_data, described_class::HOOK_NAME)
+
+ service.execute
+ end
+ end
+end
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 889b5551746..ee38c0fbb44 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -3,6 +3,19 @@
require 'spec_helper'
RSpec.describe Groups::TransferService do
+ shared_examples 'project namespace path is in sync with project path' do
+ it 'keeps project and project namespace attributes in sync' do
+ projects_with_project_namespace.each do |project|
+ project.reload
+
+ expect(project.full_path).to eq("#{group_full_path}/#{project.path}")
+ expect(project.project_namespace.full_path).to eq(project.full_path)
+ expect(project.project_namespace.parent).to eq(project.namespace)
+ expect(project.project_namespace.visibility_level).to eq(project.visibility_level)
+ end
+ end
+ end
+
let_it_be(:user) { create(:user) }
let_it_be(:new_parent_group) { create(:group, :public) }
@@ -169,6 +182,18 @@ RSpec.describe Groups::TransferService do
expect(project.full_path).to eq("#{group.path}/#{project.path}")
end
end
+
+ context 'when projects have project namespaces' do
+ let_it_be(:project1) { create(:project, :private, namespace: group) }
+ let_it_be(:project_namespace1) { create(:project_namespace, project: project1) }
+ let_it_be(:project2) { create(:project, :private, namespace: group) }
+ let_it_be(:project_namespace2) { create(:project_namespace, project: project2) }
+
+ it_behaves_like 'project namespace path is in sync with project path' do
+ let(:group_full_path) { "#{group.path}" }
+ let(:projects_with_project_namespace) { [project1, project2] }
+ end
+ end
end
end
@@ -222,10 +247,10 @@ RSpec.describe Groups::TransferService do
context 'when the parent group has a project with the same path' do
let_it_be_with_reload(:group) { create(:group, :public, :nested, path: 'foo') }
+ let_it_be(:membership) { create(:group_member, :owner, group: new_parent_group, user: user) }
+ let_it_be(:project) { create(:project, path: 'foo', namespace: new_parent_group) }
before do
- create(:group_member, :owner, group: new_parent_group, user: user)
- create(:project, path: 'foo', namespace: new_parent_group)
group.update_attribute(:path, 'foo')
end
@@ -237,6 +262,19 @@ RSpec.describe Groups::TransferService do
transfer_service.execute(new_parent_group)
expect(transfer_service.error).to eq('Transfer failed: Validation failed: Group URL has already been taken')
end
+
+ context 'when projects have project namespaces' do
+ let!(:project_namespace) { create(:project_namespace, project: project) }
+
+ before do
+ transfer_service.execute(new_parent_group)
+ end
+
+ it_behaves_like 'project namespace path is in sync with project path' do
+ let(:group_full_path) { "#{new_parent_group.full_path}" }
+ let(:projects_with_project_namespace) { [project] }
+ end
+ end
end
context 'when the group is allowed to be transferred' do
@@ -324,7 +362,7 @@ RSpec.describe Groups::TransferService do
let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true) }
it 'calls update service' do
- expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: 'disabled_with_override' }).and_call_original
+ expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: Namespace::SR_DISABLED_WITH_OVERRIDE }).and_call_original
transfer_service.execute(new_parent_group)
end
@@ -334,7 +372,7 @@ RSpec.describe Groups::TransferService do
let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false) }
it 'calls update service' do
- expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: 'disabled_and_unoverridable' }).and_call_original
+ expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE }).and_call_original
transfer_service.execute(new_parent_group)
end
@@ -407,6 +445,8 @@ RSpec.describe Groups::TransferService do
context 'when transferring a group with project descendants' do
let!(:project1) { create(:project, :repository, :private, namespace: group) }
let!(:project2) { create(:project, :repository, :internal, namespace: group) }
+ let!(:project_namespace1) { create(:project_namespace, project: project1) }
+ let!(:project_namespace2) { create(:project_namespace, project: project2) }
before do
TestEnv.clean_test_path
@@ -432,18 +472,30 @@ RSpec.describe Groups::TransferService do
expect(project1.private?).to be_truthy
expect(project2.internal?).to be_truthy
end
+
+ it_behaves_like 'project namespace path is in sync with project path' do
+ let(:group_full_path) { "#{new_parent_group.path}/#{group.path}" }
+ let(:projects_with_project_namespace) { [project1, project2] }
+ end
end
context 'when the new parent has a lower visibility than the projects' do
let!(:project1) { create(:project, :repository, :public, namespace: group) }
let!(:project2) { create(:project, :repository, :public, namespace: group) }
- let(:new_parent_group) { create(:group, :private) }
+ let!(:new_parent_group) { create(:group, :private) }
+ let!(:project_namespace1) { create(:project_namespace, project: project1) }
+ let!(:project_namespace2) { create(:project_namespace, project: project2) }
it 'updates projects visibility to match the new parent' do
group.projects.each do |project|
expect(project.private?).to be_truthy
end
end
+
+ it_behaves_like 'project namespace path is in sync with project path' do
+ let(:group_full_path) { "#{new_parent_group.path}/#{group.path}" }
+ let(:projects_with_project_namespace) { [project1, project2] }
+ end
end
end
@@ -452,6 +504,8 @@ RSpec.describe Groups::TransferService do
let!(:project2) { create(:project, :repository, :internal, namespace: group) }
let!(:subgroup1) { create(:group, :private, parent: group) }
let!(:subgroup2) { create(:group, :internal, parent: group) }
+ let!(:project_namespace1) { create(:project_namespace, project: project1) }
+ let!(:project_namespace2) { create(:project_namespace, project: project2) }
before do
TestEnv.clean_test_path
@@ -480,6 +534,11 @@ RSpec.describe Groups::TransferService do
expect(project1.redirect_routes.count).to eq(1)
expect(project2.redirect_routes.count).to eq(1)
end
+
+ it_behaves_like 'project namespace path is in sync with project path' do
+ let(:group_full_path) { "#{new_parent_group.path}/#{group.path}" }
+ let(:projects_with_project_namespace) { [project1, project2] }
+ end
end
context 'when transferring a group with nested groups and projects' do
@@ -651,6 +710,30 @@ RSpec.describe Groups::TransferService do
expect(project1.public?).to be_truthy
end
end
+
+ context 'when group has pending builds' do
+ let_it_be(:project) { create(:project, :public, namespace: group.reload) }
+ let_it_be(:other_project) { create(:project) }
+ let_it_be(:pending_build) { create(:ci_pending_build, project: project) }
+ let_it_be(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) }
+
+ before do
+ group.add_owner(user)
+ new_parent_group.add_owner(user)
+ end
+
+ it 'updates pending builds for the group', :aggregate_failures do
+ transfer_service.execute(new_parent_group)
+
+ pending_build.reload
+ unrelated_pending_build.reload
+
+ expect(pending_build.namespace_id).to eq(group.id)
+ expect(pending_build.namespace_traversal_ids).to eq(group.traversal_ids)
+ expect(unrelated_pending_build.namespace_id).to eq(other_project.namespace_id)
+ expect(unrelated_pending_build.namespace_traversal_ids).to eq(other_project.namespace.traversal_ids)
+ end
+ end
end
context 'when transferring a subgroup into root group' do
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index bc7c066fa04..e1bd3732820 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -287,7 +287,7 @@ RSpec.describe Groups::UpdateService do
let(:group) { create(:group) }
let(:project) { create(:project, shared_runners_enabled: true, group: group) }
- subject { described_class.new(group, user, shared_runners_setting: 'disabled_and_unoverridable').execute }
+ subject { described_class.new(group, user, shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE).execute }
before do
group.add_owner(user)
diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb
index fe18277b5cd..53870e810b1 100644
--- a/spec/services/groups/update_shared_runners_service_spec.rb
+++ b/spec/services/groups/update_shared_runners_service_spec.rb
@@ -85,10 +85,10 @@ RSpec.describe Groups::UpdateSharedRunnersService do
context 'disable shared Runners' do
let_it_be(:group) { create(:group) }
- let(:params) { { shared_runners_setting: 'disabled_and_unoverridable' } }
+ let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE } }
it 'receives correct method and succeeds' do
- expect(group).to receive(:update_shared_runners_setting!).with('disabled_and_unoverridable')
+ expect(group).to receive(:update_shared_runners_setting!).with(Namespace::SR_DISABLED_AND_UNOVERRIDABLE)
expect(subject[:status]).to eq(:success)
end
@@ -108,13 +108,13 @@ RSpec.describe Groups::UpdateSharedRunnersService do
end
context 'allow descendants to override' do
- let(:params) { { shared_runners_setting: 'disabled_with_override' } }
+ let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_WITH_OVERRIDE } }
context 'top level group' do
let_it_be(:group) { create(:group, :shared_runners_disabled) }
it 'receives correct method and succeeds' do
- expect(group).to receive(:update_shared_runners_setting!).with('disabled_with_override')
+ expect(group).to receive(:update_shared_runners_setting!).with(Namespace::SR_DISABLED_WITH_OVERRIDE)
expect(subject[:status]).to eq(:success)
end
diff --git a/spec/services/import/validate_remote_git_endpoint_service_spec.rb b/spec/services/import/validate_remote_git_endpoint_service_spec.rb
new file mode 100644
index 00000000000..fbd8a3cb323
--- /dev/null
+++ b/spec/services/import/validate_remote_git_endpoint_service_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::ValidateRemoteGitEndpointService do
+ include StubRequests
+
+ let_it_be(:base_url) { 'http://demo.host/path' }
+ let_it_be(:endpoint_url) { "#{base_url}/info/refs?service=git-upload-pack" }
+ let_it_be(:error_message) { "#{base_url} is not a valid HTTP Git repository" }
+
+ describe '#execute' do
+ let(:valid_response) do
+ { status: 200,
+ body: '001e# service=git-upload-pack',
+ headers: { 'Content-Type': 'application/x-git-upload-pack-advertisement' } }
+ end
+
+ it 'correctly handles URLs with fragment' do
+ allow(Gitlab::HTTP).to receive(:get)
+
+ described_class.new(url: "#{base_url}#somehash").execute
+
+ expect(Gitlab::HTTP).to have_received(:get).with(endpoint_url, basic_auth: nil, stream_body: true, follow_redirects: false)
+ end
+
+ context 'when receiving HTTP response' do
+ subject { described_class.new(url: base_url) }
+
+ it 'returns success when HTTP response is valid and contains correct payload' do
+ stub_full_request(endpoint_url, method: :get).to_return(valid_response)
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.success?).to be(true)
+ end
+
+ it 'reports error when status code is not 200' do
+ stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ status: 301 }))
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq(error_message)
+ end
+
+ it 'reports error when invalid URL is provided' do
+ result = described_class.new(url: 1).execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq('1 is not a valid URL')
+ end
+
+ it 'reports error when required header is missing' do
+ stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ headers: nil }))
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq(error_message)
+ end
+
+ it 'reports error when body is in invalid format' do
+ stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ body: 'invalid content' }))
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq(error_message)
+ end
+
+ it 'reports error when exception is raised' do
+ stub_full_request(endpoint_url, method: :get).to_raise(SocketError.new('dummy message'))
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq(error_message)
+ end
+ end
+
+ it 'passes basic auth when credentials are provided' do
+ allow(Gitlab::HTTP).to receive(:get)
+
+ described_class.new(url: "#{base_url}#somehash", user: 'user', password: 'password').execute
+
+ expect(Gitlab::HTTP).to have_received(:get).with(endpoint_url, basic_auth: { username: 'user', password: 'password' }, stream_body: true, follow_redirects: false)
+ end
+ end
+end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 14e6b44f7b0..93ef046a632 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -22,6 +22,18 @@ RSpec.describe Issues::CloseService do
describe '#execute' do
let(:service) { described_class.new(project: project, current_user: user) }
+ context 'when skip_authorization is true' do
+ it 'does close the issue even if user is not authorized' do
+ non_authorized_user = create(:user)
+
+ service = described_class.new(project: project, current_user: non_authorized_user)
+
+ expect do
+ service.execute(issue, skip_authorization: true)
+ end.to change { issue.reload.state }.from('opened').to('closed')
+ end
+ end
+
it 'checks if the user is authorized to update the issue' do
expect(service).to receive(:can?).with(user, :update_issue, issue)
.and_call_original
@@ -156,7 +168,7 @@ RSpec.describe Issues::CloseService do
context 'updating `metrics.first_mentioned_in_commit_at`' do
context 'when `metrics.first_mentioned_in_commit_at` is not set' do
it 'uses the first commit authored timestamp' do
- expected = closing_merge_request.commits.first.authored_date
+ expected = closing_merge_request.commits.take(100).last.authored_date
close_issue
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 3988069d83a..1887be4896e 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -10,6 +10,25 @@ RSpec.describe Issues::CreateService do
let(:spam_params) { double }
+ describe '.rate_limiter_scoped_and_keyed' do
+ it 'is set via the rate_limit call' do
+ expect(described_class.rate_limiter_scoped_and_keyed).to be_a(RateLimitedService::RateLimiterScopedAndKeyed)
+
+ expect(described_class.rate_limiter_scoped_and_keyed.key).to eq(:issues_create)
+ expect(described_class.rate_limiter_scoped_and_keyed.opts[:scope]).to eq(%i[project current_user])
+ expect(described_class.rate_limiter_scoped_and_keyed.opts[:users_allowlist].call).to eq(%w[support-bot])
+ expect(described_class.rate_limiter_scoped_and_keyed.rate_limiter_klass).to eq(Gitlab::ApplicationRateLimiter)
+ end
+ end
+
+ describe '#rate_limiter_bypassed' do
+ let(:subject) { described_class.new(project: project, spam_params: {}) }
+
+ it 'is nil by default' do
+ expect(subject.rate_limiter_bypassed).to be_nil
+ end
+ end
+
describe '#execute' do
let_it_be(:assignee) { create(:user) }
let_it_be(:milestone) { create(:milestone, project: project) }
diff --git a/spec/services/issues/relative_position_rebalancing_service_spec.rb b/spec/services/issues/relative_position_rebalancing_service_spec.rb
index d5d81770817..20064bd7e4b 100644
--- a/spec/services/issues/relative_position_rebalancing_service_spec.rb
+++ b/spec/services/issues/relative_position_rebalancing_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_shared_state do
- let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project, :repository_disabled, skip_disk_validation: true) }
let_it_be(:user) { project.creator }
let_it_be(:start) { RelativePositioning::START_POSITION }
let_it_be(:max_pos) { RelativePositioning::MAX_POSITION }
@@ -28,12 +28,18 @@ RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_s
end
end
+ let_it_be(:nil_clump, reload: true) do
+ (1..100).to_a.map do |i|
+ create(:issue, project: project, author: user, relative_position: nil)
+ end
+ end
+
before do
stub_feature_flags(issue_rebalancing_with_retry: false)
end
def issues_in_position_order
- project.reload.issues.reorder(relative_position: :asc).to_a
+ project.reload.issues.order_by_relative_position.to_a
end
subject(:service) { described_class.new(Project.id_in(project)) }
@@ -44,16 +50,19 @@ RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_s
expect { service.execute }.not_to change { issues_in_position_order.map(&:id) }
+ caching = service.send(:caching)
all_issues.each(&:reset)
gaps = all_issues.take(all_issues.count - 1).zip(all_issues.drop(1)).map do |a, b|
b.relative_position - a.relative_position
end
+ expect(caching.issue_count).to eq(900)
expect(gaps).to all(be > RelativePositioning::MIN_GAP)
expect(all_issues.first.relative_position).to be > (RelativePositioning::MIN_POSITION * 0.9999)
expect(all_issues.last.relative_position).to be < (RelativePositioning::MAX_POSITION * 0.9999)
expect(project.root_namespace.issue_repositioning_disabled?).to be false
+ expect(project.issues.with_null_relative_position.count).to eq(100)
end
it 'is idempotent' do
@@ -111,7 +120,7 @@ RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_s
allow(caching).to receive(:concurrent_running_rebalances_count).and_return(10)
allow(service).to receive(:caching).and_return(caching)
- expect { service.execute }.not_to raise_error(Issues::RelativePositionRebalancingService::TooManyConcurrentRebalances)
+ expect { service.execute }.not_to raise_error
end
context 're-balancing is retried on statement timeout exceptions' do
diff --git a/spec/services/issues/reopen_service_spec.rb b/spec/services/issues/reopen_service_spec.rb
index 86190c4e475..c9469b861ac 100644
--- a/spec/services/issues/reopen_service_spec.rb
+++ b/spec/services/issues/reopen_service_spec.rb
@@ -8,18 +8,26 @@ RSpec.describe Issues::ReopenService do
describe '#execute' do
context 'when user is not authorized to reopen issue' do
- before do
+ it 'does not reopen the issue' do
guest = create(:user)
project.add_guest(guest)
- perform_enqueued_jobs do
- described_class.new(project: project, current_user: guest).execute(issue)
- end
- end
+ described_class.new(project: project, current_user: guest).execute(issue)
- it 'does not reopen the issue' do
expect(issue).to be_closed
end
+
+ context 'when skip_authorization is true' do
+ it 'does close the issue even if user is not authorized' do
+ non_authorized_user = create(:user)
+
+ service = described_class.new(project: project, current_user: non_authorized_user)
+
+ expect do
+ service.execute(issue, skip_authorization: true)
+ end.to change { issue.reload.state }.from('closed').to('opened')
+ end
+ end
end
context 'when user is authorized to reopen issue' do
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index 15ed5c5a33f..2e6e6041fc3 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -80,7 +80,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'does not add a member' do
expect(execute_service[:status]).to eq(:error)
- expect(execute_service[:message]).to eq('Invite email has already been taken')
+ expect(execute_service[:message]).to eq("The member's email address has already been taken")
expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
end
end
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index dd82facaf14..478733e8aa0 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -150,7 +150,7 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
expect_to_create_members(count: 1)
expect(result[:status]).to eq(:error)
expect(result[:message][invited_member.invite_email])
- .to eq("Invite email has already been taken")
+ .to eq("The member's email address has already been taken")
expect(project.users).to include project_user
end
end
diff --git a/spec/services/merge_requests/assign_issues_service_spec.rb b/spec/services/merge_requests/assign_issues_service_spec.rb
index b857f26c052..cf405c0102e 100644
--- a/spec/services/merge_requests/assign_issues_service_spec.rb
+++ b/spec/services/merge_requests/assign_issues_service_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe MergeRequests::AssignIssuesService do
expect(service.assignable_issues.map(&:id)).to include(issue.id)
end
- it 'ignores issues the user cannot update assignee on' do
+ it 'ignores issues the user cannot update assignee on', :sidekiq_inline do
project.team.truncate
expect(service.assignable_issues).to be_empty
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index 0f282384661..ab3d9880d29 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -440,7 +440,7 @@ RSpec.describe MergeRequests::BuildService do
expect(merge_request.title).to eq('Closes #1234 Second commit')
end
- it 'adds the remaining lines of the first multi-line commit message as the description' do
+ it 'adds the remaining lines of the first multi-line commit message as the description', :sidekiq_inline do
expect(merge_request.description).to eq('Create the app')
end
end
diff --git a/spec/services/merge_requests/mergeability/check_base_service_spec.rb b/spec/services/merge_requests/mergeability/check_base_service_spec.rb
new file mode 100644
index 00000000000..f07522b43cb
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_base_service_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckBaseService do
+ subject(:check_base_service) { described_class.new(merge_request: merge_request, params: params) }
+
+ let(:merge_request) { double }
+ let(:params) { double }
+
+ describe '#merge_request' do
+ it 'returns the merge_request' do
+ expect(check_base_service.merge_request).to eq merge_request
+ end
+ end
+
+ describe '#params' do
+ it 'returns the params' do
+ expect(check_base_service.params).to eq params
+ end
+ end
+
+ describe '#skip?' do
+ it 'raises NotImplementedError' do
+ expect { check_base_service.skip? }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'raises NotImplementedError' do
+ expect { check_base_service.skip? }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#cache_key?' do
+ it 'raises NotImplementedError' do
+ expect { check_base_service.skip? }.to raise_error(NotImplementedError)
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb
new file mode 100644
index 00000000000..6fbbecd7c0e
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckCiStatusService do
+ subject(:check_ci_status) { described_class.new(merge_request: merge_request, params: params) }
+
+ let(:merge_request) { build(:merge_request) }
+ let(:params) { { skip_ci_check: skip_check } }
+ let(:skip_check) { false }
+
+ describe '#execute' do
+ before do
+ expect(merge_request).to receive(:mergeable_ci_state?).and_return(mergeable)
+ end
+
+ context 'when the merge request is in a mergable state' do
+ let(:mergeable) { true }
+
+ it 'returns a check result with status success' do
+ expect(check_ci_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ end
+ end
+
+ context 'when the merge request is not in a mergeable state' do
+ let(:mergeable) { false }
+
+ it 'returns a check result with status failed' do
+ expect(check_ci_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ end
+ end
+ end
+
+ describe '#skip?' do
+ context 'when skip check is true' do
+ let(:skip_check) { true }
+
+ it 'returns true' do
+ expect(check_ci_status.skip?).to eq true
+ end
+ end
+
+ context 'when skip check is false' do
+ let(:skip_check) { false }
+
+ it 'returns false' do
+ expect(check_ci_status.skip?).to eq false
+ end
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'returns false' do
+ expect(check_ci_status.cacheable?).to eq false
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
new file mode 100644
index 00000000000..170d99f4642
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::RunChecksService do
+ subject(:run_checks) { described_class.new(merge_request: merge_request, params: {}) }
+
+ let_it_be(:merge_request) { create(:merge_request) }
+
+ describe '#CHECKS' do
+ it 'contains every subclass of the base checks service' do
+ expect(described_class::CHECKS).to contain_exactly(*MergeRequests::Mergeability::CheckBaseService.subclasses)
+ end
+ end
+
+ describe '#execute' do
+ subject(:execute) { run_checks.execute }
+
+ let(:params) { {} }
+ let(:success_result) { Gitlab::MergeRequests::Mergeability::CheckResult.success }
+
+ context 'when every check is skipped' do
+ before do
+ MergeRequests::Mergeability::CheckBaseService.subclasses.each do |subclass|
+ expect_next_instance_of(subclass) do |service|
+ expect(service).to receive(:skip?).and_return(true)
+ end
+ end
+ end
+
+ it 'is still a success' do
+ expect(execute.all?(&:success?)).to eq(true)
+ end
+ end
+
+ context 'when a check is skipped' do
+ it 'does not execute the check' do
+ expect_next_instance_of(MergeRequests::Mergeability::CheckCiStatusService) do |service|
+ expect(service).to receive(:skip?).and_return(true)
+ expect(service).not_to receive(:execute)
+ end
+
+ expect(execute).to match_array([])
+ end
+ end
+
+ context 'when a check is not skipped' do
+ let(:cacheable) { true }
+ let(:merge_check) { instance_double(MergeRequests::Mergeability::CheckCiStatusService) }
+
+ before do
+ expect(MergeRequests::Mergeability::CheckCiStatusService).to receive(:new).and_return(merge_check)
+ expect(merge_check).to receive(:skip?).and_return(false)
+ allow(merge_check).to receive(:cacheable?).and_return(cacheable)
+ allow(merge_check).to receive(:execute).and_return(success_result)
+ end
+
+ context 'when the check is cacheable' do
+ context 'when the check is cached' do
+ it 'returns the cached result' do
+ expect_next_instance_of(Gitlab::MergeRequests::Mergeability::ResultsStore) do |service|
+ expect(service).to receive(:read).with(merge_check: merge_check).and_return(success_result)
+ end
+
+ expect(execute).to match_array([success_result])
+ end
+ end
+
+ context 'when the check is not cached' do
+ it 'writes and returns the result' do
+ expect_next_instance_of(Gitlab::MergeRequests::Mergeability::ResultsStore) do |service|
+ expect(service).to receive(:read).with(merge_check: merge_check).and_return(nil)
+ expect(service).to receive(:write).with(merge_check: merge_check, result_hash: success_result.to_hash).and_return(true)
+ end
+
+ expect(execute).to match_array([success_result])
+ end
+ end
+ end
+
+ context 'when check is not cacheable' do
+ let(:cacheable) { false }
+
+ it 'does not call the results store' do
+ expect(Gitlab::MergeRequests::Mergeability::ResultsStore).not_to receive(:new)
+
+ expect(execute).to match_array([success_result])
+ end
+ end
+
+ context 'when mergeability_caching is turned off' do
+ before do
+ stub_feature_flags(mergeability_caching: false)
+ end
+
+ it 'does not call the results store' do
+ expect(Gitlab::MergeRequests::Mergeability::ResultsStore).not_to receive(:new)
+
+ expect(execute).to match_array([success_result])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index f00a8928109..348ea9ad7d4 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -701,7 +701,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
let(:push_options) { { create: true } }
let(:changes) { new_branch_changes }
- it 'records an error' do
+ it 'records an error', :sidekiq_inline do
Members::DestroyService.new(user1).execute(ProjectMember.find_by!(user_id: user1.id))
service.execute
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index 0a56f01ebba..bca954c3959 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Notes::QuickActionsService do
let(:note_text) { "/relate #{other_issue.to_reference}" }
let(:note) { create(:note_on_issue, noteable: issue, project: project, note: note_text) }
- context 'user cannot relate issues' do
+ context 'user cannot relate issues', :sidekiq_inline do
before do
project.team.find_member(maintainer.id).destroy!
project.update!(visibility: Gitlab::VisibilityLevel::PUBLIC)
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index a03f1f17b39..48718cbc24a 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -3155,7 +3155,7 @@ RSpec.describe NotificationService, :mailer do
notification.pipeline_finished(pipeline)
end
- it 'does not send emails' do
+ it 'does not send emails', :sidekiq_inline do
should_not_email_anyone
end
end
diff --git a/spec/services/packages/composer/create_package_service_spec.rb b/spec/services/packages/composer/create_package_service_spec.rb
index 2ffd0a269f2..593777faa55 100644
--- a/spec/services/packages/composer/create_package_service_spec.rb
+++ b/spec/services/packages/composer/create_package_service_spec.rb
@@ -24,25 +24,6 @@ RSpec.describe Packages::Composer::CreatePackageService do
let(:created_package) { Packages::Package.composer.last }
- shared_examples 'using the cache update worker' do
- context 'with remove_composer_v1_cache_code enabled' do
- it 'does not enqueue a cache update job' do
- expect(::Packages::Composer::CacheUpdateWorker).not_to receive(:perform_async)
-
- subject
- end
- end
-
- context 'with remove_composer_v1_cache_code disabled' do
- it 'enqueues a cache update job' do
- stub_feature_flags(remove_composer_v1_cache_code: true)
- expect(::Packages::Composer::CacheUpdateWorker).not_to receive(:perform_async)
-
- subject
- end
- end
- end
-
context 'without an existing package' do
context 'with a branch' do
let(:branch) { project.repository.find_branch('master') }
@@ -64,7 +45,6 @@ RSpec.describe Packages::Composer::CreatePackageService do
it_behaves_like 'assigns build to package'
it_behaves_like 'assigns status to package'
- it_behaves_like 'using the cache update worker'
end
context 'with a tag' do
@@ -89,7 +69,6 @@ RSpec.describe Packages::Composer::CreatePackageService do
it_behaves_like 'assigns build to package'
it_behaves_like 'assigns status to package'
- it_behaves_like 'using the cache update worker'
end
end
@@ -106,8 +85,6 @@ RSpec.describe Packages::Composer::CreatePackageService do
.to change { Packages::Package.composer.count }.by(0)
.and change { Packages::Composer::Metadatum.count }.by(0)
end
-
- it_behaves_like 'using the cache update worker'
end
context 'belonging to another project' do
@@ -129,8 +106,6 @@ RSpec.describe Packages::Composer::CreatePackageService do
.to change { Packages::Package.composer.count }.by(1)
.and change { Packages::Composer::Metadatum.count }.by(1)
end
-
- it_behaves_like 'using the cache update worker'
end
end
end
diff --git a/spec/services/packages/debian/process_changes_service_spec.rb b/spec/services/packages/debian/process_changes_service_spec.rb
index 3069a2806b2..8e5e36cdbcb 100644
--- a/spec/services/packages/debian/process_changes_service_spec.rb
+++ b/spec/services/packages/debian/process_changes_service_spec.rb
@@ -36,7 +36,6 @@ RSpec.describe Packages::Debian::ProcessChangesService do
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
.and not_change { incoming.package_files.count }
- .and not_change { distribution.reload.needs_update? }
.and raise_error(Packages::Debian::ExtractChangesMetadataService::ExtractionError, 'is not a changes file')
end
end
@@ -54,7 +53,6 @@ RSpec.describe Packages::Debian::ProcessChangesService do
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
.and not_change { incoming.package_files.count }
- .and not_change { distribution.reload.needs_update? }
.and raise_error(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
end
end
diff --git a/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb b/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb
new file mode 100644
index 00000000000..dfe2ff9e57c
--- /dev/null
+++ b/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Projects::ContainerRepository::CacheTagsCreatedAtService, :clean_gitlab_redis_cache do
+ let_it_be(:dummy_tag_class) { Struct.new(:name, :created_at) }
+ let_it_be(:repository) { create(:container_repository) }
+
+ let(:tags) { create_tags(5) }
+ let(:service) { described_class.new(repository) }
+
+ shared_examples 'not interacting with redis' do
+ it 'does not interact with redis' do
+ expect(::Gitlab::Redis::Cache).not_to receive(:with)
+
+ subject
+ end
+ end
+
+ describe '#populate' do
+ subject { service.populate(tags) }
+
+ context 'with tags' do
+ it 'gets values from redis' do
+ expect(::Gitlab::Redis::Cache).to receive(:with).and_call_original
+
+ expect(subject).to eq(0)
+
+ tags.each { |t| expect(t.created_at).to eq(nil) }
+ end
+
+ context 'with cached values' do
+ let(:cached_tags) { tags.first(2) }
+
+ before do
+ ::Gitlab::Redis::Cache.with do |redis|
+ cached_tags.each do |tag|
+ redis.set(cache_key(tag), rfc3339(10.days.ago))
+ end
+ end
+ end
+
+ it 'gets values from redis' do
+ expect(::Gitlab::Redis::Cache).to receive(:with).and_call_original
+
+ expect(subject).to eq(2)
+
+ cached_tags.each { |t| expect(t.created_at).not_to eq(nil) }
+ (tags - cached_tags).each { |t| expect(t.created_at).to eq(nil) }
+ end
+ end
+ end
+
+ context 'with no tags' do
+ let(:tags) { [] }
+
+ it_behaves_like 'not interacting with redis'
+ end
+ end
+
+ describe '#insert' do
+ let(:max_ttl) { 90.days }
+
+ subject { service.insert(tags, max_ttl) }
+
+ context 'with tags' do
+ let(:tag) { tags.first }
+ let(:ttl) { 90.days - 3.days }
+
+ before do
+ travel_to(Time.zone.local(2021, 9, 2, 12, 0, 0))
+
+ tag.created_at = DateTime.rfc3339(3.days.ago.rfc3339)
+ end
+
+ after do
+ travel_back
+ end
+
+ it 'inserts values in redis' do
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect(redis)
+ .to receive(:set)
+ .with(cache_key(tag), rfc3339(tag.created_at), ex: ttl.to_i)
+ .and_call_original
+ end
+
+ subject
+ end
+
+ context 'with some of them already cached' do
+ let(:tag) { tags.first }
+
+ before do
+ ::Gitlab::Redis::Cache.with do |redis|
+ redis.set(cache_key(tag), rfc3339(10.days.ago))
+ end
+ service.populate(tags)
+ end
+
+ it_behaves_like 'not interacting with redis'
+ end
+ end
+
+ context 'with no tags' do
+ let(:tags) { [] }
+
+ it_behaves_like 'not interacting with redis'
+ end
+
+ context 'with no expires_in' do
+ let(:max_ttl) { nil }
+
+ it_behaves_like 'not interacting with redis'
+ end
+ end
+
+ def create_tags(size)
+ Array.new(size) do |i|
+ dummy_tag_class.new("Tag #{i}", nil)
+ end
+ end
+
+ def cache_key(tag)
+ "container_repository:{#{repository.id}}:tag:#{tag.name}:created_at"
+ end
+
+ def rfc3339(date_time)
+ # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
+ # The caching will use DateTime rfc3339
+ DateTime.rfc3339(date_time.rfc3339).rfc3339
+ end
+end
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index eed22416868..289bbf4540e 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -2,14 +2,14 @@
require 'spec_helper'
-RSpec.describe Projects::ContainerRepository::CleanupTagsService do
+RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_redis_cache do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :private) }
- let_it_be(:repository) { create(:container_repository, :root, project: project) }
- let(:service) { described_class.new(project, user, params) }
+ let(:repository) { create(:container_repository, :root, project: project) }
+ let(:service) { described_class.new(repository, user, params) }
let(:tags) { %w[latest A Ba Bb C D E] }
before do
@@ -39,291 +39,442 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
end
describe '#execute' do
- subject { service.execute(repository) }
+ subject { service.execute }
- context 'when no params are specified' do
- let(:params) { {} }
+ shared_examples 'reading and removing tags' do |caching_enabled: true|
+ context 'when no params are specified' do
+ let(:params) { {} }
- it 'does not remove anything' do
- expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
- .not_to receive(:execute)
+ it 'does not remove anything' do
+ expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
+ .not_to receive(:execute)
+ expect_no_caching
- is_expected.to eq(expected_service_response(before_truncate_size: 0, after_truncate_size: 0, before_delete_size: 0))
- end
- end
-
- context 'when regex matching everything is specified' do
- shared_examples 'removes all matches' do
- it 'does remove all tags except latest' do
- expect_delete(%w(A Ba Bb C D E))
-
- is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C D E)))
+ is_expected.to eq(expected_service_response(before_truncate_size: 0, after_truncate_size: 0, before_delete_size: 0))
end
end
- let(:params) do
- { 'name_regex_delete' => '.*' }
- end
+ context 'when regex matching everything is specified' do
+ shared_examples 'removes all matches' do
+ it 'does remove all tags except latest' do
+ expect_no_caching
- it_behaves_like 'removes all matches'
+ expect_delete(%w(A Ba Bb C D E))
+
+ is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C D E)))
+ end
+ end
- context 'with deprecated name_regex param' do
let(:params) do
- { 'name_regex' => '.*' }
+ { 'name_regex_delete' => '.*' }
end
it_behaves_like 'removes all matches'
+
+ context 'with deprecated name_regex param' do
+ let(:params) do
+ { 'name_regex' => '.*' }
+ end
+
+ it_behaves_like 'removes all matches'
+ end
end
- end
- context 'with invalid regular expressions' do
- RSpec.shared_examples 'handling an invalid regex' do
- it 'keeps all tags' do
- expect(Projects::ContainerRepository::DeleteTagsService)
- .not_to receive(:new)
- subject
+ context 'with invalid regular expressions' do
+ shared_examples 'handling an invalid regex' do
+ it 'keeps all tags' do
+ expect_no_caching
+
+ expect(Projects::ContainerRepository::DeleteTagsService)
+ .not_to receive(:new)
+
+ subject
+ end
+
+ it { is_expected.to eq(status: :error, message: 'invalid regex') }
+
+ it 'calls error tracking service' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
+
+ subject
+ end
end
- it { is_expected.to eq(status: :error, message: 'invalid regex') }
+ context 'when name_regex_delete is invalid' do
+ let(:params) { { 'name_regex_delete' => '*test*' } }
+
+ it_behaves_like 'handling an invalid regex'
+ end
- it 'calls error tracking service' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
+ context 'when name_regex is invalid' do
+ let(:params) { { 'name_regex' => '*test*' } }
- subject
+ it_behaves_like 'handling an invalid regex'
end
- end
- context 'when name_regex_delete is invalid' do
- let(:params) { { 'name_regex_delete' => '*test*' } }
+ context 'when name_regex_keep is invalid' do
+ let(:params) { { 'name_regex_keep' => '*test*' } }
- it_behaves_like 'handling an invalid regex'
+ it_behaves_like 'handling an invalid regex'
+ end
end
- context 'when name_regex is invalid' do
- let(:params) { { 'name_regex' => '*test*' } }
+ context 'when delete regex matching specific tags is used' do
+ let(:params) do
+ { 'name_regex_delete' => 'C|D' }
+ end
- it_behaves_like 'handling an invalid regex'
- end
+ it 'does remove C and D' do
+ expect_delete(%w(C D))
- context 'when name_regex_keep is invalid' do
- let(:params) { { 'name_regex_keep' => '*test*' } }
+ expect_no_caching
- it_behaves_like 'handling an invalid regex'
- end
- end
+ is_expected.to eq(expected_service_response(deleted: %w(C D), before_truncate_size: 2, after_truncate_size: 2, before_delete_size: 2))
+ end
- context 'when delete regex matching specific tags is used' do
- let(:params) do
- { 'name_regex_delete' => 'C|D' }
- end
+ context 'with overriding allow regex' do
+ let(:params) do
+ { 'name_regex_delete' => 'C|D',
+ 'name_regex_keep' => 'C' }
+ end
- it 'does remove C and D' do
- expect_delete(%w(C D))
+ it 'does not remove C' do
+ expect_delete(%w(D))
- is_expected.to eq(expected_service_response(deleted: %w(C D), before_truncate_size: 2, after_truncate_size: 2, before_delete_size: 2))
- end
+ expect_no_caching
- context 'with overriding allow regex' do
- let(:params) do
- { 'name_regex_delete' => 'C|D',
- 'name_regex_keep' => 'C' }
+ is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ end
end
- it 'does not remove C' do
- expect_delete(%w(D))
+ context 'with name_regex_delete overriding deprecated name_regex' do
+ let(:params) do
+ { 'name_regex' => 'C|D',
+ 'name_regex_delete' => 'D' }
+ end
+
+ it 'does not remove C' do
+ expect_delete(%w(D))
+
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ end
end
end
- context 'with name_regex_delete overriding deprecated name_regex' do
+ context 'with allow regex value' do
let(:params) do
- { 'name_regex' => 'C|D',
- 'name_regex_delete' => 'D' }
+ { 'name_regex_delete' => '.*',
+ 'name_regex_keep' => 'B.*' }
end
- it 'does not remove C' do
- expect_delete(%w(D))
+ it 'does not remove B*' do
+ expect_delete(%w(A C D E))
+
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ is_expected.to eq(expected_service_response(deleted: %w(A C D E), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
end
end
- end
- context 'with allow regex value' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'name_regex_keep' => 'B.*' }
- end
+ context 'when keeping only N tags' do
+ let(:params) do
+ { 'name_regex' => 'A|B.*|C',
+ 'keep_n' => 1 }
+ end
- it 'does not remove B*' do
- expect_delete(%w(A C D E))
+ it 'sorts tags by date' do
+ expect_delete(%w(Bb Ba C))
- is_expected.to eq(expected_service_response(deleted: %w(A C D E), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
- end
- end
+ expect_no_caching
- context 'when keeping only N tags' do
- let(:params) do
- { 'name_regex' => 'A|B.*|C',
- 'keep_n' => 1 }
+ expect(service).to receive(:order_by_date).and_call_original
+
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 3))
+ end
end
- it 'sorts tags by date' do
- expect_delete(%w(Bb Ba C))
+ context 'when not keeping N tags' do
+ let(:params) do
+ { 'name_regex' => 'A|B.*|C' }
+ end
+
+ it 'does not sort tags by date' do
+ expect_delete(%w(A Ba Bb C))
- expect(service).to receive(:order_by_date).and_call_original
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 3))
- end
- end
+ expect(service).not_to receive(:order_by_date)
- context 'when not keeping N tags' do
- let(:params) do
- { 'name_regex' => 'A|B.*|C' }
+ is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
+ end
end
- it 'does not sort tags by date' do
- expect_delete(%w(A Ba Bb C))
+ context 'when removing keeping only 3' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 3 }
+ end
- expect(service).not_to receive(:order_by_date)
+ it 'does remove B* and C as they are the oldest' do
+ expect_delete(%w(Bb Ba C))
- is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
- end
- end
+ expect_no_caching
- context 'when removing keeping only 3' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 3 }
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ end
end
- it 'does remove B* and C as they are the oldest' do
- expect_delete(%w(Bb Ba C))
+ context 'when removing older than 1 day' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'older_than' => '1 day' }
+ end
+
+ it 'does remove B* and C as they are older than 1 day' do
+ expect_delete(%w(Ba Bb C))
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
- end
- end
+ expect_no_caching
- context 'when removing older than 1 day' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'older_than' => '1 day' }
+ is_expected.to eq(expected_service_response(deleted: %w(Ba Bb C), before_delete_size: 3))
+ end
end
- it 'does remove B* and C as they are older than 1 day' do
- expect_delete(%w(Ba Bb C))
+ context 'when combining all parameters' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day' }
+ end
+
+ it 'does remove B* and C' do
+ expect_delete(%w(Bb Ba C))
- is_expected.to eq(expected_service_response(deleted: %w(Ba Bb C), before_delete_size: 3))
- end
- end
+ expect_no_caching
- context 'when combining all parameters' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day' }
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ end
end
- it 'does remove B* and C' do
- expect_delete(%w(Bb Ba C))
+ context 'when running a container_expiration_policy' do
+ let(:user) { nil }
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
- end
- end
+ context 'with valid container_expiration_policy param' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true }
+ end
- context 'when running a container_expiration_policy' do
- let(:user) { nil }
+ it 'succeeds without a user' do
+ expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- context 'with valid container_expiration_policy param' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day',
- 'container_expiration_policy' => true }
+ caching_enabled ? expect_caching : expect_no_caching
+
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ end
end
- it 'succeeds without a user' do
- expect_delete(%w(Bb Ba C), container_expiration_policy: true)
+ context 'without container_expiration_policy param' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day' }
+ end
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ it 'fails' do
+ is_expected.to eq(status: :error, message: 'access denied')
+ end
end
end
- context 'without container_expiration_policy param' do
+ context 'truncating the tags list' do
let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day' }
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 1
+ }
+ end
+
+ shared_examples 'returning the response' do |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
+ it 'returns the response' do
+ expect_no_caching
+
+ result = subject
+
+ service_response = expected_service_response(
+ status: status,
+ original_size: original_size,
+ before_truncate_size: before_truncate_size,
+ after_truncate_size: after_truncate_size,
+ before_delete_size: before_delete_size,
+ deleted: nil
+ )
+
+ expect(result).to eq(service_response)
+ end
+ end
+
+ where(:feature_flag_enabled, :max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
+ false | 10 | :success | :success | false
+ false | 10 | :error | :error | false
+ false | 3 | :success | :success | false
+ false | 3 | :error | :error | false
+ false | 0 | :success | :success | false
+ false | 0 | :error | :error | false
+ true | 10 | :success | :success | false
+ true | 10 | :error | :error | false
+ true | 3 | :success | :error | true
+ true | 3 | :error | :error | true
+ true | 0 | :success | :success | false
+ true | 0 | :error | :error | false
end
- it 'fails' do
- is_expected.to eq(status: :error, message: 'access denied')
+ with_them do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
+ stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
+ allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
+ expect(service).to receive(:execute).and_return(status: delete_tags_service_status)
+ end
+ end
+
+ original_size = 7
+ keep_n = 1
+
+ it_behaves_like(
+ 'returning the response',
+ status: params[:expected_status],
+ original_size: original_size,
+ before_truncate_size: original_size - keep_n,
+ after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
+ before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1 # one tag is filtered out with older_than filter
+ )
end
end
end
- context 'truncating the tags list' do
+ context 'caching' do
let(:params) do
{
'name_regex_delete' => '.*',
- 'keep_n' => 1
+ 'keep_n' => 1,
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true
+ }
+ end
+
+ let(:tags_and_created_ats) do
+ {
+ 'A' => 1.hour.ago,
+ 'Ba' => 5.days.ago,
+ 'Bb' => 5.days.ago,
+ 'C' => 1.month.ago,
+ 'D' => nil,
+ 'E' => nil
}
end
- shared_examples 'returning the response' do |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
- it 'returns the response' do
- result = subject
+ let(:cacheable_tags) { tags_and_created_ats.reject { |_, value| value.nil? } }
- service_response = expected_service_response(
- status: status,
- original_size: original_size,
- before_truncate_size: before_truncate_size,
- after_truncate_size: after_truncate_size,
- before_delete_size: before_delete_size,
- deleted: nil
- )
+ before do
+ expect_delete(%w(Bb Ba C), container_expiration_policy: true)
+ travel_to(Time.zone.local(2021, 9, 2, 12, 0, 0))
+ # We froze time so we need to set the created_at stubs again
+ stub_digest_config('sha256:configA', 1.hour.ago)
+ stub_digest_config('sha256:configB', 5.days.ago)
+ stub_digest_config('sha256:configC', 1.month.ago)
+ end
- expect(result).to eq(service_response)
- end
+ after do
+ travel_back
end
- where(:feature_flag_enabled, :max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
- false | 10 | :success | :success | false
- false | 10 | :error | :error | false
- false | 3 | :success | :success | false
- false | 3 | :error | :error | false
- false | 0 | :success | :success | false
- false | 0 | :error | :error | false
- true | 10 | :success | :success | false
- true | 10 | :error | :error | false
- true | 3 | :success | :error | true
- true | 3 | :error | :error | true
- true | 0 | :success | :success | false
- true | 0 | :error | :error | false
+ it 'caches the created_at values' do
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect_mget(redis, tags_and_created_ats.keys)
+
+ expect_set(redis, cacheable_tags)
+ end
+
+ expect(subject).to include(cached_tags_count: 0)
end
- with_them do
+ context 'with cached values' do
before do
- stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
- stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
- allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
- expect(service).to receive(:execute).and_return(status: delete_tags_service_status)
+ ::Gitlab::Redis::Cache.with do |redis|
+ redis.set(cache_key('C'), rfc3339(1.month.ago))
+ end
+ end
+
+ it 'uses them' do
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect_mget(redis, tags_and_created_ats.keys)
+
+ # because C is already in cache, it should not be cached again
+ expect_set(redis, cacheable_tags.except('C'))
+ end
+
+ # We will ping the container registry for all tags *except* for C because it's cached
+ expect(ContainerRegistry::Blob).to receive(:new).with(repository, "digest" => "sha256:configA").and_call_original
+ expect(ContainerRegistry::Blob).to receive(:new).with(repository, "digest" => "sha256:configB").twice.and_call_original
+ expect(ContainerRegistry::Blob).not_to receive(:new).with(repository, "digest" => "sha256:configC")
+ expect(ContainerRegistry::Blob).to receive(:new).with(repository, "digest" => "sha256:configD").and_call_original
+
+ expect(subject).to include(cached_tags_count: 1)
+ end
+ end
+
+ def expect_mget(redis, keys)
+ expect(redis).to receive(:mget).with(keys.map(&method(:cache_key))).and_call_original
+ end
+
+ def expect_set(redis, tags)
+ tags.each do |tag_name, created_at|
+ ex = 1.day.seconds - (Time.zone.now - created_at).seconds
+ if ex > 0
+ expect(redis).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex.to_i)
end
end
+ end
+
+ def cache_key(tag_name)
+ "container_repository:{#{repository.id}}:tag:#{tag_name}:created_at"
+ end
+
+ def rfc3339(date_time)
+ # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
+ # The caching will use DateTime rfc3339
+ DateTime.rfc3339(date_time.rfc3339).rfc3339
+ end
+ end
+
+ context 'with container_registry_expiration_policies_caching enabled for the project' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_caching: project)
+ end
+
+ it_behaves_like 'reading and removing tags', caching_enabled: true
+ end
- original_size = 7
- keep_n = 1
+ context 'with container_registry_expiration_policies_caching disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_caching: false)
+ end
+
+ it_behaves_like 'reading and removing tags', caching_enabled: false
+ end
- it_behaves_like(
- 'returning the response',
- status: params[:expected_status],
- original_size: original_size,
- before_truncate_size: original_size - keep_n,
- after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
- before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1 # one tag is filtered out with older_than filter
- )
+ context 'with container_registry_expiration_policies_caching not enabled for the project' do
+ let_it_be(:another_project) { create(:project) }
+
+ before do
+ stub_feature_flags(container_registry_expiration_policies_caching: another_project)
end
+
+ it_behaves_like 'reading and removing tags', caching_enabled: false
end
end
@@ -368,7 +519,19 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
original_size: original_size,
before_truncate_size: before_truncate_size,
after_truncate_size: after_truncate_size,
- before_delete_size: before_delete_size
+ before_delete_size: before_delete_size,
+ cached_tags_count: 0
}.compact.merge(deleted_size: deleted&.size)
end
+
+ def expect_no_caching
+ expect(::Gitlab::Redis::Cache).not_to receive(:with)
+ end
+
+ def expect_caching
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:mget).and_call_original
+ expect(redis).to receive(:set).and_call_original
+ end
+ end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index e15d9341fd1..d7c43ac676e 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -622,6 +622,22 @@ RSpec.describe Projects::CreateService, '#execute' do
end
end
+ context 'when SAST initialization is requested' do
+ let(:project) { create_project(user, opts) }
+
+ before do
+ opts[:initialize_with_sast] = '1'
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
+ end
+
+ it 'creates a commit for SAST', :aggregate_failures do
+ expect(project.repository.commit_count).to be(1)
+ expect(project.repository.commit.message).to eq(
+ 'Configure SAST in `.gitlab-ci.yml`, creating this file if it does not already exist'
+ )
+ end
+ end
+
describe 'create integration for the project' do
subject(:project) { create_project(user, opts) }
@@ -823,25 +839,23 @@ RSpec.describe Projects::CreateService, '#execute' do
let_it_be(:user) { create :user }
context 'when parent group is present' do
- let_it_be(:group) do
+ let_it_be(:group, reload: true) do
create(:group) do |group|
group.add_owner(user)
end
end
before do
- allow_next_found_instance_of(Group) do |group|
- allow(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
- end
+ group.update_shared_runners_setting!(shared_runners_setting)
user.refresh_authorized_projects # Ensure cache is warm
end
context 'default value based on parent group setting' do
where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
- 'enabled' | nil | true
- 'disabled_with_override' | nil | false
- 'disabled_and_unoverridable' | nil | false
+ Namespace::SR_ENABLED | nil | true
+ Namespace::SR_DISABLED_WITH_OVERRIDE | nil | false
+ Namespace::SR_DISABLED_AND_UNOVERRIDABLE | nil | false
end
with_them do
@@ -858,11 +872,11 @@ RSpec.describe Projects::CreateService, '#execute' do
context 'parent group is present and allows desired config' do
where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
- 'enabled' | true | true
- 'enabled' | false | false
- 'disabled_with_override' | false | false
- 'disabled_with_override' | true | true
- 'disabled_and_unoverridable' | false | false
+ Namespace::SR_ENABLED | true | true
+ Namespace::SR_ENABLED | false | false
+ Namespace::SR_DISABLED_WITH_OVERRIDE | false | false
+ Namespace::SR_DISABLED_WITH_OVERRIDE | true | true
+ Namespace::SR_DISABLED_AND_UNOVERRIDABLE | false | false
end
with_them do
@@ -878,7 +892,7 @@ RSpec.describe Projects::CreateService, '#execute' do
context 'parent group is present and disallows desired config' do
where(:shared_runners_setting, :desired_config_for_new_project) do
- 'disabled_and_unoverridable' | true
+ Namespace::SR_DISABLED_AND_UNOVERRIDABLE | true
end
with_them do
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 27688d8c966..9bdd9800fcc 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -39,26 +39,64 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
let!(:job_variables) { create(:ci_job_variable, job: build) }
let!(:report_result) { create(:ci_build_report_result, build: build) }
let!(:pending_state) { create(:ci_build_pending_state, build: build) }
+ let!(:pipeline_artifact) { create(:ci_pipeline_artifact, pipeline: pipeline) }
- it 'deletes build related records' do
- expect { destroy_project(project, user, {}) }.to change { Ci::Build.count }.by(-1)
+ it 'deletes build and pipeline related records' do
+ expect { destroy_project(project, user, {}) }
+ .to change { Ci::Build.count }.by(-1)
.and change { Ci::BuildTraceChunk.count }.by(-1)
.and change { Ci::JobArtifact.count }.by(-2)
+ .and change { Ci::DeletedObject.count }.by(2)
+ .and change { Ci::PipelineArtifact.count }.by(-1)
.and change { Ci::JobVariable.count }.by(-1)
.and change { Ci::BuildPendingState.count }.by(-1)
.and change { Ci::BuildReportResult.count }.by(-1)
.and change { Ci::BuildRunnerSession.count }.by(-1)
+ .and change { Ci::Pipeline.count }.by(-1)
end
- it 'avoids N+1 queries', skip: 'skipped until fixed in https://gitlab.com/gitlab-org/gitlab/-/issues/24644' do
- recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
+ context 'with abort_deleted_project_pipelines disabled' do
+ stub_feature_flags(abort_deleted_project_pipelines: false)
- project = create(:project, :repository, namespace: user.namespace)
- pipeline = create(:ci_pipeline, project: project)
- builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
- create_list(:ci_build_trace_chunk, 3, build: builds[0])
+ it 'avoids N+1 queries' do
+ recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
- expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ project = create(:project, :repository, namespace: user.namespace)
+ pipeline = create(:ci_pipeline, project: project)
+ builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
+ create(:ci_pipeline_artifact, pipeline: pipeline)
+ create_list(:ci_build_trace_chunk, 3, build: builds[0])
+
+ expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ end
+ end
+
+ context 'with ci_optimize_project_records_destruction disabled' do
+ stub_feature_flags(ci_optimize_project_records_destruction: false)
+
+ it 'avoids N+1 queries' do
+ recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
+
+ project = create(:project, :repository, namespace: user.namespace)
+ pipeline = create(:ci_pipeline, project: project)
+ builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
+ create_list(:ci_build_trace_chunk, 3, build: builds[0])
+
+ expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ end
+ end
+
+ context 'with ci_optimize_project_records_destruction and abort_deleted_project_pipelines enabled' do
+ it 'avoids N+1 queries' do
+ recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
+
+ project = create(:project, :repository, namespace: user.namespace)
+ pipeline = create(:ci_pipeline, project: project)
+ builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
+ create_list(:ci_build_trace_chunk, 3, build: builds[0])
+
+ expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ end
end
it_behaves_like 'deleting the project'
@@ -95,24 +133,63 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
end
context 'with abort_deleted_project_pipelines feature disabled' do
- it 'does not cancel project ci pipelines' do
+ before do
stub_feature_flags(abort_deleted_project_pipelines: false)
+ end
+ it 'does not bulk-fail project ci pipelines' do
expect(::Ci::AbortPipelinesService).not_to receive(:new)
destroy_project(project, user, {})
end
+
+ it 'does not destroy CI records via DestroyPipelineService' do
+ expect(::Ci::DestroyPipelineService).not_to receive(:new)
+
+ destroy_project(project, user, {})
+ end
end
context 'with abort_deleted_project_pipelines feature enabled' do
- it 'performs cancel for project ci pipelines' do
- stub_feature_flags(abort_deleted_project_pipelines: true)
- pipelines = build_list(:ci_pipeline, 3, :running)
- allow(project).to receive(:all_pipelines).and_return(pipelines)
+ let!(:pipelines) { create_list(:ci_pipeline, 3, :running, project: project) }
+ let(:destroy_pipeline_service) { double('DestroyPipelineService', execute: nil) }
- expect(::Ci::AbortPipelinesService).to receive_message_chain(:new, :execute).with(pipelines, :project_deleted)
+ context 'with ci_optimize_project_records_destruction disabled' do
+ before do
+ stub_feature_flags(ci_optimize_project_records_destruction: false)
+ end
- destroy_project(project, user, {})
+ it 'bulk-fails project ci pipelines' do
+ expect(::Ci::AbortPipelinesService)
+ .to receive_message_chain(:new, :execute)
+ .with(project.all_pipelines, :project_deleted)
+
+ destroy_project(project, user, {})
+ end
+
+ it 'does not destroy CI records via DestroyPipelineService' do
+ expect(::Ci::DestroyPipelineService).not_to receive(:new)
+
+ destroy_project(project, user, {})
+ end
+ end
+
+ context 'with ci_optimize_project_records_destruction enabled' do
+ it 'executes DestroyPipelineService for project ci pipelines' do
+ allow(::Ci::DestroyPipelineService).to receive(:new).and_return(destroy_pipeline_service)
+
+ expect(::Ci::AbortPipelinesService)
+ .to receive_message_chain(:new, :execute)
+ .with(project.all_pipelines, :project_deleted)
+
+ pipelines.each do |pipeline|
+ expect(destroy_pipeline_service)
+ .to receive(:execute)
+ .with(pipeline)
+ end
+
+ destroy_project(project, user, {})
+ end
end
end
diff --git a/spec/services/projects/group_links/update_service_spec.rb b/spec/services/projects/group_links/update_service_spec.rb
index 4a38fb0c7d9..ff1618c3bbe 100644
--- a/spec/services/projects/group_links/update_service_spec.rb
+++ b/spec/services/projects/group_links/update_service_spec.rb
@@ -34,86 +34,40 @@ RSpec.describe Projects::GroupLinks::UpdateService, '#execute' do
end
context 'project authorizations update' do
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is enabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: true)
- end
-
- it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
- expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
- .to receive(:perform_async).with(link.project.id)
-
- subject
- end
-
- it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
- expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
- receive(:bulk_perform_in)
- .with(1.hour,
- [[user.id]],
- batch_delay: 30.seconds, batch_size: 100)
- )
-
- subject
- end
-
- it 'updates project authorizations of users who had access to the project via the group share', :sidekiq_inline do
- group.add_maintainer(user)
-
- expect { subject }.to(
- change { Ability.allowed?(user, :create_release, project) }
- .from(true).to(false))
- end
- end
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ .to receive(:perform_async).with(link.project.id)
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is disabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: false)
- end
+ subject
+ end
- it 'calls UserProjectAccessChangedService to update project authorizations' do
- expect_next_instance_of(UserProjectAccessChangedService, [user.id]) do |service|
- expect(service).to receive(:execute)
- end
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ [[user.id]],
+ batch_delay: 30.seconds, batch_size: 100)
+ )
- subject
- end
+ subject
+ end
- it 'updates project authorizations of users who had access to the project via the group share' do
- group.add_maintainer(user)
+ it 'updates project authorizations of users who had access to the project via the group share', :sidekiq_inline do
+ group.add_maintainer(user)
- expect { subject }.to(
- change { Ability.allowed?(user, :create_release, project) }
- .from(true).to(false))
- end
+ expect { subject }.to(
+ change { Ability.allowed?(user, :create_release, project) }
+ .from(true).to(false))
end
end
context 'with only param not requiring authorization refresh' do
let(:group_link_params) { { expires_at: Date.tomorrow } }
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is enabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: true)
- end
-
- it 'does not perform any project authorizations update using `AuthorizedProjectUpdate::ProjectRecalculateWorker`' do
- expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).not_to receive(:perform_async)
-
- subject
- end
- end
-
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is disabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: false)
- end
-
- it 'does not perform any project authorizations update using `UserProjectAccessChangedService`' do
- expect(UserProjectAccessChangedService).not_to receive(:new)
+ it 'does not perform any project authorizations update using `AuthorizedProjectUpdate::ProjectRecalculateWorker`' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).not_to receive(:perform_async)
- subject
- end
+ subject
end
end
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index 92e18b6cb46..1d63f72ec38 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -86,6 +86,12 @@ RSpec.describe Projects::ImportService do
end
context 'with a Github repository' do
+ it 'tracks the start of import' do
+ expect(Gitlab::GithubImport::ParallelImporter).to receive(:track_start_import)
+
+ subject.execute
+ end
+
it 'succeeds if repository import was scheduled' do
expect_any_instance_of(Gitlab::GithubImport::ParallelImporter)
.to receive(:execute)
diff --git a/spec/services/projects/move_access_service_spec.rb b/spec/services/projects/move_access_service_spec.rb
index 90167ffebed..45e10c3ca84 100644
--- a/spec/services/projects/move_access_service_spec.rb
+++ b/spec/services/projects/move_access_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Projects::MoveAccessService do
describe '#execute' do
shared_examples 'move the accesses' do
- it do
+ it 'moves the accesses', :sidekiq_inline do
expect(project_with_access.project_members.count).to eq 4
expect(project_with_access.project_group_links.count).to eq 3
expect(project_with_access.authorized_users.count).to eq 4
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index a71fafb2121..b64f2d1e7d6 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -294,10 +294,10 @@ RSpec.describe Projects::Operations::UpdateService do
end
context 'without setting' do
- it 'does not create a setting' do
- expect(result[:status]).to eq(:error)
-
- expect(project.reload.error_tracking_setting).to be_nil
+ it 'creates setting with default values' do
+ expect(result[:status]).to eq(:success)
+ expect(project.error_tracking_setting.enabled).to be_truthy
+ expect(project.error_tracking_setting.integrated).to be_truthy
end
end
end
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index b84e28314f2..eab7228307a 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -104,104 +104,116 @@ RSpec.describe Projects::ParticipantsService do
describe '#project_members' do
subject(:usernames) { service.project_members.map { |member| member[:username] } }
- context 'when there is a project in group namespace' do
- let_it_be(:public_group) { create(:group, :public) }
- let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
+ shared_examples 'return project members' do
+ context 'when there is a project in group namespace' do
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- let_it_be(:public_group_owner) { create(:user) }
+ let_it_be(:public_group_owner) { create(:user) }
- let(:service) { described_class.new(public_project, create(:user)) }
+ let(:service) { described_class.new(public_project, create(:user)) }
- before do
- public_group.add_owner(public_group_owner)
- end
+ before do
+ public_group.add_owner(public_group_owner)
+ end
- it 'returns members of a group' do
- expect(usernames).to include(public_group_owner.username)
+ it 'returns members of a group' do
+ expect(usernames).to include(public_group_owner.username)
+ end
end
- end
-
- context 'when there is a private group and a public project' do
- let_it_be(:public_group) { create(:group, :public) }
- let_it_be(:private_group) { create(:group, :private, :nested) }
- let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- let_it_be(:project_issue) { create(:issue, project: public_project)}
+ context 'when there is a private group and a public project' do
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:private_group) { create(:group, :private, :nested) }
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- let_it_be(:public_group_owner) { create(:user) }
- let_it_be(:private_group_member) { create(:user) }
- let_it_be(:public_project_maintainer) { create(:user) }
- let_it_be(:private_group_owner) { create(:user) }
+ let_it_be(:project_issue) { create(:issue, project: public_project)}
- let_it_be(:group_ancestor_owner) { create(:user) }
+ let_it_be(:public_group_owner) { create(:user) }
+ let_it_be(:private_group_member) { create(:user) }
+ let_it_be(:public_project_maintainer) { create(:user) }
+ let_it_be(:private_group_owner) { create(:user) }
- before_all do
- public_group.add_owner public_group_owner
- private_group.add_developer private_group_member
- public_project.add_maintainer public_project_maintainer
+ let_it_be(:group_ancestor_owner) { create(:user) }
- private_group.add_owner private_group_owner
- private_group.parent.add_owner group_ancestor_owner
- end
-
- context 'when the private group is invited to the public project' do
before_all do
- create(:project_group_link, group: private_group, project: public_project)
- end
+ public_group.add_owner public_group_owner
+ private_group.add_developer private_group_member
+ public_project.add_maintainer public_project_maintainer
- context 'when a user who is outside the public project and the private group is signed in' do
- let(:service) { described_class.new(public_project, create(:user)) }
+ private_group.add_owner private_group_owner
+ private_group.parent.add_owner group_ancestor_owner
+ end
- it 'does not return the private group' do
- expect(usernames).not_to include(private_group.name)
+ context 'when the private group is invited to the public project' do
+ before_all do
+ create(:project_group_link, group: private_group, project: public_project)
end
- it 'does not return private group members' do
- expect(usernames).not_to include(private_group_member.username)
- end
+ context 'when a user who is outside the public project and the private group is signed in' do
+ let(:service) { described_class.new(public_project, create(:user)) }
- it 'returns the project maintainer' do
- expect(usernames).to include(public_project_maintainer.username)
- end
+ it 'does not return the private group' do
+ expect(usernames).not_to include(private_group.name)
+ end
- it 'returns project members from an invited public group' do
- invited_public_group = create(:group, :public)
- invited_public_group.add_owner create(:user)
+ it 'does not return private group members' do
+ expect(usernames).not_to include(private_group_member.username)
+ end
- create(:project_group_link, group: invited_public_group, project: public_project)
+ it 'returns the project maintainer' do
+ expect(usernames).to include(public_project_maintainer.username)
+ end
- expect(usernames).to include(invited_public_group.users.first.username)
- end
+ it 'returns project members from an invited public group' do
+ invited_public_group = create(:group, :public)
+ invited_public_group.add_owner create(:user)
- it 'does not return ancestors of the private group' do
- expect(usernames).not_to include(group_ancestor_owner.username)
- end
- end
+ create(:project_group_link, group: invited_public_group, project: public_project)
- context 'when private group owner is signed in' do
- let(:service) { described_class.new(public_project, private_group_owner) }
+ expect(usernames).to include(invited_public_group.users.first.username)
+ end
- it 'returns private group members' do
- expect(usernames).to include(private_group_member.username)
+ it 'does not return ancestors of the private group' do
+ expect(usernames).not_to include(group_ancestor_owner.username)
+ end
end
- it 'returns ancestors of the the private group' do
- expect(usernames).to include(group_ancestor_owner.username)
- end
- end
+ context 'when private group owner is signed in' do
+ let(:service) { described_class.new(public_project, private_group_owner) }
- context 'when the namespace owner of the public project is signed in' do
- let(:service) { described_class.new(public_project, public_group_owner) }
+ it 'returns private group members' do
+ expect(usernames).to include(private_group_member.username)
+ end
- it 'returns private group members' do
- expect(usernames).to include(private_group_member.username)
+ it 'returns ancestors of the the private group' do
+ expect(usernames).to include(group_ancestor_owner.username)
+ end
end
- it 'does not return members of the ancestral groups of the private group' do
- expect(usernames).to include(group_ancestor_owner.username)
+ context 'when the namespace owner of the public project is signed in' do
+ let(:service) { described_class.new(public_project, public_group_owner) }
+
+ it 'returns private group members' do
+ expect(usernames).to include(private_group_member.username)
+ end
+
+ it 'does not return members of the ancestral groups of the private group' do
+ expect(usernames).to include(group_ancestor_owner.username)
+ end
end
end
end
end
+
+ it_behaves_like 'return project members'
+
+ context 'when feature flag :linear_participants_service_ancestor_scopes is disabled' do
+ before do
+ stub_feature_flags(linear_participants_service_ancestor_scopes: false)
+ end
+
+ it_behaves_like 'return project members'
+ end
end
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index d96573e26af..b539b01066e 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -64,6 +64,33 @@ RSpec.describe Projects::TransferService do
expect(transfer_result).to be_truthy
expect(project.namespace).to eq(group)
end
+
+ context 'when project has an associated project namespace' do
+ let!(:project_namespace) { create(:project_namespace, project: project) }
+
+ it 'keeps project namespace in sync with project' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to be_truthy
+
+ project_namespace_in_sync(group)
+ end
+
+ context 'when project is transferred to a deeper nested group' do
+ let(:parent_group) { create(:group) }
+ let(:sub_group) { create(:group, parent: parent_group) }
+ let(:sub_sub_group) { create(:group, parent: sub_group) }
+ let(:group) { sub_sub_group }
+
+ it 'keeps project namespace in sync with project' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to be_truthy
+
+ project_namespace_in_sync(sub_sub_group)
+ end
+ end
+ end
end
context 'when transfer succeeds' do
@@ -143,6 +170,28 @@ RSpec.describe Projects::TransferService do
end
end
end
+
+ context 'when project has pending builds' do
+ let!(:other_project) { create(:project) }
+ let!(:pending_build) { create(:ci_pending_build, project: project.reload) }
+ let!(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) }
+
+ before do
+ group.reload
+ end
+
+ it 'updates pending builds for the project', :aggregate_failures do
+ execute_transfer
+
+ pending_build.reload
+ unrelated_pending_build.reload
+
+ expect(pending_build.namespace_id).to eq(group.id)
+ expect(pending_build.namespace_traversal_ids).to eq(group.traversal_ids)
+ expect(unrelated_pending_build.namespace_id).to eq(other_project.namespace_id)
+ expect(unrelated_pending_build.namespace_traversal_ids).to eq(other_project.namespace.traversal_ids)
+ end
+ end
end
context 'when transfer fails' do
@@ -203,6 +252,34 @@ RSpec.describe Projects::TransferService do
shard_name: project.repository_storage
)
end
+
+ context 'when project has pending builds' do
+ let!(:other_project) { create(:project) }
+ let!(:pending_build) { create(:ci_pending_build, project: project.reload) }
+ let!(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) }
+
+ it 'does not update pending builds for the project', :aggregate_failures do
+ attempt_project_transfer
+
+ pending_build.reload
+ unrelated_pending_build.reload
+
+ expect(pending_build.namespace_id).to eq(project.namespace_id)
+ expect(pending_build.namespace_traversal_ids).to eq(project.namespace.traversal_ids)
+ expect(unrelated_pending_build.namespace_id).to eq(other_project.namespace_id)
+ expect(unrelated_pending_build.namespace_traversal_ids).to eq(other_project.namespace.traversal_ids)
+ end
+ end
+
+ context 'when project has an associated project namespace' do
+ let!(:project_namespace) { create(:project_namespace, project: project) }
+
+ it 'keeps project namespace in sync with project' do
+ attempt_project_transfer
+
+ project_namespace_in_sync(user.namespace)
+ end
+ end
end
context 'namespace -> no namespace' do
@@ -215,6 +292,18 @@ RSpec.describe Projects::TransferService do
expect(project.namespace).to eq(user.namespace)
expect(project.errors.messages[:new_namespace].first).to eq 'Please select a new namespace for your project.'
end
+
+ context 'when project has an associated project namespace' do
+ let!(:project_namespace) { create(:project_namespace, project: project) }
+
+ it 'keeps project namespace in sync with project' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to be false
+
+ project_namespace_in_sync(user.namespace)
+ end
+ end
end
context 'disallow transferring of project with tags' do
@@ -369,28 +458,23 @@ RSpec.describe Projects::TransferService do
using RSpec::Parameterized::TableSyntax
where(:project_shared_runners_enabled, :shared_runners_setting, :expected_shared_runners_enabled) do
- true | 'disabled_and_unoverridable' | false
- false | 'disabled_and_unoverridable' | false
- true | 'disabled_with_override' | true
- false | 'disabled_with_override' | false
- true | 'enabled' | true
- false | 'enabled' | false
+ true | :disabled_and_unoverridable | false
+ false | :disabled_and_unoverridable | false
+ true | :disabled_with_override | true
+ false | :disabled_with_override | false
+ true | :shared_runners_enabled | true
+ false | :shared_runners_enabled | false
end
with_them do
let(:project) { create(:project, :public, :repository, namespace: user.namespace, shared_runners_enabled: project_shared_runners_enabled) }
- let(:group) { create(:group) }
+ let(:group) { create(:group, shared_runners_setting) }
- before do
+ it 'updates shared runners based on the parent group' do
group.add_owner(user)
- expect_next_found_instance_of(Group) do |group|
- expect(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
- end
- execute_transfer
- end
+ expect(execute_transfer).to eq(true)
- it 'updates shared runners based on the parent group' do
expect(project.shared_runners_enabled).to eq(expected_shared_runners_enabled)
end
end
@@ -478,58 +562,30 @@ RSpec.describe Projects::TransferService do
group.add_owner(user)
end
- context 'when the feature flag `specialized_worker_for_project_transfer_auth_recalculation` is enabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_transfer_auth_recalculation: true)
- end
-
- it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
- expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
- .to receive(:perform_async).with(project.id)
-
- execute_transfer
- end
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ .to receive(:perform_async).with(project.id)
- it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
- user_ids = [user.id, member_of_old_group.id, member_of_new_group.id].map { |id| [id] }
-
- expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
- receive(:bulk_perform_in)
- .with(1.hour,
- user_ids,
- batch_delay: 30.seconds, batch_size: 100)
- )
-
- subject
- end
-
- it 'refreshes the permissions of the members of the old and new namespace', :sidekiq_inline do
- expect { execute_transfer }
- .to change { member_of_old_group.authorized_projects.include?(project) }.from(true).to(false)
- .and change { member_of_new_group.authorized_projects.include?(project) }.from(false).to(true)
- end
+ execute_transfer
end
- context 'when the feature flag `specialized_worker_for_project_transfer_auth_recalculation` is disabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_transfer_auth_recalculation: false)
- end
-
- it 'calls UserProjectAccessChangedService to update project authorizations' do
- user_ids = [user.id, member_of_old_group.id, member_of_new_group.id]
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ user_ids = [user.id, member_of_old_group.id, member_of_new_group.id].map { |id| [id] }
- expect_next_instance_of(UserProjectAccessChangedService, user_ids) do |service|
- expect(service).to receive(:execute)
- end
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ user_ids,
+ batch_delay: 30.seconds, batch_size: 100)
+ )
- execute_transfer
- end
+ subject
+ end
- it 'refreshes the permissions of the members of the old and new namespace' do
- expect { execute_transfer }
- .to change { member_of_old_group.authorized_projects.include?(project) }.from(true).to(false)
- .and change { member_of_new_group.authorized_projects.include?(project) }.from(false).to(true)
- end
+ it 'refreshes the permissions of the members of the old and new namespace', :sidekiq_inline do
+ expect { execute_transfer }
+ .to change { member_of_old_group.authorized_projects.include?(project) }.from(true).to(false)
+ .and change { member_of_new_group.authorized_projects.include?(project) }.from(false).to(true)
end
end
@@ -643,4 +699,13 @@ RSpec.describe Projects::TransferService do
def rugged_config
rugged_repo(project.repository).config
end
+
+ def project_namespace_in_sync(group)
+ project.reload
+ expect(project.namespace).to eq(group)
+ expect(project.project_namespace.visibility_level).to eq(project.visibility_level)
+ expect(project.project_namespace.path).to eq(project.path)
+ expect(project.project_namespace.parent).to eq(project.namespace)
+ expect(project.project_namespace.traversal_ids).to eq([*project.namespace.traversal_ids, project.project_namespace.id])
+ end
end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 6d0b75e0c95..5810024a1ef 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -173,14 +173,6 @@ RSpec.describe Projects::UpdatePagesService do
include_examples 'successfully deploys'
- context 'when pages_smart_check_outdated_sha feature flag is disabled' do
- before do
- stub_feature_flags(pages_smart_check_outdated_sha: false)
- end
-
- include_examples 'fails with outdated reference message'
- end
-
context 'when old deployment present' do
before do
old_build = create(:ci_build, pipeline: old_pipeline, ref: 'HEAD')
@@ -189,14 +181,6 @@ RSpec.describe Projects::UpdatePagesService do
end
include_examples 'successfully deploys'
-
- context 'when pages_smart_check_outdated_sha feature flag is disabled' do
- before do
- stub_feature_flags(pages_smart_check_outdated_sha: false)
- end
-
- include_examples 'fails with outdated reference message'
- end
end
context 'when newer deployment present' do
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 115f3098185..4923ef169e8 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -374,7 +374,7 @@ RSpec.describe Projects::UpdateService do
expect(result).to eq({
status: :error,
- message: "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'."
+ message: "Name can contain only letters, digits, emojis, '_', '.', '+', dashes, or spaces. It must start with a letter, digit, emoji, or '_'."
})
end
end
@@ -441,26 +441,62 @@ RSpec.describe Projects::UpdateService do
end
end
- context 'when updating #shared_runners', :https_pages_enabled do
- let!(:pending_build) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
+ context 'when updating runners settings' do
+ let(:settings) do
+ { instance_runners_enabled: true, namespace_traversal_ids: [123] }
+ end
- subject(:call_service) do
- update_project(project, admin, shared_runners_enabled: shared_runners_enabled)
+ let!(:pending_build) do
+ create(:ci_pending_build, project: project, **settings)
+ end
+
+ context 'when project has shared runners enabled' do
+ let(:project) { create(:project, shared_runners_enabled: true) }
+
+ it 'updates builds queue when shared runners get disabled' do
+ expect { update_project(project, admin, shared_runners_enabled: false) }
+ .to change { pending_build.reload.instance_runners_enabled }.to(false)
+
+ expect(pending_build.reload.instance_runners_enabled).to be false
+ end
+ end
+
+ context 'when project has shared runners disabled' do
+ let(:project) { create(:project, shared_runners_enabled: false) }
+
+ it 'updates builds queue when shared runners get enabled' do
+ expect { update_project(project, admin, shared_runners_enabled: true) }
+ .to not_change { pending_build.reload.instance_runners_enabled }
+
+ expect(pending_build.reload.instance_runners_enabled).to be true
+ end
end
- context 'when shared runners is toggled' do
- let(:shared_runners_enabled) { false }
+ context 'when project has group runners enabled' do
+ let(:project) { create(:project, group_runners_enabled: true) }
+
+ before do
+ project.ci_cd_settings.update!(group_runners_enabled: true)
+ end
+
+ it 'updates builds queue when group runners get disabled' do
+ update_project(project, admin, group_runners_enabled: false)
- it 'updates ci pending builds' do
- expect { call_service }.to change { pending_build.reload.instance_runners_enabled }.to(false)
+ expect(pending_build.reload.namespace_traversal_ids).to be_empty
end
end
- context 'when shared runners is not toggled' do
- let(:shared_runners_enabled) { true }
+ context 'when project has group runners disabled' do
+ let(:project) { create(:project, :in_subgroup, group_runners_enabled: false) }
+
+ before do
+ project.reload.ci_cd_settings.update!(group_runners_enabled: false)
+ end
+
+ it 'updates builds queue when group runners get enabled' do
+ update_project(project, admin, group_runners_enabled: true)
- it 'updates ci pending builds' do
- expect { call_service }.to not_change { pending_build.reload.instance_runners_enabled }
+ expect(pending_build.reload.namespace_traversal_ids).to include(project.namespace.id)
end
end
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 02997096021..d67b189f90e 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -1935,6 +1935,21 @@ RSpec.describe QuickActions::InterpretService do
it_behaves_like 'relate command'
end
+ context 'when quick action target is unpersisted' do
+ let(:issue) { build(:issue, project: project) }
+ let(:other_issue) { create(:issue, project: project) }
+ let(:issues_related) { [other_issue] }
+ let(:content) { "/relate #{other_issue.to_reference}" }
+
+ it 'relates the issues after the issue is persisted' do
+ service.execute(content, issue)
+
+ issue.save!
+
+ expect(IssueLink.where(source: issue).map(&:target)).to match_array(issues_related)
+ end
+ end
+
context 'empty relate command' do
let(:issues_related) { [] }
let(:content) { '/relate' }
diff --git a/spec/services/security/ci_configuration/sast_create_service_spec.rb b/spec/services/security/ci_configuration/sast_create_service_spec.rb
index 44f8f07a5be..c7e732dc79a 100644
--- a/spec/services/security/ci_configuration/sast_create_service_spec.rb
+++ b/spec/services/security/ci_configuration/sast_create_service_spec.rb
@@ -23,4 +23,27 @@ RSpec.describe Security::CiConfiguration::SastCreateService, :snowplow do
end
include_examples 'services security ci configuration create service'
+
+ context "when committing to the default branch", :aggregate_failures do
+ subject(:result) { described_class.new(project, user, params, commit_on_default: true).execute }
+
+ let(:params) { {} }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it "doesn't try to remove that branch on raised exceptions" do
+ expect(Files::MultiService).to receive(:new).and_raise(StandardError, '_exception_')
+ expect(project.repository).not_to receive(:rm_branch)
+
+ expect { result }.to raise_error(StandardError, '_exception_')
+ end
+
+ it "commits directly to the default branch" do
+ expect(result.status).to eq(:success)
+ expect(result.payload[:success_path]).to match(/#{Gitlab::Routing.url_helpers.project_new_merge_request_url(project, {})}(.*)description(.*)source_branch/)
+ expect(result.payload[:branch]).to eq('master')
+ end
+ end
end
diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb
index c2fe565938a..d8672eec682 100644
--- a/spec/services/service_ping/submit_service_ping_service_spec.rb
+++ b/spec/services/service_ping/submit_service_ping_service_spec.rb
@@ -50,6 +50,7 @@ RSpec.describe ServicePing::SubmitService do
let(:with_dev_ops_score_params) { { dev_ops_score: score_params[:score] } }
let(:with_conv_index_params) { { conv_index: score_params[:score] } }
+ let(:with_usage_data_id_params) { { conv_index: { usage_data_id: usage_data_id } } }
shared_examples 'does not run' do
it do
@@ -173,6 +174,29 @@ RSpec.describe ServicePing::SubmitService do
end
end
+ context 'when only usage_data_id is passed in response' do
+ before do
+ stub_response(body: with_usage_data_id_params)
+ end
+
+ it 'does not save DevOps report data' do
+ expect { subject.execute }.not_to change { DevOpsReport::Metric.count }
+ end
+
+ it 'saves usage_data_id to version_usage_data_id_value' do
+ recorded_at = Time.current
+ usage_data = { uuid: 'uuid', recorded_at: recorded_at }
+
+ expect(Gitlab::UsageData).to receive(:data).with(force_refresh: true).and_return(usage_data)
+
+ subject.execute
+
+ raw_usage_data = RawUsageData.find_by(recorded_at: recorded_at)
+
+ expect(raw_usage_data.version_usage_data_id_value).to eq(31643)
+ end
+ end
+
context 'when version app usage_data_id is invalid' do
let(:usage_data_id) { -1000 }
diff --git a/spec/services/user_project_access_changed_service_spec.rb b/spec/services/user_project_access_changed_service_spec.rb
index f8835fefc84..438db6b987b 100644
--- a/spec/services/user_project_access_changed_service_spec.rb
+++ b/spec/services/user_project_access_changed_service_spec.rb
@@ -47,15 +47,13 @@ RSpec.describe UserProjectAccessChangedService do
let(:service) { UserProjectAccessChangedService.new([1, 2]) }
before do
- allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
-
expect(AuthorizedProjectsWorker).to receive(:bulk_perform_and_wait)
.with([[1], [2]])
.and_return(10)
end
it 'sticks all the updated users and returns the original result', :aggregate_failures do
- expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:bulk_stick).with(:user, [1, 2])
+ expect(ApplicationRecord.sticking).to receive(:bulk_stick).with(:user, [1, 2])
expect(service.execute).to eq(10)
end
diff --git a/spec/services/users/activity_service_spec.rb b/spec/services/users/activity_service_spec.rb
index 6c1df5c745f..092c5cd3e5e 100644
--- a/spec/services/users/activity_service_spec.rb
+++ b/spec/services/users/activity_service_spec.rb
@@ -91,9 +91,9 @@ RSpec.describe Users::ActivityService do
context 'when last activity is in the past' do
let(:user) { create(:user, last_activity_on: Date.today - 1.week) }
- context 'database load balancing is configured', :db_load_balancing do
+ context 'database load balancing is configured' do
before do
- allow(ActiveRecord::Base).to receive(:connection).and_return(::Gitlab::Database::LoadBalancing.proxy)
+ ::Gitlab::Database::LoadBalancing::Session.clear_session
end
let(:service) do
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index b30b7e6eb56..3244db4c1fb 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Users::UpdateService do
- let(:user) { create(:user) }
+ let(:password) { 'longsecret987!' }
+ let(:user) { create(:user, password: password, password_confirmation: password) }
describe '#execute' do
it 'updates time preferences' do
@@ -18,7 +19,7 @@ RSpec.describe Users::UpdateService do
it 'returns an error result when record cannot be updated' do
result = {}
expect do
- result = update_user(user, { email: 'invalid' })
+ result = update_user(user, { email: 'invalid', validation_password: password })
end.not_to change { user.reload.email }
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Email is invalid')
@@ -65,7 +66,7 @@ RSpec.describe Users::UpdateService do
context 'updating canonical email' do
context 'if email was changed' do
subject do
- update_user(user, email: 'user+extrastuff@example.com')
+ update_user(user, email: 'user+extrastuff@example.com', validation_password: password)
end
it 'calls canonicalize_email' do
@@ -75,15 +76,68 @@ RSpec.describe Users::UpdateService do
subject
end
+
+ context 'when check_password is true' do
+ def update_user(user, opts)
+ described_class.new(user, opts.merge(user: user)).execute(check_password: true)
+ end
+
+ it 'returns error if no password confirmation was passed', :aggregate_failures do
+ result = {}
+
+ expect do
+ result = update_user(user, { email: 'example@example.com' })
+ end.not_to change { user.reload.unconfirmed_email }
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid password')
+ end
+
+ it 'returns error if wrong password confirmation was passed', :aggregate_failures do
+ result = {}
+
+ expect do
+ result = update_user(user, { email: 'example@example.com', validation_password: 'wrongpassword' })
+ end.not_to change { user.reload.unconfirmed_email }
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid password')
+ end
+
+ it 'does not require password if it was automatically set', :aggregate_failures do
+ user.update!(password_automatically_set: true)
+ result = {}
+
+ expect do
+ result = update_user(user, { email: 'example@example.com' })
+ end.to change { user.reload.unconfirmed_email }
+ expect(result[:status]).to eq(:success)
+ end
+
+ it 'does not require a password if the attribute changed does not require it' do
+ result = {}
+
+ expect do
+ result = update_user(user, { job_title: 'supreme leader of the universe' })
+ end.to change { user.reload.job_title }
+ expect(result[:status]).to eq(:success)
+ end
+ end
end
- context 'if email was NOT changed' do
- subject do
- update_user(user, job_title: 'supreme leader of the universe')
+ context 'when check_password is left to false' do
+ it 'does not require a password check', :aggregate_failures do
+ result = {}
+ expect do
+ result = update_user(user, { email: 'example@example.com' })
+ end.to change { user.reload.unconfirmed_email }
+ expect(result[:status]).to eq(:success)
end
+ end
+ context 'if email was NOT changed' do
it 'skips update canonicalize email service call' do
- expect { subject }.not_to change { user.user_canonical_email }
+ expect do
+ update_user(user, job_title: 'supreme leader of the universe')
+ end.not_to change { user.user_canonical_email }
end
end
end
@@ -106,7 +160,7 @@ RSpec.describe Users::UpdateService do
it 'raises an error when record cannot be updated' do
expect do
- update_user(user, email: 'invalid')
+ update_user(user, email: 'invalid', validation_password: password)
end.to raise_error(ActiveRecord::RecordInvalid)
end
diff --git a/spec/services/users/upsert_credit_card_validation_service_spec.rb b/spec/services/users/upsert_credit_card_validation_service_spec.rb
index 148638fe5e7..bede30e1898 100644
--- a/spec/services/users/upsert_credit_card_validation_service_spec.rb
+++ b/spec/services/users/upsert_credit_card_validation_service_spec.rb
@@ -7,7 +7,17 @@ RSpec.describe Users::UpsertCreditCardValidationService do
let(:user_id) { user.id }
let(:credit_card_validated_time) { Time.utc(2020, 1, 1) }
- let(:params) { { user_id: user_id, credit_card_validated_at: credit_card_validated_time } }
+ let(:expiration_year) { Date.today.year + 10 }
+ let(:params) do
+ {
+ user_id: user_id,
+ credit_card_validated_at: credit_card_validated_time,
+ credit_card_expiration_year: expiration_year,
+ credit_card_expiration_month: 1,
+ credit_card_holder_name: 'John Smith',
+ credit_card_mask_number: '1111'
+ }
+ end
describe '#execute' do
subject(:service) { described_class.new(params) }
@@ -52,6 +62,16 @@ RSpec.describe Users::UpsertCreditCardValidationService do
end
end
+ shared_examples 'returns an error, tracking the exception' do
+ it do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+
+ result = service.execute
+
+ expect(result.status).to eq(:error)
+ end
+ end
+
context 'when user id does not exist' do
let(:user_id) { non_existing_record_id }
@@ -61,19 +81,27 @@ RSpec.describe Users::UpsertCreditCardValidationService do
context 'when missing credit_card_validated_at' do
let(:params) { { user_id: user_id } }
- it_behaves_like 'returns an error without tracking the exception'
+ it_behaves_like 'returns an error, tracking the exception'
end
context 'when missing user id' do
let(:params) { { credit_card_validated_at: credit_card_validated_time } }
- it_behaves_like 'returns an error without tracking the exception'
+ it_behaves_like 'returns an error, tracking the exception'
end
context 'when unexpected exception happen' do
it 'tracks the exception and returns an error' do
+ logged_params = {
+ credit_card_validated_at: credit_card_validated_time,
+ expiration_date: Date.new(expiration_year, 1, 31),
+ holder_name: "John Smith",
+ last_digits: 1111,
+ user_id: user_id
+ }
+
expect(::Users::CreditCardValidation).to receive(:upsert).and_raise(e = StandardError.new('My exception!'))
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(e, class: described_class.to_s, params: params)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(e, class: described_class.to_s, params: logged_params)
result = service.execute
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index f9fa46a4fc8..2aebd2adab9 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -392,7 +392,7 @@ RSpec.describe WebHookService do
end
end
- context 'when the hook is throttled (via Redis)', :clean_gitlab_redis_cache do
+ context 'when the hook is throttled (via Redis)', :clean_gitlab_redis_rate_limiting do
before do
# Set a high interval to avoid intermittent failures in CI
allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits).and_return(