Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/workers')
-rw-r--r--spec/workers/bulk_imports/entity_worker_spec.rb25
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb95
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb54
-rw-r--r--spec/workers/ci/job_artifacts/track_artifact_report_worker_spec.rb23
-rw-r--r--spec/workers/cluster_configure_istio_worker_spec.rb41
-rw-r--r--spec/workers/cluster_update_app_worker_spec.rb112
-rw-r--r--spec/workers/cluster_wait_for_app_update_worker_spec.rb27
-rw-r--r--spec/workers/cluster_wait_for_ingress_ip_address_worker_spec.rb32
-rw-r--r--spec/workers/clusters/applications/wait_for_uninstall_app_worker_spec.rb32
-rw-r--r--spec/workers/concerns/reenqueuer_spec.rb21
-rw-r--r--spec/workers/container_registry/cleanup_worker_spec.rb81
-rw-r--r--spec/workers/container_registry/delete_container_repository_worker_spec.rb146
-rw-r--r--spec/workers/database/batched_background_migration/execution_worker_spec.rb141
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb5
-rw-r--r--spec/workers/gitlab/github_import/pull_requests/import_review_request_worker_spec.rb46
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb31
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb3
-rw-r--r--spec/workers/gitlab_shell_worker_spec.rb50
-rw-r--r--spec/workers/incident_management/add_severity_system_note_worker_spec.rb25
-rw-r--r--spec/workers/loose_foreign_keys/cleanup_worker_spec.rb36
-rw-r--r--spec/workers/mail_scheduler/notification_service_worker_spec.rb39
-rw-r--r--spec/workers/merge_requests/delete_branch_worker_spec.rb65
-rw-r--r--spec/workers/merge_requests/delete_source_branch_worker_spec.rb120
-rw-r--r--spec/workers/namespaces/root_statistics_worker_spec.rb13
-rw-r--r--spec/workers/pages/invalidate_domain_cache_worker_spec.rb61
-rw-r--r--spec/workers/pages_worker_spec.rb24
-rw-r--r--spec/workers/projects/after_import_worker_spec.rb2
-rw-r--r--spec/workers/projects/post_creation_worker_spec.rb34
-rw-r--r--spec/workers/remove_expired_members_worker_spec.rb26
-rw-r--r--spec/workers/repository_check/single_repository_worker_spec.rb62
-rw-r--r--spec/workers/repository_import_worker_spec.rb32
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb53
-rw-r--r--spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb12
34 files changed, 989 insertions, 582 deletions
diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb
index 0fcdbccc304..e3f0ee65205 100644
--- a/spec/workers/bulk_imports/entity_worker_spec.rb
+++ b/spec/workers/bulk_imports/entity_worker_spec.rb
@@ -39,8 +39,11 @@ RSpec.describe BulkImports::EntityWorker do
hash_including(
'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
'current_stage' => nil,
'message' => 'Stage starting',
+ 'source_version' => entity.bulk_import.source_version_info.to_s,
'importer' => 'gitlab_migration'
)
)
@@ -71,7 +74,10 @@ RSpec.describe BulkImports::EntityWorker do
hash_including(
'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
'current_stage' => nil,
+ 'source_version' => entity.bulk_import.source_version_info.to_s,
'importer' => 'gitlab_migration'
)
)
@@ -82,9 +88,15 @@ RSpec.describe BulkImports::EntityWorker do
hash_including(
'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
'current_stage' => nil,
- 'message' => 'Error!',
- 'importer' => 'gitlab_migration'
+ 'message' => 'Entity failed',
+ 'exception.backtrace' => anything,
+ 'exception.class' => 'StandardError',
+ 'exception.message' => 'Error!',
+ 'importer' => 'gitlab_migration',
+ 'source_version' => entity.bulk_import.source_version_info.to_s
)
)
end
@@ -95,6 +107,9 @@ RSpec.describe BulkImports::EntityWorker do
exception,
bulk_import_entity_id: entity.id,
bulk_import_id: entity.bulk_import_id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
+ source_version: entity.bulk_import.source_version_info.to_s,
importer: 'gitlab_migration'
)
@@ -112,8 +127,11 @@ RSpec.describe BulkImports::EntityWorker do
hash_including(
'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
'current_stage' => 0,
'message' => 'Stage running',
+ 'source_version' => entity.bulk_import.source_version_info.to_s,
'importer' => 'gitlab_migration'
)
)
@@ -142,7 +160,10 @@ RSpec.describe BulkImports::EntityWorker do
hash_including(
'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
'current_stage' => 0,
+ 'source_version' => entity.bulk_import.source_version_info.to_s,
'importer' => 'gitlab_migration'
)
)
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
index 597eed3a9b9..7eb8150fb2e 100644
--- a/spec/workers/bulk_imports/export_request_worker_spec.rb
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe BulkImports::ExportRequestWorker do
end
shared_examples 'requests relations export for api resource' do
- include_examples 'an idempotent worker' do
+ it_behaves_like 'an idempotent worker' do
it 'requests relations export & schedules entity worker' do
expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
expect(client).to receive(:post).with(expected).twice
@@ -44,18 +44,22 @@ RSpec.describe BulkImports::ExportRequestWorker do
it 'logs retry request and reenqueues' do
allow(exception).to receive(:retriable?).twice.and_return(true)
- expect(Gitlab::Import::Logger).to receive(:error).with(
- hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'pipeline_class' => 'ExportRequestWorker',
- 'exception_class' => 'BulkImports::NetworkError',
- 'exception_message' => 'Export error',
- 'bulk_import_id' => bulk_import.id,
- 'bulk_import_entity_type' => entity.source_type,
- 'importer' => 'gitlab_migration',
- 'message' => 'Retrying export request'
- )
- ).twice
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:error).with(
+ a_hash_including(
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'exception.backtrace' => anything,
+ 'exception.class' => 'BulkImports::NetworkError',
+ 'exception.message' => 'Export error',
+ 'message' => 'Retrying export request',
+ 'importer' => 'gitlab_migration',
+ 'source_version' => entity.bulk_import.source_version_info.to_s
+ )
+ ).twice
+ end
expect(described_class).to receive(:perform_in).twice.with(2.seconds, entity.id)
@@ -65,18 +69,24 @@ RSpec.describe BulkImports::ExportRequestWorker do
context 'when error is not retriable' do
it 'logs export failure and marks entity as failed' do
- expect(Gitlab::Import::Logger).to receive(:error).with(
- hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'pipeline_class' => 'ExportRequestWorker',
- 'exception_class' => 'BulkImports::NetworkError',
- 'exception_message' => 'Export error',
- 'correlation_id_value' => anything,
- 'bulk_import_id' => bulk_import.id,
- 'bulk_import_entity_type' => entity.source_type,
- 'importer' => 'gitlab_migration'
- )
- ).twice
+ allow(exception).to receive(:retriable?).twice.and_return(false)
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:error).with(
+ a_hash_including(
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'exception.backtrace' => anything,
+ 'exception.class' => 'BulkImports::NetworkError',
+ 'exception.message' => 'Export error',
+ 'message' => "Request to export #{entity.source_type} failed",
+ 'importer' => 'gitlab_migration',
+ 'source_version' => entity.bulk_import.source_version_info.to_s
+ )
+ ).twice
+ end
perform_multiple(job_args)
@@ -119,25 +129,30 @@ RSpec.describe BulkImports::ExportRequestWorker do
let(:entity_source_id) { 'invalid' }
it 'logs the error & requests relations export using full path url' do
+ allow(BulkImports::EntityWorker).to receive(:perform_async)
+
expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
expect(client).to receive(:post).with(full_path_url).twice
end
entity.update!(source_xid: nil)
- expect(Gitlab::Import::Logger).to receive(:error).with(
- a_hash_including(
- 'message' => 'Failed to fetch source entity id',
- 'bulk_import_entity_id' => entity.id,
- 'pipeline_class' => 'ExportRequestWorker',
- 'exception_class' => 'NoMethodError',
- 'exception_message' => "undefined method `model_id' for nil:NilClass",
- 'correlation_id_value' => anything,
- 'bulk_import_id' => bulk_import.id,
- 'bulk_import_entity_type' => entity.source_type,
- 'importer' => 'gitlab_migration'
- )
- ).twice
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:error).with(
+ a_hash_including(
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'exception.backtrace' => anything,
+ 'exception.class' => 'NoMethodError',
+ 'exception.message' => "undefined method `model_id' for nil:NilClass",
+ 'message' => 'Failed to fetch source entity id',
+ 'importer' => 'gitlab_migration',
+ 'source_version' => entity.bulk_import.source_version_info.to_s
+ )
+ ).twice
+ end
perform_multiple(job_args)
@@ -153,7 +168,7 @@ RSpec.describe BulkImports::ExportRequestWorker do
let(:expected) { "/groups/#{entity.source_xid}/export_relations" }
let(:full_path_url) { '/groups/foo%2Fbar/export_relations' }
- include_examples 'requests relations export for api resource'
+ it_behaves_like 'requests relations export for api resource'
end
context 'when entity is project' do
@@ -161,7 +176,7 @@ RSpec.describe BulkImports::ExportRequestWorker do
let(:expected) { "/projects/#{entity.source_xid}/export_relations" }
let(:full_path_url) { '/projects/foo%2Fbar/export_relations' }
- include_examples 'requests relations export for api resource'
+ it_behaves_like 'requests relations export for api resource'
end
end
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index ee65775f170..23fbc5688ec 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -37,9 +37,10 @@ RSpec.describe BulkImports::PipelineWorker do
.with(
hash_including(
'pipeline_name' => 'FakePipeline',
- 'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
- 'importer' => 'gitlab_migration'
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path
)
)
end
@@ -87,8 +88,10 @@ RSpec.describe BulkImports::PipelineWorker do
'pipeline_tracker_id' => pipeline_tracker.id,
'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
- 'message' => 'Unstarted pipeline not found',
- 'importer' => 'gitlab_migration'
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'source_version' => entity.bulk_import.source_version_info.to_s,
+ 'message' => 'Unstarted pipeline not found'
)
)
end
@@ -126,7 +129,13 @@ RSpec.describe BulkImports::PipelineWorker do
'pipeline_name' => 'FakePipeline',
'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
- 'message' => 'Error!',
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'class' => 'BulkImports::PipelineWorker',
+ 'exception.backtrace' => anything,
+ 'exception.message' => 'Error!',
+ 'message' => 'Pipeline failed',
+ 'source_version' => entity.bulk_import.source_version_info.to_s,
'importer' => 'gitlab_migration'
)
)
@@ -137,9 +146,12 @@ RSpec.describe BulkImports::PipelineWorker do
.with(
instance_of(StandardError),
bulk_import_entity_id: entity.id,
- bulk_import_id: entity.bulk_import_id,
+ bulk_import_id: entity.bulk_import.id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
pipeline_name: pipeline_tracker.pipeline_name,
- importer: 'gitlab_migration'
+ importer: 'gitlab_migration',
+ source_version: entity.bulk_import.source_version_info.to_s
)
expect(BulkImports::EntityWorker)
@@ -188,8 +200,9 @@ RSpec.describe BulkImports::PipelineWorker do
'pipeline_name' => 'FakePipeline',
'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
- 'message' => 'Skipping pipeline due to failed entity',
- 'importer' => 'gitlab_migration'
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'message' => 'Skipping pipeline due to failed entity'
)
)
end
@@ -237,7 +250,8 @@ RSpec.describe BulkImports::PipelineWorker do
'pipeline_name' => 'FakePipeline',
'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
- 'importer' => 'gitlab_migration'
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path
)
)
end
@@ -361,9 +375,16 @@ RSpec.describe BulkImports::PipelineWorker do
hash_including(
'pipeline_name' => 'NdjsonPipeline',
'bulk_import_entity_id' => entity.id,
- 'message' => 'Pipeline timeout',
'bulk_import_id' => entity.bulk_import_id,
- 'importer' => 'gitlab_migration'
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'class' => 'BulkImports::PipelineWorker',
+ 'exception.backtrace' => anything,
+ 'exception.class' => 'BulkImports::Pipeline::ExpiredError',
+ 'exception.message' => 'Pipeline timeout',
+ 'importer' => 'gitlab_migration',
+ 'message' => 'Pipeline failed',
+ 'source_version' => entity.bulk_import.source_version_info.to_s
)
)
end
@@ -390,9 +411,14 @@ RSpec.describe BulkImports::PipelineWorker do
hash_including(
'pipeline_name' => 'NdjsonPipeline',
'bulk_import_entity_id' => entity.id,
- 'message' => 'Export from source instance failed: Error!',
'bulk_import_id' => entity.bulk_import_id,
- 'importer' => 'gitlab_migration'
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'exception.backtrace' => anything,
+ 'exception.class' => 'BulkImports::Pipeline::FailedError',
+ 'exception.message' => 'Export from source instance failed: Error!',
+ 'importer' => 'gitlab_migration',
+ 'source_version' => entity.bulk_import.source_version_info.to_s
)
)
end
diff --git a/spec/workers/ci/job_artifacts/track_artifact_report_worker_spec.rb b/spec/workers/ci/job_artifacts/track_artifact_report_worker_spec.rb
index e18539cc6e3..0d4b8243050 100644
--- a/spec/workers/ci/job_artifacts/track_artifact_report_worker_spec.rb
+++ b/spec/workers/ci/job_artifacts/track_artifact_report_worker_spec.rb
@@ -8,7 +8,10 @@ RSpec.describe Ci::JobArtifacts::TrackArtifactReportWorker do
let_it_be(:project) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
- let_it_be(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project, user: user) }
+ let_it_be(:pipeline) do
+ create(:ci_pipeline, :with_test_reports, :with_coverage_reports,
+ project: project, user: user)
+ end
subject(:perform) { described_class.new.perform(pipeline_id) }
@@ -25,17 +28,29 @@ RSpec.describe Ci::JobArtifacts::TrackArtifactReportWorker do
it_behaves_like 'an idempotent worker' do
let(:job_args) { pipeline_id }
- let(:test_event_name) { 'i_testing_test_report_uploaded' }
+ let(:test_event_name_1) { 'i_testing_test_report_uploaded' }
+ let(:test_event_name_2) { 'i_testing_coverage_report_uploaded' }
let(:start_time) { 1.week.ago }
let(:end_time) { 1.week.from_now }
subject(:idempotent_perform) { perform_multiple(pipeline_id, exec_times: 2) }
- it 'does not try to increment again' do
+ it 'does not try to increment again for the test event' do
+ idempotent_perform
+
+ unique_pipeline_pass = Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: test_event_name_1,
+ start_date: start_time,
+ end_date: end_time
+ )
+ expect(unique_pipeline_pass).to eq(1)
+ end
+
+ it 'does not try to increment again for the coverage event' do
idempotent_perform
unique_pipeline_pass = Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
- event_names: test_event_name,
+ event_names: test_event_name_2,
start_date: start_time,
end_date: end_time
)
diff --git a/spec/workers/cluster_configure_istio_worker_spec.rb b/spec/workers/cluster_configure_istio_worker_spec.rb
deleted file mode 100644
index 5d949fde973..00000000000
--- a/spec/workers/cluster_configure_istio_worker_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ClusterConfigureIstioWorker do
- describe '#perform' do
- shared_examples 'configure istio service' do
- it 'configures istio' do
- expect_any_instance_of(Clusters::Kubernetes::ConfigureIstioIngressService).to receive(:execute)
-
- described_class.new.perform(cluster.id)
- end
- end
-
- context 'when provider type is gcp' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
-
- it_behaves_like 'configure istio service'
- end
-
- context 'when provider type is aws' do
- let(:cluster) { create(:cluster, :project, :provided_by_aws) }
-
- it_behaves_like 'configure istio service'
- end
-
- context 'when provider type is user' do
- let(:cluster) { create(:cluster, :project, :provided_by_user) }
-
- it_behaves_like 'configure istio service'
- end
-
- context 'when cluster does not exist' do
- it 'does not provision a cluster' do
- expect_any_instance_of(Clusters::Kubernetes::ConfigureIstioIngressService).not_to receive(:execute)
-
- described_class.new.perform(123)
- end
- end
- end
-end
diff --git a/spec/workers/cluster_update_app_worker_spec.rb b/spec/workers/cluster_update_app_worker_spec.rb
deleted file mode 100644
index 8f61ee17162..00000000000
--- a/spec/workers/cluster_update_app_worker_spec.rb
+++ /dev/null
@@ -1,112 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ClusterUpdateAppWorker do
- include ExclusiveLeaseHelpers
-
- let_it_be(:project) { create(:project) }
-
- let(:prometheus_update_service) { spy }
-
- subject { described_class.new }
-
- around do |example|
- freeze_time { example.run }
- end
-
- before do
- allow(::Clusters::Applications::PrometheusUpdateService).to receive(:new).and_return(prometheus_update_service)
- end
-
- describe '#perform' do
- context 'when the application last_update_started_at is higher than the time the job was scheduled in' do
- it 'does nothing' do
- application = create(:clusters_applications_prometheus, :updated, last_update_started_at: Time.current)
-
- expect(prometheus_update_service).not_to receive(:execute)
-
- expect(subject.perform(application.name, application.id, project.id, Time.current - 5.minutes)).to be_nil
- end
- end
-
- context 'when another worker is already running' do
- it 'returns nil' do
- application = create(:clusters_applications_prometheus, :updating)
-
- expect(subject.perform(application.name, application.id, project.id, Time.current)).to be_nil
- end
- end
-
- it 'executes PrometheusUpdateService' do
- application = create(:clusters_applications_prometheus, :installed)
-
- expect(prometheus_update_service).to receive(:execute)
-
- subject.perform(application.name, application.id, project.id, Time.current)
- end
-
- context 'application is externally installed' do
- it 'does not execute PrometheusUpdateService' do
- application = create(:clusters_applications_prometheus, :externally_installed)
-
- expect(prometheus_update_service).not_to receive(:execute)
-
- subject.perform(application.name, application.id, project.id, Time.current)
- end
- end
-
- context 'with exclusive lease' do
- let_it_be(:user) { create(:user) }
-
- let(:application) { create(:clusters_applications_prometheus, :installed) }
- let(:lease_key) { "#{described_class.name.underscore}-#{application.id}" }
-
- before do
- # update_highest_role uses exclusive key too:
- allow(Gitlab::ExclusiveLease).to receive(:new).and_call_original
- stub_exclusive_lease_taken(lease_key)
- end
-
- it 'does not allow same app to be updated concurrently by same project' do
- expect(Clusters::Applications::PrometheusUpdateService).not_to receive(:new)
-
- subject.perform(application.name, application.id, project.id, Time.current)
- end
-
- it 'does not allow same app to be updated concurrently by different project', :aggregate_failures do
- project1 = create(:project, namespace: create(:namespace, owner: user))
-
- expect(Clusters::Applications::PrometheusUpdateService).not_to receive(:new)
-
- subject.perform(application.name, application.id, project1.id, Time.current)
- end
-
- it 'allows different app to be updated concurrently by same project' do
- application2 = create(:clusters_applications_prometheus, :installed)
- lease_key2 = "#{described_class.name.underscore}-#{application2.id}"
-
- stub_exclusive_lease(lease_key2)
-
- expect(Clusters::Applications::PrometheusUpdateService).to receive(:new)
- .with(application2, project)
-
- subject.perform(application2.name, application2.id, project.id, Time.current)
- end
-
- it 'allows different app to be updated by different project', :aggregate_failures do
- application2 = create(:clusters_applications_prometheus, :installed)
- lease_key2 = "#{described_class.name.underscore}-#{application2.id}"
-
- project2 = create(:project, namespace: create(:namespace, owner: user))
-
- stub_exclusive_lease(lease_key2)
-
- expect(Clusters::Applications::PrometheusUpdateService).to receive(:new)
- .with(application2, project2)
-
- subject.perform(application2.name, application2.id, project2.id, Time.current)
- end
- end
- end
-end
diff --git a/spec/workers/cluster_wait_for_app_update_worker_spec.rb b/spec/workers/cluster_wait_for_app_update_worker_spec.rb
deleted file mode 100644
index b7f7622a0e6..00000000000
--- a/spec/workers/cluster_wait_for_app_update_worker_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ClusterWaitForAppUpdateWorker do
- let(:check_upgrade_progress_service) { spy }
-
- before do
- allow(::Clusters::Applications::CheckUpgradeProgressService).to receive(:new).and_return(check_upgrade_progress_service)
- end
-
- it 'runs CheckUpgradeProgressService when application is found' do
- application = create(:clusters_applications_prometheus)
-
- expect(check_upgrade_progress_service).to receive(:execute)
-
- subject.perform(application.name, application.id)
- end
-
- it 'does not run CheckUpgradeProgressService when application is not found' do
- expect(check_upgrade_progress_service).not_to receive(:execute)
-
- expect do
- subject.perform("prometheus", -1)
- end.to raise_error(ActiveRecord::RecordNotFound)
- end
-end
diff --git a/spec/workers/cluster_wait_for_ingress_ip_address_worker_spec.rb b/spec/workers/cluster_wait_for_ingress_ip_address_worker_spec.rb
deleted file mode 100644
index 7a42c988a92..00000000000
--- a/spec/workers/cluster_wait_for_ingress_ip_address_worker_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ClusterWaitForIngressIpAddressWorker do
- describe '#perform' do
- let(:service) { instance_double(Clusters::Applications::CheckIngressIpAddressService, execute: true) }
- let(:application) { instance_double(Clusters::Applications::Ingress) }
- let(:worker) { described_class.new }
-
- before do
- allow(worker)
- .to receive(:find_application)
- .with('ingress', 117)
- .and_yield(application)
-
- allow(Clusters::Applications::CheckIngressIpAddressService)
- .to receive(:new)
- .with(application)
- .and_return(service)
-
- allow(described_class)
- .to receive(:perform_in)
- end
-
- it 'finds the application and calls CheckIngressIpAddressService#execute' do
- worker.perform('ingress', 117)
-
- expect(service).to have_received(:execute)
- end
- end
-end
diff --git a/spec/workers/clusters/applications/wait_for_uninstall_app_worker_spec.rb b/spec/workers/clusters/applications/wait_for_uninstall_app_worker_spec.rb
deleted file mode 100644
index d1dd1cd738b..00000000000
--- a/spec/workers/clusters/applications/wait_for_uninstall_app_worker_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Applications::WaitForUninstallAppWorker, '#perform' do
- let(:app) { create(:clusters_applications_helm) }
- let(:app_name) { app.name }
- let(:app_id) { app.id }
-
- subject { described_class.new.perform(app_name, app_id) }
-
- context 'when app exists' do
- let(:service) { instance_double(Clusters::Applications::CheckUninstallProgressService) }
-
- it 'calls the check service' do
- expect(Clusters::Applications::CheckUninstallProgressService).to receive(:new).with(app).and_return(service)
- expect(service).to receive(:execute).once
-
- subject
- end
- end
-
- context 'when app does not exist' do
- let(:app_id) { 0 }
-
- it 'does not call the check service' do
- expect(Clusters::Applications::CheckUninstallProgressService).not_to receive(:new)
-
- expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
- end
- end
-end
diff --git a/spec/workers/concerns/reenqueuer_spec.rb b/spec/workers/concerns/reenqueuer_spec.rb
index 56db2239bb1..e7287b55af2 100644
--- a/spec/workers/concerns/reenqueuer_spec.rb
+++ b/spec/workers/concerns/reenqueuer_spec.rb
@@ -121,14 +121,7 @@ RSpec.describe Reenqueuer::ReenqueuerSleeper do
# Unit test ensure_minimum_duration
describe '#ensure_minimum_duration' do
around do |example|
- # Allow Timecop.travel without the block form
- Timecop.safe_mode = false
-
- Timecop.freeze do
- example.run
- end
-
- Timecop.safe_mode = true
+ freeze_time { example.run }
end
let(:minimum_duration) { 4.seconds }
@@ -140,31 +133,31 @@ RSpec.describe Reenqueuer::ReenqueuerSleeper do
expect(dummy).to receive(:sleep).with(a_value_within(0.01).of(time_left))
dummy.ensure_minimum_duration(minimum_duration) do
- Timecop.travel(minimum_duration - time_left)
+ travel(minimum_duration - time_left)
end
end
end
context 'when the block completes just before the minimum duration' do
- let(:time_left) { 0.1.seconds }
+ let(:time_left) { 1.second }
it 'sleeps until the minimum duration' do
expect(dummy).to receive(:sleep).with(a_value_within(0.01).of(time_left))
dummy.ensure_minimum_duration(minimum_duration) do
- Timecop.travel(minimum_duration - time_left)
+ travel(minimum_duration - time_left)
end
end
end
context 'when the block completes just after the minimum duration' do
- let(:time_over) { 0.1.seconds }
+ let(:time_over) { 1.second }
it 'does not sleep' do
expect(dummy).not_to receive(:sleep)
dummy.ensure_minimum_duration(minimum_duration) do
- Timecop.travel(minimum_duration + time_over)
+ travel(minimum_duration + time_over)
end
end
end
@@ -176,7 +169,7 @@ RSpec.describe Reenqueuer::ReenqueuerSleeper do
expect(dummy).not_to receive(:sleep)
dummy.ensure_minimum_duration(minimum_duration) do
- Timecop.travel(minimum_duration + time_over)
+ travel(minimum_duration + time_over)
end
end
end
diff --git a/spec/workers/container_registry/cleanup_worker_spec.rb b/spec/workers/container_registry/cleanup_worker_spec.rb
new file mode 100644
index 00000000000..ffcb421ce1e
--- /dev/null
+++ b/spec/workers/container_registry/cleanup_worker_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ContainerRegistry::CleanupWorker, :aggregate_failures do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ let_it_be_with_reload(:container_repository) { create(:container_repository) }
+
+ subject(:perform) { worker.perform }
+
+ context 'with no delete scheduled container repositories' do
+ it "doesn't enqueue delete container repository jobs" do
+ expect(ContainerRegistry::DeleteContainerRepositoryWorker).not_to receive(:perform_with_capacity)
+
+ perform
+ end
+ end
+
+ context 'with delete scheduled container repositories' do
+ before do
+ container_repository.delete_scheduled!
+ end
+
+ it 'enqueues delete container repository jobs' do
+ expect(ContainerRegistry::DeleteContainerRepositoryWorker).to receive(:perform_with_capacity)
+
+ perform
+ end
+ end
+
+ context 'with stale delete ongoing container repositories' do
+ let(:delete_started_at) { (described_class::STALE_DELETE_THRESHOLD + 5.minutes).ago }
+
+ before do
+ container_repository.update!(status: :delete_ongoing, delete_started_at: delete_started_at)
+ end
+
+ it 'resets them and enqueue delete container repository jobs' do
+ expect(ContainerRegistry::DeleteContainerRepositoryWorker).to receive(:perform_with_capacity)
+
+ expect { perform }
+ .to change { container_repository.reload.status }.from('delete_ongoing').to('delete_scheduled')
+ .and change { container_repository.reload.delete_started_at }.to(nil)
+ end
+ end
+
+ context 'for counts logging' do
+ let_it_be(:delete_started_at) { (described_class::STALE_DELETE_THRESHOLD + 5.minutes).ago }
+ let_it_be(:stale_delete_container_repository) do
+ create(:container_repository, :status_delete_ongoing, delete_started_at: delete_started_at)
+ end
+
+ before do
+ container_repository.delete_scheduled!
+ end
+
+ it 'logs the counts' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:delete_scheduled_container_repositories_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:stale_delete_container_repositories_count, 1)
+
+ perform
+ end
+ end
+
+ context 'with container_registry_delete_repository_with_cron_worker disabled' do
+ before do
+ stub_feature_flags(container_registry_delete_repository_with_cron_worker: false)
+ end
+
+ it 'does not run' do
+ expect(worker).not_to receive(:reset_stale_deletes)
+ expect(worker).not_to receive(:enqueue_delete_container_repository_jobs)
+ expect(worker).not_to receive(:log_counts)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/container_registry/delete_container_repository_worker_spec.rb b/spec/workers/container_registry/delete_container_repository_worker_spec.rb
new file mode 100644
index 00000000000..381e0cc164c
--- /dev/null
+++ b/spec/workers/container_registry/delete_container_repository_worker_spec.rb
@@ -0,0 +1,146 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ContainerRegistry::DeleteContainerRepositoryWorker, :aggregate_failures do
+ let_it_be_with_reload(:container_repository) { create(:container_repository) }
+ let_it_be(:second_container_repository) { create(:container_repository) }
+
+ let(:worker) { described_class.new }
+
+ describe '#perform_work' do
+ subject(:perform_work) { worker.perform_work }
+
+ context 'with no work to do - no container repositories pending deletion' do
+ it 'will not delete any container repository' do
+ expect(::Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+
+ expect { perform_work }.to not_change { ContainerRepository.count }
+ end
+ end
+
+ context 'with work to do' do
+ let(:tags_count) { 0 }
+ let(:cleanup_tags_service_response) { { status: :success, original_size: 100, deleted_size: 0 } }
+ let(:cleanup_tags_service_double) do
+ instance_double('Projects::ContainerRepository::CleanupTagsService', execute: cleanup_tags_service_response)
+ end
+
+ before do
+ container_repository.delete_scheduled!
+ allow(Projects::ContainerRepository::CleanupTagsService)
+ .to receive(:new)
+ .with(container_repository: container_repository, params: described_class::CLEANUP_TAGS_SERVICE_PARAMS)
+ .and_return(cleanup_tags_service_double)
+ end
+
+ it 'picks and destroys the delete scheduled container repository' do
+ expect_next_pending_destruction_container_repository do |repo|
+ expect_logs_on(repo, tags_size_before_delete: 100, deleted_tags_size: 0)
+ expect(repo).to receive(:destroy!).and_call_original
+ end
+ perform_work
+ expect(ContainerRepository.all).to contain_exactly(second_container_repository)
+ end
+
+ context 'with an error during the tags cleanup' do
+ let(:cleanup_tags_service_response) { { status: :error, original_size: 100, deleted_size: 0 } }
+
+ it 'does not delete the container repository' do
+ expect_next_pending_destruction_container_repository do |repo|
+ expect_logs_on(repo, tags_size_before_delete: 100, deleted_tags_size: 0)
+ expect(repo).to receive(:set_delete_scheduled_status).and_call_original
+ expect(repo).not_to receive(:destroy!)
+ end
+ expect { perform_work }.to not_change(ContainerRepository, :count)
+ .and not_change { container_repository.reload.status }
+ expect(container_repository.delete_started_at).to eq(nil)
+ end
+ end
+
+ context 'with an error during the destroy' do
+ it 'does not delete the container repository' do
+ expect_next_pending_destruction_container_repository do |repo|
+ expect_logs_on(repo, tags_size_before_delete: 100, deleted_tags_size: 0)
+ expect(repo).to receive(:destroy!).and_raise('Error!')
+ expect(repo).to receive(:set_delete_scheduled_status).and_call_original
+ end
+
+ expect(::Gitlab::ErrorTracking).to receive(:log_exception)
+ .with(instance_of(RuntimeError), class: described_class.name)
+ expect { perform_work }.to not_change(ContainerRepository, :count)
+ .and not_change { container_repository.reload.status }
+ expect(container_repository.delete_started_at).to eq(nil)
+ end
+ end
+
+ context 'with tags left to destroy' do
+ let(:tags_count) { 10 }
+
+ it 'does not delete the container repository' do
+ expect_next_pending_destruction_container_repository do |repo|
+ expect(repo).not_to receive(:destroy!)
+ expect(repo).to receive(:set_delete_scheduled_status).and_call_original
+ end
+
+ expect { perform_work }.to not_change(ContainerRepository, :count)
+ .and not_change { container_repository.reload.status }
+ expect(container_repository.delete_started_at).to eq(nil)
+ end
+ end
+
+ context 'with no tags on the container repository' do
+ let(:tags_count) { 0 }
+ let(:cleanup_tags_service_response) { { status: :success, original_size: 0, deleted_size: 0 } }
+
+ it 'picks and destroys the delete scheduled container repository' do
+ expect_next_pending_destruction_container_repository do |repo|
+ expect_logs_on(repo, tags_size_before_delete: 0, deleted_tags_size: 0)
+ expect(repo).to receive(:destroy!).and_call_original
+ end
+ perform_work
+ expect(ContainerRepository.all).to contain_exactly(second_container_repository)
+ end
+ end
+
+ def expect_next_pending_destruction_container_repository
+ original_method = ContainerRepository.method(:next_pending_destruction)
+ expect(ContainerRepository).to receive(:next_pending_destruction).with(order_by: nil) do
+ original_method.call(order_by: nil).tap do |repo|
+ allow(repo).to receive(:tags_count).and_return(tags_count)
+ expect(repo).to receive(:set_delete_ongoing_status).and_call_original
+ yield repo
+ end
+ end
+ end
+
+ def expect_logs_on(container_repository, tags_size_before_delete:, deleted_tags_size:)
+ payload = {
+ project_id: container_repository.project.id,
+ container_repository_id: container_repository.id,
+ container_repository_path: container_repository.path,
+ tags_size_before_delete: tags_size_before_delete,
+ deleted_tags_size: deleted_tags_size
+ }
+ expect(worker.logger).to receive(:info).with(worker.structured_payload(payload))
+ .and_call_original
+ end
+ end
+ end
+
+ describe '#max_running_jobs' do
+ subject { worker.max_running_jobs }
+
+ it { is_expected.to eq(described_class::MAX_CAPACITY) }
+ end
+
+ describe '#remaining_work_count' do
+ let_it_be(:delete_scheduled_container_repositories) do
+ create_list(:container_repository, described_class::MAX_CAPACITY + 2, :status_delete_scheduled)
+ end
+
+ subject { worker.remaining_work_count }
+
+ it { is_expected.to eq(described_class::MAX_CAPACITY + 1) }
+ end
+end
diff --git a/spec/workers/database/batched_background_migration/execution_worker_spec.rb b/spec/workers/database/batched_background_migration/execution_worker_spec.rb
new file mode 100644
index 00000000000..9a850a98f2f
--- /dev/null
+++ b/spec/workers/database/batched_background_migration/execution_worker_spec.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::BatchedBackgroundMigration::ExecutionWorker, :clean_gitlab_redis_shared_state do
+ include ExclusiveLeaseHelpers
+
+ describe '#perform' do
+ let(:database_name) { Gitlab::Database::MAIN_DATABASE_NAME.to_sym }
+ let(:base_model) { Gitlab::Database.database_base_models[database_name] }
+ let(:table_name) { :events }
+ let(:job_interval) { 5.minutes }
+ let(:lease_timeout) { job_interval * described_class::LEASE_TIMEOUT_MULTIPLIER }
+ let(:interval_variance) { described_class::INTERVAL_VARIANCE }
+
+ subject(:worker) { described_class.new }
+
+ context 'when the feature flag is disabled' do
+ let(:migration) do
+ create(:batched_background_migration, :active, interval: job_interval, table_name: table_name)
+ end
+
+ before do
+ stub_feature_flags(execute_batched_migrations_on_schedule: false)
+ end
+
+ it 'does nothing' do
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration).not_to receive(:find_executable)
+ expect(worker).not_to receive(:run_migration_job)
+
+ worker.perform(database_name, migration.id)
+ end
+ end
+
+ context 'when the feature flag is enabled' do
+ before do
+ stub_feature_flags(execute_batched_migrations_on_schedule: true)
+ end
+
+ context 'when the provided database is sharing config' do
+ before do
+ skip_if_multiple_databases_not_setup
+ end
+
+ it 'does nothing' do
+ ci_model = Gitlab::Database.database_base_models['ci']
+ expect(Gitlab::Database).to receive(:db_config_share_with)
+ .with(ci_model.connection_db_config).and_return('main')
+
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration).not_to receive(:find_executable)
+ expect(worker).not_to receive(:run_migration_job)
+
+ worker.perform(:ci, 123)
+ end
+ end
+
+ context 'when migration does not exist' do
+ it 'does nothing' do
+ expect(worker).not_to receive(:run_migration_job)
+
+ worker.perform(database_name, non_existing_record_id)
+ end
+ end
+
+ context 'when migration exist' do
+ let(:migration) do
+ create(:batched_background_migration, :active, interval: job_interval, table_name: table_name)
+ end
+
+ before do
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_executable)
+ .with(migration.id, connection: base_model.connection)
+ .and_return(migration)
+ end
+
+ context 'when the migration is no longer active' do
+ it 'does not run the migration' do
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(base_model.connection).and_yield
+
+ expect(migration).to receive(:active?).and_return(false)
+
+ expect(worker).not_to receive(:run_migration_job)
+
+ worker.perform(database_name, migration.id)
+ end
+ end
+
+ context 'when the interval has not elapsed' do
+ it 'does not run the migration' do
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(base_model.connection).and_yield
+ expect(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(false)
+ expect(worker).not_to receive(:run_migration_job)
+
+ worker.perform(database_name, migration.id)
+ end
+ end
+
+ context 'when the migration is still active and the interval has elapsed' do
+ let(:table_name_lease_key) do
+ "#{described_class.name.underscore}:database_name:#{database_name}:" \
+ "table_name:#{table_name}"
+ end
+
+ context 'when can not obtain lease on the table name' do
+ it 'does nothing' do
+ stub_exclusive_lease_taken(table_name_lease_key, timeout: lease_timeout)
+
+ expect(worker).not_to receive(:run_migration_job)
+
+ worker.perform(database_name, migration.id)
+ end
+ end
+
+ it 'always cleans up the exclusive lease' do
+ expect_to_obtain_exclusive_lease(table_name_lease_key, 'uuid-table-name', timeout: lease_timeout)
+ expect_to_cancel_exclusive_lease(table_name_lease_key, 'uuid-table-name')
+
+ expect(worker).to receive(:run_migration_job).and_raise(RuntimeError, 'I broke')
+
+ expect { worker.perform(database_name, migration.id) }.to raise_error(RuntimeError, 'I broke')
+ end
+
+ it 'runs the migration' do
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(base_model.connection).and_yield
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
+ expect(instance).to receive(:run_migration_job).with(migration)
+ end
+
+ expect_to_obtain_exclusive_lease(table_name_lease_key, 'uuid-table-name', timeout: lease_timeout)
+ expect_to_cancel_exclusive_lease(table_name_lease_key, 'uuid-table-name')
+
+ expect(worker).to receive(:run_migration_job).and_call_original
+
+ worker.perform(database_name, migration.id)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 322f516fbeb..e705ca28e54 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -188,6 +188,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Clusters::Cleanup::ProjectNamespaceWorker' => 3,
'Clusters::Cleanup::ServiceAccountWorker' => 3,
'ContainerExpirationPolicies::CleanupContainerRepositoryWorker' => 0,
+ 'ContainerRegistry::DeleteContainerRepositoryWorker' => 0,
'CreateCommitSignatureWorker' => 3,
'CreateGithubWebhookWorker' => 3,
'CreateNoteDiffFileWorker' => 3,
@@ -269,6 +270,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Gitlab::GithubImport::ImportProtectedBranchWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestMergedByWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestReviewWorker' => 5,
+ 'Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestWorker' => 5,
'Gitlab::GithubImport::RefreshImportJidWorker' => 5,
'Gitlab::GithubImport::Stage::FinishImportWorker' => 5,
@@ -280,6 +282,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Gitlab::GithubImport::Stage::ImportProtectedBranchesWorker' => 5,
'Gitlab::GithubImport::Stage::ImportNotesWorker' => 5,
'Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsReviewRequestsWorker' => 5,
'Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker' => 5,
'Gitlab::GithubImport::Stage::ImportPullRequestsWorker' => 5,
'Gitlab::GithubImport::Stage::ImportRepositoryWorker' => 5,
@@ -339,6 +342,7 @@ RSpec.describe 'Every Sidekiq worker' do
'MergeRequests::AssigneesChangeWorker' => 3,
'MergeRequests::CreatePipelineWorker' => 3,
'MergeRequests::DeleteSourceBranchWorker' => 3,
+ 'MergeRequests::FetchSuggestedReviewersWorker' => 3,
'MergeRequests::HandleAssigneesChangeWorker' => 3,
'MergeRequests::ResolveTodosWorker' => 3,
'MergeRequests::SyncCodeOwnerApprovalRulesWorker' => 3,
@@ -398,6 +402,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Projects::ScheduleBulkRepositoryShardMovesWorker' => 3,
'Projects::UpdateRepositoryStorageWorker' => 3,
'Projects::RefreshBuildArtifactsSizeStatisticsWorker' => 0,
+ 'Projects::RegisterSuggestedReviewersProjectWorker' => 3,
'PropagateIntegrationGroupWorker' => 3,
'PropagateIntegrationInheritDescendantWorker' => 3,
'PropagateIntegrationInheritWorker' => 3,
diff --git a/spec/workers/gitlab/github_import/pull_requests/import_review_request_worker_spec.rb b/spec/workers/gitlab/github_import/pull_requests/import_review_request_worker_spec.rb
new file mode 100644
index 00000000000..fdcbfb18beb
--- /dev/null
+++ b/spec/workers/gitlab/github_import/pull_requests/import_review_request_worker_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#import' do
+ let(:import_state) { build_stubbed(:import_state, :started) }
+
+ let(:project) do
+ instance_double('Project', full_path: 'foo/bar', id: 1, import_state: import_state)
+ end
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:importer) { instance_double('Gitlab::GithubImport::Importer::IssueEventImporter') }
+
+ let(:review_request_hash) do
+ {
+ 'merge_request_id' => 6501124486,
+ 'users' => [
+ { 'id' => 4, 'login' => 'alice' },
+ { 'id' => 5, 'login' => 'bob' }
+ ]
+ }
+ end
+
+ it 'imports an pull request review requests' do
+ expect(Gitlab::GithubImport::Importer::PullRequests::ReviewRequestImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::PullRequests::ReviewRequests),
+ project,
+ client
+ )
+ .and_return(importer)
+
+ expect(importer).to receive(:execute)
+
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment).with(project, :pull_request_review_request, :imported)
+
+ worker.import(project, client, review_request_hash)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb
index 6fcb5db2a54..5d6dcdc10ee 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker do
expect(Gitlab::GithubImport::AdvanceStageWorker)
.to receive(:perform_async)
- .with(project.id, { '123' => 2 }, :pull_request_reviews)
+ .with(project.id, { '123' => 2 }, :pull_request_review_requests)
worker.import(client, project)
end
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb
new file mode 100644
index 00000000000..151de9bdffc
--- /dev/null
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsReviewRequestsWorker do
+ subject(:worker) { described_class.new }
+
+ let(:project) { instance_double(Project, id: 1, import_state: import_state) }
+ let(:import_state) { instance_double(ProjectImportState, refresh_jid_expiration: true) }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+ let(:importer) { instance_double(Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImporter) }
+ let(:waiter) { Gitlab::JobWaiter.new(2, '123') }
+
+ describe '#import' do
+ it 'imports all PR review requests' do
+ expect(Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImporter)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
+
+ expect(importer).to receive(:execute).and_return(waiter)
+ expect(import_state).to receive(:refresh_jid_expiration)
+
+ expect(Gitlab::GithubImport::AdvanceStageWorker)
+ .to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :pull_request_reviews)
+
+ worker.import(client, project)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
index 75d4d2dff2e..18a70273219 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
@@ -23,8 +23,7 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker do
.to receive(:execute)
.and_return(waiter)
- expect(import_state)
- .to receive(:refresh_jid_expiration)
+ expect(import_state).to receive(:refresh_jid_expiration)
expect(Gitlab::GithubImport::AdvanceStageWorker)
.to receive(:perform_async)
diff --git a/spec/workers/gitlab_shell_worker_spec.rb b/spec/workers/gitlab_shell_worker_spec.rb
index c46ef87333a..a5419291d35 100644
--- a/spec/workers/gitlab_shell_worker_spec.rb
+++ b/spec/workers/gitlab_shell_worker_spec.rb
@@ -2,37 +2,45 @@
require 'spec_helper'
-RSpec.describe GitlabShellWorker do
- let(:worker) { described_class.new }
-
+RSpec.describe GitlabShellWorker, :sidekiq_inline do
describe '#perform' do
- describe '#add_key' do
- it 'delegates to Gitlab::AuthorizedKeys' do
- expect_next_instance_of(Gitlab::AuthorizedKeys) do |instance|
- expect(instance).to receive(:add_key).with('foo', 'bar')
+ Gitlab::Shell::PERMITTED_ACTIONS.each do |action|
+ describe "with the #{action} action" do
+ it 'forwards the message to Gitlab::Shell' do
+ expect_next_instance_of(Gitlab::Shell) do |instance|
+ expect(instance).to respond_to(action)
+ expect(instance).to receive(action).with('foo', 'bar')
+ end
+
+ described_class.perform_async(action, 'foo', 'bar')
end
-
- worker.perform('add_key', 'foo', 'bar')
end
end
- describe '#remove_key' do
- it 'delegates to Gitlab::AuthorizedKeys' do
- expect_next_instance_of(Gitlab::AuthorizedKeys) do |instance|
- expect(instance).to receive(:remove_key).with('foo', 'bar')
+ describe 'all other commands' do
+ context 'when verify_gitlab_shell_worker_method_names is enabled' do
+ it 'raises ArgumentError' do
+ allow_next_instance_of(described_class) do |job_instance|
+ expect(job_instance).not_to receive(:gitlab_shell)
+ end
+
+ expect { described_class.perform_async('foo', 'bar', 'baz') }
+ .to raise_error(ArgumentError, 'foo not allowed for GitlabShellWorker')
end
-
- worker.perform('remove_key', 'foo', 'bar')
end
- end
- describe 'all other commands' do
- it 'delegates them to Gitlab::Shell' do
- expect_next_instance_of(Gitlab::Shell) do |instance|
- expect(instance).to receive(:foo).with('bar', 'baz')
+ context 'when verify_gitlab_shell_worker_method_names is disabled' do
+ before do
+ stub_feature_flags(verify_gitlab_shell_worker_method_names: false)
end
- worker.perform('foo', 'bar', 'baz')
+ it 'forwards the message to Gitlab::Shell' do
+ expect_next_instance_of(Gitlab::Shell) do |instance|
+ expect(instance).to receive('foo').with('bar', 'baz')
+ end
+
+ described_class.perform_async('foo', 'bar', 'baz')
+ end
end
end
end
diff --git a/spec/workers/incident_management/add_severity_system_note_worker_spec.rb b/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
index bda6f729759..4d6e6610a92 100644
--- a/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
+++ b/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
@@ -14,28 +14,41 @@ RSpec.describe IncidentManagement::AddSeveritySystemNoteWorker do
subject(:perform) { described_class.new.perform(incident_id, user_id) }
- shared_examples 'does not add a system note' do
+ shared_examples 'does not add anything' do
it 'does not change incident notes count' do
expect { perform }.not_to change { incident.notes.count }
end
+
+ it 'does not create a timeline event' do
+ expect(IncidentManagement::TimelineEvents::CreateService).not_to receive(:change_severity)
+ perform
+ end
end
context 'when incident and user exist' do
it 'creates a system note' do
expect { perform }.to change { incident.notes.where(author: user).count }.by(1)
end
+
+ it 'creates a timeline event' do
+ expect(IncidentManagement::TimelineEvents::CreateService)
+ .to receive(:change_severity)
+ .with(incident, user)
+ .and_call_original
+ perform
+ end
end
context 'when incident does not exist' do
let(:incident_id) { -1 }
- it_behaves_like 'does not add a system note'
+ it_behaves_like 'does not add anything'
end
context 'when incident_id is nil' do
let(:incident_id) { nil }
- it_behaves_like 'does not add a system note'
+ it_behaves_like 'does not add anything'
end
context 'when issue is not an incident' do
@@ -43,19 +56,19 @@ RSpec.describe IncidentManagement::AddSeveritySystemNoteWorker do
let(:incident_id) { issue.id }
- it_behaves_like 'does not add a system note'
+ it_behaves_like 'does not add anything'
end
context 'when user does not exist' do
let(:user_id) { -1 }
- it_behaves_like 'does not add a system note'
+ it_behaves_like 'does not add anything'
end
context 'when user_id is nil' do
let(:user_id) { nil }
- it_behaves_like 'does not add a system note'
+ it_behaves_like 'does not add anything'
end
end
end
diff --git a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
index 77190dc49d9..09d58a1189e 100644
--- a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
+++ b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
@@ -105,9 +105,10 @@ RSpec.describe LooseForeignKeys::CleanupWorker do
def perform_for(db:)
time = Time.current.midnight
- if db == :main
+ case db
+ when :main
time += 2.minutes
- elsif db == :ci
+ when :ci
time += 3.minutes
end
@@ -124,37 +125,6 @@ RSpec.describe LooseForeignKeys::CleanupWorker do
expect(loose_fk_child_table_2_1.count).to eq(0)
end
- context 'when deleting in batches' do
- before do
- stub_const('LooseForeignKeys::CleanupWorker::BATCH_SIZE', 2)
- end
-
- it 'cleans up all rows' do
- expect(LooseForeignKeys::BatchCleanerService).to receive(:new).exactly(:twice).and_call_original
-
- perform_for(db: :main)
-
- expect(loose_fk_child_table_1_1.count).to eq(0)
- expect(loose_fk_child_table_1_2.where(parent_id_with_different_column: nil).count).to eq(4)
- expect(loose_fk_child_table_2_1.count).to eq(0)
- end
- end
-
- context 'when the deleted rows count limit have been reached' do
- def count_deletable_rows
- loose_fk_child_table_1_1.count + loose_fk_child_table_2_1.count
- end
-
- before do
- stub_const('LooseForeignKeys::ModificationTracker::MAX_DELETES', 2)
- stub_const('LooseForeignKeys::CleanerService::DELETE_LIMIT', 1)
- end
-
- it 'cleans up 2 rows' do
- expect { perform_for(db: :main) }.to change { count_deletable_rows }.by(-2)
- end
- end
-
describe 'multi-database support' do
where(:current_minute, :configured_base_models, :expected_connection_model) do
2 | { main: 'ActiveRecord::Base', ci: 'Ci::ApplicationRecord' } | 'ActiveRecord::Base'
diff --git a/spec/workers/mail_scheduler/notification_service_worker_spec.rb b/spec/workers/mail_scheduler/notification_service_worker_spec.rb
index ff4a1646d09..3c17025c152 100644
--- a/spec/workers/mail_scheduler/notification_service_worker_spec.rb
+++ b/spec/workers/mail_scheduler/notification_service_worker_spec.rb
@@ -42,9 +42,42 @@ RSpec.describe MailScheduler::NotificationServiceWorker do
end
end
- context 'when the method is not a public method' do
- it 'raises NoMethodError' do
- expect { worker.perform('notifiable?', *serialize(key)) }.to raise_error(NoMethodError)
+ context 'when the method is allowed' do
+ it 'calls the method on NotificationService' do
+ NotificationService.permitted_actions.each do |action|
+ expect(worker.notification_service).to receive(action).with(key)
+
+ worker.perform(action, *serialize(key))
+ end
+ end
+ end
+
+ context 'when the method is not allowed' do
+ context 'when verify_mail_scheduler_notification_service_worker_method_names is enabled' do
+ it 'raises ArgumentError' do
+ expect(worker.notification_service).not_to receive(:async)
+ expect(worker.notification_service).not_to receive(:foo)
+
+ expect { worker.perform('async', *serialize(key)) }
+ .to raise_error(ArgumentError, 'async not allowed for MailScheduler::NotificationServiceWorker')
+
+ expect { worker.perform('foo', *serialize(key)) }
+ .to raise_error(ArgumentError, 'foo not allowed for MailScheduler::NotificationServiceWorker')
+ end
+ end
+
+ context 'when verify_mail_scheduler_notification_service_worker_method_names is disabled' do
+ before do
+ stub_feature_flags(verify_mail_scheduler_notification_service_worker_method_names: false)
+ end
+
+ it 'forwards the argument to the service' do
+ expect(worker.notification_service).to receive(:async)
+ expect(worker.notification_service).to receive(:foo)
+
+ worker.perform('async', *serialize(key))
+ worker.perform('foo', *serialize(key))
+ end
end
end
end
diff --git a/spec/workers/merge_requests/delete_branch_worker_spec.rb b/spec/workers/merge_requests/delete_branch_worker_spec.rb
new file mode 100644
index 00000000000..80ca8c061f5
--- /dev/null
+++ b/spec/workers/merge_requests/delete_branch_worker_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::DeleteBranchWorker do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:user) { create(:user) }
+
+ let(:branch) { merge_request.source_branch }
+ let(:sha) { merge_request.source_branch_sha }
+ let(:retarget_branch) { true }
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ context 'with a non-existing merge request' do
+ it 'does nothing' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+ worker.perform(non_existing_record_id, user.id, branch, retarget_branch)
+ end
+ end
+
+ context 'with a non-existing user' do
+ it 'does nothing' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+
+ worker.perform(merge_request.id, non_existing_record_id, branch, retarget_branch)
+ end
+ end
+
+ context 'with existing user and merge request' do
+ it 'calls service to delete source branch' do
+ expect_next_instance_of(::Branches::DeleteService) do |instance|
+ expect(instance).to receive(:execute).with(branch)
+ end
+
+ worker.perform(merge_request.id, user.id, branch, retarget_branch)
+ end
+
+ context 'when retarget branch param is true' do
+ it 'calls the retarget chain service' do
+ expect_next_instance_of(::MergeRequests::RetargetChainService) do |instance|
+ expect(instance).to receive(:execute).with(merge_request)
+ end
+
+ worker.perform(merge_request.id, user.id, branch, retarget_branch)
+ end
+ end
+
+ context 'when retarget branch param is false' do
+ let(:retarget_branch) { false }
+
+ it 'does not call the retarget chain service' do
+ expect(::MergeRequests::RetargetChainService).not_to receive(:new)
+
+ worker.perform(merge_request.id, user.id, branch, retarget_branch)
+ end
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:merge_request) { create(:merge_request) }
+ let(:job_args) { [merge_request.id, sha, user.id, true] }
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/delete_source_branch_worker_spec.rb b/spec/workers/merge_requests/delete_source_branch_worker_spec.rb
index fe677103fd0..2935d3ef5dc 100644
--- a/spec/workers/merge_requests/delete_source_branch_worker_spec.rb
+++ b/spec/workers/merge_requests/delete_source_branch_worker_spec.rb
@@ -10,96 +10,116 @@ RSpec.describe MergeRequests::DeleteSourceBranchWorker do
let(:worker) { described_class.new }
describe '#perform' do
- context 'with a non-existing merge request' do
- it 'does nothing' do
- expect(::Branches::DeleteService).not_to receive(:new)
- expect(::MergeRequests::RetargetChainService).not_to receive(:new)
+ context 'when the add_delete_branch_worker feature flag is enabled' do
+ context 'with a non-existing merge request' do
+ it 'does nothing' do
+ expect(::MergeRequests::DeleteBranchWorker).not_to receive(:perform_async)
- worker.perform(non_existing_record_id, sha, user.id)
+ worker.perform(non_existing_record_id, sha, user.id)
+ end
end
- end
- context 'with a non-existing user' do
- it 'does nothing' do
- expect(::Branches::DeleteService).not_to receive(:new)
- expect(::MergeRequests::RetargetChainService).not_to receive(:new)
+ context 'with a non-existing user' do
+ it 'does nothing' do
+ expect(::MergeRequests::DeleteBranchWorker).not_to receive(:perform_async)
- worker.perform(merge_request.id, sha, non_existing_record_id)
+ worker.perform(merge_request.id, sha, non_existing_record_id)
+ end
end
- end
- context 'with existing user and merge request' do
- it 'calls service to delete source branch' do
- expect_next_instance_of(::Branches::DeleteService) do |instance|
- expect(instance).to receive(:execute).with(merge_request.source_branch)
+ context 'with existing user and merge request' do
+ it 'creates a new delete branch worker async' do
+ expect(::MergeRequests::DeleteBranchWorker).to receive(:perform_async).with(merge_request.id, user.id,
+ merge_request.source_branch, true)
+
+ worker.perform(merge_request.id, sha, user.id)
end
- worker.perform(merge_request.id, sha, user.id)
- end
+ context 'source branch sha does not match' do
+ it 'does nothing' do
+ expect(::MergeRequests::DeleteBranchWorker).not_to receive(:perform_async)
- it 'calls service to try retarget merge requests' do
- expect_next_instance_of(::MergeRequests::RetargetChainService) do |instance|
- expect(instance).to receive(:execute).with(merge_request)
+ worker.perform(merge_request.id, 'new-source-branch-sha', user.id)
+ end
end
+ end
- worker.perform(merge_request.id, sha, user.id)
+ it_behaves_like 'an idempotent worker' do
+ let(:merge_request) { create(:merge_request) }
+ let(:job_args) { [merge_request.id, sha, user.id] }
+ end
+ end
+
+ context 'when the add_delete_branch_worker feature flag is disabled' do
+ before do
+ stub_feature_flags(add_delete_branch_worker: false)
end
- context 'source branch sha does not match' do
+ context 'with a non-existing merge request' do
it 'does nothing' do
expect(::Branches::DeleteService).not_to receive(:new)
expect(::MergeRequests::RetargetChainService).not_to receive(:new)
- worker.perform(merge_request.id, 'new-source-branch-sha', user.id)
+ worker.perform(non_existing_record_id, sha, user.id)
end
end
- context 'when delete service returns an error' do
- let(:service_result) { ServiceResponse.error(message: 'placeholder') }
+ context 'with a non-existing user' do
+ it 'does nothing' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+ expect(::MergeRequests::RetargetChainService).not_to receive(:new)
+
+ worker.perform(merge_request.id, sha, non_existing_record_id)
+ end
+ end
- it 'tracks the exception' do
+ context 'with existing user and merge request' do
+ it 'calls service to delete source branch' do
expect_next_instance_of(::Branches::DeleteService) do |instance|
- expect(instance).to receive(:execute).with(merge_request.source_branch).and_return(service_result)
+ expect(instance).to receive(:execute).with(merge_request.source_branch)
end
- expect(service_result).to receive(:track_exception).and_call_original
+ worker.perform(merge_request.id, sha, user.id)
+ end
+
+ it 'calls service to try retarget merge requests' do
+ expect_next_instance_of(::MergeRequests::RetargetChainService) do |instance|
+ expect(instance).to receive(:execute).with(merge_request)
+ end
worker.perform(merge_request.id, sha, user.id)
end
- context 'when track_delete_source_errors is disabled' do
- before do
- stub_feature_flags(track_delete_source_errors: false)
+ context 'source branch sha does not match' do
+ it 'does nothing' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+ expect(::MergeRequests::RetargetChainService).not_to receive(:new)
+
+ worker.perform(merge_request.id, 'new-source-branch-sha', user.id)
end
+ end
+
+ context 'when delete service returns an error' do
+ let(:service_result) { ServiceResponse.error(message: 'placeholder') }
- it 'does not track the exception' do
+ it 'still retargets the merge request' do
expect_next_instance_of(::Branches::DeleteService) do |instance|
expect(instance).to receive(:execute).with(merge_request.source_branch).and_return(service_result)
end
- expect(service_result).not_to receive(:track_exception)
+ expect_next_instance_of(::MergeRequests::RetargetChainService) do |instance|
+ expect(instance).to receive(:execute).with(merge_request)
+ end
worker.perform(merge_request.id, sha, user.id)
end
end
-
- it 'still retargets the merge request' do
- expect_next_instance_of(::Branches::DeleteService) do |instance|
- expect(instance).to receive(:execute).with(merge_request.source_branch).and_return(service_result)
- end
-
- expect_next_instance_of(::MergeRequests::RetargetChainService) do |instance|
- expect(instance).to receive(:execute).with(merge_request)
- end
-
- worker.perform(merge_request.id, sha, user.id)
- end
end
- end
- it_behaves_like 'an idempotent worker' do
- let(:merge_request) { create(:merge_request) }
- let(:job_args) { [merge_request.id, sha, user.id] }
+ it_behaves_like 'an idempotent worker' do
+ let(:merge_request) { create(:merge_request) }
+ let(:job_args) { [merge_request.id, sha, user.id] }
+ end
end
end
end
diff --git a/spec/workers/namespaces/root_statistics_worker_spec.rb b/spec/workers/namespaces/root_statistics_worker_spec.rb
index 7b774da0bdc..30854415405 100644
--- a/spec/workers/namespaces/root_statistics_worker_spec.rb
+++ b/spec/workers/namespaces/root_statistics_worker_spec.rb
@@ -89,4 +89,17 @@ RSpec.describe Namespaces::RootStatisticsWorker, '#perform' do
.not_to change { Namespace::AggregationSchedule.count }
end
end
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ feature_flag: :root_statistics_worker_read_replica,
+ data_consistency: :sticky
+
+ it 'has the `until_executed` deduplicate strategy' do
+ expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
+ end
+
+ it 'has an option to reschedule once if deduplicated' do
+ expect(described_class.get_deduplication_options).to include({ if_deduplicated: :reschedule_once })
+ end
end
diff --git a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
index b9c27c54fa1..c786d4658d4 100644
--- a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
+++ b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
@@ -4,9 +4,9 @@ require 'spec_helper'
RSpec.describe Pages::InvalidateDomainCacheWorker do
shared_examples 'clears caches with' do |event_class:, event_data:, caches:|
- let(:event) do
- event_class.new(data: event_data)
- end
+ include AfterNextHelpers
+
+ let(:event) { event_class.new(data: event_data) }
subject { consume_event(subscriber: described_class, event: event) }
@@ -14,9 +14,8 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
it 'clears the cache with Gitlab::Pages::CacheControl' do
caches.each do |cache|
- expect_next_instance_of(Gitlab::Pages::CacheControl, type: cache[:type], id: cache[:id]) do |cache_control|
- expect(cache_control).to receive(:clear_cache)
- end
+ expect_next(Gitlab::Pages::CacheControl, type: cache[:type], id: cache[:id])
+ .to receive(:clear_cache)
end
subject
@@ -181,19 +180,17 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
]
end
- it 'does not clear the cache when the attributes is not pages related' do
- event = Projects::ProjectAttributesChangedEvent.new(
- data: {
- project_id: 1,
- namespace_id: 2,
- root_namespace_id: 3,
- attributes: ['unknown']
- }
- )
-
- expect(described_class).not_to receive(:clear_cache)
-
- ::Gitlab::EventStore.publish(event)
+ it_behaves_like 'ignores the published event' do
+ let(:event) do
+ Projects::ProjectAttributesChangedEvent.new(
+ data: {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ attributes: ['unknown']
+ }
+ )
+ end
end
end
@@ -204,26 +201,24 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
- features: ["pages_access_level"]
+ features: ['pages_access_level']
},
caches: [
{ type: :project, id: 1 },
{ type: :namespace, id: 3 }
]
- it 'does not clear the cache when the features is not pages related' do
- event = Projects::ProjectFeaturesChangedEvent.new(
- data: {
- project_id: 1,
- namespace_id: 2,
- root_namespace_id: 3,
- features: ['unknown']
- }
- )
-
- expect(described_class).not_to receive(:clear_cache)
-
- ::Gitlab::EventStore.publish(event)
+ it_behaves_like 'ignores the published event' do
+ let(:event) do
+ Projects::ProjectFeaturesChangedEvent.new(
+ data: {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ features: ['unknown']
+ }
+ )
+ end
end
end
diff --git a/spec/workers/pages_worker_spec.rb b/spec/workers/pages_worker_spec.rb
index ad714d8d11e..f0d29037fa4 100644
--- a/spec/workers/pages_worker_spec.rb
+++ b/spec/workers/pages_worker_spec.rb
@@ -3,14 +3,26 @@
require 'spec_helper'
RSpec.describe PagesWorker, :sidekiq_inline do
- let(:project) { create(:project) }
- let(:ci_build) { create(:ci_build, project: project) }
+ let_it_be(:ci_build) { create(:ci_build) }
- it 'calls UpdatePagesService' do
- expect_next_instance_of(Projects::UpdatePagesService, project, ci_build) do |service|
- expect(service).to receive(:execute)
+ context 'when called with the deploy action' do
+ it 'calls UpdatePagesService' do
+ expect_next_instance_of(Projects::UpdatePagesService, ci_build.project, ci_build) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ described_class.perform_async(:deploy, ci_build.id)
end
+ end
- described_class.perform_async(:deploy, ci_build.id)
+ context 'when called with any other action' do
+ it 'does nothing' do
+ expect_next_instance_of(described_class) do |job_class|
+ expect(job_class).not_to receive(:foo)
+ expect(job_class).not_to receive(:deploy)
+ end
+
+ described_class.perform_async(:foo)
+ end
end
end
diff --git a/spec/workers/projects/after_import_worker_spec.rb b/spec/workers/projects/after_import_worker_spec.rb
index a14b2443173..85d15c89b0a 100644
--- a/spec/workers/projects/after_import_worker_spec.rb
+++ b/spec/workers/projects/after_import_worker_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Projects::AfterImportWorker do
- include GitHelpers
-
subject { worker.perform(project.id) }
let(:worker) { described_class.new }
diff --git a/spec/workers/projects/post_creation_worker_spec.rb b/spec/workers/projects/post_creation_worker_spec.rb
index 3158ac9fa27..732dc540fb7 100644
--- a/spec/workers/projects/post_creation_worker_spec.rb
+++ b/spec/workers/projects/post_creation_worker_spec.rb
@@ -81,6 +81,40 @@ RSpec.describe Projects::PostCreationWorker do
end
end
end
+
+ describe 'Incident timeline event tags' do
+ context 'when project is nil' do
+ let(:job_args) { [nil] }
+
+ it 'does not create event tags' do
+ expect { subject }.not_to change { IncidentManagement::TimelineEventTag.count }
+ end
+ end
+
+ context 'when project is created', :aggregate_failures do
+ it 'creates tags for the project' do
+ expect { subject }.to change { IncidentManagement::TimelineEventTag.count }.by(2)
+
+ expect(project.incident_management_timeline_event_tags.pluck_names).to match_array(
+ [
+ ::IncidentManagement::TimelineEventTag::START_TIME_TAG_NAME,
+ ::IncidentManagement::TimelineEventTag::END_TIME_TAG_NAME
+ ]
+ )
+ end
+
+ it 'raises error if record creation fails' do
+ allow_next_instance_of(IncidentManagement::TimelineEventTag) do |tag|
+ allow(tag).to receive(:valid?).and_return(false)
+ end
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(an_instance_of(ActiveRecord::RecordInvalid), include(extra: { project_id: a_kind_of(Integer) })).twice
+ subject
+
+ expect(project.incident_management_timeline_event_tags).to be_empty
+ end
+ end
+ end
end
end
end
diff --git a/spec/workers/remove_expired_members_worker_spec.rb b/spec/workers/remove_expired_members_worker_spec.rb
index 44b8fa21be4..062a9bcfa83 100644
--- a/spec/workers/remove_expired_members_worker_spec.rb
+++ b/spec/workers/remove_expired_members_worker_spec.rb
@@ -56,27 +56,13 @@ RSpec.describe RemoveExpiredMembersWorker do
expect(Member.find_by(user_id: expired_project_bot.id)).to be_nil
end
- context 'when user_destroy_with_limited_execution_time_worker is enabled' do
- it 'initiates project bot removal' do
- worker.perform
-
- expect(
- Users::GhostUserMigration.where(user: expired_project_bot,
- initiator_user: nil)
- ).to be_exists
- end
- end
-
- context 'when user_destroy_with_limited_execution_time_worker is disabled' do
- before do
- stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
- end
-
- it 'deletes expired project bot' do
- worker.perform
+ it 'initiates project bot removal' do
+ worker.perform
- expect(User.exists?(expired_project_bot.id)).to be(false)
- end
+ expect(
+ Users::GhostUserMigration.where(user: expired_project_bot,
+ initiator_user: nil)
+ ).to be_exists
end
end
diff --git a/spec/workers/repository_check/single_repository_worker_spec.rb b/spec/workers/repository_check/single_repository_worker_spec.rb
index dbb24cc047e..b8db262598b 100644
--- a/spec/workers/repository_check/single_repository_worker_spec.rb
+++ b/spec/workers/repository_check/single_repository_worker_spec.rb
@@ -6,12 +6,18 @@ require 'fileutils'
RSpec.describe RepositoryCheck::SingleRepositoryWorker do
subject(:worker) { described_class.new }
+ before do
+ allow(::Gitlab::Git::Repository).to receive(:new).and_call_original
+ end
+
it 'skips when the project has no push events' do
project = create(:project, :repository, :wiki_disabled)
project.events.destroy_all # rubocop: disable Cop/DestroyAll
- break_project(project)
- expect(worker).not_to receive(:git_fsck)
+ repository = instance_double(::Gitlab::Git::Repository)
+ allow(::Gitlab::Git::Repository).to receive(:new)
+ .with(project.repository_storage, "#{project.disk_path}.git", anything, anything, container: project)
+ .and_return(repository)
worker.perform(project.id)
@@ -21,7 +27,12 @@ RSpec.describe RepositoryCheck::SingleRepositoryWorker do
it 'fails when the project has push events and a broken repository' do
project = create(:project, :repository)
create_push_event(project)
- break_project(project)
+
+ repository = project.repository.raw
+ expect(repository).to receive(:fsck).and_raise(::Gitlab::Git::Repository::GitError)
+ expect(::Gitlab::Git::Repository).to receive(:new)
+ .with(project.repository_storage, "#{project.disk_path}.git", anything, anything, container: project)
+ .and_return(repository)
worker.perform(project.id)
@@ -32,7 +43,11 @@ RSpec.describe RepositoryCheck::SingleRepositoryWorker do
project = create(:project, :repository, :wiki_disabled)
create_push_event(project)
- expect(worker).to receive(:git_fsck).and_call_original
+ repository = project.repository.raw
+ expect(repository).to receive(:fsck).and_call_original
+ expect(::Gitlab::Git::Repository).to receive(:new)
+ .with(project.repository_storage, "#{project.disk_path}.git", anything, anything, container: project)
+ .and_return(repository)
expect do
worker.perform(project.id)
@@ -50,7 +65,12 @@ RSpec.describe RepositoryCheck::SingleRepositoryWorker do
worker.perform(project.id)
expect(project.reload.last_repository_check_failed).to eq(false)
- break_wiki(project)
+ repository = project.wiki.repository.raw
+ expect(repository).to receive(:fsck).and_raise(::Gitlab::Git::Repository::GitError)
+ expect(::Gitlab::Git::Repository).to receive(:new)
+ .with(project.repository_storage, "#{project.disk_path}.wiki.git", anything, anything, container: project.wiki)
+ .and_return(repository)
+
worker.perform(project.id)
expect(project.reload.last_repository_check_failed).to eq(true)
@@ -59,7 +79,10 @@ RSpec.describe RepositoryCheck::SingleRepositoryWorker do
it 'skips wikis when disabled' do
project = create(:project, :wiki_disabled)
# Make sure the test would fail if the wiki repo was checked
- break_wiki(project)
+ repository = instance_double(::Gitlab::Git::Repository)
+ allow(::Gitlab::Git::Repository).to receive(:new)
+ .with(project.repository_storage, "#{project.disk_path}.wiki.git", anything, anything, container: project)
+ .and_return(repository)
subject.perform(project.id)
@@ -88,31 +111,4 @@ RSpec.describe RepositoryCheck::SingleRepositoryWorker do
def create_push_event(project)
project.events.create!(action: :pushed, author_id: create(:user).id)
end
-
- def break_wiki(project)
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- break_repo(wiki_path(project))
- end
- end
-
- def wiki_path(project)
- project.wiki.repository.path_to_repo
- end
-
- def break_project(project)
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- break_repo(project.repository.path_to_repo)
- end
- end
-
- def break_repo(repo)
- # Create or replace blob ffffffffffffffffffffffffffffffffffffffff with an empty file
- # This will make the repo invalid, _and_ 'git init' cannot fix it.
- path = File.join(repo, 'objects', 'ff')
- file = File.join(path, 'ffffffffffffffffffffffffffffffffffffff')
-
- FileUtils.mkdir_p(path)
- FileUtils.rm_f(file)
- FileUtils.touch(file)
- end
end
diff --git a/spec/workers/repository_import_worker_spec.rb b/spec/workers/repository_import_worker_spec.rb
index 82d975cb85a..1dc77fbf83f 100644
--- a/spec/workers/repository_import_worker_spec.rb
+++ b/spec/workers/repository_import_worker_spec.rb
@@ -46,36 +46,24 @@ RSpec.describe RepositoryImportWorker do
end
context 'when the import has failed' do
- it 'hide the credentials that were used in the import URL' do
- error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found }
+ it 'updates the error on Import/Export & hides credentials from import URL' do
+ import_url = 'https://user:pass@test.com/root/repoC.git/'
+ error = "#{import_url} not found"
import_state.update!(jid: '123')
- expect_next_instance_of(Projects::ImportService) do |instance|
- expect(instance).to receive(:execute).and_return({ status: :error, message: error })
- end
-
- expect do
- subject.perform(project.id)
- end.to raise_error(RuntimeError, error)
- expect(import_state.reload.jid).not_to be_nil
- expect(import_state.status).to eq('failed')
- end
-
- it 'updates the error on Import/Export' do
- error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found }
-
project.update!(import_type: 'gitlab_project')
- import_state.update!(jid: '123')
+
expect_next_instance_of(Projects::ImportService) do |instance|
- expect(instance).to receive(:execute).and_return({ status: :error, message: error })
+ expect(instance).to receive(:track_start_import).and_raise(StandardError, error)
end
- expect do
- subject.perform(project.id)
- end.to raise_error(RuntimeError, error)
+ expect { subject.perform(project.id) }.not_to raise_error
- expect(import_state.reload.last_error).not_to be_nil
+ import_state.reload
+ expect(import_state.jid).to eq('123')
expect(import_state.status).to eq('failed')
+ expect(import_state.last_error).to include("[FILTERED] not found")
+ expect(import_state.last_error).not_to include(import_url)
end
end
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index 10c22b736d2..5fa7c5d64db 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -42,14 +42,42 @@ RSpec.describe RunPipelineScheduleWorker do
end
end
- context 'when everything is ok' do
- let(:create_pipeline_service) { instance_double(Ci::CreatePipelineService) }
+ describe "#run_pipeline_schedule" do
+ let(:create_pipeline_service) { instance_double(Ci::CreatePipelineService, execute: service_response) }
+ let(:service_response) { instance_double(ServiceResponse, payload: pipeline, error?: false) }
- it 'calls the Service' do
+ before do
expect(Ci::CreatePipelineService).to receive(:new).with(project, user, ref: pipeline_schedule.ref).and_return(create_pipeline_service)
- expect(create_pipeline_service).to receive(:execute!).with(:schedule, ignore_skip_ci: true, save_on_errors: false, schedule: pipeline_schedule)
- worker.perform(pipeline_schedule.id, user.id)
+ expect(create_pipeline_service).to receive(:execute).with(:schedule, ignore_skip_ci: true, save_on_errors: false, schedule: pipeline_schedule).and_return(service_response)
+ end
+
+ context "when pipeline is persisted" do
+ let(:pipeline) { instance_double(Ci::Pipeline, persisted?: true) }
+
+ it "returns the service response" do
+ expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response)
+ end
+
+ it "does not log errors" do
+ expect(worker).not_to receive(:log_extra_metadata_on_done)
+
+ expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response)
+ end
+ end
+
+ context "when pipeline was not persisted" do
+ let(:service_response) { instance_double(ServiceResponse, error?: true, message: "Error", payload: pipeline) }
+ let(:pipeline) { instance_double(Ci::Pipeline, persisted?: false) }
+
+ it "logs a pipeline creation error" do
+ expect(worker)
+ .to receive(:log_extra_metadata_on_done)
+ .with(:pipeline_creation_error, service_response.message)
+ .and_call_original
+
+ expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response.message)
+ end
end
end
@@ -82,20 +110,5 @@ RSpec.describe RunPipelineScheduleWorker do
worker.perform(pipeline_schedule.id, user.id)
end
end
-
- context 'when pipeline cannot be created' do
- before do
- allow(Ci::CreatePipelineService).to receive(:new) { raise Ci::CreatePipelineService::CreateError }
- end
-
- it 'logging a pipeline error' do
- expect(worker)
- .to receive(:log_extra_metadata_on_done)
- .with(:pipeline_creation_error, an_instance_of(Ci::CreatePipelineService::CreateError))
- .and_call_original
-
- worker.perform(pipeline_schedule.id, user.id)
- end
- end
end
end
diff --git a/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb b/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb
index f42033fdb9c..7c585542e30 100644
--- a/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb
+++ b/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb
@@ -38,16 +38,4 @@ RSpec.describe Users::MigrateRecordsToGhostUserInBatchesWorker do
expect(issue.last_edited_by).to eq(User.ghost)
end
end
-
- context 'when user_destroy_with_limited_execution_time_worker is disabled' do
- before do
- stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
- end
-
- it 'does not execute the service' do
- expect(Users::MigrateRecordsToGhostUserInBatchesService).not_to receive(:new)
-
- worker.perform
- end
- end
end