Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-05-19 18:44:42 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-05-19 18:44:42 +0300
commit4555e1b21c365ed8303ffb7a3325d773c9b8bf31 (patch)
tree5423a1c7516cffe36384133ade12572cf709398d /spec/workers
parente570267f2f6b326480d284e0164a6464ba4081bc (diff)
Add latest changes from gitlab-org/gitlab@13-12-stable-eev13.12.0-rc42
Diffstat (limited to 'spec/workers')
-rw-r--r--spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb2
-rw-r--r--spec/workers/build_finished_worker_spec.rb1
-rw-r--r--spec/workers/build_hooks_worker_spec.rb18
-rw-r--r--spec/workers/bulk_import_worker_spec.rb3
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb30
-rw-r--r--spec/workers/bulk_imports/relation_export_worker_spec.rb47
-rw-r--r--spec/workers/ci/create_cross_project_pipeline_worker_spec.rb1
-rw-r--r--spec/workers/ci/delete_unit_tests_worker_spec.rb33
-rw-r--r--spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb2
-rw-r--r--spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb4
-rw-r--r--spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb2
-rw-r--r--spec/workers/ci/retry_pipeline_worker_spec.rb51
-rw-r--r--spec/workers/cluster_update_app_worker_spec.rb11
-rw-r--r--spec/workers/concerns/application_worker_spec.rb95
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb100
-rw-r--r--spec/workers/concerns/limited_capacity/job_tracker_spec.rb48
-rw-r--r--spec/workers/concerns/limited_capacity/worker_spec.rb137
-rw-r--r--spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb496
-rw-r--r--spec/workers/container_expiration_policy_worker_spec.rb12
-rw-r--r--spec/workers/deployments/hooks_worker_spec.rb53
-rw-r--r--spec/workers/email_receiver_worker_spec.rb21
-rw-r--r--spec/workers/environments/canary_ingress/update_worker_spec.rb1
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb370
-rw-r--r--spec/workers/expire_job_cache_worker_spec.rb1
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb26
-rw-r--r--spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/github_import/import_issue_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/github_import/import_note_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/jira_import/import_issue_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/jira_import/stage/start_import_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb1
-rw-r--r--spec/workers/import_issues_csv_worker_spec.rb1
-rw-r--r--spec/workers/incident_management/add_severity_system_note_worker_spec.rb1
-rw-r--r--spec/workers/incident_management/process_alert_worker_spec.rb1
-rw-r--r--spec/workers/incident_management/process_alert_worker_v2_spec.rb96
-rw-r--r--spec/workers/incident_management/process_prometheus_alert_worker_spec.rb1
-rw-r--r--spec/workers/issuable/label_links_destroy_worker_spec.rb17
-rw-r--r--spec/workers/issuables/clear_groups_issue_counter_worker_spec.rb42
-rw-r--r--spec/workers/issue_placement_worker_spec.rb21
-rw-r--r--spec/workers/issue_rebalancing_worker_spec.rb16
-rw-r--r--spec/workers/jira_connect/sync_project_worker_spec.rb1
-rw-r--r--spec/workers/merge_requests/create_pipeline_worker_spec.rb2
-rw-r--r--spec/workers/merge_worker_spec.rb18
-rw-r--r--spec/workers/namespaces/onboarding_issue_created_worker_spec.rb1
-rw-r--r--spec/workers/packages/composer/cache_update_worker_spec.rb1
-rw-r--r--spec/workers/packages/debian/process_changes_worker_spec.rb113
-rw-r--r--spec/workers/packages/nuget/extraction_worker_spec.rb18
-rw-r--r--spec/workers/packages/rubygems/extraction_worker_spec.rb36
-rw-r--r--spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb1
-rw-r--r--spec/workers/pipeline_process_worker_spec.rb5
-rw-r--r--spec/workers/post_receive_spec.rb24
-rw-r--r--spec/workers/process_commit_worker_spec.rb2
-rw-r--r--spec/workers/project_service_worker_spec.rb2
-rw-r--r--spec/workers/projects/git_garbage_collect_worker_spec.rb1
-rw-r--r--spec/workers/projects/post_creation_worker_spec.rb2
-rw-r--r--spec/workers/prometheus/create_default_alerts_worker_spec.rb1
-rw-r--r--spec/workers/propagate_integration_group_worker_spec.rb1
-rw-r--r--spec/workers/propagate_integration_project_worker_spec.rb1
-rw-r--r--spec/workers/rebase_worker_spec.rb2
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb1
-rw-r--r--spec/workers/service_desk_email_receiver_worker_spec.rb6
-rw-r--r--spec/workers/update_external_pull_requests_worker_spec.rb1
-rw-r--r--spec/workers/update_merge_requests_worker_spec.rb2
-rw-r--r--spec/workers/users/deactivate_dormant_users_worker_spec.rb61
-rw-r--r--spec/workers/users/update_open_issue_count_worker_spec.rb65
-rw-r--r--spec/workers/web_hook_worker_spec.rb18
67 files changed, 1808 insertions, 346 deletions
diff --git a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
index 0501fc3b8cf..832d5afd957 100644
--- a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
+++ b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker do
it_behaves_like 'worker with data consistency',
described_class,
- feature_flag: :periodic_project_authorization_update_via_replica,
+ feature_flag: :delayed_consistency_for_user_refresh_over_range_worker,
data_consistency: :delayed
describe '#perform' do
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index 5aca5d68677..3434980341b 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -22,7 +22,6 @@ RSpec.describe BuildFinishedWorker do
end
expect(BuildHooksWorker).to receive(:perform_async)
- expect(ExpirePipelineCacheWorker).to receive(:perform_async)
expect(ChatNotificationWorker).not_to receive(:perform_async)
expect(ArchiveTraceWorker).to receive(:perform_in)
diff --git a/spec/workers/build_hooks_worker_spec.rb b/spec/workers/build_hooks_worker_spec.rb
index 7e469958a84..8395d8fb0e7 100644
--- a/spec/workers/build_hooks_worker_spec.rb
+++ b/spec/workers/build_hooks_worker_spec.rb
@@ -23,6 +23,24 @@ RSpec.describe BuildHooksWorker do
end
end
+ describe '.perform_async' do
+ context 'when delayed_perform_for_build_hooks_worker feature flag is disabled' do
+ before do
+ stub_feature_flags(delayed_perform_for_build_hooks_worker: false)
+ end
+
+ it 'does not call perform_in' do
+ expect(described_class).not_to receive(:perform_in)
+ end
+ end
+
+ it 'delays scheduling a job by calling perform_in' do
+ expect(described_class).to receive(:perform_in).with(described_class::DATA_CONSISTENCY_DELAY.second, 123)
+
+ described_class.perform_async(123)
+ end
+ end
+
it_behaves_like 'worker with data consistency',
described_class,
feature_flag: :load_balancing_for_build_hooks_worker,
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index 5964ec45563..9119394f250 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe BulkImportWorker do
end
context 'when there are created entities to process' do
- it 'marks a batch of entities as started, enqueues BulkImports::EntityWorker and reenqueues' do
+ it 'marks a batch of entities as started, enqueues EntityWorker, ExportRequestWorker and reenqueues' do
stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
bulk_import = create(:bulk_import, :created)
@@ -78,6 +78,7 @@ RSpec.describe BulkImportWorker do
expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
expect(BulkImports::EntityWorker).to receive(:perform_async)
+ expect(BulkImports::ExportRequestWorker).to receive(:perform_async)
subject.perform(bulk_import.id)
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
new file mode 100644
index 00000000000..f7838279212
--- /dev/null
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::ExportRequestWorker do
+ let_it_be(:bulk_import) { create(:bulk_import) }
+ let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
+ let_it_be(:entity) { create(:bulk_import_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
+
+ let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
+ let(:job_args) { [entity.id] }
+
+ describe '#perform' do
+ before do
+ allow(Gitlab::HTTP).to receive(:post).and_return(response_double)
+ end
+
+ include_examples 'an idempotent worker' do
+ it 'requests relations export' do
+ expected = "/groups/foo%2Fbar/export_relations"
+
+ expect_next_instance_of(BulkImports::Clients::Http) do |client|
+ expect(client).to receive(:post).with(expected).twice
+ end
+
+ perform_multiple(job_args)
+ end
+ end
+ end
+end
diff --git a/spec/workers/bulk_imports/relation_export_worker_spec.rb b/spec/workers/bulk_imports/relation_export_worker_spec.rb
new file mode 100644
index 00000000000..63f1992d186
--- /dev/null
+++ b/spec/workers/bulk_imports/relation_export_worker_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::RelationExportWorker do
+ let_it_be(:jid) { 'jid' }
+ let_it_be(:relation) { 'labels' }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let(:job_args) { [user.id, group.id, group.class.name, relation] }
+
+ describe '#perform' do
+ include_examples 'an idempotent worker' do
+ context 'when export record does not exist' do
+ let(:another_group) { create(:group) }
+ let(:job_args) { [user.id, another_group.id, another_group.class.name, relation] }
+
+ it 'creates export record' do
+ another_group.add_owner(user)
+
+ expect { perform_multiple(job_args) }
+ .to change { another_group.bulk_import_exports.count }
+ .from(0)
+ .to(1)
+ end
+ end
+
+ it 'executes RelationExportService' do
+ group.add_owner(user)
+
+ service = instance_double(BulkImports::RelationExportService)
+
+ expect(BulkImports::RelationExportService)
+ .to receive(:new)
+ .with(user, group, relation, anything)
+ .twice
+ .and_return(service)
+ expect(service)
+ .to receive(:execute)
+ .twice
+
+ perform_multiple(job_args)
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/create_cross_project_pipeline_worker_spec.rb b/spec/workers/ci/create_cross_project_pipeline_worker_spec.rb
index 116e6878281..372b0de1b54 100644
--- a/spec/workers/ci/create_cross_project_pipeline_worker_spec.rb
+++ b/spec/workers/ci/create_cross_project_pipeline_worker_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Ci::CreateCrossProjectPipelineWorker do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
let(:bridge) { create(:ci_bridge, user: user, pipeline: pipeline) }
let(:service) { double('pipeline creation service') }
diff --git a/spec/workers/ci/delete_unit_tests_worker_spec.rb b/spec/workers/ci/delete_unit_tests_worker_spec.rb
new file mode 100644
index 00000000000..ff2575b19c1
--- /dev/null
+++ b/spec/workers/ci/delete_unit_tests_worker_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DeleteUnitTestsWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ it 'executes a service' do
+ expect_next_instance_of(Ci::DeleteUnitTestsService) do |instance|
+ expect(instance).to receive(:execute)
+ end
+
+ worker.perform
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let!(:unit_test_1) { create(:ci_unit_test) }
+ let!(:unit_test_2) { create(:ci_unit_test) }
+ let!(:unit_test_1_recent_failure) { create(:ci_unit_test_failure, unit_test: unit_test_1) }
+ let!(:unit_test_2_old_failure) { create(:ci_unit_test_failure, unit_test: unit_test_2, failed_at: 15.days.ago) }
+
+ it 'only deletes old unit tests and their failures' do
+ subject
+
+ expect(unit_test_1.reload).to be_persisted
+ expect(unit_test_1_recent_failure.reload).to be_persisted
+ expect(Ci::UnitTest.find_by(id: unit_test_2.id)).to be_nil
+ expect(Ci::UnitTestFailure.find_by(id: unit_test_2_old_failure.id)).to be_nil
+ end
+ end
+end
diff --git a/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb b/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb
index 4690c73d121..e5de0ba0143 100644
--- a/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb
+++ b/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Ci::MergeRequests::AddTodoWhenBuildFailsWorker do
include_examples 'an idempotent worker' do
it 'executes todo service' do
service = double
- expect(::MergeRequests::AddTodoWhenBuildFailsService).to receive(:new).with(project, nil).and_return(service).twice
+ expect(::MergeRequests::AddTodoWhenBuildFailsService).to receive(:new).with(project: project).and_return(service).twice
expect(service).to receive(:execute).with(job).twice
perform_twice
diff --git a/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb
index be351032b58..5096691270a 100644
--- a/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb
@@ -21,8 +21,8 @@ RSpec.describe ::Ci::PipelineArtifacts::CreateQualityReportWorker do
it_behaves_like 'an idempotent worker' do
let(:job_args) { pipeline_id }
- it 'creates a pipeline artifact' do
- expect { subject }.to change { pipeline.pipeline_artifacts.count }.by(1)
+ it 'does not create another pipeline artifact if already has one' do
+ expect { subject }.not_to change { pipeline.pipeline_artifacts.count }
end
end
end
diff --git a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
index ad9c08d02cb..274f848ad88 100644
--- a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Ci::PipelineArtifacts::ExpireArtifactsWorker do
describe '#perform' do
let_it_be(:pipeline_artifact) do
- create(:ci_pipeline_artifact, :with_coverage_report, expire_at: 1.week.ago)
+ create(:ci_pipeline_artifact, :with_coverage_report, :unlocked, expire_at: 1.week.ago)
end
it 'executes a service' do
diff --git a/spec/workers/ci/retry_pipeline_worker_spec.rb b/spec/workers/ci/retry_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..c7600a24280
--- /dev/null
+++ b/spec/workers/ci/retry_pipeline_worker_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::RetryPipelineWorker do
+ describe '#perform' do
+ subject(:perform) { described_class.new.perform(pipeline_id, user_id) }
+
+ let(:pipeline) { create(:ci_pipeline) }
+
+ context 'when pipeline exists' do
+ let(:pipeline_id) { pipeline.id }
+
+ context 'when user exists' do
+ let(:user) { create(:user) }
+ let(:user_id) { user.id }
+
+ before do
+ pipeline.project.add_maintainer(user)
+ end
+
+ it 'retries the pipeline' do
+ expect(::Ci::Pipeline).to receive(:find_by_id).with(pipeline.id).and_return(pipeline)
+ expect(pipeline).to receive(:retry_failed).with(having_attributes(id: user_id))
+
+ perform
+ end
+ end
+
+ context 'when user does not exist' do
+ let(:user_id) { 1234 }
+
+ it 'does not retry the pipeline' do
+ expect(::Ci::Pipeline).to receive(:find_by_id).with(pipeline_id).and_return(pipeline)
+ expect(pipeline).not_to receive(:retry_failed).with(having_attributes(id: user_id))
+
+ perform
+ end
+ end
+ end
+
+ context 'when pipeline does not exist' do
+ let(:pipeline_id) { 1234 }
+ let(:user_id) { 1234 }
+
+ it 'returns nil' do
+ expect(perform).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/workers/cluster_update_app_worker_spec.rb b/spec/workers/cluster_update_app_worker_spec.rb
index 8b8c1c82099..8f61ee17162 100644
--- a/spec/workers/cluster_update_app_worker_spec.rb
+++ b/spec/workers/cluster_update_app_worker_spec.rb
@@ -46,8 +46,19 @@ RSpec.describe ClusterUpdateAppWorker do
subject.perform(application.name, application.id, project.id, Time.current)
end
+ context 'application is externally installed' do
+ it 'does not execute PrometheusUpdateService' do
+ application = create(:clusters_applications_prometheus, :externally_installed)
+
+ expect(prometheus_update_service).not_to receive(:execute)
+
+ subject.perform(application.name, application.id, project.id, Time.current)
+ end
+ end
+
context 'with exclusive lease' do
let_it_be(:user) { create(:user) }
+
let(:application) { create(:clusters_applications_prometheus, :installed) }
let(:lease_key) { "#{described_class.name.underscore}-#{application.id}" }
diff --git a/spec/workers/concerns/application_worker_spec.rb b/spec/workers/concerns/application_worker_spec.rb
index 07e11f014c3..5c1a1d3ae8f 100644
--- a/spec/workers/concerns/application_worker_spec.rb
+++ b/spec/workers/concerns/application_worker_spec.rb
@@ -3,7 +3,14 @@
require 'spec_helper'
RSpec.describe ApplicationWorker do
- let_it_be(:worker) do
+ # We depend on the lazy-load characteristic of rspec. If the worker is loaded
+ # before setting up, it's likely to go wrong. Consider this catcha:
+ # before do
+ # allow(router).to receive(:route).with(worker).and_return('queue_1')
+ # end
+ # As worker is triggered, it includes ApplicationWorker, and the router is
+ # called before it is stubbed. That makes the stubbing useless.
+ let(:worker) do
Class.new do
def self.name
'Gitlab::Foo::Bar::DummyWorker'
@@ -14,10 +21,77 @@ RSpec.describe ApplicationWorker do
end
let(:instance) { worker.new }
+ let(:router) { double(:router) }
- describe 'Sidekiq options' do
- it 'sets the queue name based on the class name' do
+ before do
+ allow(::Gitlab::SidekiqConfig::WorkerRouter).to receive(:global).and_return(router)
+ allow(router).to receive(:route).and_return('foo_bar_dummy')
+ end
+
+ describe 'Sidekiq attributes' do
+ it 'sets the queue name based on the output of the router' do
expect(worker.sidekiq_options['queue']).to eq('foo_bar_dummy')
+ expect(router).to have_received(:route).with(worker).at_least(:once)
+ end
+
+ context 'when a worker attribute is updated' do
+ before do
+ counter = 0
+ allow(router).to receive(:route) do
+ counter += 1
+ "queue_#{counter}"
+ end
+ end
+
+ it 'updates the queue name afterward' do
+ expect(worker.sidekiq_options['queue']).to eq('queue_1')
+
+ worker.feature_category :pages
+ expect(worker.sidekiq_options['queue']).to eq('queue_2')
+
+ worker.feature_category_not_owned!
+ expect(worker.sidekiq_options['queue']).to eq('queue_3')
+
+ worker.urgency :high
+ expect(worker.sidekiq_options['queue']).to eq('queue_4')
+
+ worker.worker_has_external_dependencies!
+ expect(worker.sidekiq_options['queue']).to eq('queue_5')
+
+ worker.worker_resource_boundary :cpu
+ expect(worker.sidekiq_options['queue']).to eq('queue_6')
+
+ worker.idempotent!
+ expect(worker.sidekiq_options['queue']).to eq('queue_7')
+
+ worker.weight 3
+ expect(worker.sidekiq_options['queue']).to eq('queue_8')
+
+ worker.tags :hello
+ expect(worker.sidekiq_options['queue']).to eq('queue_9')
+
+ worker.big_payload!
+ expect(worker.sidekiq_options['queue']).to eq('queue_10')
+
+ expect(router).to have_received(:route).with(worker).at_least(10).times
+ end
+ end
+
+ context 'when the worker is inherited' do
+ let(:sub_worker) { Class.new(worker) }
+
+ before do
+ allow(router).to receive(:route).and_return('queue_1')
+ worker # Force loading worker 1 to update its queue
+
+ allow(router).to receive(:route).and_return('queue_2')
+ end
+
+ it 'sets the queue name for the inherited worker' do
+ expect(sub_worker.sidekiq_options['queue']).to eq('queue_2')
+
+ expect(router).to have_received(:route).with(sub_worker).at_least(:once)
+ end
end
end
@@ -74,11 +148,24 @@ RSpec.describe ApplicationWorker do
end
describe '.queue_namespace' do
- it 'sets the queue name based on the class name' do
+ before do
+ allow(router).to receive(:route).and_return('foo_bar_dummy', 'some_namespace:foo_bar_dummy')
+ end
+
+ it 'updates the queue name from the router again' do
+ expect(worker.queue).to eq('foo_bar_dummy')
+
worker.queue_namespace :some_namespace
expect(worker.queue).to eq('some_namespace:foo_bar_dummy')
end
+
+ it 'updates the queue_namespace options of the worker' do
+ worker.queue_namespace :some_namespace
+
+ expect(worker.queue_namespace).to eql('some_namespace')
+ expect(worker.sidekiq_options['queue_namespace']).to be(:some_namespace)
+ end
end
describe '.queue' do
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index 75f2c7922de..85e1721461f 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -18,39 +18,49 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
def counter_description
'This is a counter'
end
+
+ def representation_class
+ MockRepresantation
+ end
end.new
end
+ before do
+ stub_const('MockRepresantation', Class.new do
+ include Gitlab::GithubImport::Representation::ToHash
+ include Gitlab::GithubImport::Representation::ExposeAttribute
+
+ def self.from_json_hash(raw_hash)
+ new(Gitlab::GithubImport::Representation.symbolize_hash(raw_hash))
+ end
+
+ attr_reader :attributes
+
+ def initialize(attributes)
+ @attributes = attributes
+ end
+ end)
+ end
+
describe '#import' do
- let(:representation_class) { double(:representation_class) }
let(:importer_class) { double(:importer_class, name: 'klass_name') }
let(:importer_instance) { double(:importer_instance) }
- let(:representation) { double(:representation) }
let(:project) { double(:project, full_path: 'foo/bar', id: 1) }
let(:client) { double(:client) }
before do
expect(worker)
- .to receive(:representation_class)
- .and_return(representation_class)
-
- expect(worker)
.to receive(:importer_class)
.at_least(:once)
.and_return(importer_class)
+ end
- expect(representation_class)
- .to receive(:from_json_hash)
- .with(an_instance_of(Hash))
- .and_return(representation)
-
+ it 'imports the object' do
expect(importer_class)
.to receive(:new)
- .with(representation, project, client)
+ .with(instance_of(MockRepresantation), project, client)
.and_return(importer_instance)
- end
- it 'imports the object' do
expect(importer_instance)
.to receive(:execute)
@@ -62,6 +72,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(logger)
.to receive(:info)
.with(
+ github_id: 1,
message: 'starting importer',
import_source: :github,
project_id: 1,
@@ -70,6 +81,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(logger)
.to receive(:info)
.with(
+ github_id: 1,
message: 'importer finished',
import_source: :github,
project_id: 1,
@@ -77,10 +89,15 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
)
end
- worker.import(project, client, { 'number' => 10 })
+ worker.import(project, client, { 'number' => 10, 'github_id' => 1 })
end
it 'logs error when the import fails' do
+ expect(importer_class)
+ .to receive(:new)
+ .with(instance_of(MockRepresantation), project, client)
+ .and_return(importer_instance)
+
exception = StandardError.new('some error')
expect(importer_instance)
.to receive(:execute)
@@ -90,6 +107,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(logger)
.to receive(:info)
.with(
+ github_id: 1,
message: 'starting importer',
import_source: :github,
project_id: project.id,
@@ -98,20 +116,64 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(logger)
.to receive(:error)
.with(
+ github_id: 1,
message: 'importer failed',
import_source: :github,
project_id: project.id,
importer: 'klass_name',
- 'error.message': 'some error'
+ 'error.message': 'some error',
+ 'github.data': {
+ 'github_id' => 1,
+ 'number' => 10
+ }
)
end
expect(Gitlab::ErrorTracking)
.to receive(:track_and_raise_exception)
- .with(exception, import_source: :github, project_id: 1, importer: 'klass_name')
- .and_call_original
+ .with(
+ exception,
+ import_source: :github,
+ github_id: 1,
+ project_id: 1,
+ importer: 'klass_name'
+ ).and_call_original
+
+ expect { worker.import(project, client, { 'number' => 10, 'github_id' => 1 }) }
+ .to raise_error(exception)
+ end
+
+ it 'logs error when representation does not have a github_id' do
+ expect(importer_class).not_to receive(:new)
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:error)
+ .with(
+ github_id: nil,
+ message: 'importer failed',
+ import_source: :github,
+ project_id: project.id,
+ importer: 'klass_name',
+ 'error.message': 'key not found: :github_id',
+ 'github.data': {
+ 'number' => 10
+ }
+ )
+ end
- expect { worker.import(project, client, { 'number' => 10 }) }.to raise_error(exception)
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_exception)
+ .with(
+ an_instance_of(KeyError),
+ import_source: :github,
+ github_id: nil,
+ project_id: 1,
+ importer: 'klass_name'
+ ).and_call_original
+
+ expect { worker.import(project, client, { 'number' => 10 }) }
+ .to raise_error(KeyError, 'key not found: :github_id')
end
end
diff --git a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
index 2c79f347903..f141a1ad7ad 100644
--- a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
+++ b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
@@ -7,30 +7,30 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
described_class.new('namespace')
end
+ let(:max_jids) { 10 }
+
describe '#register' do
it 'adds jid to the set' do
- job_tracker.register('a-job-id')
-
+ expect(job_tracker.register('a-job-id', max_jids)). to be true
expect(job_tracker.running_jids).to contain_exactly('a-job-id')
end
- it 'updates the counter' do
- expect { job_tracker.register('a-job-id') }
- .to change { job_tracker.count }
- .from(0)
- .to(1)
- end
-
- it 'does it in only one Redis call' do
- expect(job_tracker).to receive(:with_redis).once.and_call_original
+ it 'returns false if the jid was not added' do
+ max_jids = 2
+ %w[jid1 jid2].each do |jid|
+ expect(job_tracker.register(jid, max_jids)).to be true
+ end
- job_tracker.register('a-job-id')
+ expect(job_tracker.register('jid3', max_jids)).to be false
+ expect(job_tracker.running_jids).to contain_exactly(*%w[jid1 jid2])
end
end
describe '#remove' do
before do
- job_tracker.register(%w[a-job-id other-job-id])
+ %w[a-job-id other-job-id].each do |jid|
+ job_tracker.register(jid, max_jids)
+ end
end
it 'removes jid from the set' do
@@ -38,24 +38,11 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
expect(job_tracker.running_jids).to contain_exactly('a-job-id')
end
-
- it 'updates the counter' do
- expect { job_tracker.remove('other-job-id') }
- .to change { job_tracker.count }
- .from(2)
- .to(1)
- end
-
- it 'does it in only one Redis call' do
- expect(job_tracker).to receive(:with_redis).once.and_call_original
-
- job_tracker.remove('other-job-id')
- end
end
describe '#clean_up' do
before do
- job_tracker.register('a-job-id')
+ job_tracker.register('a-job-id', max_jids)
end
context 'with running jobs' do
@@ -83,13 +70,6 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
.to change { job_tracker.running_jids.include?('a-job-id') }
end
- it 'updates the counter' do
- expect { job_tracker.clean_up }
- .to change { job_tracker.count }
- .from(1)
- .to(0)
- end
-
it 'gets the job ids, removes them, and updates the counter with only two Redis calls' do
expect(job_tracker).to receive(:with_redis).twice.and_call_original
diff --git a/spec/workers/concerns/limited_capacity/worker_spec.rb b/spec/workers/concerns/limited_capacity/worker_spec.rb
index 2c33c8666ec..790b5c3544d 100644
--- a/spec/workers/concerns/limited_capacity/worker_spec.rb
+++ b/spec/workers/concerns/limited_capacity/worker_spec.rb
@@ -44,40 +44,22 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
describe '.perform_with_capacity' do
subject(:perform_with_capacity) { worker_class.perform_with_capacity(:arg) }
+ let(:max_running_jobs) { 3 }
+
before do
expect_next_instance_of(worker_class) do |instance|
expect(instance).to receive(:remove_failed_jobs)
- expect(instance).to receive(:report_prometheus_metrics)
-
- allow(instance).to receive(:remaining_work_count).and_return(remaining_work_count)
- allow(instance).to receive(:remaining_capacity).and_return(remaining_capacity)
- end
- end
-
- context 'when capacity is larger than work' do
- let(:remaining_work_count) { 2 }
- let(:remaining_capacity) { 3 }
- it 'enqueues jobs for remaining work' do
- expect(worker_class)
- .to receive(:bulk_perform_async)
- .with([[:arg], [:arg]])
-
- perform_with_capacity
+ allow(instance).to receive(:max_running_jobs).and_return(max_running_jobs)
end
end
- context 'when capacity is lower than work' do
- let(:remaining_work_count) { 5 }
- let(:remaining_capacity) { 3 }
-
- it 'enqueues jobs for remaining work' do
- expect(worker_class)
- .to receive(:bulk_perform_async)
- .with([[:arg], [:arg], [:arg]])
+ it 'enqueues jobs' do
+ expect(worker_class)
+ .to receive(:bulk_perform_async)
+ .with([[:arg], [:arg], [:arg]])
- perform_with_capacity
- end
+ perform_with_capacity
end
end
@@ -104,34 +86,27 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
perform
end
- it 'registers itself in the running set' do
+ it 'reports prometheus metrics' do
allow(worker).to receive(:perform_work)
- expect(job_tracker).to receive(:register).with('my-jid')
+ expect(worker).to receive(:report_prometheus_metrics).once.and_call_original
+ expect(worker).to receive(:report_running_jobs_metrics).twice.and_call_original
perform
end
- it 'removes itself from the running set' do
- expect(job_tracker).to receive(:remove).with('my-jid')
-
+ it 'updates the running set' do
+ expect(job_tracker.running_jids).to be_empty
allow(worker).to receive(:perform_work)
perform
- end
- it 'reports prometheus metrics' do
- allow(worker).to receive(:perform_work)
- expect(worker).to receive(:report_prometheus_metrics).once.and_call_original
- expect(worker).to receive(:report_running_jobs_metrics).twice.and_call_original
-
- perform
+ expect(job_tracker.running_jids).to be_empty
end
end
context 'with capacity and without work' do
before do
allow(worker).to receive(:max_running_jobs).and_return(10)
- allow(worker).to receive(:running_jobs_count).and_return(0)
allow(worker).to receive(:remaining_work_count).and_return(0)
allow(worker).to receive(:perform_work)
end
@@ -146,7 +121,7 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
context 'without capacity' do
before do
allow(worker).to receive(:max_running_jobs).and_return(10)
- allow(worker).to receive(:running_jobs_count).and_return(15)
+ allow(job_tracker).to receive(:register).and_return(false)
allow(worker).to receive(:remaining_work_count).and_return(10)
end
@@ -161,27 +136,14 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
perform
end
-
- it 'does not register in the running set' do
- expect(job_tracker).not_to receive(:register)
-
- perform
- end
-
- it 'removes itself from the running set' do
- expect(job_tracker).to receive(:remove).with('my-jid')
-
- perform
- end
-
- it 'reports prometheus metrics' do
- expect(worker).to receive(:report_prometheus_metrics)
-
- perform
- end
end
context 'when perform_work fails' do
+ before do
+ allow(worker).to receive(:max_running_jobs).and_return(10)
+ allow(job_tracker).to receive(:register).and_return(true)
+ end
+
it 'does not re-enqueue itself' do
expect(worker).not_to receive(:re_enqueue)
@@ -189,7 +151,7 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
end
it 'removes itself from the running set' do
- expect(job_tracker).to receive(:remove)
+ expect(job_tracker).to receive(:remove).with('my-jid')
expect { perform }.to raise_error(NotImplementedError)
end
@@ -202,65 +164,14 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
end
end
- describe '#remaining_capacity' do
- subject(:remaining_capacity) { worker.remaining_capacity }
-
- before do
- expect(worker).to receive(:max_running_jobs).and_return(max_capacity)
- end
-
- context 'when changing the capacity to a lower value' do
- let(:max_capacity) { -1 }
-
- it { expect(remaining_capacity).to eq(0) }
- end
-
- context 'when registering new jobs' do
- let(:max_capacity) { 2 }
-
- before do
- job_tracker.register('a-job-id')
- end
-
- it { expect(remaining_capacity).to eq(1) }
- end
-
- context 'with jobs in the queue' do
- let(:max_capacity) { 2 }
-
- before do
- expect(worker_class).to receive(:queue_size).and_return(1)
- end
-
- it { expect(remaining_capacity).to eq(1) }
- end
-
- context 'with both running jobs and queued jobs' do
- let(:max_capacity) { 10 }
-
- before do
- expect(worker_class).to receive(:queue_size).and_return(5)
- expect(worker).to receive(:running_jobs_count).and_return(3)
- end
-
- it { expect(remaining_capacity).to eq(2) }
- end
- end
-
describe '#remove_failed_jobs' do
subject(:remove_failed_jobs) { worker.remove_failed_jobs }
- before do
- job_tracker.register('a-job-id')
- allow(worker).to receive(:max_running_jobs).and_return(2)
+ it 'removes failed jobs' do
+ job_tracker.register('a-job-id', 10)
expect(job_tracker).to receive(:clean_up).and_call_original
- end
-
- context 'with failed jobs' do
- it 'update the available capacity' do
- expect { remove_failed_jobs }.to change { worker.remaining_capacity }.by(1)
- end
+ expect { remove_failed_jobs }.to change { job_tracker.running_jids.size }.by(-1)
end
end
diff --git a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
index eb4faaed769..04f568515ed 100644
--- a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
@@ -5,11 +5,11 @@ require 'spec_helper'
RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
using RSpec::Parameterized::TableSyntax
- let_it_be(:repository, reload: true) { create(:container_repository, :cleanup_scheduled) }
- let_it_be(:project) { repository.project }
- let_it_be(:policy) { project.container_expiration_policy }
- let_it_be(:other_repository) { create(:container_repository) }
+ let_it_be(:repository, refind: true) { create(:container_repository, :cleanup_scheduled, expiration_policy_started_at: 1.month.ago) }
+ let_it_be(:other_repository, refind: true) { create(:container_repository, expiration_policy_started_at: 15.days.ago) }
+ let(:project) { repository.project }
+ let(:policy) { project.container_expiration_policy }
let(:worker) { described_class.new }
describe '#perform_work' do
@@ -19,7 +19,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
policy.update_column(:enabled, true)
end
- RSpec.shared_examples 'handling all repository conditions' do
+ shared_examples 'handling all repository conditions' do
it 'sends the repository for cleaning' do
service_response = cleanup_service_response(repository: repository)
expect(ContainerExpirationPolicies::CleanupService)
@@ -72,11 +72,21 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
end
+ context 'with an erroneous cleanup' do
+ it 'logs an error' do
+ service_response = ServiceResponse.error(message: 'cleanup in an error')
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response, cleanup_status: :error)
+
+ subject
+ end
+ end
+
context 'with policy running shortly' do
before do
- repository.project
- .container_expiration_policy
- .update_column(:next_run_at, 1.minute.from_now)
+ repository.cleanup_unfinished! if loopless_enabled?
+ policy.update_column(:next_run_at, 1.minute.from_now)
end
it 'skips the repository' do
@@ -84,118 +94,385 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:project_id, repository.project.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, :skipped)
-
expect { subject }.to change { ContainerRepository.waiting_for_cleanup.count }.from(1).to(0)
+
expect(repository.reload.cleanup_unscheduled?).to be_truthy
end
end
context 'with disabled policy' do
before do
- repository.project
- .container_expiration_policy
- .disable!
+ policy.disable!
end
it 'skips the repository' do
expect(ContainerExpirationPolicies::CleanupService).not_to receive(:new)
- expect { subject }.to change { ContainerRepository.waiting_for_cleanup.count }.from(1).to(0)
- expect(repository.reload.cleanup_unscheduled?).to be_truthy
+ if loopless_enabled?
+ expect { subject }
+ .to not_change { ContainerRepository.waiting_for_cleanup.count }
+ .and not_change { repository.reload.expiration_policy_cleanup_status }
+ else
+ expect { subject }.to change { ContainerRepository.waiting_for_cleanup.count }.from(1).to(0)
+ expect(repository.reload.cleanup_unscheduled?).to be_truthy
+ end
end
end
end
- context 'with repository in cleanup scheduled state' do
- it_behaves_like 'handling all repository conditions'
- end
-
- context 'with repository in cleanup unfinished state' do
+ context 'with loopless enabled' do
before do
- repository.cleanup_unfinished!
+ stub_feature_flags(container_registry_expiration_policies_loopless: true)
end
- it_behaves_like 'handling all repository conditions'
- end
+ context 'with repository in cleanup unscheduled state' do
+ before do
+ policy.update_column(:next_run_at, 5.minutes.ago)
+ end
- context 'with another repository in cleanup unfinished state' do
- let_it_be(:another_repository) { create(:container_repository, :cleanup_unfinished) }
+ it_behaves_like 'handling all repository conditions'
+ end
- it 'process the cleanup scheduled repository first' do
- service_response = cleanup_service_response(repository: repository)
- expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: service_response))
- expect_log_extra_metadata(service_response: service_response)
+ context 'with repository in cleanup unfinished state' do
+ before do
+ repository.cleanup_unfinished!
+ end
- subject
+ it_behaves_like 'handling all repository conditions'
end
- end
- context 'with multiple repositories in cleanup unfinished state' do
- let_it_be(:repository2) { create(:container_repository, :cleanup_unfinished, expiration_policy_started_at: 20.minutes.ago) }
- let_it_be(:repository3) { create(:container_repository, :cleanup_unfinished, expiration_policy_started_at: 10.minutes.ago) }
+ context 'container repository selection' do
+ where(:repository_cleanup_status, :repository_policy_status, :other_repository_cleanup_status, :other_repository_policy_status, :expected_selected_repository) do
+ :unscheduled | :disabled | :unscheduled | :disabled | :none
+ :unscheduled | :disabled | :unscheduled | :runnable | :other_repository
+ :unscheduled | :disabled | :unscheduled | :not_runnable | :none
- before do
- repository.update!(expiration_policy_cleanup_status: :cleanup_unfinished, expiration_policy_started_at: 30.minutes.ago)
+ :unscheduled | :disabled | :scheduled | :disabled | :none
+ :unscheduled | :disabled | :scheduled | :runnable | :other_repository
+ :unscheduled | :disabled | :scheduled | :not_runnable | :none
+
+ :unscheduled | :disabled | :unfinished | :disabled | :none
+ :unscheduled | :disabled | :unfinished | :runnable | :other_repository
+ :unscheduled | :disabled | :unfinished | :not_runnable | :other_repository
+
+ :unscheduled | :disabled | :ongoing | :disabled | :none
+ :unscheduled | :disabled | :ongoing | :runnable | :none
+ :unscheduled | :disabled | :ongoing | :not_runnable | :none
+
+ :unscheduled | :runnable | :unscheduled | :disabled | :repository
+ :unscheduled | :runnable | :unscheduled | :runnable | :repository
+ :unscheduled | :runnable | :unscheduled | :not_runnable | :repository
+
+ :unscheduled | :runnable | :scheduled | :disabled | :repository
+ :unscheduled | :runnable | :scheduled | :runnable | :repository
+ :unscheduled | :runnable | :scheduled | :not_runnable | :repository
+
+ :unscheduled | :runnable | :unfinished | :disabled | :repository
+ :unscheduled | :runnable | :unfinished | :runnable | :repository
+ :unscheduled | :runnable | :unfinished | :not_runnable | :repository
+
+ :unscheduled | :runnable | :ongoing | :disabled | :repository
+ :unscheduled | :runnable | :ongoing | :runnable | :repository
+ :unscheduled | :runnable | :ongoing | :not_runnable | :repository
+
+ :scheduled | :disabled | :unscheduled | :disabled | :none
+ :scheduled | :disabled | :unscheduled | :runnable | :other_repository
+ :scheduled | :disabled | :unscheduled | :not_runnable | :none
+
+ :scheduled | :disabled | :scheduled | :disabled | :none
+ :scheduled | :disabled | :scheduled | :runnable | :other_repository
+ :scheduled | :disabled | :scheduled | :not_runnable | :none
+
+ :scheduled | :disabled | :unfinished | :disabled | :none
+ :scheduled | :disabled | :unfinished | :runnable | :other_repository
+ :scheduled | :disabled | :unfinished | :not_runnable | :other_repository
+
+ :scheduled | :disabled | :ongoing | :disabled | :none
+ :scheduled | :disabled | :ongoing | :runnable | :none
+ :scheduled | :disabled | :ongoing | :not_runnable | :none
+
+ :scheduled | :runnable | :unscheduled | :disabled | :repository
+ :scheduled | :runnable | :unscheduled | :runnable | :other_repository
+ :scheduled | :runnable | :unscheduled | :not_runnable | :repository
+
+ :scheduled | :runnable | :scheduled | :disabled | :repository
+ :scheduled | :runnable | :scheduled | :runnable | :repository
+ :scheduled | :runnable | :scheduled | :not_runnable | :repository
+
+ :scheduled | :runnable | :unfinished | :disabled | :repository
+ :scheduled | :runnable | :unfinished | :runnable | :repository
+ :scheduled | :runnable | :unfinished | :not_runnable | :repository
+
+ :scheduled | :runnable | :ongoing | :disabled | :repository
+ :scheduled | :runnable | :ongoing | :runnable | :repository
+ :scheduled | :runnable | :ongoing | :not_runnable | :repository
+
+ :scheduled | :not_runnable | :unscheduled | :disabled | :none
+ :scheduled | :not_runnable | :unscheduled | :runnable | :other_repository
+ :scheduled | :not_runnable | :unscheduled | :not_runnable | :none
+
+ :scheduled | :not_runnable | :scheduled | :disabled | :none
+ :scheduled | :not_runnable | :scheduled | :runnable | :other_repository
+ :scheduled | :not_runnable | :scheduled | :not_runnable | :none
+
+ :scheduled | :not_runnable | :unfinished | :disabled | :none
+ :scheduled | :not_runnable | :unfinished | :runnable | :other_repository
+ :scheduled | :not_runnable | :unfinished | :not_runnable | :other_repository
+
+ :scheduled | :not_runnable | :ongoing | :disabled | :none
+ :scheduled | :not_runnable | :ongoing | :runnable | :none
+ :scheduled | :not_runnable | :ongoing | :not_runnable | :none
+
+ :unfinished | :disabled | :unscheduled | :disabled | :none
+ :unfinished | :disabled | :unscheduled | :runnable | :other_repository
+ :unfinished | :disabled | :unscheduled | :not_runnable | :none
+
+ :unfinished | :disabled | :scheduled | :disabled | :none
+ :unfinished | :disabled | :scheduled | :runnable | :other_repository
+ :unfinished | :disabled | :scheduled | :not_runnable | :none
+
+ :unfinished | :disabled | :unfinished | :disabled | :none
+ :unfinished | :disabled | :unfinished | :runnable | :other_repository
+ :unfinished | :disabled | :unfinished | :not_runnable | :other_repository
+
+ :unfinished | :disabled | :ongoing | :disabled | :none
+ :unfinished | :disabled | :ongoing | :runnable | :none
+ :unfinished | :disabled | :ongoing | :not_runnable | :none
+
+ :unfinished | :runnable | :unscheduled | :disabled | :repository
+ :unfinished | :runnable | :unscheduled | :runnable | :other_repository
+ :unfinished | :runnable | :unscheduled | :not_runnable | :repository
+
+ :unfinished | :runnable | :scheduled | :disabled | :repository
+ :unfinished | :runnable | :scheduled | :runnable | :other_repository
+ :unfinished | :runnable | :scheduled | :not_runnable | :repository
+
+ :unfinished | :runnable | :unfinished | :disabled | :repository
+ :unfinished | :runnable | :unfinished | :runnable | :repository
+ :unfinished | :runnable | :unfinished | :not_runnable | :repository
+
+ :unfinished | :runnable | :ongoing | :disabled | :repository
+ :unfinished | :runnable | :ongoing | :runnable | :repository
+ :unfinished | :runnable | :ongoing | :not_runnable | :repository
+
+ :unfinished | :not_runnable | :unscheduled | :disabled | :repository
+ :unfinished | :not_runnable | :unscheduled | :runnable | :other_repository
+ :unfinished | :not_runnable | :unscheduled | :not_runnable | :repository
+
+ :unfinished | :not_runnable | :scheduled | :disabled | :repository
+ :unfinished | :not_runnable | :scheduled | :runnable | :other_repository
+ :unfinished | :not_runnable | :scheduled | :not_runnable | :repository
+
+ :unfinished | :not_runnable | :unfinished | :disabled | :repository
+ :unfinished | :not_runnable | :unfinished | :runnable | :repository
+ :unfinished | :not_runnable | :unfinished | :not_runnable | :repository
+
+ :unfinished | :not_runnable | :ongoing | :disabled | :repository
+ :unfinished | :not_runnable | :ongoing | :runnable | :repository
+ :unfinished | :not_runnable | :ongoing | :not_runnable | :repository
+
+ :ongoing | :disabled | :unscheduled | :disabled | :none
+ :ongoing | :disabled | :unscheduled | :runnable | :other_repository
+ :ongoing | :disabled | :unscheduled | :not_runnable | :none
+
+ :ongoing | :disabled | :scheduled | :disabled | :none
+ :ongoing | :disabled | :scheduled | :runnable | :other_repository
+ :ongoing | :disabled | :scheduled | :not_runnable | :none
+
+ :ongoing | :disabled | :unfinished | :disabled | :none
+ :ongoing | :disabled | :unfinished | :runnable | :other_repository
+ :ongoing | :disabled | :unfinished | :not_runnable | :other_repository
+
+ :ongoing | :disabled | :ongoing | :disabled | :none
+ :ongoing | :disabled | :ongoing | :runnable | :none
+ :ongoing | :disabled | :ongoing | :not_runnable | :none
+
+ :ongoing | :runnable | :unscheduled | :disabled | :none
+ :ongoing | :runnable | :unscheduled | :runnable | :other_repository
+ :ongoing | :runnable | :unscheduled | :not_runnable | :none
+
+ :ongoing | :runnable | :scheduled | :disabled | :none
+ :ongoing | :runnable | :scheduled | :runnable | :other_repository
+ :ongoing | :runnable | :scheduled | :not_runnable | :none
+
+ :ongoing | :runnable | :unfinished | :disabled | :none
+ :ongoing | :runnable | :unfinished | :runnable | :other_repository
+ :ongoing | :runnable | :unfinished | :not_runnable | :other_repository
+
+ :ongoing | :runnable | :ongoing | :disabled | :none
+ :ongoing | :runnable | :ongoing | :runnable | :none
+ :ongoing | :runnable | :ongoing | :not_runnable | :none
+
+ :ongoing | :not_runnable | :unscheduled | :disabled | :none
+ :ongoing | :not_runnable | :unscheduled | :runnable | :other_repository
+ :ongoing | :not_runnable | :unscheduled | :not_runnable | :none
+
+ :ongoing | :not_runnable | :scheduled | :disabled | :none
+ :ongoing | :not_runnable | :scheduled | :runnable | :other_repository
+ :ongoing | :not_runnable | :scheduled | :not_runnable | :none
+
+ :ongoing | :not_runnable | :unfinished | :disabled | :none
+ :ongoing | :not_runnable | :unfinished | :runnable | :other_repository
+ :ongoing | :not_runnable | :unfinished | :not_runnable | :other_repository
+
+ :ongoing | :not_runnable | :ongoing | :disabled | :none
+ :ongoing | :not_runnable | :ongoing | :runnable | :none
+ :ongoing | :not_runnable | :ongoing | :not_runnable | :none
+ end
+
+ with_them do
+ before do
+ update_container_repository(repository, repository_cleanup_status, repository_policy_status)
+ update_container_repository(other_repository, other_repository_cleanup_status, other_repository_policy_status)
+ end
+
+ subject { worker.send(:container_repository) }
+
+ if params[:expected_selected_repository] == :none
+ it 'does not select any repository' do
+ expect(subject).to eq(nil)
+ end
+ else
+ it 'does select a repository' do
+ selected_repository = expected_selected_repository == :repository ? repository : other_repository
+
+ expect(subject).to eq(selected_repository)
+ end
+ end
+
+ def update_container_repository(container_repository, cleanup_status, policy_status)
+ container_repository.update_column(:expiration_policy_cleanup_status, "cleanup_#{cleanup_status}")
+
+ policy = container_repository.project.container_expiration_policy
+
+ case policy_status
+ when :disabled
+ policy.update!(enabled: false)
+ when :runnable
+ policy.update!(enabled: true)
+ policy.update_column(:next_run_at, 5.minutes.ago)
+ when :not_runnable
+ policy.update!(enabled: true)
+ policy.update_column(:next_run_at, 5.minutes.from_now)
+ end
+ end
+ end
end
- it 'process the repository with the oldest expiration_policy_started_at' do
- service_response = cleanup_service_response(repository: repository)
- expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: service_response))
- expect_log_extra_metadata(service_response: service_response)
+ context 'with another repository in cleanup unfinished state' do
+ let_it_be(:another_repository) { create(:container_repository, :cleanup_unfinished) }
- subject
+ before do
+ policy.update_column(:next_run_at, 5.minutes.ago)
+ end
+
+ it 'process the cleanup scheduled repository first' do
+ service_response = cleanup_service_response(repository: repository)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
+
+ subject
+ end
end
end
- context 'with repository in cleanup ongoing state' do
+ context 'with loopless disabled' do
before do
- repository.cleanup_ongoing!
+ stub_feature_flags(container_registry_expiration_policies_loopless: false)
end
- it 'does not process it' do
- expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+ context 'with repository in cleanup scheduled state' do
+ it_behaves_like 'handling all repository conditions'
+ end
+
+ context 'with repository in cleanup unfinished state' do
+ before do
+ repository.cleanup_unfinished!
+ end
- expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
- expect(repository.cleanup_ongoing?).to be_truthy
+ it_behaves_like 'handling all repository conditions'
end
- end
- context 'with no repository in any cleanup state' do
- before do
- repository.cleanup_unscheduled!
+ context 'with another repository in cleanup unfinished state' do
+ let_it_be(:another_repository) { create(:container_repository, :cleanup_unfinished) }
+
+ it 'process the cleanup scheduled repository first' do
+ service_response = cleanup_service_response(repository: repository)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
+
+ subject
+ end
end
- it 'does not process it' do
- expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+ context 'with multiple repositories in cleanup unfinished state' do
+ let_it_be(:repository2) { create(:container_repository, :cleanup_unfinished, expiration_policy_started_at: 20.minutes.ago) }
+ let_it_be(:repository3) { create(:container_repository, :cleanup_unfinished, expiration_policy_started_at: 10.minutes.ago) }
+
+ before do
+ repository.update!(expiration_policy_cleanup_status: :cleanup_unfinished, expiration_policy_started_at: 30.minutes.ago)
+ end
+
+ it 'process the repository with the oldest expiration_policy_started_at' do
+ service_response = cleanup_service_response(repository: repository)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
- expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
- expect(repository.cleanup_unscheduled?).to be_truthy
+ subject
+ end
end
- end
- context 'with no container repository waiting' do
- before do
- repository.destroy!
+ context 'with repository in cleanup ongoing state' do
+ before do
+ repository.cleanup_ongoing!
+ end
+
+ it 'does not process it' do
+ expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+
+ expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ expect(repository.cleanup_ongoing?).to be_truthy
+ end
end
- it 'does not execute the cleanup tags service' do
- expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+ context 'with no repository in any cleanup state' do
+ before do
+ repository.cleanup_unscheduled!
+ end
+
+ it 'does not process it' do
+ expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
- expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ expect(repository.cleanup_unscheduled?).to be_truthy
+ end
end
- end
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(container_registry_expiration_policies_throttling: false)
+ context 'with no container repository waiting' do
+ before do
+ repository.destroy!
+ end
+
+ it 'does not execute the cleanup tags service' do
+ expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+
+ expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ end
end
- it 'is a no-op' do
- expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_throttling: false)
+ end
- expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ it 'is a no-op' do
+ expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+
+ expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ end
end
end
@@ -224,44 +501,77 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_tags_service_truncated, truncated)
expect(worker).to receive(:log_extra_metadata_on_done).with(:running_jobs_count, 0)
+
+ if service_response.error?
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_error_message, service_response.message)
+ end
end
end
describe '#remaining_work_count' do
subject { worker.remaining_work_count }
- context 'with container repositoires waiting for cleanup' do
- let_it_be(:unfinished_repositories) { create_list(:container_repository, 2, :cleanup_unfinished) }
+ shared_examples 'handling all conditions' do
+ context 'with container repositories waiting for cleanup' do
+ let_it_be(:unfinished_repositories) { create_list(:container_repository, 2, :cleanup_unfinished) }
- it { is_expected.to eq(3) }
+ it { is_expected.to eq(3) }
- it 'logs the work count' do
- expect_log_info(
- cleanup_scheduled_count: 1,
- cleanup_unfinished_count: 2,
- cleanup_total_count: 3
- )
+ it 'logs the work count' do
+ expect_log_info(
+ cleanup_scheduled_count: 1,
+ cleanup_unfinished_count: 2,
+ cleanup_total_count: 3
+ )
- subject
+ subject
+ end
+ end
+
+ context 'with no container repositories waiting for cleanup' do
+ before do
+ repository.cleanup_ongoing!
+ policy.update_column(:next_run_at, 5.minutes.from_now)
+ end
+
+ it { is_expected.to eq(0) }
+
+ it 'logs 0 work count' do
+ expect_log_info(
+ cleanup_scheduled_count: 0,
+ cleanup_unfinished_count: 0,
+ cleanup_total_count: 0
+ )
+
+ subject
+ end
end
end
- context 'with no container repositories waiting for cleanup' do
+ context 'with loopless enabled' do
+ let_it_be(:disabled_repository) { create(:container_repository, :cleanup_scheduled) }
+
+ let(:capacity) { 10 }
+
before do
- repository.cleanup_ongoing!
- end
+ stub_feature_flags(container_registry_expiration_policies_loopless: true)
+ stub_application_setting(container_registry_expiration_policies_worker_capacity: capacity)
- it { is_expected.to eq(0) }
+ # loopless mode is more accurate that non loopless: policies need to be enabled
+ ContainerExpirationPolicy.update_all(enabled: true)
+ repository.project.container_expiration_policy.update_column(:next_run_at, 5.minutes.ago)
+ disabled_repository.project.container_expiration_policy.update_column(:enabled, false)
+ end
- it 'logs 0 work count' do
- expect_log_info(
- cleanup_scheduled_count: 0,
- cleanup_unfinished_count: 0,
- cleanup_total_count: 0
- )
+ it_behaves_like 'handling all conditions'
+ end
- subject
+ context 'with loopless disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_loopless: false)
end
+
+ it_behaves_like 'handling all conditions'
end
end
@@ -289,4 +599,8 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
expect(worker.logger)
.to receive(:info).with(worker.structured_payload(structure))
end
+
+ def loopless_enabled?
+ Feature.enabled?(:container_registry_expiration_policies_loopless)
+ end
end
diff --git a/spec/workers/container_expiration_policy_worker_spec.rb b/spec/workers/container_expiration_policy_worker_spec.rb
index 2d5176e874d..e8f9a972f10 100644
--- a/spec/workers/container_expiration_policy_worker_spec.rb
+++ b/spec/workers/container_expiration_policy_worker_spec.rb
@@ -35,10 +35,16 @@ RSpec.describe ContainerExpirationPolicyWorker do
end
context 'With no container expiration policies' do
- it 'does not execute any policies' do
- expect(ContainerRepository).not_to receive(:for_project_id)
+ context 'with loopless disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_loopless: false)
+ end
- expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
+ it 'does not execute any policies' do
+ expect(ContainerRepository).not_to receive(:for_project_id)
+
+ expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
+ end
end
end
diff --git a/spec/workers/deployments/hooks_worker_spec.rb b/spec/workers/deployments/hooks_worker_spec.rb
new file mode 100644
index 00000000000..f1fe7b0fc5d
--- /dev/null
+++ b/spec/workers/deployments/hooks_worker_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Deployments::HooksWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ before do
+ allow(ProjectServiceWorker).to receive(:perform_async)
+ end
+
+ it 'executes project services for deployment_hooks' do
+ deployment = create(:deployment, :running)
+ project = deployment.project
+ service = create(:service, type: 'SlackService', project: project, deployment_events: true, active: true)
+
+ expect(ProjectServiceWorker).to receive(:perform_async).with(service.id, an_instance_of(Hash))
+
+ worker.perform(deployment_id: deployment.id, status_changed_at: Time.current)
+ end
+
+ it 'does not execute an inactive service' do
+ deployment = create(:deployment, :running)
+ project = deployment.project
+ create(:service, type: 'SlackService', project: project, deployment_events: true, active: false)
+
+ expect(ProjectServiceWorker).not_to receive(:perform_async)
+
+ worker.perform(deployment_id: deployment.id, status_changed_at: Time.current)
+ end
+
+ it 'does not execute if a deployment does not exist' do
+ expect(ProjectServiceWorker).not_to receive(:perform_async)
+
+ worker.perform(deployment_id: non_existing_record_id, status_changed_at: Time.current)
+ end
+
+ it 'execute webhooks' do
+ deployment = create(:deployment, :running)
+ project = deployment.project
+ web_hook = create(:project_hook, deployment_events: true, project: project)
+
+ status_changed_at = Time.current
+
+ expect_next_instance_of(WebHookService, web_hook, hash_including(status_changed_at: status_changed_at), "deployment_hooks") do |service|
+ expect(service).to receive(:async_execute)
+ end
+
+ worker.perform(deployment_id: deployment.id, status_changed_at: status_changed_at)
+ end
+ end
+end
diff --git a/spec/workers/email_receiver_worker_spec.rb b/spec/workers/email_receiver_worker_spec.rb
index 8bf7f3f552d..d26c08fb221 100644
--- a/spec/workers/email_receiver_worker_spec.rb
+++ b/spec/workers/email_receiver_worker_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe EmailReceiverWorker, :mailer do
it "calls the email receiver" do
expect(Gitlab::Email::Receiver).to receive(:new).with(raw_message).and_call_original
expect_any_instance_of(Gitlab::Email::Receiver).to receive(:execute)
+ expect(Sidekiq.logger).to receive(:info).with(hash_including(message: "Successfully processed message")).and_call_original
described_class.new.perform(raw_message)
end
@@ -20,10 +21,11 @@ RSpec.describe EmailReceiverWorker, :mailer do
context "when an error occurs" do
before do
allow_any_instance_of(Gitlab::Email::Receiver).to receive(:execute).and_raise(error)
+ expect(Sidekiq.logger).to receive(:error).with(hash_including('exception.class' => error.class.name)).and_call_original
end
context 'when the error is Gitlab::Email::EmptyEmailError' do
- let(:error) { Gitlab::Email::EmptyEmailError }
+ let(:error) { Gitlab::Email::EmptyEmailError.new }
it 'sends out a rejection email' do
perform_enqueued_jobs do
@@ -38,7 +40,7 @@ RSpec.describe EmailReceiverWorker, :mailer do
end
context 'when the error is Gitlab::Email::AutoGeneratedEmailError' do
- let(:error) { Gitlab::Email::AutoGeneratedEmailError }
+ let(:error) { Gitlab::Email::AutoGeneratedEmailError.new }
it 'does not send out any rejection email' do
perform_enqueued_jobs do
@@ -63,6 +65,21 @@ RSpec.describe EmailReceiverWorker, :mailer do
expect(email.body.parts.last.to_s).to include("Could not deal with that")
end
end
+
+ context 'when the error is ActiveRecord::StatementTimeout' do
+ let(:error) { ActiveRecord::StatementTimeout.new("Statement timeout") }
+
+ it 'does not report the error to the sender' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(error).and_call_original
+
+ perform_enqueued_jobs do
+ described_class.new.perform(raw_message)
+ end
+
+ email = ActionMailer::Base.deliveries.last
+ expect(email).to be_nil
+ end
+ end
end
end
diff --git a/spec/workers/environments/canary_ingress/update_worker_spec.rb b/spec/workers/environments/canary_ingress/update_worker_spec.rb
index 7bc5108719c..e7782c2fba1 100644
--- a/spec/workers/environments/canary_ingress/update_worker_spec.rb
+++ b/spec/workers/environments/canary_ingress/update_worker_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Environments::CanaryIngress::UpdateWorker do
let_it_be(:environment) { create(:environment) }
+
let(:worker) { described_class.new }
describe '#perform' do
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 5a22529b6d6..de848e59d57 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -104,4 +104,374 @@ RSpec.describe 'Every Sidekiq worker' do
end
end
end
+
+ context 'retries' do
+ let(:cronjobs) do
+ workers_without_defaults.select { |worker| worker.klass < CronjobQueue }
+ end
+
+ let(:retry_exception_workers) do
+ workers_without_defaults.select { |worker| retry_exceptions.has_key?(worker.klass.to_s) }
+ end
+
+ let(:retry_exceptions) do
+ {
+ 'AdjournedProjectDeletionWorker' => 3,
+ 'AdminEmailsWorker' => 3,
+ 'Analytics::CodeReviewMetricsWorker' => 3,
+ 'Analytics::DevopsAdoption::CreateSnapshotWorker' => 3,
+ 'Analytics::InstanceStatistics::CounterJobWorker' => 3,
+ 'Analytics::UsageTrends::CounterJobWorker' => 3,
+ 'ApprovalRules::ExternalApprovalRulePayloadWorker' => 3,
+ 'ApproveBlockedPendingApprovalUsersWorker' => 3,
+ 'ArchiveTraceWorker' => 3,
+ 'AuthorizedKeysWorker' => 3,
+ 'AuthorizedProjectUpdate::ProjectCreateWorker' => 3,
+ 'AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker' => 3,
+ 'AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker' => 3,
+ 'AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker' => 3,
+ 'AuthorizedProjectsWorker' => 3,
+ 'AutoDevops::DisableWorker' => 3,
+ 'AutoMergeProcessWorker' => 3,
+ 'BackgroundMigrationWorker' => 3,
+ 'BuildFinishedWorker' => 3,
+ 'BuildHooksWorker' => 3,
+ 'BuildQueueWorker' => 3,
+ 'BuildSuccessWorker' => 3,
+ 'BulkImportWorker' => false,
+ 'BulkImports::EntityWorker' => false,
+ 'BulkImports::PipelineWorker' => false,
+ 'Chaos::CpuSpinWorker' => 3,
+ 'Chaos::DbSpinWorker' => 3,
+ 'Chaos::KillWorker' => false,
+ 'Chaos::LeakMemWorker' => 3,
+ 'Chaos::SleepWorker' => 3,
+ 'ChatNotificationWorker' => false,
+ 'Ci::BatchResetMinutesWorker' => 10,
+ 'Ci::BuildPrepareWorker' => 3,
+ 'Ci::BuildScheduleWorker' => 3,
+ 'Ci::BuildTraceChunkFlushWorker' => 3,
+ 'Ci::CreateCrossProjectPipelineWorker' => 3,
+ 'Ci::DailyBuildGroupReportResultsWorker' => 3,
+ 'Ci::DeleteObjectsWorker' => 0,
+ 'Ci::DropPipelineWorker' => 3,
+ 'Ci::InitialPipelineProcessWorker' => 3,
+ 'Ci::MergeRequests::AddTodoWhenBuildFailsWorker' => 3,
+ 'Ci::PipelineArtifacts::CoverageReportWorker' => 3,
+ 'Ci::PipelineArtifacts::CreateQualityReportWorker' => 3,
+ 'Ci::PipelineBridgeStatusWorker' => 3,
+ 'Ci::PipelineSuccessUnlockArtifactsWorker' => 3,
+ 'Ci::RefDeleteUnlockArtifactsWorker' => 3,
+ 'Ci::ResourceGroups::AssignResourceFromResourceGroupWorker' => 3,
+ 'Ci::TestFailureHistoryWorker' => 3,
+ 'Ci::TriggerDownstreamSubscriptionsWorker' => 3,
+ 'CleanupContainerRepositoryWorker' => 3,
+ 'ClusterConfigureIstioWorker' => 3,
+ 'ClusterInstallAppWorker' => 3,
+ 'ClusterPatchAppWorker' => 3,
+ 'ClusterProvisionWorker' => 3,
+ 'ClusterUpdateAppWorker' => 3,
+ 'ClusterUpgradeAppWorker' => 3,
+ 'ClusterWaitForAppInstallationWorker' => 3,
+ 'ClusterWaitForAppUpdateWorker' => 3,
+ 'ClusterWaitForIngressIpAddressWorker' => 3,
+ 'Clusters::Applications::ActivateServiceWorker' => 3,
+ 'Clusters::Applications::DeactivateServiceWorker' => 3,
+ 'Clusters::Applications::UninstallWorker' => 3,
+ 'Clusters::Applications::WaitForUninstallAppWorker' => 3,
+ 'Clusters::Cleanup::AppWorker' => 3,
+ 'Clusters::Cleanup::ProjectNamespaceWorker' => 3,
+ 'Clusters::Cleanup::ServiceAccountWorker' => 3,
+ 'ContainerExpirationPolicies::CleanupContainerRepositoryWorker' => 0,
+ 'CreateCommitSignatureWorker' => 3,
+ 'CreateGithubWebhookWorker' => 3,
+ 'CreateNoteDiffFileWorker' => 3,
+ 'CreatePipelineWorker' => 3,
+ 'DastSiteValidationWorker' => 3,
+ 'DeleteContainerRepositoryWorker' => 3,
+ 'DeleteDiffFilesWorker' => 3,
+ 'DeleteMergedBranchesWorker' => 3,
+ 'DeleteStoredFilesWorker' => 3,
+ 'DeleteUserWorker' => 3,
+ 'Deployments::AutoRollbackWorker' => 3,
+ 'Deployments::DropOlderDeploymentsWorker' => 3,
+ 'Deployments::ExecuteHooksWorker' => 3,
+ 'Deployments::FinishedWorker' => 3,
+ 'Deployments::ForwardDeploymentWorker' => 3,
+ 'Deployments::LinkMergeRequestWorker' => 3,
+ 'Deployments::SuccessWorker' => 3,
+ 'Deployments::UpdateEnvironmentWorker' => 3,
+ 'DesignManagement::CopyDesignCollectionWorker' => 3,
+ 'DesignManagement::NewVersionWorker' => 3,
+ 'DestroyPagesDeploymentsWorker' => 3,
+ 'DetectRepositoryLanguagesWorker' => 1,
+ 'DisallowTwoFactorForGroupWorker' => 3,
+ 'DisallowTwoFactorForSubgroupsWorker' => 3,
+ 'Dora::DailyMetrics::RefreshWorker' => 3,
+ 'ElasticAssociationIndexerWorker' => 3,
+ 'ElasticCommitIndexerWorker' => 2,
+ 'ElasticDeleteProjectWorker' => 2,
+ 'ElasticFullIndexWorker' => 2,
+ 'ElasticIndexerWorker' => 2,
+ 'ElasticIndexingControlWorker' => 3,
+ 'ElasticNamespaceIndexerWorker' => 2,
+ 'ElasticNamespaceRolloutWorker' => 2,
+ 'EmailReceiverWorker' => 3,
+ 'EmailsOnPushWorker' => 3,
+ 'Environments::CanaryIngress::UpdateWorker' => false,
+ 'Epics::UpdateEpicsDatesWorker' => 3,
+ 'ErrorTrackingIssueLinkWorker' => 3,
+ 'Experiments::RecordConversionEventWorker' => 3,
+ 'ExpireBuildInstanceArtifactsWorker' => 3,
+ 'ExpireJobCacheWorker' => 3,
+ 'ExpirePipelineCacheWorker' => 3,
+ 'ExportCsvWorker' => 3,
+ 'ExternalServiceReactiveCachingWorker' => 3,
+ 'FileHookWorker' => false,
+ 'FlushCounterIncrementsWorker' => 3,
+ 'Geo::Batch::ProjectRegistrySchedulerWorker' => 3,
+ 'Geo::Batch::ProjectRegistryWorker' => 3,
+ 'Geo::ContainerRepositorySyncWorker' => 3,
+ 'Geo::DesignRepositoryShardSyncWorker' => false,
+ 'Geo::DesignRepositorySyncWorker' => 3,
+ 'Geo::DestroyWorker' => 3,
+ 'Geo::EventWorker' => 3,
+ 'Geo::FileDownloadWorker' => 3,
+ 'Geo::FileRegistryRemovalWorker' => 3,
+ 'Geo::FileRemovalWorker' => 3,
+ 'Geo::HashedStorageAttachmentsMigrationWorker' => 3,
+ 'Geo::HashedStorageMigrationWorker' => 3,
+ 'Geo::ProjectSyncWorker' => 3,
+ 'Geo::RenameRepositoryWorker' => 3,
+ 'Geo::RepositoriesCleanUpWorker' => 3,
+ 'Geo::RepositoryCleanupWorker' => 3,
+ 'Geo::RepositoryShardSyncWorker' => false,
+ 'Geo::RepositoryVerification::Primary::ShardWorker' => false,
+ 'Geo::RepositoryVerification::Primary::SingleWorker' => false,
+ 'Geo::RepositoryVerification::Secondary::SingleWorker' => false,
+ 'Geo::ReverificationBatchWorker' => 0,
+ 'Geo::Scheduler::Primary::SchedulerWorker' => 3,
+ 'Geo::Scheduler::SchedulerWorker' => 3,
+ 'Geo::Scheduler::Secondary::SchedulerWorker' => 3,
+ 'Geo::VerificationBatchWorker' => 0,
+ 'Geo::VerificationTimeoutWorker' => false,
+ 'Geo::VerificationWorker' => 3,
+ 'GeoRepositoryDestroyWorker' => 3,
+ 'GitGarbageCollectWorker' => false,
+ 'Gitlab::GithubImport::AdvanceStageWorker' => 3,
+ 'Gitlab::GithubImport::ImportDiffNoteWorker' => 5,
+ 'Gitlab::GithubImport::ImportIssueWorker' => 5,
+ 'Gitlab::GithubImport::ImportLfsObjectWorker' => 5,
+ 'Gitlab::GithubImport::ImportNoteWorker' => 5,
+ 'Gitlab::GithubImport::ImportPullRequestMergedByWorker' => 5,
+ 'Gitlab::GithubImport::ImportPullRequestReviewWorker' => 5,
+ 'Gitlab::GithubImport::ImportPullRequestWorker' => 5,
+ 'Gitlab::GithubImport::RefreshImportJidWorker' => 5,
+ 'Gitlab::GithubImport::Stage::FinishImportWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportBaseDataWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportLfsObjectsWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportNotesWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportRepositoryWorker' => 5,
+ 'Gitlab::JiraImport::AdvanceStageWorker' => 5,
+ 'Gitlab::JiraImport::ImportIssueWorker' => 5,
+ 'Gitlab::JiraImport::Stage::FinishImportWorker' => 5,
+ 'Gitlab::JiraImport::Stage::ImportAttachmentsWorker' => 5,
+ 'Gitlab::JiraImport::Stage::ImportIssuesWorker' => 5,
+ 'Gitlab::JiraImport::Stage::ImportLabelsWorker' => 5,
+ 'Gitlab::JiraImport::Stage::ImportNotesWorker' => 5,
+ 'Gitlab::JiraImport::Stage::StartImportWorker' => 5,
+ 'Gitlab::PhabricatorImport::ImportTasksWorker' => 5,
+ 'GitlabPerformanceBarStatsWorker' => 3,
+ 'GitlabShellWorker' => 3,
+ 'GitlabUsagePingWorker' => 3,
+ 'GroupDestroyWorker' => 3,
+ 'GroupExportWorker' => false,
+ 'GroupImportWorker' => false,
+ 'GroupSamlGroupSyncWorker' => 3,
+ 'GroupWikis::GitGarbageCollectWorker' => false,
+ 'Groups::ScheduleBulkRepositoryShardMovesWorker' => 3,
+ 'Groups::UpdateRepositoryStorageWorker' => 3,
+ 'Groups::UpdateStatisticsWorker' => 3,
+ 'HashedStorage::MigratorWorker' => 3,
+ 'HashedStorage::ProjectMigrateWorker' => 3,
+ 'HashedStorage::ProjectRollbackWorker' => 3,
+ 'HashedStorage::RollbackerWorker' => 3,
+ 'ImportIssuesCsvWorker' => 3,
+ 'ImportSoftwareLicensesWorker' => 3,
+ 'IncidentManagement::AddSeveritySystemNoteWorker' => 3,
+ 'IncidentManagement::ApplyIncidentSlaExceededLabelWorker' => 3,
+ 'IncidentManagement::OncallRotations::PersistAllRotationsShiftsJob' => 3,
+ 'IncidentManagement::OncallRotations::PersistShiftsJob' => 3,
+ 'IncidentManagement::PagerDuty::ProcessIncidentWorker' => 3,
+ 'IncidentManagement::ProcessAlertWorker' => 3,
+ 'IncidentManagement::ProcessPrometheusAlertWorker' => 3,
+ 'InvalidGpgSignatureUpdateWorker' => 3,
+ 'IrkerWorker' => 3,
+ 'IssuableExportCsvWorker' => 3,
+ 'IssuePlacementWorker' => 3,
+ 'IssueRebalancingWorker' => 3,
+ 'IterationsUpdateStatusWorker' => 3,
+ 'JiraConnect::SyncBranchWorker' => 3,
+ 'JiraConnect::SyncBuildsWorker' => 3,
+ 'JiraConnect::SyncDeploymentsWorker' => 3,
+ 'JiraConnect::SyncFeatureFlagsWorker' => 3,
+ 'JiraConnect::SyncMergeRequestWorker' => 3,
+ 'JiraConnect::SyncProjectWorker' => 3,
+ 'LdapGroupSyncWorker' => 3,
+ 'MailScheduler::IssueDueWorker' => 3,
+ 'MailScheduler::NotificationServiceWorker' => 3,
+ 'MembersDestroyer::UnassignIssuablesWorker' => 3,
+ 'MergeRequestCleanupRefsWorker' => 3,
+ 'MergeRequestMergeabilityCheckWorker' => 3,
+ 'MergeRequestResetApprovalsWorker' => 3,
+ 'MergeRequests::AssigneesChangeWorker' => 3,
+ 'MergeRequests::CreatePipelineWorker' => 3,
+ 'MergeRequests::DeleteSourceBranchWorker' => 3,
+ 'MergeRequests::HandleAssigneesChangeWorker' => 3,
+ 'MergeRequests::ResolveTodosWorker' => 3,
+ 'MergeRequests::SyncCodeOwnerApprovalRulesWorker' => 3,
+ 'MergeTrains::RefreshWorker' => 3,
+ 'MergeWorker' => 3,
+ 'Metrics::Dashboard::PruneOldAnnotationsWorker' => 3,
+ 'Metrics::Dashboard::SyncDashboardsWorker' => 3,
+ 'MigrateExternalDiffsWorker' => 3,
+ 'NamespacelessProjectDestroyWorker' => 3,
+ 'Namespaces::OnboardingIssueCreatedWorker' => 3,
+ 'Namespaces::OnboardingPipelineCreatedWorker' => 3,
+ 'Namespaces::OnboardingProgressWorker' => 3,
+ 'Namespaces::OnboardingUserAddedWorker' => 3,
+ 'Namespaces::RootStatisticsWorker' => 3,
+ 'Namespaces::ScheduleAggregationWorker' => 3,
+ 'NetworkPolicyMetricsWorker' => 3,
+ 'NewEpicWorker' => 3,
+ 'NewIssueWorker' => 3,
+ 'NewMergeRequestWorker' => 3,
+ 'NewNoteWorker' => 3,
+ 'ObjectPool::CreateWorker' => 3,
+ 'ObjectPool::DestroyWorker' => 3,
+ 'ObjectPool::JoinWorker' => 3,
+ 'ObjectPool::ScheduleJoinWorker' => 3,
+ 'ObjectStorage::BackgroundMoveWorker' => 5,
+ 'ObjectStorage::MigrateUploadsWorker' => 3,
+ 'Packages::Composer::CacheUpdateWorker' => 3,
+ 'Packages::Go::SyncPackagesWorker' => 3,
+ 'Packages::Maven::Metadata::SyncWorker' => 3,
+ 'Packages::Nuget::ExtractionWorker' => 3,
+ 'Packages::Rubygems::ExtractionWorker' => 3,
+ 'PagesDomainSslRenewalWorker' => 3,
+ 'PagesDomainVerificationWorker' => 3,
+ 'PagesRemoveWorker' => 3,
+ 'PagesTransferWorker' => 3,
+ 'PagesUpdateConfigurationWorker' => 3,
+ 'PagesWorker' => 3,
+ 'PersonalAccessTokens::Groups::PolicyWorker' => 3,
+ 'PersonalAccessTokens::Instance::PolicyWorker' => 3,
+ 'PipelineHooksWorker' => 3,
+ 'PipelineMetricsWorker' => 3,
+ 'PipelineNotificationWorker' => 3,
+ 'PipelineProcessWorker' => 3,
+ 'PipelineUpdateWorker' => 3,
+ 'PostReceive' => 3,
+ 'ProcessCommitWorker' => 3,
+ 'ProjectCacheWorker' => 3,
+ 'ProjectDailyStatisticsWorker' => 3,
+ 'ProjectDestroyWorker' => 3,
+ 'ProjectExportWorker' => false,
+ 'ProjectImportScheduleWorker' => false,
+ 'ProjectScheduleBulkRepositoryShardMovesWorker' => 3,
+ 'ProjectServiceWorker' => 3,
+ 'ProjectTemplateExportWorker' => false,
+ 'ProjectUpdateRepositoryStorageWorker' => 3,
+ 'Projects::GitGarbageCollectWorker' => false,
+ 'Projects::PostCreationWorker' => 3,
+ 'Projects::ScheduleBulkRepositoryShardMovesWorker' => 3,
+ 'Projects::UpdateRepositoryStorageWorker' => 3,
+ 'Prometheus::CreateDefaultAlertsWorker' => 3,
+ 'PropagateIntegrationGroupWorker' => 3,
+ 'PropagateIntegrationInheritDescendantWorker' => 3,
+ 'PropagateIntegrationInheritWorker' => 3,
+ 'PropagateIntegrationProjectWorker' => 3,
+ 'PropagateIntegrationWorker' => 3,
+ 'PropagateServiceTemplateWorker' => 3,
+ 'PurgeDependencyProxyCacheWorker' => 3,
+ 'ReactiveCachingWorker' => 3,
+ 'RebaseWorker' => 3,
+ 'RefreshLicenseComplianceChecksWorker' => 3,
+ 'Releases::CreateEvidenceWorker' => 3,
+ 'RemoteMirrorNotificationWorker' => 3,
+ 'RepositoryCheck::BatchWorker' => false,
+ 'RepositoryCheck::ClearWorker' => false,
+ 'RepositoryCheck::SingleRepositoryWorker' => false,
+ 'RepositoryCleanupWorker' => 3,
+ 'RepositoryForkWorker' => 5,
+ 'RepositoryImportWorker' => false,
+ 'RepositoryPushAuditEventWorker' => 3,
+ 'RepositoryRemoveRemoteWorker' => 3,
+ 'RepositoryUpdateMirrorWorker' => false,
+ 'RepositoryUpdateRemoteMirrorWorker' => 3,
+ 'RequirementsManagement::ImportRequirementsCsvWorker' => 3,
+ 'RequirementsManagement::ProcessRequirementsReportsWorker' => 3,
+ 'RunPipelineScheduleWorker' => 3,
+ 'ScanSecurityReportSecretsWorker' => 17,
+ 'Security::AutoFixWorker' => 3,
+ 'Security::StoreScansWorker' => 3,
+ 'SelfMonitoringProjectCreateWorker' => 3,
+ 'SelfMonitoringProjectDeleteWorker' => 3,
+ 'ServiceDeskEmailReceiverWorker' => 3,
+ 'SetUserStatusBasedOnUserCapSettingWorker' => 3,
+ 'SnippetScheduleBulkRepositoryShardMovesWorker' => 3,
+ 'SnippetUpdateRepositoryStorageWorker' => 3,
+ 'Snippets::ScheduleBulkRepositoryShardMovesWorker' => 3,
+ 'Snippets::UpdateRepositoryStorageWorker' => 3,
+ 'StageUpdateWorker' => 3,
+ 'StatusPage::PublishWorker' => 5,
+ 'StoreSecurityReportsWorker' => 3,
+ 'StoreSecurityScansWorker' => 3,
+ 'SyncSeatLinkRequestWorker' => 20,
+ 'SyncSeatLinkWorker' => 12,
+ 'SyncSecurityReportsToReportApprovalRulesWorker' => 3,
+ 'SystemHookPushWorker' => 3,
+ 'TodosDestroyer::ConfidentialEpicWorker' => 3,
+ 'TodosDestroyer::ConfidentialIssueWorker' => 3,
+ 'TodosDestroyer::DestroyedIssuableWorker' => 3,
+ 'TodosDestroyer::EntityLeaveWorker' => 3,
+ 'TodosDestroyer::GroupPrivateWorker' => 3,
+ 'TodosDestroyer::PrivateFeaturesWorker' => 3,
+ 'TodosDestroyer::ProjectPrivateWorker' => 3,
+ 'UpdateExternalPullRequestsWorker' => 3,
+ 'UpdateHeadPipelineForMergeRequestWorker' => 3,
+ 'UpdateHighestRoleWorker' => 3,
+ 'UpdateMergeRequestsWorker' => 3,
+ 'UpdateProjectStatisticsWorker' => 3,
+ 'UploadChecksumWorker' => 3,
+ 'Vulnerabilities::Statistics::AdjustmentWorker' => 3,
+ 'VulnerabilityExports::ExportDeletionWorker' => 3,
+ 'VulnerabilityExports::ExportWorker' => 3,
+ 'WaitForClusterCreationWorker' => 3,
+ 'WebHookWorker' => 4,
+ 'WebHooks::DestroyWorker' => 3,
+ 'Wikis::GitGarbageCollectWorker' => false,
+ 'X509CertificateRevokeWorker' => 3
+ }
+ end
+
+ it 'uses the default number of retries for new jobs' do
+ expect(workers_without_defaults - cronjobs - retry_exception_workers).to all(have_attributes(retries: true))
+ end
+
+ it 'uses zero retries for cronjobs' do
+ expect(cronjobs - retry_exception_workers).to all(have_attributes(retries: false))
+ end
+
+ it 'uses specified numbers of retries for workers with exceptions encoded here', :aggregate_failures do
+ retry_exception_workers.each do |worker|
+ expect(worker.retries).to eq(retry_exceptions[worker.klass.to_s]),
+ "#{worker.klass} has #{worker.retries} retries, expected #{retry_exceptions[worker.klass]}"
+ end
+ end
+ end
end
diff --git a/spec/workers/expire_job_cache_worker_spec.rb b/spec/workers/expire_job_cache_worker_spec.rb
index 8efead31a42..cbd9dd39336 100644
--- a/spec/workers/expire_job_cache_worker_spec.rb
+++ b/spec/workers/expire_job_cache_worker_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ExpireJobCacheWorker do
let_it_be(:pipeline) { create(:ci_empty_pipeline) }
+
let(:project) { pipeline.project }
describe '#perform' do
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
deleted file mode 100644
index 3df64c35166..00000000000
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-require 'fileutils'
-
-require 'spec_helper'
-
-RSpec.describe GitGarbageCollectWorker do
- let_it_be(:project) { create(:project, :repository) }
-
- let(:lease_uuid) { SecureRandom.uuid }
- let(:lease_key) { "project_housekeeping:#{project.id}" }
- let(:task) { :full_repack }
- let(:params) { [project.id, task, lease_key, lease_uuid] }
-
- subject { described_class.new }
-
- describe "#perform" do
- it 'calls the Projects::GitGarbageGitGarbageCollectWorker with the same params' do
- expect_next_instance_of(Projects::GitGarbageCollectWorker) do |instance|
- expect(instance).to receive(:perform).with(*params)
- end
-
- subject.perform(*params)
- end
- end
-end
diff --git a/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
index 4039cdac721..6476d82eb85 100644
--- a/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GithubImport::ImportDiffNoteWorker do
importer = double(:importer)
hash = {
'noteable_id' => 42,
+ 'github_id' => 42,
'path' => 'README.md',
'commit_id' => '123abc',
'diff_hunk' => "@@ -1 +1 @@\n-Hello\n+Hello world",
diff --git a/spec/workers/gitlab/github_import/import_issue_worker_spec.rb b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
index c25e89f6928..9f5bd1d9e5e 100644
--- a/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GithubImport::ImportIssueWorker do
importer = double(:importer)
hash = {
'iid' => 42,
+ 'github_id' => 42,
'title' => 'My Issue',
'description' => 'This is my issue',
'milestone_number' => 4,
diff --git a/spec/workers/gitlab/github_import/import_note_worker_spec.rb b/spec/workers/gitlab/github_import/import_note_worker_spec.rb
index bfb40d7c3d3..94bc8e26e4a 100644
--- a/spec/workers/gitlab/github_import/import_note_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_note_worker_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GithubImport::ImportNoteWorker do
importer = double(:importer)
hash = {
'noteable_id' => 42,
+ 'github_id' => 42,
'noteable_type' => 'issues',
'user' => { 'id' => 4, 'login' => 'alice' },
'note' => 'Hello world',
diff --git a/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb b/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
index 12b21abf910..1238929fbcb 100644
--- a/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GithubImport::ImportPullRequestWorker do
importer = double(:importer)
hash = {
'iid' => 42,
+ 'github_id' => 42,
'title' => 'My Pull Request',
'description' => 'This is my pull request',
'source_branch' => 'my-feature',
diff --git a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
index 324e8010887..695e21f4733 100644
--- a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::JiraImport::ImportIssueWorker do
let_it_be(:project) { create(:project) }
let_it_be(:jira_issue_label_1) { create(:label, project: project) }
let_it_be(:jira_issue_label_2) { create(:label, project: project) }
+
let(:some_key) { 'some-key' }
describe 'modules' do
diff --git a/spec/workers/gitlab/jira_import/stage/start_import_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/start_import_worker_spec.rb
index 7066e6e912f..e440884553f 100644
--- a/spec/workers/gitlab/jira_import/stage/start_import_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/start_import_worker_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::JiraImport::Stage::StartImportWorker do
let_it_be(:project) { create(:project, import_type: 'jira') }
let_it_be(:jid) { '12345678' }
+
let(:worker) { described_class.new }
describe 'modules' do
diff --git a/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb b/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb
index 7f1cb8a2076..92754513988 100644
--- a/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe ::Gitlab::JiraImport::StuckJiraImportJobsWorker do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:worker) { described_class.new }
describe 'with scheduled Jira import' do
diff --git a/spec/workers/import_issues_csv_worker_spec.rb b/spec/workers/import_issues_csv_worker_spec.rb
index 6a698af49c0..919ab2b1adf 100644
--- a/spec/workers/import_issues_csv_worker_spec.rb
+++ b/spec/workers/import_issues_csv_worker_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe ImportIssuesCsvWorker do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:upload) { create(:upload, :with_file) }
let(:worker) { described_class.new }
diff --git a/spec/workers/incident_management/add_severity_system_note_worker_spec.rb b/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
index 203c62ffe6f..bda6f729759 100644
--- a/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
+++ b/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
@@ -40,6 +40,7 @@ RSpec.describe IncidentManagement::AddSeveritySystemNoteWorker do
context 'when issue is not an incident' do
let_it_be(:issue) { create(:issue, project: project) }
+
let(:incident_id) { issue.id }
it_behaves_like 'does not add a system note'
diff --git a/spec/workers/incident_management/process_alert_worker_spec.rb b/spec/workers/incident_management/process_alert_worker_spec.rb
index 41d4f31da24..7db9b191677 100644
--- a/spec/workers/incident_management/process_alert_worker_spec.rb
+++ b/spec/workers/incident_management/process_alert_worker_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
let_it_be(:started_at) { Time.now.rfc3339 }
let_it_be(:payload) { { 'title' => 'title', 'start_time' => started_at } }
let_it_be(:alert) { create(:alert_management_alert, project: project, payload: payload, started_at: started_at) }
+
let(:created_issue) { Issue.last! }
subject { described_class.new.perform(nil, nil, alert.id) }
diff --git a/spec/workers/incident_management/process_alert_worker_v2_spec.rb b/spec/workers/incident_management/process_alert_worker_v2_spec.rb
new file mode 100644
index 00000000000..6cde8b758fa
--- /dev/null
+++ b/spec/workers/incident_management/process_alert_worker_v2_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::ProcessAlertWorkerV2 do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:settings) { create(:project_incident_management_setting, project: project, create_issue: true) }
+
+ describe '#perform' do
+ let_it_be(:started_at) { Time.now.rfc3339 }
+ let_it_be(:payload) { { 'title' => 'title', 'start_time' => started_at } }
+ let_it_be(:alert) { create(:alert_management_alert, project: project, payload: payload, started_at: started_at) }
+
+ let(:created_issue) { Issue.last! }
+
+ subject(:perform_worker) { described_class.new.perform(alert.id) }
+
+ before do
+ allow(Gitlab::AppLogger).to receive(:warn).and_call_original
+
+ allow(AlertManagement::CreateAlertIssueService)
+ .to receive(:new).with(alert, User.alert_bot)
+ .and_call_original
+ end
+
+ shared_examples 'creates issue successfully' do
+ it 'creates an issue' do
+ expect(AlertManagement::CreateAlertIssueService)
+ .to receive(:new).with(alert, User.alert_bot)
+
+ expect { perform_worker }.to change { Issue.count }.by(1)
+ end
+
+ it 'updates AlertManagement::Alert#issue_id' do
+ perform_worker
+
+ expect(alert.reload.issue_id).to eq(created_issue.id)
+ end
+
+ it 'does not write a warning to log' do
+ perform_worker
+
+ expect(Gitlab::AppLogger).not_to have_received(:warn)
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [alert.id] }
+
+ it 'does not create a second issue' do
+ expect { perform_worker }.to change { Issue.count }.by(1)
+ end
+ end
+ end
+
+ context 'with valid alert' do
+ it_behaves_like 'creates issue successfully'
+
+ context 'when alert cannot be updated' do
+ let_it_be(:alert) { create(:alert_management_alert, :with_validation_errors, project: project, payload: payload) }
+
+ it 'updates AlertManagement::Alert#issue_id' do
+ expect { perform_worker }.not_to change { alert.reload.issue_id }
+ end
+
+ it 'logs a warning' do
+ perform_worker
+
+ expect(Gitlab::AppLogger).to have_received(:warn).with(
+ message: 'Cannot process an Incident',
+ issue_id: created_issue.id,
+ alert_id: alert.id,
+ errors: 'Hosts hosts array is over 255 chars'
+ )
+ end
+ end
+
+ context 'prometheus alert' do
+ let_it_be(:alert) { create(:alert_management_alert, :prometheus, project: project, started_at: started_at) }
+
+ it_behaves_like 'creates issue successfully'
+ end
+ end
+
+ context 'with invalid alert' do
+ let(:invalid_alert_id) { non_existing_record_id }
+
+ subject(:perform_worker) { described_class.new.perform(invalid_alert_id) }
+
+ it 'does not create issues' do
+ expect(AlertManagement::CreateAlertIssueService).not_to receive(:new)
+
+ expect { perform_worker }.not_to change { Issue.count }
+ end
+ end
+ end
+end
diff --git a/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb b/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
index 2ca4193aa72..56f07459a15 100644
--- a/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
+++ b/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe IncidentManagement::ProcessPrometheusAlertWorker do
describe '#perform' do
let_it_be(:project) { create(:project) }
let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) }
+
let(:payload_key) { Gitlab::AlertManagement::Payload::Prometheus.new(project: project, payload: alert_params).gitlab_fingerprint }
let!(:prometheus_alert_event) { create(:prometheus_alert_event, prometheus_alert: prometheus_alert, payload_key: payload_key) }
let!(:settings) { create(:project_incident_management_setting, project: project, create_issue: true) }
diff --git a/spec/workers/issuable/label_links_destroy_worker_spec.rb b/spec/workers/issuable/label_links_destroy_worker_spec.rb
new file mode 100644
index 00000000000..a838f1c8017
--- /dev/null
+++ b/spec/workers/issuable/label_links_destroy_worker_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issuable::LabelLinksDestroyWorker do
+ let(:job_args) { [1, 'MergeRequest'] }
+ let(:service) { double }
+
+ include_examples 'an idempotent worker' do
+ it 'calls the Issuable::DestroyLabelLinksService' do
+ expect(::Issuable::DestroyLabelLinksService).to receive(:new).twice.and_return(service)
+ expect(service).to receive(:execute).twice
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/issuables/clear_groups_issue_counter_worker_spec.rb b/spec/workers/issuables/clear_groups_issue_counter_worker_spec.rb
new file mode 100644
index 00000000000..ac430f42e7a
--- /dev/null
+++ b/spec/workers/issuables/clear_groups_issue_counter_worker_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issuables::ClearGroupsIssueCounterWorker do
+ describe '#perform' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:root_group) { create(:group, parent: parent_group) }
+ let_it_be(:subgroup) { create(:group, parent: root_group) }
+
+ let(:count_service) { Groups::OpenIssuesCountService }
+ let(:instance1) { instance_double(count_service) }
+ let(:instance2) { instance_double(count_service) }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [[root_group.id]] }
+ let(:exec_times) { IdempotentWorkerHelper::WORKER_EXEC_TIMES }
+
+ it 'clears the cached issue count in given groups and ancestors' do
+ expect(count_service).to receive(:new)
+ .exactly(exec_times).times.with(root_group).and_return(instance1)
+ expect(count_service).to receive(:new)
+ .exactly(exec_times).times.with(parent_group).and_return(instance2)
+ expect(count_service).not_to receive(:new).with(subgroup)
+
+ [instance1, instance2].all? do |instance|
+ expect(instance).to receive(:clear_all_cache_keys).exactly(exec_times).times
+ end
+
+ subject
+ end
+ end
+
+ it 'does not call count service or rise error when group_ids is empty' do
+ expect(count_service).not_to receive(:new)
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ described_class.new.perform([])
+ end
+ end
+end
diff --git a/spec/workers/issue_placement_worker_spec.rb b/spec/workers/issue_placement_worker_spec.rb
index 5d4d41b90d0..e0c17bfadee 100644
--- a/spec/workers/issue_placement_worker_spec.rb
+++ b/spec/workers/issue_placement_worker_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe IssuePlacementWorker do
describe '#perform' do
let_it_be(:time) { Time.now.utc }
- let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
let_it_be(:author) { create(:user) }
let_it_be(:common_attrs) { { author: author, project: project } }
let_it_be(:unplaced) { common_attrs.merge(relative_position: nil) }
@@ -117,6 +118,19 @@ RSpec.describe IssuePlacementWorker do
let(:worker_arguments) { { issue_id: issue_id, project_id: nil } }
it_behaves_like 'running the issue placement worker'
+
+ context 'when block_issue_repositioning is enabled' do
+ let(:issue_id) { issue.id }
+ let(:project_id) { project.id }
+
+ before do
+ stub_feature_flags(block_issue_repositioning: group)
+ end
+
+ it 'does not run repositioning tasks' do
+ expect { run_worker }.not_to change { issue.reset.relative_position }
+ end
+ end
end
context 'passing a project ID' do
@@ -129,4 +143,9 @@ RSpec.describe IssuePlacementWorker do
it_behaves_like 'running the issue placement worker'
end
end
+
+ it 'has the `until_executed` deduplicate strategy' do
+ expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
+ expect(described_class.get_deduplication_options).to include({ including_scheduled: true })
+ end
end
diff --git a/spec/workers/issue_rebalancing_worker_spec.rb b/spec/workers/issue_rebalancing_worker_spec.rb
index 8b0fcd4bc5a..e5c6ac3f854 100644
--- a/spec/workers/issue_rebalancing_worker_spec.rb
+++ b/spec/workers/issue_rebalancing_worker_spec.rb
@@ -4,7 +4,21 @@ require 'spec_helper'
RSpec.describe IssueRebalancingWorker do
describe '#perform' do
- let_it_be(:issue) { create(:issue) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ context 'when block_issue_repositioning is enabled' do
+ before do
+ stub_feature_flags(block_issue_repositioning: group)
+ end
+
+ it 'does not run an instance of IssueRebalancingService' do
+ expect(IssueRebalancingService).not_to receive(:new)
+
+ described_class.new.perform(nil, issue.project_id)
+ end
+ end
it 'runs an instance of IssueRebalancingService' do
service = double(execute: nil)
diff --git a/spec/workers/jira_connect/sync_project_worker_spec.rb b/spec/workers/jira_connect/sync_project_worker_spec.rb
index 04cc3bec3af..5c0e7e7609c 100644
--- a/spec/workers/jira_connect/sync_project_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_project_worker_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep do
describe '#perform' do
let_it_be(:project) { create_default(:project).freeze }
+
let!(:mr_with_jira_title) { create(:merge_request, :unique_branches, title: 'TEST-123') }
let!(:mr_with_jira_description) { create(:merge_request, :unique_branches, description: 'TEST-323') }
let!(:mr_with_other_title) { create(:merge_request, :unique_branches) }
diff --git a/spec/workers/merge_requests/create_pipeline_worker_spec.rb b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
index 8efce5220be..06d44c45706 100644
--- a/spec/workers/merge_requests/create_pipeline_worker_spec.rb
+++ b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe MergeRequests::CreatePipelineWorker do
context 'when the objects exist' do
it 'calls the merge request create pipeline service and calls update head pipeline' do
aggregate_failures do
- expect_next_instance_of(MergeRequests::CreatePipelineService, project, user) do |service|
+ expect_next_instance_of(MergeRequests::CreatePipelineService, project: project, current_user: user) do |service|
expect(service).to receive(:execute).with(merge_request)
end
diff --git a/spec/workers/merge_worker_spec.rb b/spec/workers/merge_worker_spec.rb
index 417e6edce96..0268bc2388f 100644
--- a/spec/workers/merge_worker_spec.rb
+++ b/spec/workers/merge_worker_spec.rb
@@ -29,5 +29,23 @@ RSpec.describe MergeWorker do
source_project.repository.expire_branches_cache
expect(source_project.repository.branch_names).not_to include('markdown')
end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) do
+ [
+ merge_request.id,
+ merge_request.author_id,
+ commit_message: 'wow such merge',
+ sha: merge_request.diff_head_sha
+ ]
+ end
+
+ it 'the merge request is still shown as merged' do
+ subject
+
+ merge_request.reload
+ expect(merge_request).to be_merged
+ end
+ end
end
end
diff --git a/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb b/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
index 459e4f953d0..32e7bdd563d 100644
--- a/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
+++ b/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Namespaces::OnboardingIssueCreatedWorker, '#perform' do
let_it_be(:issue) { create(:issue) }
+
let(:namespace) { issue.namespace }
it_behaves_like 'records an onboarding progress action', :issue_created do
diff --git a/spec/workers/packages/composer/cache_update_worker_spec.rb b/spec/workers/packages/composer/cache_update_worker_spec.rb
index cc6b48c80eb..a0d8aa5d375 100644
--- a/spec/workers/packages/composer/cache_update_worker_spec.rb
+++ b/spec/workers/packages/composer/cache_update_worker_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Packages::Composer::CacheUpdateWorker, type: :worker do
let_it_be(:json) { { 'name' => package_name } }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
+
let(:last_sha) { nil }
let!(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
let(:job_args) { [project.id, package_name, last_sha] }
diff --git a/spec/workers/packages/debian/process_changes_worker_spec.rb b/spec/workers/packages/debian/process_changes_worker_spec.rb
new file mode 100644
index 00000000000..4a8eb855398
--- /dev/null
+++ b/spec/workers/packages/debian/process_changes_worker_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::ProcessChangesWorker, type: :worker do
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_file, codename: 'unstable') }
+
+ let(:incoming) { create(:debian_incoming, project: distribution.project) }
+ let(:package_file) { incoming.package_files.last }
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ let(:package_file_id) { package_file.id }
+ let(:user_id) { user.id }
+
+ subject { worker.perform(package_file_id, user_id) }
+
+ context 'with mocked service' do
+ it 'calls ProcessChangesService' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect_next_instance_of(::Packages::Debian::ProcessChangesService) do |service|
+ expect(service).to receive(:execute)
+ .with(no_args)
+ end
+
+ subject
+ end
+ end
+
+ context 'with non existing package file' do
+ let(:package_file_id) { non_existing_record_id }
+
+ it 'returns early without error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(::Packages::Debian::ProcessChangesService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'with nil package file id' do
+ let(:package_file_id) { nil }
+
+ it 'returns early without error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(::Packages::Debian::ProcessChangesService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'with non existing user' do
+ let(:user_id) { non_existing_record_id }
+
+ it 'returns early without error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(::Packages::Debian::ProcessChangesService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'with nil user id' do
+ let(:user_id) { nil }
+
+ it 'returns early without error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(::Packages::Debian::ProcessChangesService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'when the service raises an error' do
+ let(:package_file) { incoming.package_files.first }
+
+ it 'removes package file', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(Packages::Debian::ExtractChangesMetadataService::ExtractionError),
+ package_file_id: package_file_id,
+ user_id: user_id
+ )
+ expect { subject }
+ .to not_change { Packages::Package.count }
+ .and change { Packages::PackageFile.count }.by(-1)
+ .and change { incoming.package_files.count }.from(7).to(6)
+
+ expect { package_file.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [package_file.id, user.id] }
+
+ it 'sets the Debian file type as changes', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ # Using subject inside this block will process the job multiple times
+ expect { subject }
+ .to change { Packages::Package.count }.from(1).to(2)
+ .and not_change { Packages::PackageFile.count }
+ .and change { incoming.package_files.count }.from(7).to(0)
+ .and change { package_file&.debian_file_metadatum&.reload&.file_type }.from('unknown').to('changes')
+
+ created_package = Packages::Package.last
+ expect(created_package.name).to eq 'sample'
+ expect(created_package.version).to eq '1.2.3~alpha2'
+ expect(created_package.creator).to eq user
+ end
+ end
+ end
+end
diff --git a/spec/workers/packages/nuget/extraction_worker_spec.rb b/spec/workers/packages/nuget/extraction_worker_spec.rb
index 4703afc9413..5186c037dc5 100644
--- a/spec/workers/packages/nuget/extraction_worker_spec.rb
+++ b/spec/workers/packages/nuget/extraction_worker_spec.rb
@@ -14,14 +14,15 @@ RSpec.describe Packages::Nuget::ExtractionWorker, type: :worker do
subject { described_class.new.perform(package_file_id) }
shared_examples 'handling the metadata error' do |exception_class: ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError|
- it 'removes the package and the package file' do
+ it 'updates package status to error', :aggregate_failures do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
instance_of(exception_class),
project_id: package.project_id
)
- expect { subject }
- .to change { Packages::Package.count }.by(-1)
- .and change { Packages::PackageFile.count }.by(-1)
+
+ subject
+
+ expect(package.reload).to be_error
end
end
@@ -102,5 +103,14 @@ RSpec.describe Packages::Nuget::ExtractionWorker, type: :worker do
it_behaves_like 'handling the metadata error'
end
end
+
+ context 'handles a processing an unaccounted for error' do
+ before do
+ expect(::Packages::Nuget::UpdatePackageFromMetadataService).to receive(:new)
+ .and_raise(Zip::Error)
+ end
+
+ it_behaves_like 'handling the metadata error', exception_class: Zip::Error
+ end
end
end
diff --git a/spec/workers/packages/rubygems/extraction_worker_spec.rb b/spec/workers/packages/rubygems/extraction_worker_spec.rb
index 15c0a3be90c..0e67f3ac62e 100644
--- a/spec/workers/packages/rubygems/extraction_worker_spec.rb
+++ b/spec/workers/packages/rubygems/extraction_worker_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Packages::Rubygems::ExtractionWorker, type: :worker do
describe '#perform' do
- let_it_be(:package) { create(:rubygems_package) }
+ let_it_be(:package) { create(:rubygems_package, :processing) }
let(:package_file) { package.package_files.first }
let(:package_file_id) { package_file.id }
@@ -14,15 +14,13 @@ RSpec.describe Packages::Rubygems::ExtractionWorker, type: :worker do
subject { described_class.new.perform(*job_args) }
- include_examples 'an idempotent worker' do
- it 'processes the gem', :aggregate_failures do
- expect { subject }
- .to change { Packages::Package.count }.by(0)
- .and change { Packages::PackageFile.count }.by(2)
+ it 'processes the gem', :aggregate_failures do
+ expect { subject }
+ .to change { Packages::Package.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(1)
- expect(Packages::Package.last.id).to be(package.id)
- expect(package.name).not_to be(package_name)
- end
+ expect(Packages::Package.last.id).to be(package.id)
+ expect(package.name).not_to be(package_name)
end
it 'handles a processing failure', :aggregate_failures do
@@ -34,9 +32,23 @@ RSpec.describe Packages::Rubygems::ExtractionWorker, type: :worker do
project_id: package.project_id
)
- expect { subject }
- .to change { Packages::Package.count }.by(-1)
- .and change { Packages::PackageFile.count }.by(-2)
+ subject
+
+ expect(package.reload).to be_error
+ end
+
+ it 'handles processing an unaccounted for error', :aggregate_failures do
+ expect(::Packages::Rubygems::ProcessGemService).to receive(:new)
+ .and_raise(Zip::Error)
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(Zip::Error),
+ project_id: package.project_id
+ )
+
+ subject
+
+ expect(package.reload).to be_error
end
context 'returns when there is no package file' do
diff --git a/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb b/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
index dac8c529984..563bbdef1be 100644
--- a/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
+++ b/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe PagesDomainSslRenewalCronWorker do
describe '#perform' do
let_it_be(:project) { create :project }
+
let!(:domain) { create(:pages_domain, project: project, auto_ssl_enabled: false) }
let!(:domain_with_enabled_auto_ssl) { create(:pages_domain, project: project, auto_ssl_enabled: true) }
let!(:domain_with_obtained_letsencrypt) do
diff --git a/spec/workers/pipeline_process_worker_spec.rb b/spec/workers/pipeline_process_worker_spec.rb
index 5d45a131095..0c1db3ccc5a 100644
--- a/spec/workers/pipeline_process_worker_spec.rb
+++ b/spec/workers/pipeline_process_worker_spec.rb
@@ -20,5 +20,10 @@ RSpec.describe PipelineProcessWorker do
.not_to raise_error
end
end
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ feature_flag: :load_balancing_for_pipeline_process_worker,
+ data_consistency: :delayed
end
end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index f7fd1b1a0a7..a468c8c3482 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -94,30 +94,12 @@ RSpec.describe PostReceive do
perform
end
- it 'tracks an event for the empty_repo_upload experiment', :snowplow do
- allow_next_instance_of(ApplicationExperiment) do |e|
- allow(e).to receive(:should_track?).and_return(true)
- allow(e).to receive(:track_initial_writes)
+ it 'tracks an event for the empty_repo_upload experiment', :experiment do
+ expect_next_instance_of(EmptyRepoUploadExperiment) do |e|
+ expect(e).to receive(:track_initial_write)
end
perform
-
- expect_snowplow_event(category: 'empty_repo_upload', action: 'initial_write', context: [{
- schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0',
- data: anything
- }])
- end
-
- it 'does not track an event for the empty_repo_upload experiment when project is not empty', :snowplow do
- allow(empty_project).to receive(:empty_repo?).and_return(false)
- allow_next_instance_of(ApplicationExperiment) do |e|
- allow(e).to receive(:should_track?).and_return(true)
- allow(e).to receive(:track_initial_writes)
- end
-
- perform
-
- expect_no_snowplow_event
end
end
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index 7a168bf054e..294a05c652b 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -94,7 +94,7 @@ RSpec.describe ProcessCommitWorker do
project.repository.after_create_branch
MergeRequests::MergeService
- .new(project, merge_request.author, { sha: merge_request.diff_head_sha })
+ .new(project: project, current_user: merge_request.author, params: { sha: merge_request.diff_head_sha })
.execute(merge_request)
merge_request.reload.merge_commit
diff --git a/spec/workers/project_service_worker_spec.rb b/spec/workers/project_service_worker_spec.rb
index c638b7472ff..237f501e0ec 100644
--- a/spec/workers/project_service_worker_spec.rb
+++ b/spec/workers/project_service_worker_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe ProjectServiceWorker, '#perform' do
let(:service) { JiraService.new }
before do
- allow(Service).to receive(:find).and_return(service)
+ allow(Integration).to receive(:find).and_return(service)
end
it 'executes service with given data' do
diff --git a/spec/workers/projects/git_garbage_collect_worker_spec.rb b/spec/workers/projects/git_garbage_collect_worker_spec.rb
index 8c44643ae51..7b54d7df4b2 100644
--- a/spec/workers/projects/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/projects/git_garbage_collect_worker_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe Projects::GitGarbageCollectWorker do
context 'LFS object garbage collection' do
let_it_be(:lfs_reference) { create(:lfs_objects_project, project: project) }
+
let(:lfs_object) { lfs_reference.lfs_object }
before do
diff --git a/spec/workers/projects/post_creation_worker_spec.rb b/spec/workers/projects/post_creation_worker_spec.rb
index b15b7b76b56..c2f42f03299 100644
--- a/spec/workers/projects/post_creation_worker_spec.rb
+++ b/spec/workers/projects/post_creation_worker_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Projects::PostCreationWorker do
let(:job_args) { [nil] }
it 'does not create prometheus service' do
- expect { subject }.not_to change { Service.count }
+ expect { subject }.not_to change { Integration.count }
end
end
diff --git a/spec/workers/prometheus/create_default_alerts_worker_spec.rb b/spec/workers/prometheus/create_default_alerts_worker_spec.rb
index 105fa0415d9..887d677c95f 100644
--- a/spec/workers/prometheus/create_default_alerts_worker_spec.rb
+++ b/spec/workers/prometheus/create_default_alerts_worker_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Prometheus::CreateDefaultAlertsWorker do
let_it_be(:project) { create(:project) }
+
let(:worker) { described_class.new }
let(:logger) { worker.send(:logger) }
let(:service) { instance_double(Prometheus::CreateDefaultAlertsService) }
diff --git a/spec/workers/propagate_integration_group_worker_spec.rb b/spec/workers/propagate_integration_group_worker_spec.rb
index fbf1fbf1fea..1c72bed323a 100644
--- a/spec/workers/propagate_integration_group_worker_spec.rb
+++ b/spec/workers/propagate_integration_group_worker_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe PropagateIntegrationGroupWorker do
let_it_be(:subgroup1) { create(:group, parent: group) }
let_it_be(:subgroup2) { create(:group, parent: group) }
let_it_be(:integration) { create(:redmine_service, :instance) }
+
let(:job_args) { [integration.id, group.id, subgroup2.id] }
it_behaves_like 'an idempotent worker' do
diff --git a/spec/workers/propagate_integration_project_worker_spec.rb b/spec/workers/propagate_integration_project_worker_spec.rb
index 0302af2acc9..c8293744bec 100644
--- a/spec/workers/propagate_integration_project_worker_spec.rb
+++ b/spec/workers/propagate_integration_project_worker_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe PropagateIntegrationProjectWorker do
let_it_be(:project2) { create(:project, group: group) }
let_it_be(:project3) { create(:project, group: group) }
let_it_be(:integration) { create(:redmine_service, :instance) }
+
let(:job_args) { [integration.id, project1.id, project3.id] }
it_behaves_like 'an idempotent worker' do
diff --git a/spec/workers/rebase_worker_spec.rb b/spec/workers/rebase_worker_spec.rb
index 9246b283be5..4bdfd7219f2 100644
--- a/spec/workers/rebase_worker_spec.rb
+++ b/spec/workers/rebase_worker_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe RebaseWorker, '#perform' do
it 'sets the correct project for running hooks' do
expect(MergeRequests::RebaseService)
- .to receive(:new).with(forked_project, merge_request.author).and_call_original
+ .to receive(:new).with(project: forked_project, current_user: merge_request.author).and_call_original
subject.perform(merge_request.id, merge_request.author.id)
end
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index 0b9f95e09fe..fc572c0d9c3 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe RunPipelineScheduleWorker do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project ) }
+
let(:worker) { described_class.new }
context 'when a project not found' do
diff --git a/spec/workers/service_desk_email_receiver_worker_spec.rb b/spec/workers/service_desk_email_receiver_worker_spec.rb
index d3bfa51348e..60fc951f627 100644
--- a/spec/workers/service_desk_email_receiver_worker_spec.rb
+++ b/spec/workers/service_desk_email_receiver_worker_spec.rb
@@ -9,11 +9,12 @@ RSpec.describe ServiceDeskEmailReceiverWorker, :mailer do
context 'when service_desk_email config is enabled' do
before do
- stub_service_desk_email_setting(enabled: true, address: 'foo')
+ stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com')
end
it 'does not ignore the email' do
- expect(Gitlab::Email::ServiceDeskReceiver).to receive(:new)
+ expect(Gitlab::Email::ServiceDeskReceiver).to receive(:new).and_call_original
+ expect(Sidekiq.logger).to receive(:error).with(hash_including('exception.class' => Gitlab::Email::ProjectNotFound.to_s)).and_call_original
worker.perform(email)
end
@@ -23,6 +24,7 @@ RSpec.describe ServiceDeskEmailReceiverWorker, :mailer do
allow_next_instance_of(Gitlab::Email::ServiceDeskReceiver) do |receiver|
allow(receiver).to receive(:find_handler).and_return(nil)
end
+ expect(Sidekiq.logger).to receive(:error).with(hash_including('exception.class' => Gitlab::Email::UnknownIncomingEmail.to_s)).and_call_original
end
it 'sends a rejection email' do
diff --git a/spec/workers/update_external_pull_requests_worker_spec.rb b/spec/workers/update_external_pull_requests_worker_spec.rb
index 80f22470977..cb6a4e2ebf8 100644
--- a/spec/workers/update_external_pull_requests_worker_spec.rb
+++ b/spec/workers/update_external_pull_requests_worker_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe UpdateExternalPullRequestsWorker do
describe '#perform' do
let_it_be(:project) { create(:project, import_source: 'tanuki/repository') }
let_it_be(:user) { create(:user) }
+
let(:worker) { described_class.new }
before do
diff --git a/spec/workers/update_merge_requests_worker_spec.rb b/spec/workers/update_merge_requests_worker_spec.rb
index fb12086c2f4..bd0dc2f9ef4 100644
--- a/spec/workers/update_merge_requests_worker_spec.rb
+++ b/spec/workers/update_merge_requests_worker_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe UpdateMergeRequestsWorker do
end
it 'executes MergeRequests::RefreshService with expected values' do
- expect_next_instance_of(MergeRequests::RefreshService, project, user) do |refresh_service|
+ expect_next_instance_of(MergeRequests::RefreshService, project: project, current_user: user) do |refresh_service|
expect(refresh_service).to receive(:execute).with(oldrev, newrev, ref)
end
diff --git a/spec/workers/users/deactivate_dormant_users_worker_spec.rb b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
new file mode 100644
index 00000000000..32291a143ee
--- /dev/null
+++ b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::DeactivateDormantUsersWorker do
+ describe '#perform' do
+ subject(:worker) { described_class.new }
+
+ it 'does not run for GitLab.com' do
+ create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
+ create(:user, last_activity_on: nil)
+
+ expect(Gitlab).to receive(:com?).and_return(true)
+ expect(Gitlab::CurrentSettings).not_to receive(:current_application_settings)
+
+ worker.perform
+
+ expect(User.dormant.count).to eq(1)
+ expect(User.with_no_activity.count).to eq(1)
+ end
+
+ context 'when automatic deactivation of dormant users is enabled' do
+ before do
+ stub_application_setting(deactivate_dormant_users: true)
+ end
+
+ it 'deactivates dormant users' do
+ freeze_time do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+ stub_const("#{described_class.name}::PAUSE_SECONDS", 0)
+
+ create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
+ create(:user, last_activity_on: nil)
+
+ expect(worker).to receive(:sleep).twice
+
+ worker.perform
+
+ expect(User.dormant.count).to eq(0)
+ expect(User.with_no_activity.count).to eq(0)
+ end
+ end
+ end
+
+ context 'when automatic deactivation of dormant users is disabled' do
+ before do
+ stub_application_setting(deactivate_dormant_users: false)
+ end
+
+ it 'does nothing' do
+ create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
+ create(:user, last_activity_on: nil)
+
+ worker.perform
+
+ expect(User.dormant.count).to eq(1)
+ expect(User.with_no_activity.count).to eq(1)
+ end
+ end
+ end
+end
diff --git a/spec/workers/users/update_open_issue_count_worker_spec.rb b/spec/workers/users/update_open_issue_count_worker_spec.rb
new file mode 100644
index 00000000000..700055980d8
--- /dev/null
+++ b/spec/workers/users/update_open_issue_count_worker_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::UpdateOpenIssueCountWorker do
+ let_it_be(:first_user) { create(:user) }
+ let_it_be(:second_user) { create(:user) }
+
+ describe '#perform' do
+ let(:target_user_ids) { [first_user.id, second_user.id] }
+
+ subject { described_class.new.perform(target_user_ids) }
+
+ context 'when arguments are missing' do
+ context 'when target_user_ids are missing' do
+ context 'when nil' do
+ let(:target_user_ids) { nil }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, /No target user ID provided/)
+ end
+ end
+
+ context 'when empty array' do
+ let(:target_user_ids) { [] }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, /No target user ID provided/)
+ end
+ end
+
+ context 'when not an ID' do
+ let(:target_user_ids) { "nonsense" }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, /No valid target user ID provided/)
+ end
+ end
+ end
+ end
+
+ context 'when successful' do
+ let(:job_args) { [target_user_ids] }
+ let(:fake_service1) { double }
+ let(:fake_service2) { double }
+
+ it 'calls the user update service' do
+ expect(Users::UpdateAssignedOpenIssueCountService).to receive(:new).with(target_user: first_user).and_return(fake_service1)
+ expect(Users::UpdateAssignedOpenIssueCountService).to receive(:new).with(target_user: second_user).and_return(fake_service2)
+ expect(fake_service1).to receive(:execute)
+ expect(fake_service2).to receive(:execute)
+
+ subject
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ it 'recalculates' do
+ subject
+
+ expect(first_user.assigned_open_issues_count).to eq(0)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/web_hook_worker_spec.rb b/spec/workers/web_hook_worker_spec.rb
new file mode 100644
index 00000000000..becc7461f2a
--- /dev/null
+++ b/spec/workers/web_hook_worker_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe WebHookWorker do
+ include AfterNextHelpers
+
+ let_it_be(:project_hook) { create(:project_hook) }
+ let_it_be(:data) { { foo: 'bar' } }
+ let_it_be(:hook_name) { 'push_hooks' }
+
+ describe '#perform' do
+ it 'delegates to WebHookService' do
+ expect_next(WebHookService, project_hook, data.with_indifferent_access, hook_name).to receive(:execute)
+
+ subject.perform(project_hook.id, data, hook_name)
+ end
+ end
+end