Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/workers')
-rw-r--r--spec/workers/abuse/trust_score_worker_spec.rb46
-rw-r--r--spec/workers/background_migration/ci_database_worker_spec.rb2
-rw-r--r--spec/workers/background_migration_worker_spec.rb2
-rw-r--r--spec/workers/bulk_imports/entity_worker_spec.rb26
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb13
-rw-r--r--spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb46
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb251
-rw-r--r--spec/workers/bulk_imports/stuck_import_worker_spec.rb59
-rw-r--r--spec/workers/bulk_imports/transform_references_worker_spec.rb257
-rw-r--r--spec/workers/ci/catalog/resources/process_sync_events_worker_spec.rb52
-rw-r--r--spec/workers/ci/low_urgency_cancel_redundant_pipelines_worker_spec.rb9
-rw-r--r--spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb4
-rw-r--r--spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb4
-rw-r--r--spec/workers/ci/runners/process_runner_version_update_worker_spec.rb2
-rw-r--r--spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb2
-rw-r--r--spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb2
-rw-r--r--spec/workers/click_house/events_sync_worker_spec.rb34
-rw-r--r--spec/workers/concerns/click_house_worker_spec.rb88
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb19
-rw-r--r--spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb26
-rw-r--r--spec/workers/concerns/worker_attributes_spec.rb2
-rw-r--r--spec/workers/delete_user_worker_spec.rb77
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb58
-rw-r--r--spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/bitbucket_server_import/stage/import_repository_worker_spec.rb15
-rw-r--r--spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb77
-rw-r--r--spec/workers/gitlab/github_import/advance_stage_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb20
-rw-r--r--spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb10
-rw-r--r--spec/workers/gitlab/jira_import/import_issue_worker_spec.rb8
-rw-r--r--spec/workers/integrations/slack_event_worker_spec.rb2
-rw-r--r--spec/workers/merge_requests/set_reviewer_reviewed_worker_spec.rb4
-rw-r--r--spec/workers/packages/cleanup_package_registry_worker_spec.rb22
-rw-r--r--spec/workers/packages/npm/create_metadata_cache_worker_spec.rb8
-rw-r--r--spec/workers/packages/nuget/cleanup_stale_symbols_worker_spec.rb77
-rw-r--r--spec/workers/pages/deactivate_mr_deployments_worker_spec.rb44
-rw-r--r--spec/workers/pipeline_schedule_worker_spec.rb52
-rw-r--r--spec/workers/process_commit_worker_spec.rb254
-rw-r--r--spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb2
-rw-r--r--spec/workers/projects/update_repository_storage_worker_spec.rb2
40 files changed, 1345 insertions, 339 deletions
diff --git a/spec/workers/abuse/trust_score_worker_spec.rb b/spec/workers/abuse/trust_score_worker_spec.rb
new file mode 100644
index 00000000000..adc582ada94
--- /dev/null
+++ b/spec/workers/abuse/trust_score_worker_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Abuse::TrustScoreWorker, :clean_gitlab_redis_shared_state, feature_category: :instance_resiliency do
+ let(:worker) { described_class.new }
+ let_it_be(:user) { create(:user) }
+
+ subject(:perform) { worker.perform(user.id, :telesign, 0.85, 'foo') }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [user.id, :telesign, 0.5] }
+ end
+
+ context "when the user does not exist" do
+ let(:log_payload) { { 'message' => 'User not found.', 'user_id' => user.id } }
+
+ before do
+ allow(User).to receive(:find_by_id).with(user.id).and_return(nil)
+ end
+
+ it 'logs an error' do
+ expect(Sidekiq.logger).to receive(:info).with(hash_including(log_payload))
+
+ expect { perform }.not_to raise_exception
+ end
+
+ it 'does not attempt to create the trust score' do
+ expect(Abuse::TrustScore).not_to receive(:create!)
+
+ perform
+ end
+ end
+
+ context "when the user exists" do
+ it 'creates an abuse trust score with the correct data' do
+ expect { perform }.to change { Abuse::TrustScore.count }.from(0).to(1)
+ expect(Abuse::TrustScore.last.attributes).to include({
+ user_id: user.id,
+ source: "telesign",
+ score: 0.85,
+ correlation_id_value: 'foo'
+ }.stringify_keys)
+ end
+ end
+end
diff --git a/spec/workers/background_migration/ci_database_worker_spec.rb b/spec/workers/background_migration/ci_database_worker_spec.rb
index 952c9ebfce8..7819bc695a4 100644
--- a/spec/workers/background_migration/ci_database_worker_spec.rb
+++ b/spec/workers/background_migration/ci_database_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_cluster_shared_state,
+RSpec.describe BackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state,
feature_category: :database do
before do
skip_if_shared_database(:ci)
diff --git a/spec/workers/background_migration_worker_spec.rb b/spec/workers/background_migration_worker_spec.rb
index 76509b4b227..be1f5027e44 100644
--- a/spec/workers/background_migration_worker_spec.rb
+++ b/spec/workers/background_migration_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_cluster_shared_state,
+RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state,
feature_category: :database do
it_behaves_like 'it runs background migration jobs', 'main'
end
diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb
index 690555aa08f..325b31c85db 100644
--- a/spec/workers/bulk_imports/entity_worker_spec.rb
+++ b/spec/workers/bulk_imports/entity_worker_spec.rb
@@ -49,10 +49,6 @@ RSpec.describe BulkImports::EntityWorker, feature_category: :importers do
end
end
- it 'has the option to reschedule once if deduplicated' do
- expect(described_class.get_deduplication_options).to include({ if_deduplicated: :reschedule_once })
- end
-
context 'when pipeline workers from a stage are running' do
before do
pipeline_tracker.enqueue!
@@ -77,6 +73,8 @@ RSpec.describe BulkImports::EntityWorker, feature_category: :importers do
it 'enqueues the pipeline workers from the next stage and re-enqueues itself' do
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
+
expect(logger).to receive(:info).with(hash_including('message' => 'Stage starting', 'entity_stage' => 1))
end
@@ -92,6 +90,26 @@ RSpec.describe BulkImports::EntityWorker, feature_category: :importers do
worker.perform(entity.id)
end
+
+ context 'when exclusive lease cannot be obtained' do
+ it 'does not start next stage and re-enqueue worker' do
+ expect_next_instance_of(Gitlab::ExclusiveLease) do |lease|
+ expect(lease).to receive(:try_obtain).and_return(false)
+ end
+
+ expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:info).with(
+ hash_including(
+ 'message' => 'Cannot obtain an exclusive lease. There must be another instance already in execution.'
+ )
+ )
+ end
+
+ expect(described_class).to receive(:perform_in)
+
+ worker.perform(entity.id)
+ end
+ end
end
context 'when there are no next stage to run' do
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
index e9d0b6b24b2..2cc6348bb27 100644
--- a/spec/workers/bulk_imports/export_request_worker_spec.rb
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -72,17 +72,14 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
entity.update!(source_xid: nil)
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
+
expect(logger).to receive(:error).with(
a_hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
'exception.backtrace' => anything,
'exception.class' => 'NoMethodError',
'exception.message' => /^undefined method `model_id' for nil:NilClass/,
- 'message' => 'Failed to fetch source entity id',
- 'source_version' => entity.bulk_import.source_version_info.to_s
+ 'message' => 'Failed to fetch source entity id'
)
).twice
end
@@ -148,7 +145,9 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
entity = create(:bulk_import_entity, bulk_import: bulk_import)
error = 'Exhausted error!'
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
+
expect(logger)
.to receive(:error)
.with(hash_including('message' => "Request to export #{entity.source_type} failed"))
diff --git a/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb b/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb
index 59ae4205c0f..2dd5b23b3d2 100644
--- a/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb
@@ -33,6 +33,8 @@ RSpec.describe BulkImports::FinishBatchedPipelineWorker, feature_category: :impo
)
end
+ let!(:batch_1) { create(:bulk_import_batch_tracker, :finished, tracker: pipeline_tracker) }
+
subject(:worker) { described_class.new }
describe '#perform' do
@@ -45,27 +47,30 @@ RSpec.describe BulkImports::FinishBatchedPipelineWorker, feature_category: :impo
end
end
- context 'when import is in progress' do
- it 'marks the tracker as finished' do
- expect_next_instance_of(BulkImports::Logger) do |logger|
- expect(logger).to receive(:info).with(
- a_hash_including('message' => 'Tracker finished')
- )
- end
+ it 'marks the tracker as finished' do
+ expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_tracker).with(pipeline_tracker).and_call_original
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
- expect { subject.perform(pipeline_tracker.id) }
- .to change { pipeline_tracker.reload.finished? }
- .from(false).to(true)
+ expect(logger).to receive(:info).with(
+ a_hash_including('message' => 'Tracker finished')
+ )
end
- it "calls the pipeline's `#on_finish`" do
- expect_next_instance_of(pipeline_class) do |pipeline|
- expect(pipeline).to receive(:on_finish)
- end
+ expect { subject.perform(pipeline_tracker.id) }
+ .to change { pipeline_tracker.reload.finished? }
+ .from(false).to(true)
+ end
- subject.perform(pipeline_tracker.id)
+ it "calls the pipeline's `#on_finish`" do
+ expect_next_instance_of(pipeline_class) do |pipeline|
+ expect(pipeline).to receive(:on_finish)
end
+ subject.perform(pipeline_tracker.id)
+ end
+
+ context 'when import is in progress' do
it 're-enqueues for any started batches' do
create(:bulk_import_batch_tracker, :started, tracker: pipeline_tracker)
@@ -88,14 +93,17 @@ RSpec.describe BulkImports::FinishBatchedPipelineWorker, feature_category: :impo
end
context 'when pipeline tracker is stale' do
- let(:pipeline_tracker) { create(:bulk_import_tracker, :started, :batched, :stale, entity: entity) }
+ before do
+ batch_1.update!(updated_at: 5.hours.ago)
+ end
it 'fails pipeline tracker and its batches' do
- create(:bulk_import_batch_tracker, :finished, tracker: pipeline_tracker)
-
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_tracker).with(pipeline_tracker).and_call_original
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
+
expect(logger).to receive(:error).with(
- a_hash_including('message' => 'Tracker stale. Failing batches and tracker')
+ a_hash_including('message' => 'Batch stale. Failing batches and tracker')
)
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index d99b3e9de73..368c7537641 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -16,12 +16,16 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
def self.file_extraction_pipeline?
false
end
+
+ def self.abort_on_failure?
+ false
+ end
end
end
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
- let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
+ let_it_be_with_reload(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
let(:pipeline_tracker) do
create(
@@ -44,7 +48,7 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
- include_examples 'an idempotent worker' do
+ it_behaves_like 'an idempotent worker' do
let(:job_args) { [pipeline_tracker.id, pipeline_tracker.stage, entity.id] }
it 'runs the pipeline and sets tracker to finished' do
@@ -61,17 +65,9 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
it 'runs the given pipeline successfully' do
expect_next_instance_of(BulkImports::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- hash_including(
- 'pipeline_class' => 'FakePipeline',
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path
- )
- )
+ expect(logger).to receive(:with_tracker).with(pipeline_tracker).and_call_original
+ expect(logger).to receive(:with_entity).with(pipeline_tracker.entity).and_call_original
+ expect(logger).to receive(:info)
end
allow(worker).to receive(:jid).and_return('jid')
@@ -98,22 +94,9 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
job = { 'args' => [pipeline_tracker.id, pipeline_tracker.stage, entity.id] }
expect_next_instance_of(BulkImports::Logger) do |logger|
- expect(logger)
- .to receive(:error)
- .with(
- hash_including(
- 'pipeline_class' => 'FakePipeline',
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'class' => 'BulkImports::PipelineWorker',
- 'exception.message' => 'Error!',
- 'message' => 'Pipeline failed',
- 'source_version' => entity.bulk_import.source_version_info.to_s,
- 'importer' => 'gitlab_migration'
- )
- )
+ expect(logger).to receive(:with_tracker).with(pipeline_tracker).and_call_original
+ expect(logger).to receive(:with_entity).with(pipeline_tracker.entity).and_call_original
+ expect(logger).to receive(:error)
end
expect(Gitlab::ErrorTracking)
@@ -121,13 +104,13 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
.with(
instance_of(StandardError),
hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import.id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'pipeline_class' => pipeline_tracker.pipeline_name,
- 'importer' => 'gitlab_migration',
- 'source_version' => entity.bulk_import.source_version_info.to_s
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import.id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
+ pipeline_class: pipeline_tracker.pipeline_name,
+ importer: 'gitlab_migration',
+ source_version: entity.bulk_import.source_version_info.to_s
)
)
@@ -156,6 +139,21 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
expect(pipeline_tracker.status_name).to eq(:failed)
expect(pipeline_tracker.jid).to eq('jid')
+ expect(entity.reload.status_name).to eq(:created)
+ end
+
+ context 'when pipeline has abort_on_failure' do
+ before do
+ allow(pipeline_class).to receive(:abort_on_failure?).and_return(true)
+ end
+
+ it 'marks entity as failed' do
+ job = { 'args' => [pipeline_tracker.id, pipeline_tracker.stage, entity.id] }
+
+ described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new('Error!'))
+
+ expect(entity.reload.status_name).to eq(:failed)
+ end
end
end
@@ -266,6 +264,10 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
describe '#perform' do
context 'when entity is failed' do
+ before do
+ entity.update!(status: -1)
+ end
+
it 'marks tracker as skipped and logs the skip' do
pipeline_tracker = create(
:bulk_import_tracker,
@@ -274,23 +276,12 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
status_event: 'enqueue'
)
- entity.update!(status: -1)
-
expect_next_instance_of(BulkImports::Logger) do |logger|
allow(logger).to receive(:info)
expect(logger)
.to receive(:info)
- .with(
- hash_including(
- 'pipeline_class' => 'FakePipeline',
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'message' => 'Skipping pipeline due to failed entity'
- )
- )
+ .with(hash_including(message: 'Skipping pipeline due to failed entity'))
end
worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
@@ -323,23 +314,15 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
- it 'reenqueues the worker' do
+ it 're_enqueues the worker' do
expect_any_instance_of(BulkImports::Tracker) do |tracker|
expect(tracker).to receive(:retry).and_call_original
end
expect_next_instance_of(BulkImports::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- hash_including(
- 'pipeline_class' => 'FakePipeline',
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path
- )
- )
+ expect(logger).to receive(:with_tracker).and_call_original
+ expect(logger).to receive(:with_entity).and_call_original
+ expect(logger).to receive(:info)
end
expect(described_class)
@@ -495,8 +478,8 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
- context 'when export is batched' do
- let(:batches_count) { 2 }
+ context 'when export is batched', :aggregate_failures do
+ let(:batches_count) { 3 }
before do
allow_next_instance_of(BulkImports::ExportStatus) do |status|
@@ -506,10 +489,30 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
allow(status).to receive(:empty?).and_return(false)
allow(status).to receive(:failed?).and_return(false)
end
+ allow(worker).to receive(:log_extra_metadata_on_done).and_call_original
end
it 'enqueues pipeline batches' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).exactly(3).times
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:batched, true)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 2, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+
+ it 'enqueues only missing pipelines batches' do
+ create(:bulk_import_batch_tracker, tracker: pipeline_tracker, batch_number: 2)
expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
@@ -517,7 +520,8 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
expect(pipeline_tracker.status_name).to eq(:started)
expect(pipeline_tracker.batched).to eq(true)
- expect(pipeline_tracker.batches.count).to eq(batches_count)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
end
context 'when batches count is less than 1' do
@@ -531,6 +535,127 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
expect(pipeline_tracker.reload.status_name).to eq(:finished)
end
end
+
+ context 'when pipeline batch enqueuing should be limited' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ allow(::Gitlab::CurrentSettings).to receive(:bulk_import_concurrent_pipeline_batch_limit).and_return(2)
+ end
+
+ it 'only enqueues limited batches and reenqueues itself' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 2])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, false)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2)
+ expect(described_class.jobs).to contain_exactly(
+ hash_including(
+ 'args' => [pipeline_tracker.id, pipeline_tracker.stage, entity.id],
+ 'scheduled_at' => be_within(1).of(10.seconds.from_now.to_i)
+ )
+ )
+ end
+
+ context 'when there is a batch in progress' do
+ where(:status) { BulkImports::BatchTracker::IN_PROGRESS_STATES }
+
+ with_them do
+ before do
+ create(:bulk_import_batch_tracker, status, batch_number: 1, tracker: pipeline_tracker)
+ end
+
+ it 'counts the in progress batch against the limit' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).once
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [2])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, false)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2)
+ expect(described_class.jobs).to contain_exactly(
+ hash_including(
+ 'args' => [pipeline_tracker.id, pipeline_tracker.stage, entity.id],
+ 'scheduled_at' => be_within(1).of(10.seconds.from_now.to_i)
+ )
+ )
+ end
+ end
+ end
+
+ context 'when there is a batch that has finished' do
+ where(:status) do
+ all_statuses = BulkImports::BatchTracker.state_machines[:status].states.map(&:name)
+ all_statuses - BulkImports::BatchTracker::IN_PROGRESS_STATES
+ end
+
+ with_them do
+ before do
+ create(:bulk_import_batch_tracker, status, batch_number: 1, tracker: pipeline_tracker)
+ end
+
+ it 'does not count the finished batch against the limit' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [2, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(bulk_import_limit_concurrent_batches: false)
+ end
+
+ it 'does not limit batches' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).exactly(3).times
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 2, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+
+ it 'still enqueues only missing pipelines batches' do
+ create(:bulk_import_batch_tracker, tracker: pipeline_tracker, batch_number: 2)
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+ end
+ end
end
end
end
diff --git a/spec/workers/bulk_imports/stuck_import_worker_spec.rb b/spec/workers/bulk_imports/stuck_import_worker_spec.rb
index eadf3864190..09fd1e1b524 100644
--- a/spec/workers/bulk_imports/stuck_import_worker_spec.rb
+++ b/spec/workers/bulk_imports/stuck_import_worker_spec.rb
@@ -5,10 +5,21 @@ require 'spec_helper'
RSpec.describe BulkImports::StuckImportWorker, feature_category: :importers do
let_it_be(:created_bulk_import) { create(:bulk_import, :created) }
let_it_be(:started_bulk_import) { create(:bulk_import, :started) }
- let_it_be(:stale_created_bulk_import) { create(:bulk_import, :created, created_at: 3.days.ago) }
- let_it_be(:stale_started_bulk_import) { create(:bulk_import, :started, created_at: 3.days.ago) }
- let_it_be(:stale_created_bulk_import_entity) { create(:bulk_import_entity, :created, created_at: 3.days.ago) }
- let_it_be(:stale_started_bulk_import_entity) { create(:bulk_import_entity, :started, created_at: 3.days.ago) }
+ let_it_be(:stale_created_bulk_import) do
+ create(:bulk_import, :created, updated_at: 3.days.ago)
+ end
+
+ let_it_be(:stale_started_bulk_import) do
+ create(:bulk_import, :started, updated_at: 3.days.ago)
+ end
+
+ let_it_be(:stale_created_bulk_import_entity) do
+ create(:bulk_import_entity, :created, updated_at: 3.days.ago)
+ end
+
+ let_it_be(:stale_started_bulk_import_entity) do
+ create(:bulk_import_entity, :started, updated_at: 3.days.ago)
+ end
let_it_be(:started_bulk_import_tracker) do
create(:bulk_import_tracker, :started, entity: stale_started_bulk_import_entity)
@@ -37,16 +48,12 @@ RSpec.describe BulkImports::StuckImportWorker, feature_category: :importers do
it 'updates the status of bulk import entities to timeout' do
expect_next_instance_of(BulkImports::Logger) do |logger|
allow(logger).to receive(:error)
- expect(logger).to receive(:error).with(
- message: 'BulkImports::Entity stale',
- bulk_import_entity_id: stale_created_bulk_import_entity.id,
- bulk_import_id: stale_created_bulk_import_entity.bulk_import_id
- )
- expect(logger).to receive(:error).with(
- message: 'BulkImports::Entity stale',
- bulk_import_entity_id: stale_started_bulk_import_entity.id,
- bulk_import_id: stale_started_bulk_import_entity.bulk_import_id
- )
+
+ expect(logger).to receive(:with_entity).with(stale_created_bulk_import_entity).and_call_original
+ expect(logger).to receive(:error).with(message: 'BulkImports::Entity stale')
+
+ expect(logger).to receive(:with_entity).with(stale_started_bulk_import_entity).and_call_original
+ expect(logger).to receive(:error).with(message: 'BulkImports::Entity stale')
end
expect { subject }.to change { stale_created_bulk_import_entity.reload.status_name }.from(:created).to(:timeout)
@@ -61,5 +68,29 @@ RSpec.describe BulkImports::StuckImportWorker, feature_category: :importers do
expect { subject }.to not_change { created_bulk_import.reload.status }
.and not_change { started_bulk_import.reload.status }
end
+
+ context 'when bulk import has been updated recently', :clean_gitlab_redis_shared_state do
+ before do
+ stale_created_bulk_import.update!(updated_at: 2.minutes.ago)
+ stale_started_bulk_import.update!(updated_at: 2.minutes.ago)
+ end
+
+ it 'does not update the status of the import' do
+ expect { subject }.to not_change { stale_created_bulk_import.reload.status_name }
+ .and not_change { stale_started_bulk_import.reload.status_name }
+ end
+ end
+
+ context 'when bulk import entity has been updated recently', :clean_gitlab_redis_shared_state do
+ before do
+ stale_created_bulk_import_entity.update!(updated_at: 2.minutes.ago)
+ stale_started_bulk_import_entity.update!(updated_at: 2.minutes.ago)
+ end
+
+ it 'does not update the status of the entity' do
+ expect { subject }.to not_change { stale_created_bulk_import_entity.reload.status_name }
+ .and not_change { stale_started_bulk_import_entity.reload.status_name }
+ end
+ end
end
end
diff --git a/spec/workers/bulk_imports/transform_references_worker_spec.rb b/spec/workers/bulk_imports/transform_references_worker_spec.rb
new file mode 100644
index 00000000000..6295ecb47d6
--- /dev/null
+++ b/spec/workers/bulk_imports/transform_references_worker_spec.rb
@@ -0,0 +1,257 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::TransformReferencesWorker, feature_category: :importers do
+ let_it_be(:project) do
+ project = create(:project)
+ project.add_owner(user)
+ project
+ end
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:bulk_import) { create(:bulk_import) }
+
+ let_it_be(:entity) do
+ create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import,
+ source_full_path: 'source/full/path')
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import, url: 'https://my.gitlab.com') }
+
+ let_it_be_with_refind(:issue) do
+ create(:issue,
+ project: project,
+ description: 'https://my.gitlab.com/source/full/path/-/issues/1')
+ end
+
+ let_it_be(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @source_username? @bob, @alice!')
+ end
+
+ let_it_be(:issue_note) do
+ create(:note,
+ noteable: issue,
+ project: project,
+ note: 'https://my.gitlab.com/source/full/path/-/issues/1 @older_username, not_a@username, and @old_username.')
+ end
+
+ let_it_be(:merge_request_note) do
+ create(:note,
+ noteable: merge_request,
+ project: project,
+ note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @same_username')
+ end
+
+ let_it_be(:system_note) do
+ create(:note,
+ project: project,
+ system: true,
+ noteable: issue,
+ note: "mentioned in merge request !#{merge_request.iid} created by @old_username",
+ note_html: 'note html'
+ )
+ end
+
+ let(:expected_url) do
+ expected_url = URI('')
+ expected_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
+ expected_url.host = ::Gitlab.config.gitlab.host
+ expected_url.port = ::Gitlab.config.gitlab.port
+ expected_url.path = "/#{project.full_path}"
+ expected_url
+ end
+
+ subject { described_class.new.perform([object.id], object.class.to_s, tracker.id) }
+
+ before do
+ allow(Gitlab::Cache::Import::Caching)
+ .to receive(:values_from_hash)
+ .and_return({
+ 'old_username' => 'new_username',
+ 'older_username' => 'newer_username',
+ 'source_username' => 'destination_username',
+ 'bob' => 'alice-gdk',
+ 'alice' => 'bob-gdk',
+ 'manuelgrabowski' => 'manuelgrabowski-admin',
+ 'manuelgrabowski-admin' => 'manuelgrabowski',
+ 'boaty-mc-boatface' => 'boatymcboatface',
+ 'boatymcboatface' => 'boaty-mc-boatface'
+ })
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [[issue.id], 'Issue', tracker.id] }
+ end
+
+ it 'transforms and saves multiple objects' do
+ old_note = merge_request_note.note
+ merge_request_note_2 = create(:note, noteable: merge_request, project: project, note: old_note)
+
+ described_class.new.perform([merge_request_note.id, merge_request_note_2.id], 'Note', tracker.id)
+
+ expect(merge_request_note.reload.note).not_to eq(old_note)
+ expect(merge_request_note_2.reload.note).not_to eq(old_note)
+ end
+
+ shared_examples 'transforms and saves references' do
+ it 'transforms references and saves the object' do
+ expect_any_instance_of(object.class) do |object|
+ expect(object).to receive(:save!)
+ end
+
+ expect { subject }.not_to change { object.updated_at }
+
+ expect(body).to eq(expected_body)
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(BulkImports::UsersMapper).to receive(:new).and_raise(StandardError)
+ end
+
+ it 'tracks the error and creates an import failure' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(anything, hash_including(bulk_import_id: bulk_import.id))
+
+ expect(BulkImports::Failure).to receive(:create)
+ .with(hash_including(bulk_import_entity_id: entity.id, pipeline_class: 'ReferencesPipeline'))
+
+ subject
+ end
+ end
+ end
+
+ context 'for issue description' do
+ let(:object) { issue }
+ let(:body) { object.reload.description }
+ let(:expected_body) { "http://localhost:80/#{object.namespace.full_path}/-/issues/1" }
+
+ include_examples 'transforms and saves references'
+
+ shared_examples 'returns object unchanged' do
+ it 'returns object unchanged' do
+ issue.update!(description: description)
+
+ subject
+
+ expect(issue.reload.description).to eq(description)
+ end
+
+ it 'does not save the object' do
+ expect_any_instance_of(object.class) do |object|
+ expect(object).to receive(:save!)
+ end
+
+ subject
+ end
+ end
+
+ context 'when object does not have reference or username' do
+ let(:description) { 'foo' }
+
+ include_examples 'returns object unchanged'
+ end
+
+ context 'when there are no matched urls or usernames' do
+ let(:description) { 'https://my.gitlab.com/another/project/path/-/issues/1 @random_username' }
+
+ include_examples 'returns object unchanged'
+ end
+
+ context 'when url path does not start with source full path' do
+ let(:description) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' }
+
+ include_examples 'returns object unchanged'
+ end
+
+ context 'when host does not match and url path starts with source full path' do
+ let(:description) { 'https://another.gitlab.com/source/full/path/-/issues/1' }
+
+ include_examples 'returns object unchanged'
+ end
+
+ context 'when url does not match at all' do
+ let(:description) { 'https://website.example/foo/bar' }
+
+ include_examples 'returns object unchanged'
+ end
+ end
+
+ context 'for merge request description' do
+ let(:object) { merge_request }
+ let(:body) { object.reload.description }
+ let(:expected_body) do
+ "#{expected_url}/-/merge_requests/#{merge_request.iid} @destination_username? @alice-gdk, @bob-gdk!"
+ end
+
+ include_examples 'transforms and saves references'
+ end
+
+ context 'for issue notes' do
+ let(:object) { issue_note }
+ let(:body) { object.reload.note }
+ let(:expected_body) { "#{expected_url}/-/issues/#{issue.iid} @newer_username, not_a@username, and @new_username." }
+
+ include_examples 'transforms and saves references'
+ end
+
+ context 'for merge request notes' do
+ let(:object) { merge_request_note }
+ let(:body) { object.reload.note }
+ let(:expected_body) { "#{expected_url}/-/merge_requests/#{merge_request.iid} @same_username" }
+
+ include_examples 'transforms and saves references'
+ end
+
+ context 'for system notes' do
+ let(:object) { system_note }
+ let(:body) { object.reload.note }
+ let(:expected_body) { "mentioned in merge request !#{merge_request.iid} created by @new_username" }
+
+ include_examples 'transforms and saves references'
+
+ context 'when the note includes a username' do
+ let_it_be(:object) do
+ create(:note,
+ project: project,
+ system: true,
+ noteable: issue,
+ note: 'mentioned in merge request created by @source_username.',
+ note_html: 'empty'
+ )
+ end
+
+ let(:body) { object.reload.note }
+ let(:expected_body) { 'mentioned in merge request created by @destination_username.' }
+
+ include_examples 'transforms and saves references'
+ end
+ end
+
+ context 'when old and new usernames are interchanged' do
+ # e.g
+ # |------------------------|-------------------------|
+ # | old_username | new_username |
+ # |------------------------|-------------------------|
+ # | @manuelgrabowski-admin | @manuelgrabowski |
+ # | @manuelgrabowski | @manuelgrabowski-admin |
+ # |------------------------|-------------------------|
+
+ let_it_be(:object) do
+ create(:note,
+ project: project,
+ noteable: merge_request,
+ note: '@manuelgrabowski-admin, @boaty-mc-boatface'
+ )
+ end
+
+ let(:body) { object.reload.note }
+ let(:expected_body) { '@manuelgrabowski, @boatymcboatface' }
+
+ include_examples 'transforms and saves references'
+ end
+end
diff --git a/spec/workers/ci/catalog/resources/process_sync_events_worker_spec.rb b/spec/workers/ci/catalog/resources/process_sync_events_worker_spec.rb
new file mode 100644
index 00000000000..036cc54e9ba
--- /dev/null
+++ b/spec/workers/ci/catalog/resources/process_sync_events_worker_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Catalog::Resources::ProcessSyncEventsWorker, feature_category: :pipeline_composition do
+ subject(:worker) { described_class.new }
+
+ include_examples 'an idempotent worker'
+
+ it 'has the `until_executed` deduplicate strategy' do
+ expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
+ end
+
+ it 'has the option to reschedule once if deduplicated and a TTL of 1 minute' do
+ expect(described_class.get_deduplication_options).to include({ if_deduplicated: :reschedule_once, ttl: 1.minute })
+ end
+
+ describe '#perform' do
+ let_it_be(:project) { create(:project, name: 'Old Name') }
+ let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
+
+ before_all do
+ create(:ci_catalog_resource_sync_event, catalog_resource: resource, status: :processed)
+ create_list(:ci_catalog_resource_sync_event, 2, catalog_resource: resource)
+ # PG trigger adds an event for this update
+ project.update!(name: 'New Name', description: 'Test', visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ end
+
+ subject(:perform) { worker.perform }
+
+ it 'consumes all sync events' do
+ expect { perform }.to change { Ci::Catalog::Resources::SyncEvent.status_pending.count }
+ .from(3).to(0)
+ end
+
+ it 'syncs the denormalized columns of catalog resource with the project' do
+ perform
+
+ expect(resource.reload.name).to eq(project.name)
+ expect(resource.reload.description).to eq(project.description)
+ expect(resource.reload.visibility_level).to eq(project.visibility_level)
+ end
+
+ it 'logs the service result', :aggregate_failures do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:estimated_total_events, 3)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:consumable_events, 3)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:processed_events, 3)
+
+ perform
+ end
+ end
+end
diff --git a/spec/workers/ci/low_urgency_cancel_redundant_pipelines_worker_spec.rb b/spec/workers/ci/low_urgency_cancel_redundant_pipelines_worker_spec.rb
new file mode 100644
index 00000000000..da09a28b384
--- /dev/null
+++ b/spec/workers/ci/low_urgency_cancel_redundant_pipelines_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::LowUrgencyCancelRedundantPipelinesWorker, feature_category: :continuous_integration do
+ it 'is labeled as low urgency' do
+ expect(described_class.get_urgency).to eq(:low)
+ end
+end
diff --git a/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
index b594f661a9a..a7624fdfe01 100644
--- a/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
@@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe Ci::PipelineArtifacts::CoverageReportWorker, feature_category: :code_testing do
+ it 'has the `until_executed` deduplicate strategy' do
+ expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
+ end
+
describe '#perform' do
let(:pipeline_id) { pipeline.id }
diff --git a/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb b/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
index e3e7047db56..e61d2e5450a 100644
--- a/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
+++ b/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
@@ -19,13 +19,13 @@ RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupWorker, featur
let(:resource_group) { create(:ci_resource_group) }
let(:resource_group_id) { resource_group.id }
- include_examples 'an idempotent worker' do
+ it_behaves_like 'an idempotent worker' do
let(:job_args) { [resource_group_id] }
end
context 'when resource group exists' do
it 'executes AssignResourceFromResourceGroupService' do
- expect_next_instances_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService, 2, false, resource_group.project, nil) do |service|
+ expect_next_instance_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService, resource_group.project, nil) do |service|
expect(service).to receive(:execute).with(resource_group)
end
diff --git a/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb b/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
index 30b451f2112..64e2e8cd037 100644
--- a/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
+++ b/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateWorker, feature_category: :runner_fleet do
+RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateWorker, feature_category: :fleet_visibility do
subject(:worker) { described_class.new }
describe '#perform' do
diff --git a/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb b/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
index 34b1cb33e6b..7157a3e7beb 100644
--- a/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
+++ b/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::ReconcileExistingRunnerVersionsCronWorker, feature_category: :runner_fleet do
+RSpec.describe Ci::Runners::ReconcileExistingRunnerVersionsCronWorker, feature_category: :fleet_visibility do
subject(:worker) { described_class.new }
describe '#perform' do
diff --git a/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb b/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb
index 79d1fadfd2b..4c5ea621191 100644
--- a/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb
+++ b/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::StaleMachinesCleanupCronWorker, feature_category: :runner_fleet do
+RSpec.describe Ci::Runners::StaleMachinesCleanupCronWorker, feature_category: :fleet_visibility do
let(:worker) { described_class.new }
describe '#perform', :freeze_time do
diff --git a/spec/workers/click_house/events_sync_worker_spec.rb b/spec/workers/click_house/events_sync_worker_spec.rb
index 01267db36a7..9662f26115a 100644
--- a/spec/workers/click_house/events_sync_worker_spec.rb
+++ b/spec/workers/click_house/events_sync_worker_spec.rb
@@ -5,6 +5,12 @@ require 'spec_helper'
RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_management do
let(:worker) { described_class.new }
+ specify do
+ expect(worker.class.click_house_worker_attrs).to match(
+ a_hash_including(migration_lock_ttl: ClickHouse::MigrationSupport::ExclusiveLock::DEFAULT_CLICKHOUSE_WORKER_TTL)
+ )
+ end
+
it_behaves_like 'an idempotent worker' do
context 'when the event_sync_worker_for_click_house feature flag is on', :click_house do
before do
@@ -63,11 +69,32 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
end
it 'inserts all records' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
+ { status: :processed, records_inserted: 4, reached_end_of_table: true })
+
worker.perform
events = ClickHouse::Client.select('SELECT * FROM events', :main)
expect(events.size).to eq(4)
end
+
+ context 'when new records are inserted while processing' do
+ it 'does not process new records created during the iteration' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
+ { status: :processed, records_inserted: 4,
+ reached_end_of_table: true })
+
+ # Simulating the case when there is an insert during the iteration
+ call_count = 0
+ allow(worker).to receive(:next_batch).and_wrap_original do |method|
+ call_count += 1
+ create(:event) if call_count == 3
+ method.call
+ end
+
+ worker.perform
+ end
+ end
end
context 'when time limit is reached' do
@@ -96,6 +123,9 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
end
it 'syncs records after the cursor' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
+ { status: :processed, records_inserted: 3, reached_end_of_table: true })
+
worker.perform
events = ClickHouse::Client.select('SELECT id FROM events ORDER BY id', :main)
@@ -121,7 +151,7 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
context 'when clickhouse is not configured' do
before do
- allow(ClickHouse::Client.configuration).to receive(:databases).and_return({})
+ allow(ClickHouse::Client).to receive(:database_configured?).and_return(false)
end
it 'skips execution' do
@@ -135,7 +165,7 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
context 'when exclusive lease error happens' do
it 'skips execution' do
stub_feature_flags(event_sync_worker_for_click_house: true)
- allow(ClickHouse::Client.configuration).to receive(:databases).and_return({ main: :some_db })
+ allow(ClickHouse::Client).to receive(:database_configured?).with(:main).and_return(true)
expect(worker).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :skipped })
diff --git a/spec/workers/concerns/click_house_worker_spec.rb b/spec/workers/concerns/click_house_worker_spec.rb
new file mode 100644
index 00000000000..cb8bf9c7578
--- /dev/null
+++ b/spec/workers/concerns/click_house_worker_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouseWorker, feature_category: :database do
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
+ include ClickHouseWorker
+
+ def perform
+ AnotherWorker.perform_async('identifier')
+ end
+ end
+ end
+
+ let(:another_worker) do
+ Class.new do
+ def self.name
+ 'AnotherWorker'
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ before do
+ stub_const('DummyWorker', worker)
+ stub_const('AnotherWorker', another_worker)
+ end
+
+ describe '.register_click_house_worker?' do
+ subject(:register_click_house_worker?) { worker.register_click_house_worker? }
+
+ context 'when click_house_migration_lock is set' do
+ before do
+ worker.click_house_migration_lock(1.minute)
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when click_house_migration_lock is not set' do
+ it { is_expected.to be(true) }
+ end
+
+ context 'when worker does not include module' do
+ it { expect(another_worker).not_to respond_to(:register_click_house_worker?) }
+ end
+ end
+
+ describe '.click_house_worker_attrs' do
+ subject(:click_house_worker_attrs) { worker.click_house_migration_lock(ttl) }
+
+ let(:ttl) { 1.minute }
+
+ it { expect { click_house_worker_attrs }.not_to raise_error }
+ it { is_expected.to match(a_hash_including(migration_lock_ttl: 60.seconds)) }
+
+ context 'with invalid ttl' do
+ let(:ttl) { {} }
+
+ it 'raises exception' do
+ expect { click_house_worker_attrs }.to raise_error(ArgumentError)
+ end
+ end
+ end
+
+ it 'registers ClickHouse worker' do
+ expect(worker.register_click_house_worker?).to be_truthy
+ expect(another_worker).not_to respond_to(:register_click_house_worker?)
+ end
+
+ it 'sets default TTL for worker registration' do
+ expect(worker.click_house_worker_attrs).to match(
+ a_hash_including(migration_lock_ttl: ClickHouse::MigrationSupport::ExclusiveLock::DEFAULT_CLICKHOUSE_WORKER_TTL)
+ )
+ end
+
+ it 'registers worker to pause on ClickHouse migrations' do
+ expect(worker.get_pause_control).to eq(:click_house_migration)
+ expect(another_worker.get_pause_control).to be_nil
+ end
+end
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index b2bc502d156..bba855f5095 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -67,10 +67,8 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
describe '#import', :clean_gitlab_redis_cache do
before do
- expect(worker)
- .to receive(:importer_class)
- .at_least(:once)
- .and_return(importer_class)
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ expect(worker).to receive(:importer_class).at_least(:once).and_return(importer_class)
end
it 'imports the object' do
@@ -203,25 +201,22 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
expect(project.import_failures.last.exception_message).to eq('some error')
end
- context 'without github_identifiers defined' do
+ context 'when a NoMethod error is raised' do
let(:stubbed_representation) { representation_class.instance_eval { undef_method :github_identifiers } }
- it 'logs error when representation does not have a github_id' do
- expect(importer_class).not_to receive(:new)
-
+ it 'logs the error but does not re-raise it, so the worker does not retry' do
expect(Gitlab::Import::ImportFailureService)
.to receive(:track)
.with(
project_id: project.id,
exception: a_kind_of(NoMethodError),
error_source: 'klass_name',
- fail_import: true,
+ fail_import: false,
external_identifiers: { object_type: 'dummy' }
)
.and_call_original
- expect { worker.import(project, client, { 'number' => 10 }) }
- .to raise_error(NoMethodError, /^undefined method `github_identifiers/)
+ worker.import(project, client, { 'number' => 10 })
end
end
@@ -239,7 +234,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
.and_raise(exception)
end
- it 'logs an error' do
+ it 'logs the error but does not re-raise it, so the worker does not retry' do
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
diff --git a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
index fa782967441..37e686f9f92 100644
--- a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
@@ -15,6 +15,10 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
end.new
end
+ it 'has a Sidekiq retry of 6' do
+ expect(worker.class.sidekiq_options['retry']).to eq(6)
+ end
+
describe '#perform' do
it 'returns if no project could be found' do
expect(worker).not_to receive(:try_import)
@@ -138,6 +142,10 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
end
describe '#try_import' do
+ before do
+ allow(worker).to receive(:jid).and_return('jid')
+ end
+
it 'imports the project' do
client = double(:client)
@@ -145,7 +153,7 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
.to receive(:import)
.with(client, project)
- expect(project.import_state).to receive(:refresh_jid_expiration)
+ expect(Gitlab::GithubImport::RefreshImportJidWorker).to receive(:perform_in_the_future).with(project.id, 'jid')
worker.try_import(client, project)
end
@@ -153,7 +161,7 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
it 'reschedules the worker if RateLimitError was raised' do
client = double(:client, rate_limit_resets_in: 10)
- expect(project.import_state).to receive(:refresh_jid_expiration)
+ expect(Gitlab::GithubImport::RefreshImportJidWorker).to receive(:perform_in_the_future).with(project.id, 'jid')
expect(worker)
.to receive(:import)
@@ -186,7 +194,7 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
end
end
- describe '.resumes_work_when_interrupted!' do
+ describe '.sidekiq_options!' do
subject(:sidekiq_options) { worker.class.sidekiq_options }
it 'does not set the `max_retries_after_interruption` if not called' do
@@ -199,16 +207,8 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
is_expected.to include('max_retries_after_interruption' => 20)
end
- context 'when the flag is disabled' do
- before do
- stub_feature_flags(github_importer_raise_max_interruptions: false)
- end
-
- it 'does not set `max_retries_after_interruption`' do
- worker.class.resumes_work_when_interrupted!
-
- is_expected.not_to have_key('max_retries_after_interruption')
- end
+ it 'sets the status_expiration' do
+ is_expected.to include('status_expiration' => Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
end
end
end
diff --git a/spec/workers/concerns/worker_attributes_spec.rb b/spec/workers/concerns/worker_attributes_spec.rb
index 767a55162fb..1c9d9a5a1ad 100644
--- a/spec/workers/concerns/worker_attributes_spec.rb
+++ b/spec/workers/concerns/worker_attributes_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe WorkerAttributes, feature_category: :shared do
describe '.data_consistency' do
context 'with invalid data_consistency' do
- it 'raise exception' do
+ it 'raises exception' do
expect { worker.data_consistency(:invalid) }
.to raise_error('Invalid data consistency: invalid')
end
diff --git a/spec/workers/delete_user_worker_spec.rb b/spec/workers/delete_user_worker_spec.rb
index 8a99f69c079..13a7390f592 100644
--- a/spec/workers/delete_user_worker_spec.rb
+++ b/spec/workers/delete_user_worker_spec.rb
@@ -30,28 +30,91 @@ RSpec.describe DeleteUserWorker, feature_category: :user_management do
end
end
- context 'when user is banned' do
+ context 'when user deleted their own account' do
subject(:perform) { described_class.new.perform(current_user.id, user.id) }
before do
- user.ban
+ # user is blocked as part of User#delete_async
+ user.block
+ # custom attribute is created as part of User#delete_async
+ UserCustomAttribute.set_deleted_own_account_at(user)
end
- it_behaves_like 'does nothing'
+ shared_examples 'proceeds with deletion' do
+ it "proceeds with deletion" do
+ expect_next_instance_of(Users::DestroyService) do |service|
+ expect(service).to receive(:execute).with(user, {})
+ end
+
+ perform
+ end
+ end
+
+ it_behaves_like 'proceeds with deletion'
context 'when delay_delete_own_user feature flag is disabled' do
before do
stub_feature_flags(delay_delete_own_user: false)
end
- it "proceeds with deletion" do
- expect_next_instance_of(Users::DestroyService) do |service|
- expect(service).to receive(:execute).with(user, {})
- end
+ it_behaves_like 'proceeds with deletion'
+ end
+
+ shared_examples 'logs' do |reason|
+ it 'logs' do
+ expect(Gitlab::AppLogger).to receive(:info).with({
+ message: 'Skipped own account deletion.',
+ reason: reason,
+ user_id: user.id,
+ username: user.username
+ })
perform
end
end
+
+ shared_examples 'updates the user\'s custom attributes' do
+ it 'destroys the user\'s DELETED_OWN_ACCOUNT_AT custom attribute' do
+ key = UserCustomAttribute::DELETED_OWN_ACCOUNT_AT
+ expect { perform }.to change { user.custom_attributes.by_key(key).count }.from(1).to(0)
+ end
+
+ context 'when custom attribute is not present' do
+ before do
+ UserCustomAttribute.delete_all
+ end
+
+ it 'does nothing' do
+ expect { perform }.not_to raise_error
+ end
+ end
+
+ it 'creates a SKIPPED_ACCOUNT_DELETION_AT custom attribute for the user' do
+ key = UserCustomAttribute::SKIPPED_ACCOUNT_DELETION_AT
+ expect { perform }.to change { user.custom_attributes.by_key(key).count }.from(0).to(1)
+ end
+ end
+
+ context 'when user is banned' do
+ before do
+ user.activate
+ user.ban
+ end
+
+ it_behaves_like 'does nothing'
+ it_behaves_like 'logs', 'User has been banned.'
+ it_behaves_like 'updates the user\'s custom attributes'
+ end
+
+ context 'when user is not blocked (e.g. result of user reinstatement request)' do
+ before do
+ user.activate
+ end
+
+ it_behaves_like 'does nothing'
+ it_behaves_like 'logs', 'User has been unblocked.'
+ it_behaves_like 'updates the user\'s custom attributes'
+ end
end
context 'when user to delete does not exist' do
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 4c2cff434a7..c60e8d37c2e 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
file_worker_queues = Gitlab::SidekiqConfig.worker_queues.to_set
worker_queues = Gitlab::SidekiqConfig.workers.map(&:generated_queue_name).to_set
- worker_queues << ActionMailer::MailDeliveryJob.new.queue_name
+ worker_queues << ActionMailer::MailDeliveryJob.new('Notify').queue_name
worker_queues << 'default'
missing_from_file = worker_queues - file_worker_queues
@@ -120,6 +120,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
{
'AdjournedProjectDeletionWorker' => 3,
'AdminEmailsWorker' => 3,
+ 'Ai::SyncServiceTokenWorker' => 3,
'Analytics::CodeReviewMetricsWorker' => 3,
'Analytics::DevopsAdoption::CreateSnapshotWorker' => 3,
'Analytics::UsageTrends::CounterJobWorker' => 3,
@@ -140,9 +141,10 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'BulkImportWorker' => 3,
'BulkImports::ExportRequestWorker' => 5,
'BulkImports::EntityWorker' => 3,
- 'BulkImports::PipelineWorker' => 3,
- 'BulkImports::PipelineBatchWorker' => 3,
- 'BulkImports::FinishProjectImportWorker' => 5,
+ 'BulkImports::PipelineWorker' => 6,
+ 'BulkImports::PipelineBatchWorker' => 6,
+ 'BulkImports::FinishProjectImportWorker' => 3,
+ 'BulkImports::TransformReferencesWorker' => 3,
'Chaos::CpuSpinWorker' => 3,
'Chaos::DbSpinWorker' => 3,
'Chaos::KillWorker' => false,
@@ -233,30 +235,19 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'ExternalServiceReactiveCachingWorker' => 3,
'FileHookWorker' => false,
'FlushCounterIncrementsWorker' => 3,
- 'Geo::Batch::ProjectRegistrySchedulerWorker' => 3,
- 'Geo::Batch::ProjectRegistryWorker' => 3,
'Geo::ContainerRepositorySyncWorker' => 1,
'Geo::DestroyWorker' => 3,
'Geo::EventWorker' => 3,
'Geo::FileRemovalWorker' => 3,
- 'Geo::ProjectSyncWorker' => 1,
- 'Geo::RenameRepositoryWorker' => 3,
- 'Geo::RepositoryCleanupWorker' => 3,
- 'Geo::RepositoryShardSyncWorker' => false,
- 'Geo::RepositoryVerification::Primary::ShardWorker' => false,
- 'Geo::RepositoryVerification::Primary::SingleWorker' => false,
- 'Geo::RepositoryVerification::Secondary::SingleWorker' => false,
'Geo::ReverificationBatchWorker' => 0,
'Geo::BulkMarkPendingBatchWorker' => 0,
'Geo::BulkMarkVerificationPendingBatchWorker' => 0,
- 'Geo::Scheduler::Primary::SchedulerWorker' => false,
'Geo::Scheduler::SchedulerWorker' => false,
'Geo::Scheduler::Secondary::SchedulerWorker' => false,
'Geo::VerificationBatchWorker' => 0,
'Geo::VerificationStateBackfillWorker' => false,
'Geo::VerificationTimeoutWorker' => false,
'Geo::VerificationWorker' => 3,
- 'GeoRepositoryDestroyWorker' => 3,
'Gitlab::BitbucketImport::AdvanceStageWorker' => 3,
'Gitlab::BitbucketImport::Stage::FinishImportWorker' => 3,
'Gitlab::BitbucketImport::Stage::ImportIssuesWorker' => 3,
@@ -271,7 +262,8 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::BitbucketServerImport::Stage::ImportNotesWorker' => 3,
'Gitlab::BitbucketServerImport::Stage::ImportPullRequestsWorker' => 3,
'Gitlab::BitbucketServerImport::Stage::ImportRepositoryWorker' => 3,
- 'Gitlab::GithubImport::AdvanceStageWorker' => 3,
+ 'Gitlab::BitbucketServerImport::Stage::ImportUsersWorker' => 3,
+ 'Gitlab::GithubImport::AdvanceStageWorker' => 6,
'Gitlab::GithubImport::Attachments::ImportReleaseWorker' => 5,
'Gitlab::GithubImport::Attachments::ImportNoteWorker' => 5,
'Gitlab::GithubImport::Attachments::ImportIssueWorker' => 5,
@@ -288,20 +280,20 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::GithubImport::PullRequests::ImportMergedByWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestWorker' => 5,
'Gitlab::GithubImport::RefreshImportJidWorker' => 5,
- 'Gitlab::GithubImport::Stage::FinishImportWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportBaseDataWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportIssueEventsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportLfsObjectsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportAttachmentsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportProtectedBranchesWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportNotesWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportCollaboratorsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportPullRequestsReviewRequestsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportPullRequestsWorker' => 5,
- 'Gitlab::GithubImport::Stage::ImportRepositoryWorker' => 5,
+ 'Gitlab::GithubImport::Stage::FinishImportWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportBaseDataWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportIssueEventsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportLfsObjectsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportAttachmentsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportProtectedBranchesWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportNotesWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportCollaboratorsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsReviewRequestsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsWorker' => 6,
+ 'Gitlab::GithubImport::Stage::ImportRepositoryWorker' => 6,
'Gitlab::GithubGistsImport::ImportGistWorker' => 5,
'Gitlab::GithubGistsImport::StartImportWorker' => 5,
'Gitlab::GithubGistsImport::FinishImportWorker' => 5,
@@ -391,6 +383,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Packages::MarkPackageFilesForDestructionWorker' => 3,
'Packages::Maven::Metadata::SyncWorker' => 3,
'Packages::Npm::CleanupStaleMetadataCacheWorker' => 0,
+ 'Packages::Nuget::CleanupStaleSymbolsWorker' => 0,
'Packages::Nuget::ExtractionWorker' => 3,
'Packages::Rubygems::ExtractionWorker' => 3,
'PagesDomainSslRenewalWorker' => 3,
@@ -482,8 +475,9 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'ComplianceManagement::MergeRequests::ComplianceViolationsWorker' => 3,
'Zoekt::IndexerWorker' => 2,
'Issuable::RelatedLinksCreateWorker' => 3,
- 'BulkImports::RelationBatchExportWorker' => 3,
- 'BulkImports::RelationExportWorker' => 3
+ 'BulkImports::RelationBatchExportWorker' => 6,
+ 'BulkImports::RelationExportWorker' => 6,
+ 'Ci::Runners::ExportUsageCsvWorker' => 3
}.merge(extra_retry_exceptions)
end
diff --git a/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
index 673988a3275..6fcf1ac8822 100644
--- a/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
+++ b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::BitbucketImport::AdvanceStageWorker, :clean_gitlab_redis_
it 'schedules the next stage' do
expect(import_state)
- .to receive(:refresh_jid_expiration)
+ .to receive(:refresh_jid_expiration).twice
expect(Gitlab::BitbucketImport::Stage::FinishImportWorker)
.to receive(:perform_async)
diff --git a/spec/workers/gitlab/bitbucket_server_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/bitbucket_server_import/stage/import_repository_worker_spec.rb
index 7ea23041e79..1531b30089c 100644
--- a/spec/workers/gitlab/bitbucket_server_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/bitbucket_server_import/stage/import_repository_worker_spec.rb
@@ -18,12 +18,25 @@ RSpec.describe Gitlab::BitbucketServerImport::Stage::ImportRepositoryWorker, fea
end
it 'schedules the next stage' do
- expect(Gitlab::BitbucketServerImport::Stage::ImportPullRequestsWorker).to receive(:perform_async)
+ expect(Gitlab::BitbucketServerImport::Stage::ImportUsersWorker).to receive(:perform_async)
.with(project.id)
worker.perform(project.id)
end
+ context 'when the bitbucket_server_convert_mentions_to_users flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
+ end
+
+ it 'skips the user import and schedules the next stage' do
+ expect(Gitlab::BitbucketServerImport::Stage::ImportPullRequestsWorker).to receive(:perform_async)
+ .with(project.id)
+
+ worker.perform(project.id)
+ end
+ end
+
it 'logs stage start and finish' do
expect(Gitlab::BitbucketServerImport::Logger)
.to receive(:info).with(hash_including(message: 'starting stage', project_id: project.id))
diff --git a/spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb b/spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb
new file mode 100644
index 00000000000..d4cd1b82349
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketServerImport::Stage::ImportUsersWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+
+ let(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketServerImport::StageMethods
+
+ describe '#perform' do
+ context 'when the import succeeds' do
+ before do
+ allow_next_instance_of(Gitlab::BitbucketServerImport::Importers::UsersImporter) do |importer|
+ allow(importer).to receive(:execute)
+ end
+ end
+
+ it 'schedules the next stage' do
+ expect(Gitlab::BitbucketServerImport::Stage::ImportPullRequestsWorker).to receive(:perform_async)
+ .with(project.id)
+
+ worker.perform(project.id)
+ end
+
+ it 'logs stage start and finish' do
+ expect(Gitlab::BitbucketServerImport::Logger)
+ .to receive(:info).with(hash_including(message: 'starting stage', project_id: project.id))
+ expect(Gitlab::BitbucketServerImport::Logger)
+ .to receive(:info).with(hash_including(message: 'stage finished', project_id: project.id))
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when project does not exists' do
+ it 'does not call importer' do
+ expect(Gitlab::BitbucketServerImport::Importers::UsersImporter).not_to receive(:new)
+
+ worker.perform(-1)
+ end
+ end
+
+ context 'when project import state is not `started`' do
+ it 'does not call importer' do
+ project = create(:project, :import_canceled)
+
+ expect(Gitlab::BitbucketServerImport::Importers::UsersImporter).not_to receive(:new)
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when the importer fails' do
+ it 'does not schedule the next stage and raises error' do
+ exception = StandardError.new('Error')
+
+ allow_next_instance_of(Gitlab::BitbucketServerImport::Importers::UsersImporter) do |importer|
+ allow(importer).to receive(:execute).and_raise(exception)
+ end
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track).with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: false
+ ).and_call_original
+
+ expect { worker.perform(project.id) }
+ .to change { Gitlab::BitbucketServerImport::Stage::ImportUsersWorker.jobs.size }.by(0)
+ .and raise_error(exception)
+ end
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb
index 60c117a2a90..dcf016c550b 100644
--- a/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb
@@ -4,4 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::AdvanceStageWorker, feature_category: :importers do
it_behaves_like Gitlab::Import::AdvanceStage, factory: :import_state
+
+ it 'has a Sidekiq retry of 6' do
+ expect(described_class.sidekiq_options['retry']).to eq(6)
+ end
end
diff --git a/spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb b/spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb
index abba6cd7734..5d0cb05c8d5 100644
--- a/spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::GithubImport::RefreshImportJidWorker, feature_category: :
it 'schedules a job in the future' do
expect(described_class)
.to receive(:perform_in)
- .with(1.minute.to_i, 10, '123')
+ .with(5.minutes.to_i, 10, '123')
described_class.perform_in_the_future(10, '123')
end
@@ -33,15 +33,20 @@ RSpec.describe Gitlab::GithubImport::RefreshImportJidWorker, feature_category: :
allow(worker)
.to receive(:find_import_state)
.with(project.id)
- .and_return(project)
+ .and_return(import_state)
expect(Gitlab::SidekiqStatus)
.to receive(:running?)
.with('123')
.and_return(true)
- expect(project)
- .to receive(:refresh_jid_expiration)
+ expect(Gitlab::SidekiqStatus)
+ .to receive(:expire)
+ .with('123', Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
+
+ expect(Gitlab::SidekiqStatus)
+ .to receive(:set)
+ .with(import_state.jid, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
expect(worker.class)
.to receive(:perform_in_the_future)
@@ -63,8 +68,11 @@ RSpec.describe Gitlab::GithubImport::RefreshImportJidWorker, feature_category: :
.with('123')
.and_return(false)
- expect(project)
- .not_to receive(:refresh_jid_expiration)
+ expect(Gitlab::SidekiqStatus)
+ .not_to receive(:expire)
+
+ expect(Gitlab::SidekiqStatus)
+ .not_to receive(:set)
worker.perform(project.id, '123')
end
diff --git a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
index f4a306eeb0c..020f7539bf4 100644
--- a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
@@ -10,16 +10,6 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker, feature_cate
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
- before do
- expect(Gitlab::GithubImport::RefreshImportJidWorker)
- .to receive(:perform_in_the_future)
- .with(project.id, '123')
-
- expect(worker)
- .to receive(:jid)
- .and_return('123')
- end
-
context 'when the import succeeds' do
context 'with issues' do
it 'schedules the importing of the base data' do
diff --git a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
index 6dfab44b228..3dc3971385e 100644
--- a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
@@ -22,15 +22,15 @@ RSpec.describe Gitlab::JiraImport::ImportIssueWorker, feature_category: :importe
describe '#perform', :clean_gitlab_redis_cache do
let(:assignee_ids) { [user.id] }
let(:issue_attrs) do
- build(:issue, project_id: project.id, namespace_id: project.project_namespace_id, title: 'jira issue')
- .as_json.merge(
- 'label_ids' => [jira_issue_label_1.id, jira_issue_label_2.id], 'assignee_ids' => assignee_ids
- ).except('issue_type')
+ build(:issue, project_id: project.id, namespace_id: project.project_namespace_id, title: 'jira issue').as_json
+ .merge('label_ids' => [jira_issue_label_1.id, jira_issue_label_2.id], 'assignee_ids' => assignee_ids)
+ .except('issue_type')
.compact
end
context 'when any exception raised while inserting to DB' do
before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
allow(subject).to receive(:insert_and_return_id).and_raise(StandardError)
expect(Gitlab::JobWaiter).to receive(:notify)
diff --git a/spec/workers/integrations/slack_event_worker_spec.rb b/spec/workers/integrations/slack_event_worker_spec.rb
index 6e8c73f1506..7a0a17569b2 100644
--- a/spec/workers/integrations/slack_event_worker_spec.rb
+++ b/spec/workers/integrations/slack_event_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::SlackEventWorker, :clean_gitlab_redis_cluster_shared_state,
+RSpec.describe Integrations::SlackEventWorker, :clean_gitlab_redis_shared_state,
feature_category: :integrations do
describe '.event?' do
subject { described_class.event?(event) }
diff --git a/spec/workers/merge_requests/set_reviewer_reviewed_worker_spec.rb b/spec/workers/merge_requests/set_reviewer_reviewed_worker_spec.rb
index 7341a0dcc5b..c49b4339f7b 100644
--- a/spec/workers/merge_requests/set_reviewer_reviewed_worker_spec.rb
+++ b/spec/workers/merge_requests/set_reviewer_reviewed_worker_spec.rb
@@ -12,6 +12,10 @@ RSpec.describe MergeRequests::SetReviewerReviewedWorker, feature_category: :sour
it_behaves_like 'subscribes to event' do
let(:event) { approved_event }
+
+ before do
+ stub_feature_flags(mr_request_changes: false)
+ end
end
it 'calls MergeRequests::UpdateReviewerStateService' do
diff --git a/spec/workers/packages/cleanup_package_registry_worker_spec.rb b/spec/workers/packages/cleanup_package_registry_worker_spec.rb
index f2787a92fbf..0d2f9629327 100644
--- a/spec/workers/packages/cleanup_package_registry_worker_spec.rb
+++ b/spec/workers/packages/cleanup_package_registry_worker_spec.rb
@@ -80,6 +80,28 @@ RSpec.describe Packages::CleanupPackageRegistryWorker, feature_category: :packag
end
end
+ context 'with nuget symbols pending destruction' do
+ let_it_be(:nuget_symbol) { create(:nuget_symbol, :stale) }
+
+ include_examples 'an idempotent worker' do
+ it 'queues the cleanup job' do
+ expect(Packages::Nuget::CleanupStaleSymbolsWorker).to receive(:perform_with_capacity)
+
+ perform
+ end
+ end
+ end
+
+ context 'with no nuget symbols pending destruction' do
+ include_examples 'an idempotent worker' do
+ it 'does not queue the cleanup job' do
+ expect(Packages::Nuget::CleanupStaleSymbolsWorker).not_to receive(:perform_with_capacity)
+
+ perform
+ end
+ end
+ end
+
describe 'counts logging' do
let_it_be(:processing_package_file) { create(:package_file, status: :processing) }
diff --git a/spec/workers/packages/npm/create_metadata_cache_worker_spec.rb b/spec/workers/packages/npm/create_metadata_cache_worker_spec.rb
index 360cc4223b4..a061d97ddf5 100644
--- a/spec/workers/packages/npm/create_metadata_cache_worker_spec.rb
+++ b/spec/workers/packages/npm/create_metadata_cache_worker_spec.rb
@@ -58,13 +58,5 @@ RSpec.describe Packages::Npm::CreateMetadataCacheWorker, type: :worker, feature_
it_behaves_like 'does not trigger service to create npm metadata cache'
end
-
- context 'when npm_metadata_cache flag is disabled' do
- before do
- stub_feature_flags(npm_metadata_cache: false)
- end
-
- it_behaves_like 'does not trigger service to create npm metadata cache'
- end
end
end
diff --git a/spec/workers/packages/nuget/cleanup_stale_symbols_worker_spec.rb b/spec/workers/packages/nuget/cleanup_stale_symbols_worker_spec.rb
new file mode 100644
index 00000000000..41afe64a808
--- /dev/null
+++ b/spec/workers/packages/nuget/cleanup_stale_symbols_worker_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::CleanupStaleSymbolsWorker, type: :worker, feature_category: :package_registry do
+ let(:worker) { described_class.new }
+
+ describe '#perform_work' do
+ subject(:perform_work) { worker.perform_work }
+
+ context 'with no work to do' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with work to do' do
+ let_it_be(:symbol_1) { create(:nuget_symbol) }
+ let_it_be(:symbol_2) { create(:nuget_symbol, :stale) }
+
+ it 'deletes the stale symbol', :aggregate_failures do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:nuget_symbol_id, symbol_2.id)
+ expect(Packages::Nuget::Symbol).to receive(:next_pending_destruction).with(order_by: nil).and_call_original
+ expect { perform_work }.to change { Packages::Nuget::Symbol.count }.by(-1)
+ expect { symbol_2.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'with a stale symbol' do
+ let_it_be(:symbol) { create(:nuget_symbol, :stale) }
+
+ context 'with an error during deletion' do
+ before do
+ allow_next_found_instance_of(Packages::Nuget::Symbol) do |instance|
+ allow(instance).to receive(:destroy!).and_raise(StandardError)
+ end
+ end
+
+ it 'handles the error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(StandardError), class: described_class.name
+ )
+
+ expect { perform_work }.to change { Packages::Nuget::Symbol.error.count }.by(1)
+ expect(symbol.reload).to be_error
+ end
+ end
+
+ context 'when trying to destroy a destroyed record' do
+ before do
+ allow_next_found_instance_of(Packages::Nuget::Symbol) do |instance|
+ destroy_method = instance.method(:destroy!)
+
+ allow(instance).to receive(:destroy!) do
+ destroy_method.call
+
+ raise StandardError
+ end
+ end
+ end
+
+ it 'handles the error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception)
+ .with(instance_of(StandardError), class: described_class.name)
+ expect { perform_work }.not_to change { Packages::Nuget::Symbol.count }
+ expect(symbol.reload).to be_error
+ end
+ end
+ end
+ end
+
+ describe '#max_running_jobs' do
+ let(:capacity) { described_class::MAX_CAPACITY }
+
+ subject { worker.max_running_jobs }
+
+ it { is_expected.to eq(capacity) }
+ end
+end
diff --git a/spec/workers/pages/deactivate_mr_deployments_worker_spec.rb b/spec/workers/pages/deactivate_mr_deployments_worker_spec.rb
new file mode 100644
index 00000000000..c060118a062
--- /dev/null
+++ b/spec/workers/pages/deactivate_mr_deployments_worker_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::DeactivateMrDeploymentsWorker, feature_category: :pages do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ let(:merge_request) { create(:merge_request) }
+ let(:pipeline_1) { create(:ci_pipeline, merge_request: merge_request) }
+ let(:pipeline_2) { create(:ci_pipeline, merge_request: merge_request) }
+
+ context 'when MR does not have a Pages Build' do
+ it 'does not raise an error' do
+ expect { worker.perform(merge_request) }.not_to raise_error
+ end
+ end
+
+ context 'when MR does have a Pages Build' do
+ let(:build_1) { create(:ci_build, pipeline: pipeline_1) }
+ let(:build_2) { create(:ci_build, pipeline: pipeline_2) }
+
+ context 'with a path_prefix' do
+ it 'deactivates the deployment', :freeze_time do
+ pages_deployment_1 = create(:pages_deployment, path_prefix: '/foo', ci_build: build_1)
+ pages_deployment_2 = create(:pages_deployment, path_prefix: '/bar', ci_build: build_1)
+
+ expect { worker.perform(merge_request.id) }
+ .to change { pages_deployment_1.reload.deleted_at }.from(nil).to(Time.now.utc)
+ .and change { pages_deployment_2.reload.deleted_at }.from(nil).to(Time.now.utc)
+ end
+ end
+
+ context 'without a path_prefix' do
+ it 'does not deactivate the deployment' do
+ pages_deployment_1 = create(:pages_deployment, path_prefix: '', ci_build: build_1)
+
+ expect { worker.perform(merge_request) }
+ .to not_change { pages_deployment_1.reload.deleted_at }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb
index 48138034c33..5648c5bc4c5 100644
--- a/spec/workers/pipeline_schedule_worker_spec.rb
+++ b/spec/workers/pipeline_schedule_worker_spec.rb
@@ -14,13 +14,15 @@ RSpec.describe PipelineScheduleWorker, :sidekiq_inline, feature_category: :conti
create(:ci_pipeline_schedule, :nightly, project: project, owner: user)
end
+ let(:next_run_at) { pipeline_schedule.next_run_at }
+
before do
stub_application_setting(auto_devops_enabled: false)
stub_ci_pipeline_to_return_yaml_file
end
around do |example|
- travel_to(pipeline_schedule.next_run_at + 1.hour) do
+ travel_to(next_run_at + 1.hour) do
example.run
end
end
@@ -142,4 +144,52 @@ RSpec.describe PipelineScheduleWorker, :sidekiq_inline, feature_category: :conti
expect { subject }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
end
end
+
+ context 'with scheduling delay' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ let!(:other_pipeline_schedule) do
+ create(:ci_pipeline_schedule, :every_minute, project: project, owner: user)
+ end
+
+ let(:next_run_at) do
+ [pipeline_schedule, other_pipeline_schedule].maximum(:next_run_at)
+ end
+
+ it 'calls bulk_perform_in with the arguments and delay' do
+ expect(RunPipelineScheduleWorker)
+ .to receive(:bulk_perform_in)
+ .with(1.second, [[pipeline_schedule.id, user.id, { scheduling: true }]])
+ .and_call_original
+
+ expect(RunPipelineScheduleWorker)
+ .to receive(:bulk_perform_in)
+ .with(7.seconds, [[other_pipeline_schedule.id, user.id, { scheduling: true }]])
+ .and_call_original
+
+ subject
+ end
+
+ context 'with run_pipeline_schedule_worker_with_delay disabled' do
+ before do
+ stub_feature_flags(run_pipeline_schedule_worker_with_delay: false)
+ end
+
+ it 'calls bulk_perform_async with the arguments and delay' do
+ expect(RunPipelineScheduleWorker)
+ .to receive(:bulk_perform_async)
+ .with([[pipeline_schedule.id, user.id, { scheduling: true }]])
+ .and_call_original
+
+ expect(RunPipelineScheduleWorker)
+ .to receive(:bulk_perform_async)
+ .with([[other_pipeline_schedule.id, user.id, { scheduling: true }]])
+ .and_call_original
+
+ subject
+ end
+ end
+ end
end
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index 02221285ad3..956e29ec7f4 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -3,180 +3,186 @@
require 'spec_helper'
RSpec.describe ProcessCommitWorker, feature_category: :source_code_management do
- let(:worker) { described_class.new }
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+
let(:project) { create(:project, :public, :repository) }
let(:issue) { create(:issue, project: project, author: user) }
let(:commit) { project.commit }
+ let(:worker) { described_class.new }
+
it "is deduplicated" do
expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
expect(described_class.get_deduplication_options).to include(feature_flag: :deduplicate_process_commit_worker)
end
describe '#perform' do
- it 'does not process the commit when the project does not exist' do
- expect(worker).not_to receive(:close_issues)
+ subject(:perform) { worker.perform(project_id, user_id, commit.to_hash, default) }
- worker.perform(-1, user.id, commit.to_hash)
- end
-
- it 'does not process the commit when the user does not exist' do
- expect(worker).not_to receive(:close_issues)
+ let(:project_id) { project.id }
+ let(:user_id) { user.id }
- worker.perform(project.id, -1, commit.to_hash)
+ before do
+ allow(Commit).to receive(:build_from_sidekiq_hash).and_return(commit)
end
- include_examples 'an idempotent worker' do
- subject do
- perform_multiple([project.id, user.id, commit.to_hash], worker: worker)
- end
-
- it 'processes the commit message' do
- expect(worker).to receive(:process_commit_message)
- .exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES)
- .and_call_original
+ context 'when pushing to the default branch' do
+ let(:default) { true }
- subject
- end
+ context 'when project does not exist' do
+ let(:project_id) { -1 }
- it 'updates the issue metrics' do
- expect(worker).to receive(:update_issue_metrics)
- .exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES)
- .and_call_original
+ it 'does not close related issues' do
+ expect { perform }.to change { Issues::CloseWorker.jobs.size }.by(0)
- subject
+ perform
+ end
end
- end
- end
- describe '#process_commit_message' do
- context 'when pushing to the default branch' do
- before do
- allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
- end
+ context 'when user does not exist' do
+ let(:user_id) { -1 }
- it 'closes issues that should be closed per the commit message' do
- expect(worker).to receive(:close_issues).with(project, user, user, commit, [issue])
+ it 'does not close related issues' do
+ expect { perform }.not_to change { Issues::CloseWorker.jobs.size }
- worker.process_commit_message(project, commit, user, user, true)
+ perform
+ end
end
- it 'creates cross references' do
- expect(commit).to receive(:create_cross_references!).with(user, [issue])
-
- worker.process_commit_message(project, commit, user, user, true)
- end
- end
+ include_examples 'an idempotent worker' do
+ before do
+ allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
+ issue.metrics.update!(first_mentioned_in_commit_at: nil)
+ end
- context 'when pushing to a non-default branch' do
- it 'does not close any issues' do
- allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
+ subject do
+ perform_multiple([project.id, user.id, commit.to_hash], worker: worker)
+ end
- expect(worker).not_to receive(:close_issues)
+ it 'closes related issues' do
+ expect { perform }.to change { Issues::CloseWorker.jobs.size }.by(1)
- worker.process_commit_message(project, commit, user, user, false)
+ subject
+ end
end
- it 'does not create cross references' do
- expect(commit).to receive(:create_cross_references!).with(user, [])
+ context 'when commit is not a merge request merge commit' do
+ context 'when commit has issue reference' do
+ before do
+ allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
+ end
+
+ it 'closes issues that should be closed per the commit message' do
+ expect { perform }.to change { Issues::CloseWorker.jobs.size }.by(1)
+ end
+
+ it 'creates cross references' do
+ expect(commit).to receive(:create_cross_references!).with(user, [issue])
+
+ perform
+ end
+
+ describe 'issue metrics', :clean_gitlab_redis_cache do
+ context 'when issue has no first_mentioned_in_commit_at set' do
+ before do
+ issue.metrics.update!(first_mentioned_in_commit_at: nil)
+ end
+
+ it 'updates issue metrics' do
+ expect { perform }.to change { issue.metrics.reload.first_mentioned_in_commit_at }
+ .to(commit.committed_date)
+ end
+ end
+
+ context 'when issue has first_mentioned_in_commit_at earlier than given committed_date' do
+ before do
+ issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date - 1.day)
+ end
+
+ it "doesn't update issue metrics" do
+ expect { perform }.not_to change { issue.metrics.reload.first_mentioned_in_commit_at }
+ end
+ end
+
+ context 'when issue has first_mentioned_in_commit_at later than given committed_date' do
+ before do
+ issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date + 1.day)
+ end
+
+ it 'updates issue metrics' do
+ expect { perform }.to change { issue.metrics.reload.first_mentioned_in_commit_at }
+ .to(commit.committed_date)
+ end
+ end
+ end
+ end
- worker.process_commit_message(project, commit, user, user, false)
- end
- end
+ context 'when commit has no issue references' do
+ before do
+ allow(commit).to receive(:safe_message).and_return("Lorem Ipsum")
+ end
- context 'when commit is a merge request merge commit to the default branch' do
- let(:merge_request) do
- create(
- :merge_request,
- description: "Closes #{issue.to_reference}",
- source_branch: 'feature-merged',
- target_branch: 'master',
- source_project: project
- )
+ describe 'issue metrics' do
+ it "doesn't execute any queries with false conditions" do
+ expect { perform }.not_to make_queries_matching(/WHERE (?:1=0|0=1)/)
+ end
+ end
+ end
end
- let(:commit) do
- project.repository.create_branch('feature-merged', 'feature')
- project.repository.after_create_branch
+ context 'when commit is a merge request merge commit' do
+ let(:merge_request) do
+ create(
+ :merge_request,
+ description: "Closes #{issue.to_reference}",
+ source_branch: 'feature-merged',
+ target_branch: 'master',
+ source_project: project
+ )
+ end
- MergeRequests::MergeService
- .new(project: project, current_user: merge_request.author, params: { sha: merge_request.diff_head_sha })
- .execute(merge_request)
+ let(:commit) do
+ project.repository.create_branch('feature-merged', 'feature')
+ project.repository.after_create_branch
- merge_request.reload.merge_commit
- end
+ MergeRequests::MergeService
+ .new(project: project, current_user: merge_request.author, params: { sha: merge_request.diff_head_sha })
+ .execute(merge_request)
- it 'does not close any issues from the commit message' do
- expect(worker).not_to receive(:close_issues)
+ merge_request.reload.merge_commit
+ end
- worker.process_commit_message(project, commit, user, user, true)
- end
+ it 'does not close any issues from the commit message' do
+ expect { perform }.not_to change { Issues::CloseWorker.jobs.size }
- it 'still creates cross references' do
- expect(commit).to receive(:create_cross_references!).with(user, [])
+ perform
+ end
- worker.process_commit_message(project, commit, user, user, true)
- end
- end
- end
+ it 'still creates cross references' do
+ expect(commit).to receive(:create_cross_references!).with(commit.author, [])
- describe '#close_issues' do
- it 'creates Issue::CloseWorker jobs' do
- expect do
- worker.close_issues(project, user, user, commit, [issue])
- end.to change { Issues::CloseWorker.jobs.size }.by(1)
+ perform
+ end
+ end
end
- end
- describe '#update_issue_metrics', :clean_gitlab_redis_cache do
- context 'when commit has issue reference' do
- subject(:update_metrics_and_reload) do
- -> {
- worker.update_issue_metrics(commit, user)
- issue.metrics.reload
- }
- end
+ context 'when pushing to a non-default branch' do
+ let(:default) { false }
before do
allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
end
- context 'when issue has no first_mentioned_in_commit_at set' do
- it 'updates issue metrics' do
- expect { update_metrics_and_reload.call }
- .to change { issue.metrics.first_mentioned_in_commit_at }.to(commit.committed_date)
- end
- end
-
- context 'when issue has first_mentioned_in_commit_at earlier than given committed_date' do
- before do
- issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date - 1.day)
- end
-
- it "doesn't update issue metrics" do
- expect { update_metrics_and_reload.call }.not_to change { issue.metrics.first_mentioned_in_commit_at }
- end
- end
-
- context 'when issue has first_mentioned_in_commit_at later than given committed_date' do
- before do
- issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date + 1.day)
- end
+ it 'does not close any issues from the commit message' do
+ expect { perform }.not_to change { Issues::CloseWorker.jobs.size }
- it "doesn't update issue metrics" do
- expect { update_metrics_and_reload.call }
- .to change { issue.metrics.first_mentioned_in_commit_at }.to(commit.committed_date)
- end
+ perform
end
- end
- context 'when commit has no issue references' do
- it "doesn't execute any queries with false conditions" do
- allow(commit).to receive(:safe_message).and_return("Lorem Ipsum")
+ it 'still creates cross references' do
+ expect(commit).to receive(:create_cross_references!).with(user, [])
- expect { worker.update_issue_metrics(commit, user) }
- .not_to make_queries_matching(/WHERE (?:1=0|0=1)/)
+ perform
end
end
end
diff --git a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
index 68af5e61e3b..226ecaa89c5 100644
--- a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
+++ b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
@@ -95,7 +95,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker, feature_category: :
expect(redis).to receive(:hset).with(
'inactive_projects_deletion_warning_email_notified',
"project:#{inactive_large_project.id}",
- Date.current
+ Date.current.to_s
)
end
expect(::Projects::InactiveProjectsDeletionNotificationWorker).to receive(:perform_async).with(
diff --git a/spec/workers/projects/update_repository_storage_worker_spec.rb b/spec/workers/projects/update_repository_storage_worker_spec.rb
index 91445c2bbf6..44c2dc41b2b 100644
--- a/spec/workers/projects/update_repository_storage_worker_spec.rb
+++ b/spec/workers/projects/update_repository_storage_worker_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Projects::UpdateRepositoryStorageWorker, feature_category: :sourc
it_behaves_like 'an update storage move worker' do
let_it_be_with_refind(:container) { create(:project, :repository) }
- let_it_be(:repository_storage_move) { create(:project_repository_storage_move) }
+ let_it_be_with_reload(:repository_storage_move) { create(:project_repository_storage_move) }
let(:service_klass) { Projects::UpdateRepositoryStorageService }
let(:repository_storage_move_klass) { Projects::RepositoryStorageMove }