Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/workers')
-rw-r--r--spec/workers/bulk_import_worker_spec.rb2
-rw-r--r--spec/workers/bulk_imports/entity_worker_spec.rb2
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb90
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb236
-rw-r--r--spec/workers/ci/create_downstream_pipeline_worker_spec.rb43
-rw-r--r--spec/workers/ci/ref_delete_unlock_artifacts_worker_spec.rb27
-rw-r--r--spec/workers/ci/runners/process_runner_version_update_worker_spec.rb2
-rw-r--r--spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb2
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb37
-rw-r--r--spec/workers/concerns/waitable_worker_spec.rb43
-rw-r--r--spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb3
-rw-r--r--spec/workers/container_registry/cleanup_worker_spec.rb14
-rw-r--r--spec/workers/container_registry/migration/guard_worker_spec.rb16
-rw-r--r--spec/workers/database/batched_background_migration/ci_execution_worker_spec.rb9
-rw-r--r--spec/workers/database/batched_background_migration/execution_worker_spec.rb141
-rw-r--r--spec/workers/database/batched_background_migration/main_execution_worker_spec.rb9
-rw-r--r--spec/workers/delete_container_repository_worker_spec.rb108
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb9
-rw-r--r--spec/workers/flush_counter_increments_worker_spec.rb21
-rw-r--r--spec/workers/gitlab/export/prune_project_export_jobs_worker_spec.rb52
-rw-r--r--spec/workers/gitlab/github_gists_import/finish_import_worker_spec.rb51
-rw-r--r--spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb94
-rw-r--r--spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb110
-rw-r--r--spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/jira_import/import_issue_worker_spec.rb3
-rw-r--r--spec/workers/gitlab_shell_worker_spec.rb26
-rw-r--r--spec/workers/incident_management/close_incident_worker_spec.rb6
-rw-r--r--spec/workers/incident_management/pager_duty/process_incident_worker_spec.rb8
-rw-r--r--spec/workers/issuable_export_csv_worker_spec.rb4
-rw-r--r--spec/workers/jira_connect/forward_event_worker_spec.rb6
-rw-r--r--spec/workers/jira_connect/send_uninstalled_hook_worker_spec.rb29
-rw-r--r--spec/workers/mail_scheduler/notification_service_worker_spec.rb30
-rw-r--r--spec/workers/merge_requests/delete_branch_worker_spec.rb65
-rw-r--r--spec/workers/merge_requests/delete_source_branch_worker_spec.rb23
-rw-r--r--spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb13
-rw-r--r--spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb4
-rw-r--r--spec/workers/namespaces/process_sync_events_worker_spec.rb6
-rw-r--r--spec/workers/namespaces/root_statistics_worker_spec.rb1
-rw-r--r--spec/workers/namespaces/schedule_aggregation_worker_spec.rb4
-rw-r--r--spec/workers/packages/debian/process_changes_worker_spec.rb20
-rw-r--r--spec/workers/packages/debian/process_package_file_worker_spec.rb138
-rw-r--r--spec/workers/pipeline_schedule_worker_spec.rb26
-rw-r--r--spec/workers/post_receive_spec.rb39
-rw-r--r--spec/workers/process_commit_worker_spec.rb2
-rw-r--r--spec/workers/projects/delete_branch_worker_spec.rb112
-rw-r--r--spec/workers/projects/import_export/parallel_project_export_worker_spec.rb60
-rw-r--r--spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb4
-rw-r--r--spec/workers/projects/process_sync_events_worker_spec.rb4
-rw-r--r--spec/workers/releases/create_evidence_worker_spec.rb4
-rw-r--r--spec/workers/releases/manage_evidence_worker_spec.rb4
-rw-r--r--spec/workers/repository_check/single_repository_worker_spec.rb10
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb10
-rw-r--r--spec/workers/tasks_to_be_done/create_worker_spec.rb4
-rw-r--r--spec/workers/update_highest_role_worker_spec.rb4
54 files changed, 1111 insertions, 680 deletions
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index 0d0b81d2ec0..61c33f123fa 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe BulkImportWorker do
entity_2 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
expect { subject.perform(bulk_import.id) }
- .to change(BulkImports::Tracker, :count)
+ .to change { BulkImports::Tracker.count }
.by(BulkImports::Groups::Stage.new(entity_1).pipelines.size * 2)
expect(entity_1.trackers).not_to be_empty
diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb
index e3f0ee65205..4cd37c93d5f 100644
--- a/spec/workers/bulk_imports/entity_worker_spec.rb
+++ b/spec/workers/bulk_imports/entity_worker_spec.rb
@@ -114,6 +114,8 @@ RSpec.describe BulkImports::EntityWorker do
)
subject
+
+ expect(entity.reload.failed?).to eq(true)
end
context 'in first stage' do
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
index 7eb8150fb2e..7260e0c0f67 100644
--- a/spec/workers/bulk_imports/export_request_worker_spec.rb
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -2,9 +2,10 @@
require 'spec_helper'
-RSpec.describe BulkImports::ExportRequestWorker do
+RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
+ let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
let_it_be(:version_url) { 'https://gitlab.example/api/v4/version' }
let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
@@ -31,73 +32,6 @@ RSpec.describe BulkImports::ExportRequestWorker do
perform_multiple(job_args)
end
- context 'when network error is raised' do
- let(:exception) { BulkImports::NetworkError.new('Export error') }
-
- before do
- allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
- allow(client).to receive(:post).and_raise(exception).twice
- end
- end
-
- context 'when error is retriable' do
- it 'logs retry request and reenqueues' do
- allow(exception).to receive(:retriable?).twice.and_return(true)
-
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:error).with(
- a_hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'exception.backtrace' => anything,
- 'exception.class' => 'BulkImports::NetworkError',
- 'exception.message' => 'Export error',
- 'message' => 'Retrying export request',
- 'importer' => 'gitlab_migration',
- 'source_version' => entity.bulk_import.source_version_info.to_s
- )
- ).twice
- end
-
- expect(described_class).to receive(:perform_in).twice.with(2.seconds, entity.id)
-
- perform_multiple(job_args)
- end
- end
-
- context 'when error is not retriable' do
- it 'logs export failure and marks entity as failed' do
- allow(exception).to receive(:retriable?).twice.and_return(false)
-
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:error).with(
- a_hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'exception.backtrace' => anything,
- 'exception.class' => 'BulkImports::NetworkError',
- 'exception.message' => 'Export error',
- 'message' => "Request to export #{entity.source_type} failed",
- 'importer' => 'gitlab_migration',
- 'source_version' => entity.bulk_import.source_version_info.to_s
- )
- ).twice
- end
-
- perform_multiple(job_args)
-
- failure = entity.failures.last
-
- expect(failure.pipeline_class).to eq('ExportRequestWorker')
- expect(failure.exception_message).to eq('Export error')
- end
- end
- end
-
context 'when source id is nil' do
let(:entity_source_id) { 'gid://gitlab/Model/1234567' }
@@ -179,4 +113,24 @@ RSpec.describe BulkImports::ExportRequestWorker do
it_behaves_like 'requests relations export for api resource'
end
end
+
+ describe '#sidekiq_retries_exhausted' do
+ it 'logs export failure and marks entity as failed' do
+ entity = create(:bulk_import_entity, bulk_import: bulk_import)
+ error = 'Exhausted error!'
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:error)
+ .with(hash_including('message' => "Request to export #{entity.source_type} failed"))
+ end
+
+ described_class
+ .sidekiq_retries_exhausted_block
+ .call({ 'args' => [entity.id] }, StandardError.new(error))
+
+ expect(entity.reload.failed?).to eq(true)
+ expect(entity.failures.last.exception_message).to eq(error)
+ end
+ end
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index 23fbc5688ec..03ec6267ca8 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::PipelineWorker do
+RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
let(:pipeline_class) do
Class.new do
def initialize(_); end
@@ -19,6 +19,15 @@ RSpec.describe BulkImports::PipelineWorker do
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
+ let(:pipeline_tracker) do
+ create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'FakePipeline',
+ status_event: 'enqueue'
+ )
+ end
+
before do
stub_const('FakePipeline', pipeline_class)
@@ -60,45 +69,12 @@ RSpec.describe BulkImports::PipelineWorker do
end
end
- it_behaves_like 'successfully runs the pipeline' do
- let(:pipeline_tracker) do
- create(
- :bulk_import_tracker,
- entity: entity,
- pipeline_name: 'FakePipeline',
- status_event: 'enqueue'
- )
- end
- end
+ it_behaves_like 'successfully runs the pipeline'
- context 'when the pipeline cannot be found' do
- it 'logs the error' do
- pipeline_tracker = create(
- :bulk_import_tracker,
- :finished,
- entity: entity,
- pipeline_name: 'FakePipeline'
- )
-
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:error)
- .with(
- hash_including(
- 'pipeline_tracker_id' => pipeline_tracker.id,
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'source_version' => entity.bulk_import.source_version_info.to_s,
- 'message' => 'Unstarted pipeline not found'
- )
- )
- end
-
- expect(BulkImports::EntityWorker)
- .to receive(:perform_async)
- .with(entity.id, pipeline_tracker.stage)
+ context 'when exclusive lease cannot be obtained' do
+ it 'does not run the pipeline' do
+ expect(subject).to receive(:try_obtain_lease).and_return(false)
+ expect(subject).not_to receive(:run)
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
end
@@ -145,13 +121,15 @@ RSpec.describe BulkImports::PipelineWorker do
.to receive(:track_exception)
.with(
instance_of(StandardError),
- bulk_import_entity_id: entity.id,
- bulk_import_id: entity.bulk_import.id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
- pipeline_name: pipeline_tracker.pipeline_name,
- importer: 'gitlab_migration',
- source_version: entity.bulk_import.source_version_info.to_s
+ hash_including(
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import.id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'pipeline_name' => pipeline_tracker.pipeline_name,
+ 'importer' => 'gitlab_migration',
+ 'source_version' => entity.bulk_import.source_version_info.to_s
+ )
)
expect(BulkImports::EntityWorker)
@@ -179,6 +157,111 @@ RSpec.describe BulkImports::PipelineWorker do
expect(pipeline_tracker.jid).to eq('jid')
end
+ shared_examples 'successfully runs the pipeline' do
+ it 'runs the given pipeline successfully' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ hash_including(
+ 'pipeline_name' => 'FakePipeline',
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path
+ )
+ )
+ end
+
+ expect(BulkImports::EntityWorker)
+ .to receive(:perform_async)
+ .with(entity.id, pipeline_tracker.stage)
+
+ allow(subject).to receive(:jid).and_return('jid')
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:finished)
+ expect(pipeline_tracker.jid).to eq('jid')
+ end
+ end
+
+ context 'when enqueued pipeline cannot be found' do
+ shared_examples 'logs the error' do
+ it 'logs the error' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ status = pipeline_tracker.human_status_name
+
+ expect(logger)
+ .to receive(:error)
+ .with(
+ hash_including(
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'pipeline_tracker_id' => pipeline_tracker.id,
+ 'pipeline_tracker_state' => status,
+ 'pipeline_name' => pipeline_tracker.pipeline_name,
+ 'source_full_path' => entity.source_full_path,
+ 'source_version' => entity.bulk_import.source_version_info.to_s,
+ 'importer' => 'gitlab_migration',
+ 'message' => "Pipeline in #{status} state instead of expected enqueued state"
+ )
+ )
+ end
+
+ expect(BulkImports::EntityWorker)
+ .to receive(:perform_async)
+ .with(entity.id, pipeline_tracker.stage)
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ end
+ end
+
+ context 'when pipeline is finished' do
+ let(:pipeline_tracker) do
+ create(
+ :bulk_import_tracker,
+ :finished,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+ end
+
+ include_examples 'logs the error'
+ end
+
+ context 'when pipeline is skipped' do
+ let(:pipeline_tracker) do
+ create(
+ :bulk_import_tracker,
+ :skipped,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+ end
+
+ include_examples 'logs the error'
+ end
+
+ context 'when tracker is started' do
+ it 'marks tracker as failed' do
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ :started,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ expect(pipeline_tracker.reload.failed?).to eq(true)
+ end
+ end
+ end
+
context 'when entity is failed' do
it 'marks tracker as skipped and logs the skip' do
pipeline_tracker = create(
@@ -343,23 +426,64 @@ RSpec.describe BulkImports::PipelineWorker do
end
context 'when export status is empty' do
- it 'reenqueues pipeline worker' do
+ before do
allow_next_instance_of(BulkImports::ExportStatus) do |status|
allow(status).to receive(:started?).and_return(false)
allow(status).to receive(:empty?).and_return(true)
allow(status).to receive(:failed?).and_return(false)
end
- expect(described_class)
- .to receive(:perform_in)
- .with(
- described_class::FILE_EXTRACTION_PIPELINE_PERFORM_DELAY,
- pipeline_tracker.id,
- pipeline_tracker.stage,
- entity.id
- )
+ entity.update!(created_at: entity_created_at)
+ end
- subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ context 'when timeout is not reached' do
+ let(:entity_created_at) { 1.minute.ago }
+
+ it 'reenqueues pipeline worker' do
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(
+ described_class::FILE_EXTRACTION_PIPELINE_PERFORM_DELAY,
+ pipeline_tracker.id,
+ pipeline_tracker.stage,
+ entity.id
+ )
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ expect(pipeline_tracker.reload.status_name).to eq(:enqueued)
+ end
+ end
+
+ context 'when timeout is reached' do
+ let(:entity_created_at) { 10.minutes.ago }
+
+ it 'marks as failed and logs the error' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:error)
+ .with(
+ hash_including(
+ 'pipeline_name' => 'NdjsonPipeline',
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'class' => 'BulkImports::PipelineWorker',
+ 'exception.backtrace' => anything,
+ 'exception.class' => 'BulkImports::Pipeline::ExpiredError',
+ 'exception.message' => 'Empty export status on source instance',
+ 'importer' => 'gitlab_migration',
+ 'message' => 'Pipeline failed',
+ 'source_version' => entity.bulk_import.source_version_info.to_s
+ )
+ )
+ end
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ expect(pipeline_tracker.reload.status_name).to eq(:failed)
+ end
end
end
diff --git a/spec/workers/ci/create_downstream_pipeline_worker_spec.rb b/spec/workers/ci/create_downstream_pipeline_worker_spec.rb
index 7a75da850d9..b4add681e67 100644
--- a/spec/workers/ci/create_downstream_pipeline_worker_spec.rb
+++ b/spec/workers/ci/create_downstream_pipeline_worker_spec.rb
@@ -9,19 +9,52 @@ RSpec.describe Ci::CreateDownstreamPipelineWorker do
let(:bridge) { create(:ci_bridge, user: user, pipeline: pipeline) }
- let(:service) { double('pipeline creation service') }
-
describe '#perform' do
context 'when bridge exists' do
- it 'calls cross project pipeline creation service' do
+ let(:service) { double('pipeline creation service') }
+
+ let(:service_result) { ServiceResponse.success(payload: instance_double(Ci::Pipeline, id: 100)) }
+
+ it 'calls cross project pipeline creation service and logs the new pipeline id' do
expect(Ci::CreateDownstreamPipelineService)
.to receive(:new)
.with(project, user)
.and_return(service)
- expect(service).to receive(:execute).with(bridge)
+ expect(service)
+ .to receive(:execute)
+ .with(bridge)
+ .and_return(service_result)
+
+ worker = described_class.new
+ worker.perform(bridge.id)
+
+ expect(worker.logging_extras).to eq({ "extra.ci_create_downstream_pipeline_worker.new_pipeline_id" => 100 })
+ end
+
+ context 'when downstream pipeline creation errors' do
+ let(:service_result) { ServiceResponse.error(message: 'Already has a downstream pipeline') }
+
+ it 'calls cross project pipeline creation service and logs the error' do
+ expect(Ci::CreateDownstreamPipelineService)
+ .to receive(:new)
+ .with(project, user)
+ .and_return(service)
+
+ expect(service)
+ .to receive(:execute)
+ .with(bridge)
+ .and_return(service_result)
+
+ worker = described_class.new
+ worker.perform(bridge.id)
- described_class.new.perform(bridge.id)
+ expect(worker.logging_extras).to eq(
+ {
+ "extra.ci_create_downstream_pipeline_worker.create_error_message" => "Already has a downstream pipeline"
+ }
+ )
+ end
end
end
diff --git a/spec/workers/ci/ref_delete_unlock_artifacts_worker_spec.rb b/spec/workers/ci/ref_delete_unlock_artifacts_worker_spec.rb
index fe4bc2421a4..f14b7f9d1d0 100644
--- a/spec/workers/ci/ref_delete_unlock_artifacts_worker_spec.rb
+++ b/spec/workers/ci/ref_delete_unlock_artifacts_worker_spec.rb
@@ -46,30 +46,11 @@ RSpec.describe Ci::RefDeleteUnlockArtifactsWorker do
context 'when a locked pipeline with persisted artifacts exists' do
let!(:pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: 'master', project: project, locked: :artifacts_locked) }
- context 'with ci_update_unlocked_job_artifacts disabled' do
- before do
- stub_feature_flags(ci_update_unlocked_job_artifacts: false)
- end
+ it 'logs the correct extra metadata' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_pipelines, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_job_artifacts, 2)
- it 'logs the correct extra metadata' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_pipelines, 1)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_job_artifacts, 0)
-
- perform
- end
- end
-
- context 'with ci_update_unlocked_job_artifacts enabled' do
- before do
- stub_feature_flags(ci_update_unlocked_job_artifacts: true)
- end
-
- it 'logs the correct extra metadata' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_pipelines, 1)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_job_artifacts, 2)
-
- perform
- end
+ perform
end
end
end
diff --git a/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb b/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
index ff67266c3e8..30b451f2112 100644
--- a/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
+++ b/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateWorker do
+RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateWorker, feature_category: :runner_fleet do
subject(:worker) { described_class.new }
describe '#perform' do
diff --git a/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb b/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
index 1292df62ce5..34b1cb33e6b 100644
--- a/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
+++ b/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::ReconcileExistingRunnerVersionsCronWorker do
+RSpec.describe Ci::Runners::ReconcileExistingRunnerVersionsCronWorker, feature_category: :runner_fleet do
subject(:worker) { described_class.new }
describe '#perform' do
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index ece0c5053cb..02190201986 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -194,6 +194,43 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures do
.to raise_error(NoMethodError, /^undefined method `github_identifiers/)
end
end
+
+ context 'when the record is invalid' do
+ it 'logs an error' do
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ github_identifiers: github_identifiers,
+ message: 'starting importer',
+ project_id: project.id,
+ importer: 'klass_name'
+ }
+ )
+
+ expect(importer_class)
+ .to receive(:new)
+ .with(instance_of(MockRepresantation), project, client)
+ .and_return(importer_instance)
+
+ exception = ActiveRecord::RecordInvalid.new
+ expect(importer_instance)
+ .to receive(:execute)
+ .and_raise(exception)
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: exception,
+ error_source: 'klass_name',
+ fail_import: false
+ )
+ .and_call_original
+
+ worker.import(project, client, { 'number' => 10, 'github_id' => 1 })
+ end
+ end
end
describe '#increment_object_counter?' do
diff --git a/spec/workers/concerns/waitable_worker_spec.rb b/spec/workers/concerns/waitable_worker_spec.rb
index bf156c3b8cb..1449c327052 100644
--- a/spec/workers/concerns/waitable_worker_spec.rb
+++ b/spec/workers/concerns/waitable_worker_spec.rb
@@ -14,12 +14,6 @@ RSpec.describe WaitableWorker do
include ApplicationWorker
prepend WaitableWorker
- # This is a workaround for a Ruby 2.3.7 bug. rspec-mocks cannot restore
- # the visibility of prepended modules. See
- # https://github.com/rspec/rspec-mocks/issues/1231 for more details.
- def self.bulk_perform_inline(args_list)
- end
-
def perform(count = 0)
self.class.counter += count
end
@@ -37,27 +31,6 @@ RSpec.describe WaitableWorker do
worker.bulk_perform_and_wait(arguments)
end
-
- context 'when the feature flag `always_async_project_authorizations_refresh` is turned off' do
- before do
- stub_feature_flags(always_async_project_authorizations_refresh: false)
- end
-
- it 'inlines the job' do
- args_list = [[1]]
- expect(worker).to receive(:bulk_perform_inline).with(args_list).and_call_original
- expect(Gitlab::AppJsonLogger).to(
- receive(:info).with(a_hash_including('message' => 'running inline',
- 'class' => 'Gitlab::Foo::Bar::DummyWorker',
- 'job_status' => 'running',
- 'queue' => 'foo_bar_dummy'))
- .once)
-
- worker.bulk_perform_and_wait(args_list)
-
- expect(worker.counter).to eq(1)
- end
- end
end
context 'between 2 and 3 jobs' do
@@ -81,22 +54,6 @@ RSpec.describe WaitableWorker do
end
end
- describe '.bulk_perform_inline' do
- it 'runs the jobs inline' do
- expect(worker).not_to receive(:bulk_perform_async)
-
- worker.bulk_perform_inline([[1], [2]])
-
- expect(worker.counter).to eq(3)
- end
-
- it 'enqueues jobs if an error is raised' do
- expect(worker).to receive(:bulk_perform_async).with([['foo']])
-
- worker.bulk_perform_inline([[1], ['foo']])
- end
- end
-
describe '#perform' do
shared_examples 'perform' do
it 'notifies the JobWaiter when done if the key is provided' do
diff --git a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
index 3cb83a7a5d7..8eda943f36e 100644
--- a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
@@ -398,8 +398,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
def cleanup_service_response(
- status: :finished,
- repository:,
+ repository:, status: :finished,
cleanup_tags_service_original_size: 100,
cleanup_tags_service_before_truncate_size: 80,
cleanup_tags_service_after_truncate_size: 80,
diff --git a/spec/workers/container_registry/cleanup_worker_spec.rb b/spec/workers/container_registry/cleanup_worker_spec.rb
index ffcb421ce1e..a510b660412 100644
--- a/spec/workers/container_registry/cleanup_worker_spec.rb
+++ b/spec/workers/container_registry/cleanup_worker_spec.rb
@@ -63,19 +63,5 @@ RSpec.describe ContainerRegistry::CleanupWorker, :aggregate_failures do
perform
end
end
-
- context 'with container_registry_delete_repository_with_cron_worker disabled' do
- before do
- stub_feature_flags(container_registry_delete_repository_with_cron_worker: false)
- end
-
- it 'does not run' do
- expect(worker).not_to receive(:reset_stale_deletes)
- expect(worker).not_to receive(:enqueue_delete_container_repository_jobs)
- expect(worker).not_to receive(:log_counts)
-
- subject
- end
- end
end
end
diff --git a/spec/workers/container_registry/migration/guard_worker_spec.rb b/spec/workers/container_registry/migration/guard_worker_spec.rb
index d2bcfef2f5b..4ad2d5c300c 100644
--- a/spec/workers/container_registry/migration/guard_worker_spec.rb
+++ b/spec/workers/container_registry/migration/guard_worker_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
expect(ContainerRegistry::Migration).to receive(timeout).and_call_original
expect { subject }
- .to change(import_aborted_migrations, :count).by(1)
+ .to change { import_aborted_migrations.count }.by(1)
.and change { stale_migration.reload.migration_state }.to('import_aborted')
.and not_change { ongoing_migration.migration_state }
end
@@ -67,7 +67,7 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
expect(ContainerRegistry::Migration).to receive(timeout).and_call_original
expect { subject }
- .to change(import_skipped_migrations, :count)
+ .to change { import_skipped_migrations.count }
expect(stale_migration.reload.migration_state).to eq('import_skipped')
expect(stale_migration.reload.migration_skipped_reason).to eq('migration_canceled')
@@ -124,11 +124,11 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_stale_migrations_count, 1)
expect { subject }
- .to change(pre_importing_migrations, :count).by(-1)
+ .to change { pre_importing_migrations.count }.by(-1)
.and not_change(pre_import_done_migrations, :count)
.and not_change(importing_migrations, :count)
.and not_change(import_done_migrations, :count)
- .and change(import_aborted_migrations, :count).by(1)
+ .and change { import_aborted_migrations.count }.by(1)
.and change { stale_migration.reload.migration_state }.from('pre_importing').to('import_aborted')
.and not_change { ongoing_migration.migration_state }
end
@@ -223,10 +223,10 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
expect { subject }
.to not_change(pre_importing_migrations, :count)
- .and change(pre_import_done_migrations, :count).by(-1)
+ .and change { pre_import_done_migrations.count }.by(-1)
.and not_change(importing_migrations, :count)
.and not_change(import_done_migrations, :count)
- .and change(import_aborted_migrations, :count).by(1)
+ .and change { import_aborted_migrations.count }.by(1)
.and change { stale_migration.reload.migration_state }.from('pre_import_done').to('import_aborted')
.and not_change { ongoing_migration.migration_state }
end
@@ -252,9 +252,9 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
expect { subject }
.to not_change(pre_importing_migrations, :count)
.and not_change(pre_import_done_migrations, :count)
- .and change(importing_migrations, :count).by(-1)
+ .and change { importing_migrations.count }.by(-1)
.and not_change(import_done_migrations, :count)
- .and change(import_aborted_migrations, :count).by(1)
+ .and change { import_aborted_migrations.count }.by(1)
.and change { stale_migration.reload.migration_state }.from('importing').to('import_aborted')
.and not_change { ongoing_migration.migration_state }
end
diff --git a/spec/workers/database/batched_background_migration/ci_execution_worker_spec.rb b/spec/workers/database/batched_background_migration/ci_execution_worker_spec.rb
new file mode 100644
index 00000000000..ec77a15c7ef
--- /dev/null
+++ b/spec/workers/database/batched_background_migration/ci_execution_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::BatchedBackgroundMigration::CiExecutionWorker,
+ :clean_gitlab_redis_shared_state,
+ feature_category: :database do
+ it_behaves_like 'batched background migrations execution worker'
+ end
diff --git a/spec/workers/database/batched_background_migration/execution_worker_spec.rb b/spec/workers/database/batched_background_migration/execution_worker_spec.rb
deleted file mode 100644
index 9a850a98f2f..00000000000
--- a/spec/workers/database/batched_background_migration/execution_worker_spec.rb
+++ /dev/null
@@ -1,141 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Database::BatchedBackgroundMigration::ExecutionWorker, :clean_gitlab_redis_shared_state do
- include ExclusiveLeaseHelpers
-
- describe '#perform' do
- let(:database_name) { Gitlab::Database::MAIN_DATABASE_NAME.to_sym }
- let(:base_model) { Gitlab::Database.database_base_models[database_name] }
- let(:table_name) { :events }
- let(:job_interval) { 5.minutes }
- let(:lease_timeout) { job_interval * described_class::LEASE_TIMEOUT_MULTIPLIER }
- let(:interval_variance) { described_class::INTERVAL_VARIANCE }
-
- subject(:worker) { described_class.new }
-
- context 'when the feature flag is disabled' do
- let(:migration) do
- create(:batched_background_migration, :active, interval: job_interval, table_name: table_name)
- end
-
- before do
- stub_feature_flags(execute_batched_migrations_on_schedule: false)
- end
-
- it 'does nothing' do
- expect(Gitlab::Database::BackgroundMigration::BatchedMigration).not_to receive(:find_executable)
- expect(worker).not_to receive(:run_migration_job)
-
- worker.perform(database_name, migration.id)
- end
- end
-
- context 'when the feature flag is enabled' do
- before do
- stub_feature_flags(execute_batched_migrations_on_schedule: true)
- end
-
- context 'when the provided database is sharing config' do
- before do
- skip_if_multiple_databases_not_setup
- end
-
- it 'does nothing' do
- ci_model = Gitlab::Database.database_base_models['ci']
- expect(Gitlab::Database).to receive(:db_config_share_with)
- .with(ci_model.connection_db_config).and_return('main')
-
- expect(Gitlab::Database::BackgroundMigration::BatchedMigration).not_to receive(:find_executable)
- expect(worker).not_to receive(:run_migration_job)
-
- worker.perform(:ci, 123)
- end
- end
-
- context 'when migration does not exist' do
- it 'does nothing' do
- expect(worker).not_to receive(:run_migration_job)
-
- worker.perform(database_name, non_existing_record_id)
- end
- end
-
- context 'when migration exist' do
- let(:migration) do
- create(:batched_background_migration, :active, interval: job_interval, table_name: table_name)
- end
-
- before do
- allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_executable)
- .with(migration.id, connection: base_model.connection)
- .and_return(migration)
- end
-
- context 'when the migration is no longer active' do
- it 'does not run the migration' do
- expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(base_model.connection).and_yield
-
- expect(migration).to receive(:active?).and_return(false)
-
- expect(worker).not_to receive(:run_migration_job)
-
- worker.perform(database_name, migration.id)
- end
- end
-
- context 'when the interval has not elapsed' do
- it 'does not run the migration' do
- expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(base_model.connection).and_yield
- expect(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(false)
- expect(worker).not_to receive(:run_migration_job)
-
- worker.perform(database_name, migration.id)
- end
- end
-
- context 'when the migration is still active and the interval has elapsed' do
- let(:table_name_lease_key) do
- "#{described_class.name.underscore}:database_name:#{database_name}:" \
- "table_name:#{table_name}"
- end
-
- context 'when can not obtain lease on the table name' do
- it 'does nothing' do
- stub_exclusive_lease_taken(table_name_lease_key, timeout: lease_timeout)
-
- expect(worker).not_to receive(:run_migration_job)
-
- worker.perform(database_name, migration.id)
- end
- end
-
- it 'always cleans up the exclusive lease' do
- expect_to_obtain_exclusive_lease(table_name_lease_key, 'uuid-table-name', timeout: lease_timeout)
- expect_to_cancel_exclusive_lease(table_name_lease_key, 'uuid-table-name')
-
- expect(worker).to receive(:run_migration_job).and_raise(RuntimeError, 'I broke')
-
- expect { worker.perform(database_name, migration.id) }.to raise_error(RuntimeError, 'I broke')
- end
-
- it 'runs the migration' do
- expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(base_model.connection).and_yield
-
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
- expect(instance).to receive(:run_migration_job).with(migration)
- end
-
- expect_to_obtain_exclusive_lease(table_name_lease_key, 'uuid-table-name', timeout: lease_timeout)
- expect_to_cancel_exclusive_lease(table_name_lease_key, 'uuid-table-name')
-
- expect(worker).to receive(:run_migration_job).and_call_original
-
- worker.perform(database_name, migration.id)
- end
- end
- end
- end
- end
-end
diff --git a/spec/workers/database/batched_background_migration/main_execution_worker_spec.rb b/spec/workers/database/batched_background_migration/main_execution_worker_spec.rb
new file mode 100644
index 00000000000..42a3675f735
--- /dev/null
+++ b/spec/workers/database/batched_background_migration/main_execution_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::BatchedBackgroundMigration::MainExecutionWorker,
+ :clean_gitlab_redis_shared_state,
+ feature_category: :database do
+ it_behaves_like 'batched background migrations execution worker'
+ end
diff --git a/spec/workers/delete_container_repository_worker_spec.rb b/spec/workers/delete_container_repository_worker_spec.rb
index a011457444a..6ad131b4c14 100644
--- a/spec/workers/delete_container_repository_worker_spec.rb
+++ b/spec/workers/delete_container_repository_worker_spec.rb
@@ -10,112 +10,10 @@ RSpec.describe DeleteContainerRepositoryWorker do
let(:worker) { described_class.new }
describe '#perform' do
- let(:user_id) { user.id }
- let(:repository_id) { repository.id }
+ subject(:perform) { worker.perform(user.id, repository.id) }
- subject(:perform) { worker.perform(user_id, repository_id) }
-
- it 'executes the destroy service' do
- expect_destroy_service_execution
-
- perform
- end
-
- context 'with an invalid user id' do
- let(:user_id) { -1 }
-
- it { expect { perform }.not_to raise_error }
- end
-
- context 'with an invalid repository id' do
- let(:repository_id) { -1 }
-
- it { expect { perform }.not_to raise_error }
- end
-
- context 'with a repository being migrated', :freeze_time do
- before do
- stub_application_setting(
- container_registry_pre_import_tags_rate: 0.5,
- container_registry_import_timeout: 10.minutes.to_i
- )
- end
-
- shared_examples 'destroying the repository' do
- it 'does destroy the repository' do
- expect_next_found_instance_of(ContainerRepository) do |container_repository|
- expect(container_repository).not_to receive(:tags_count)
- end
- expect(described_class).not_to receive(:perform_in)
- expect_destroy_service_execution
-
- perform
- end
- end
-
- shared_examples 'not re enqueuing job if feature flag is disabled' do
- before do
- stub_feature_flags(container_registry_migration_phase2_delete_container_repository_worker_support: false)
- end
-
- it_behaves_like 'destroying the repository'
- end
-
- context 'with migration state set to pre importing' do
- let_it_be(:repository) { create(:container_repository, :pre_importing) }
-
- let(:tags_count) { 60 }
- let(:delay) { (tags_count * 0.5).seconds + 10.minutes + described_class::FIXED_DELAY }
-
- it 'does not destroy the repository and re enqueue the job' do
- expect_next_found_instance_of(ContainerRepository) do |container_repository|
- expect(container_repository).to receive(:tags_count).and_return(tags_count)
- end
- expect(described_class).to receive(:perform_in).with(delay.from_now)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:delete_postponed, delay)
- expect(::Projects::ContainerRepository::DestroyService).not_to receive(:new)
-
- perform
- end
-
- it_behaves_like 'not re enqueuing job if feature flag is disabled'
- end
-
- %i[pre_import_done importing import_aborted].each do |migration_state|
- context "with migration state set to #{migration_state}" do
- let_it_be(:repository) { create(:container_repository, migration_state) }
-
- let(:delay) { 10.minutes + described_class::FIXED_DELAY }
-
- it 'does not destroy the repository and re enqueue the job' do
- expect_next_found_instance_of(ContainerRepository) do |container_repository|
- expect(container_repository).not_to receive(:tags_count)
- end
- expect(described_class).to receive(:perform_in).with(delay.from_now)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:delete_postponed, delay)
- expect(::Projects::ContainerRepository::DestroyService).not_to receive(:new)
-
- perform
- end
-
- it_behaves_like 'not re enqueuing job if feature flag is disabled'
- end
- end
-
- %i[default import_done import_skipped].each do |migration_state|
- context "with migration state set to #{migration_state}" do
- let_it_be(:repository) { create(:container_repository, migration_state) }
-
- it_behaves_like 'destroying the repository'
- it_behaves_like 'not re enqueuing job if feature flag is disabled'
- end
- end
- end
-
- def expect_destroy_service_execution
- service = instance_double(Projects::ContainerRepository::DestroyService)
- expect(service).to receive(:execute)
- expect(Projects::ContainerRepository::DestroyService).to receive(:new).with(project, user).and_return(service)
+ it 'is a no op' do
+ expect { subject }.to not_change { ContainerRepository.count }
end
end
end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index e705ca28e54..788f5d8222c 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -139,6 +139,7 @@ RSpec.describe 'Every Sidekiq worker' do
'BuildQueueWorker' => 3,
'BuildSuccessWorker' => 3,
'BulkImportWorker' => false,
+ 'BulkImports::ExportRequestWorker' => 5,
'BulkImports::EntityWorker' => false,
'BulkImports::PipelineWorker' => false,
'Chaos::CpuSpinWorker' => 3,
@@ -193,6 +194,8 @@ RSpec.describe 'Every Sidekiq worker' do
'CreateGithubWebhookWorker' => 3,
'CreateNoteDiffFileWorker' => 3,
'CreatePipelineWorker' => 3,
+ 'Database::BatchedBackgroundMigration::CiExecutionWorker' => 0,
+ 'Database::BatchedBackgroundMigration::MainExecutionWorker' => 0,
'DeleteContainerRepositoryWorker' => 3,
'DeleteDiffFilesWorker' => 3,
'DeleteMergedBranchesWorker' => 3,
@@ -286,6 +289,9 @@ RSpec.describe 'Every Sidekiq worker' do
'Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker' => 5,
'Gitlab::GithubImport::Stage::ImportPullRequestsWorker' => 5,
'Gitlab::GithubImport::Stage::ImportRepositoryWorker' => 5,
+ 'Gitlab::GithubGistsImport::ImportGistWorker' => 5,
+ 'Gitlab::GithubGistsImport::StartImportWorker' => 5,
+ 'Gitlab::GithubGistsImport::FinishImportWorker' => 5,
'Gitlab::JiraImport::AdvanceStageWorker' => 5,
'Gitlab::JiraImport::ImportIssueWorker' => 5,
'Gitlab::JiraImport::Stage::FinishImportWorker' => 5,
@@ -340,6 +346,7 @@ RSpec.describe 'Every Sidekiq worker' do
'MergeRequestMergeabilityCheckWorker' => 3,
'MergeRequestResetApprovalsWorker' => 3,
'MergeRequests::AssigneesChangeWorker' => 3,
+ 'MergeRequests::CaptureSuggestedReviewersAcceptedWorker' => 3,
'MergeRequests::CreatePipelineWorker' => 3,
'MergeRequests::DeleteSourceBranchWorker' => 3,
'MergeRequests::FetchSuggestedReviewersWorker' => 3,
@@ -366,7 +373,6 @@ RSpec.describe 'Every Sidekiq worker' do
'ObjectPool::DestroyWorker' => 3,
'ObjectPool::JoinWorker' => 3,
'ObjectPool::ScheduleJoinWorker' => 3,
- 'ObjectStorage::BackgroundMoveWorker' => 5,
'ObjectStorage::MigrateUploadsWorker' => 3,
'Onboarding::CreateLearnGitlabWorker' => 3,
'Packages::CleanupPackageFileWorker' => 0,
@@ -388,6 +394,7 @@ RSpec.describe 'Every Sidekiq worker' do
'PipelineProcessWorker' => 3,
'PostReceive' => 3,
'ProcessCommitWorker' => 3,
+ 'ProductAnalytics::InitializeAnalyticsWorker' => 3,
'ProjectCacheWorker' => 3,
'ProjectDestroyWorker' => 3,
'ProjectExportWorker' => false,
diff --git a/spec/workers/flush_counter_increments_worker_spec.rb b/spec/workers/flush_counter_increments_worker_spec.rb
index 14b49b97ac3..83670acf4b6 100644
--- a/spec/workers/flush_counter_increments_worker_spec.rb
+++ b/spec/workers/flush_counter_increments_worker_spec.rb
@@ -12,29 +12,32 @@ RSpec.describe FlushCounterIncrementsWorker, :counter_attribute do
subject { worker.perform(model.class.name, model.id, attribute) }
- it 'flushes increments to database' do
+ it 'commits increments to database' do
expect(model.class).to receive(:find_by_id).and_return(model)
- expect(model)
- .to receive(:flush_increments_to_database!)
- .with(attribute)
- .and_call_original
+ expect_next_instance_of(Gitlab::Counters::BufferedCounter, model, attribute) do |service|
+ expect(service).to receive(:commit_increment!)
+ end
subject
end
context 'when model class does not exist' do
- subject { worker.perform('non-existend-model') }
+ subject { worker.perform('NonExistentModel', 1, attribute) }
it 'does nothing' do
- expect(worker).not_to receive(:in_lock)
+ expect(Gitlab::Counters::BufferedCounter).not_to receive(:new)
+
+ subject
end
end
context 'when record does not exist' do
- subject { worker.perform(model.class.name, model.id + 100, attribute) }
+ subject { worker.perform(model.class.name, non_existing_record_id, attribute) }
it 'does nothing' do
- expect(worker).not_to receive(:in_lock)
+ expect(Gitlab::Counters::BufferedCounter).not_to receive(:new)
+
+ subject
end
end
end
diff --git a/spec/workers/gitlab/export/prune_project_export_jobs_worker_spec.rb b/spec/workers/gitlab/export/prune_project_export_jobs_worker_spec.rb
new file mode 100644
index 00000000000..eded07c7a2f
--- /dev/null
+++ b/spec/workers/gitlab/export/prune_project_export_jobs_worker_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Export::PruneProjectExportJobsWorker, feature_category: :importers do
+ let_it_be(:old_job_1) { create(:project_export_job, updated_at: 37.months.ago) }
+ let_it_be(:old_job_2) { create(:project_export_job, updated_at: 12.months.ago) }
+ let_it_be(:old_job_3) { create(:project_export_job, updated_at: 8.days.ago) }
+ let_it_be(:fresh_job_1) { create(:project_export_job, updated_at: 1.day.ago) }
+ let_it_be(:fresh_job_2) { create(:project_export_job, updated_at: 2.days.ago) }
+ let_it_be(:fresh_job_3) { create(:project_export_job, updated_at: 6.days.ago) }
+
+ let_it_be(:old_relation_export_1) { create(:project_relation_export, project_export_job_id: old_job_1.id) }
+ let_it_be(:old_relation_export_2) { create(:project_relation_export, project_export_job_id: old_job_2.id) }
+ let_it_be(:old_relation_export_3) { create(:project_relation_export, project_export_job_id: old_job_3.id) }
+ let_it_be(:fresh_relation_export_1) { create(:project_relation_export, project_export_job_id: fresh_job_1.id) }
+
+ let_it_be(:old_upload_1) { create(:relation_export_upload, project_relation_export_id: old_relation_export_1.id) }
+ let_it_be(:old_upload_2) { create(:relation_export_upload, project_relation_export_id: old_relation_export_2.id) }
+ let_it_be(:old_upload_3) { create(:relation_export_upload, project_relation_export_id: old_relation_export_3.id) }
+ let_it_be(:fresh_upload_1) { create(:relation_export_upload, project_relation_export_id: fresh_relation_export_1.id) }
+
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ include_examples 'an idempotent worker' do
+ it 'prunes jobs and associations older than 7 days' do
+ expect { perform_multiple }.to change { ProjectExportJob.count }.by(-3)
+ expect(ProjectExportJob.find_by(id: old_job_1.id)).to be_nil
+ expect(ProjectExportJob.find_by(id: old_job_2.id)).to be_nil
+ expect(ProjectExportJob.find_by(id: old_job_3.id)).to be_nil
+
+ expect(Projects::ImportExport::RelationExport.find_by(id: old_relation_export_1.id)).to be_nil
+ expect(Projects::ImportExport::RelationExport.find_by(id: old_relation_export_2.id)).to be_nil
+ expect(Projects::ImportExport::RelationExport.find_by(id: old_relation_export_3.id)).to be_nil
+
+ expect(Projects::ImportExport::RelationExportUpload.find_by(id: old_upload_1.id)).to be_nil
+ expect(Projects::ImportExport::RelationExportUpload.find_by(id: old_upload_2.id)).to be_nil
+ expect(Projects::ImportExport::RelationExportUpload.find_by(id: old_upload_3.id)).to be_nil
+ end
+
+ it 'leaves fresh jobs and associations' do
+ perform_multiple
+ expect(fresh_job_1.reload).to be_present
+ expect(fresh_job_2.reload).to be_present
+ expect(fresh_job_3.reload).to be_present
+ expect(fresh_relation_export_1.reload).to be_present
+ expect(fresh_upload_1.reload).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_gists_import/finish_import_worker_spec.rb b/spec/workers/gitlab/github_gists_import/finish_import_worker_spec.rb
new file mode 100644
index 00000000000..c4c19f2f9c5
--- /dev/null
+++ b/spec/workers/gitlab/github_gists_import/finish_import_worker_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubGistsImport::FinishImportWorker, feature_category: :importer do
+ subject(:worker) { described_class.new }
+
+ let_it_be(:user) { create(:user) }
+
+ describe '#perform', :aggregate_failures do
+ context 'when there are no remaining jobs' do
+ it 'marks import status as finished' do
+ waiter = instance_double(Gitlab::JobWaiter, key: :key, jobs_remaining: 0)
+ expect(Gitlab::JobWaiter).to receive(:new).and_return(waiter)
+ expect(waiter).to receive(:wait).with(described_class::BLOCKING_WAIT_TIME)
+ expect_next_instance_of(Gitlab::GithubGistsImport::Status) do |status|
+ expect(status).to receive(:finish!)
+ end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(user_id: user.id, message: 'GitHub Gists import finished')
+
+ worker.perform(user.id, waiter.key, waiter.jobs_remaining)
+ end
+ end
+
+ context 'when there are remaining jobs' do
+ it 'reschedules the worker' do
+ waiter = instance_double(Gitlab::JobWaiter, key: :key, jobs_remaining: 2)
+ expect(Gitlab::JobWaiter).to receive(:new).and_return(waiter)
+ expect(waiter).to receive(:wait).with(described_class::BLOCKING_WAIT_TIME)
+ expect(described_class).to receive(:perform_in)
+ .with(described_class::INTERVAL, user.id, waiter.key, waiter.jobs_remaining)
+
+ worker.perform(user.id, waiter.key, waiter.jobs_remaining)
+ end
+ end
+ end
+
+ describe '.sidekiq_retries_exhausted' do
+ it 'sets status to failed' do
+ job = { 'args' => [user.id, 'some_key', '1'], 'jid' => '123' }
+
+ expect_next_instance_of(Gitlab::GithubGistsImport::Status) do |status|
+ expect(status).to receive(:fail!)
+ end
+
+ described_class.sidekiq_retries_exhausted_block.call(job)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
new file mode 100644
index 00000000000..dfc5084bb10
--- /dev/null
+++ b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :importer do
+ subject { described_class.new }
+
+ let_it_be(:user) { create(:user) }
+ let(:token) { 'token' }
+ let(:gist_hash) do
+ {
+ id: '055b70',
+ git_pull_url: 'https://gist.github.com/foo/bar.git',
+ files: {
+ 'random.txt': {
+ filename: 'random.txt',
+ type: 'text/plain',
+ language: 'Text',
+ raw_url: 'https://gist.githubusercontent.com/user_name/055b70/raw/66a7be0d/random.txt',
+ size: 166903
+ }
+ },
+ is_public: false,
+ created_at: '2022-09-06T11:38:18Z',
+ updated_at: '2022-09-06T11:38:18Z',
+ description: 'random text'
+ }
+ end
+
+ let(:importer) { instance_double('Gitlab::GithubGistsImport::Importer::GistImporter') }
+ let(:importer_result) { instance_double('ServiceResponse', success?: true) }
+ let(:gist_object) do
+ instance_double('Gitlab::GithubGistsImport::Representation::Gist',
+ gist_hash.merge(github_identifiers: { id: '055b70' }, truncated_title: 'random text', visibility_level: 0))
+ end
+
+ let(:log_attributes) do
+ {
+ 'user_id' => user.id,
+ 'github_identifiers' => { 'id': gist_object.id },
+ 'class' => 'Gitlab::GithubGistsImport::ImportGistWorker',
+ 'correlation_id' => 'new-correlation-id',
+ 'jid' => nil,
+ 'job_status' => 'running',
+ 'queue' => 'github_gists_importer:github_gists_import_import_gist'
+ }
+ end
+
+ describe '#perform' do
+ before do
+ allow(Gitlab::GithubGistsImport::Representation::Gist)
+ .to receive(:from_json_hash)
+ .with(gist_hash)
+ .and_return(gist_object)
+
+ allow(Gitlab::GithubGistsImport::Importer::GistImporter)
+ .to receive(:new)
+ .with(gist_object, user.id)
+ .and_return(importer)
+
+ allow(Gitlab::ApplicationContext).to receive(:current).and_return('correlation_id' => 'new-correlation-id')
+ allow(described_class).to receive(:queue).and_return('github_gists_importer:github_gists_import_import_gist')
+ end
+
+ context 'when success' do
+ it 'imports gist' do
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(log_attributes.merge('message' => 'start importer'))
+ expect(importer).to receive(:execute).and_return(importer_result)
+ expect(Gitlab::JobWaiter).to receive(:notify).with('some_key', subject.jid)
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(log_attributes.merge('message' => 'importer finished'))
+
+ subject.perform(user.id, gist_hash, 'some_key')
+ end
+ end
+
+ context 'when importer raised an error' do
+ it 'raises an error' do
+ exception = StandardError.new('_some_error_')
+
+ expect(importer).to receive(:execute).and_raise(exception)
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:error)
+ .with(log_attributes.merge('message' => 'importer failed', 'error.message' => '_some_error_'))
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+
+ expect { subject.perform(user.id, gist_hash, 'some_key') }.to raise_error(StandardError)
+ end
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb b/spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb
new file mode 100644
index 00000000000..523b7463a9d
--- /dev/null
+++ b/spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb
@@ -0,0 +1,110 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubGistsImport::StartImportWorker, feature_category: :importer do
+ subject(:worker) { described_class.new }
+
+ let_it_be(:user) { create(:user) }
+ let(:token) { Gitlab::CryptoHelper.aes256_gcm_encrypt('token') }
+ let(:importer) { instance_double(Gitlab::GithubGistsImport::Importer::GistsImporter) }
+ let(:waiter) { instance_double(Gitlab::JobWaiter, key: :key, jobs_remaining: 1) }
+ let(:importer_context) { Struct.new(:success?, :error, :waiter, :next_attempt_in, keyword_init: true) }
+ let(:log_attributes) do
+ {
+ 'user_id' => user.id,
+ 'class' => described_class.name,
+ 'correlation_id' => 'new-correlation-id',
+ 'jid' => nil,
+ 'job_status' => 'running',
+ 'queue' => 'github_gists_importer:github_gists_import_start_import'
+ }
+ end
+
+ describe '#perform', :aggregate_failures do
+ before do
+ allow(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(log_attributes.merge('message' => 'starting importer'))
+
+ allow(Gitlab::ApplicationContext).to receive(:current).and_return('correlation_id' => 'new-correlation-id')
+ allow(described_class).to receive(:queue).and_return('github_gists_importer:github_gists_import_start_import')
+ end
+
+ context 'when import was successfull' do
+ it 'imports all the gists' do
+ expect(Gitlab::CryptoHelper)
+ .to receive(:aes256_gcm_decrypt)
+ .with(token)
+ .and_call_original
+
+ expect(Gitlab::GithubGistsImport::Importer::GistsImporter)
+ .to receive(:new)
+ .with(user, 'token')
+ .and_return(importer)
+
+ expect(importer)
+ .to receive(:execute)
+ .and_return(importer_context.new(success?: true, waiter: waiter))
+
+ expect(Gitlab::GithubGistsImport::FinishImportWorker)
+ .to receive(:perform_async)
+ .with(user.id, waiter.key, waiter.jobs_remaining)
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(log_attributes.merge('message' => 'importer finished'))
+
+ worker.perform(user.id, token)
+ end
+ end
+
+ context 'when importer returns an error' do
+ it 'raises an error' do
+ exception = StandardError.new('_some_error_')
+ importer_result = importer_context.new(success?: false, error: exception)
+
+ expect_next_instance_of(Gitlab::GithubGistsImport::Importer::GistsImporter) do |importer|
+ expect(importer).to receive(:execute).and_return(importer_result)
+ end
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:error)
+ .with(log_attributes.merge('message' => 'import failed', 'error.message' => exception.message))
+
+ expect { worker.perform(user.id, token) }.to raise_error(StandardError)
+ end
+ end
+
+ context 'when rate limit is reached' do
+ it 'reschedules worker' do
+ exception = Gitlab::GithubImport::RateLimitError.new
+ importer_result = importer_context.new(success?: false, error: exception, next_attempt_in: 5)
+
+ expect_next_instance_of(Gitlab::GithubGistsImport::Importer::GistsImporter) do |importer|
+ expect(importer).to receive(:execute).and_return(importer_result)
+ end
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(log_attributes.merge('message' => 'rate limit reached'))
+
+ expect(described_class).to receive(:perform_in).with(5, user.id, token)
+
+ worker.perform(user.id, token)
+ end
+ end
+ end
+
+ describe '.sidekiq_retries_exhausted' do
+ it 'sets status to failed' do
+ job = { 'args' => [user.id, token], 'jid' => '123' }
+
+ expect_next_instance_of(Gitlab::GithubGistsImport::Status) do |status|
+ expect(status).to receive(:fail!)
+ end
+
+ described_class.sidekiq_retries_exhausted_block.call(job)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
index 15bc55c1526..c92741e8f10 100644
--- a/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Gitlab::GithubImport::ImportDiffNoteWorker do
hash = {
'noteable_id' => 42,
'github_id' => 42,
+ 'html_url' => 'https://github.com/foo/bar/pull/42',
'path' => 'README.md',
'commit_id' => '123abc',
'diff_hunk' => "@@ -1 +1 @@\n-Hello\n+Hello world",
diff --git a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
index 695e21f4733..0244e69b7b6 100644
--- a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::JiraImport::ImportIssueWorker do
describe '#perform', :clean_gitlab_redis_cache do
let(:assignee_ids) { [user.id] }
let(:issue_attrs) do
- build(:issue, project_id: project.id, title: 'jira issue')
+ build(:issue, project_id: project.id, namespace_id: project.project_namespace_id, title: 'jira issue')
.as_json.merge(
'label_ids' => [jira_issue_label_1.id, jira_issue_label_2.id], 'assignee_ids' => assignee_ids
).except('issue_type')
@@ -71,6 +71,7 @@ RSpec.describe Gitlab::JiraImport::ImportIssueWorker do
expect(issue.title).to eq('jira issue')
expect(issue.project).to eq(project)
+ expect(issue.namespace).to eq(project.project_namespace)
expect(issue.labels).to match_array([label, jira_issue_label_1, jira_issue_label_2])
expect(issue.assignees).to eq([user])
end
diff --git a/spec/workers/gitlab_shell_worker_spec.rb b/spec/workers/gitlab_shell_worker_spec.rb
index a5419291d35..838f2ef4ba4 100644
--- a/spec/workers/gitlab_shell_worker_spec.rb
+++ b/spec/workers/gitlab_shell_worker_spec.rb
@@ -18,29 +18,13 @@ RSpec.describe GitlabShellWorker, :sidekiq_inline do
end
describe 'all other commands' do
- context 'when verify_gitlab_shell_worker_method_names is enabled' do
- it 'raises ArgumentError' do
- allow_next_instance_of(described_class) do |job_instance|
- expect(job_instance).not_to receive(:gitlab_shell)
- end
-
- expect { described_class.perform_async('foo', 'bar', 'baz') }
- .to raise_error(ArgumentError, 'foo not allowed for GitlabShellWorker')
- end
- end
-
- context 'when verify_gitlab_shell_worker_method_names is disabled' do
- before do
- stub_feature_flags(verify_gitlab_shell_worker_method_names: false)
+ it 'raises ArgumentError' do
+ allow_next_instance_of(described_class) do |job_instance|
+ expect(job_instance).not_to receive(:gitlab_shell)
end
- it 'forwards the message to Gitlab::Shell' do
- expect_next_instance_of(Gitlab::Shell) do |instance|
- expect(instance).to receive('foo').with('bar', 'baz')
- end
-
- described_class.perform_async('foo', 'bar', 'baz')
- end
+ expect { described_class.perform_async('foo', 'bar', 'baz') }
+ .to raise_error(ArgumentError, 'foo not allowed for GitlabShellWorker')
end
end
end
diff --git a/spec/workers/incident_management/close_incident_worker_spec.rb b/spec/workers/incident_management/close_incident_worker_spec.rb
index b0d284ba5db..c96bb4a3d1e 100644
--- a/spec/workers/incident_management/close_incident_worker_spec.rb
+++ b/spec/workers/incident_management/close_incident_worker_spec.rb
@@ -17,14 +17,14 @@ RSpec.describe IncidentManagement::CloseIncidentWorker do
expect(service).to receive(:execute).with(issue, system_note: false).and_call_original
end
- expect { worker.perform(issue_id) }.to change(ResourceStateEvent, :count).by(1)
+ expect { worker.perform(issue_id) }.to change { ResourceStateEvent.count }.by(1)
end
shared_examples 'does not call the close issue service' do
specify do
expect(Issues::CloseService).not_to receive(:new)
- expect { worker.perform(issue_id) }.not_to change(ResourceStateEvent, :count)
+ expect { worker.perform(issue_id) }.not_to change { ResourceStateEvent.count }
end
end
@@ -58,7 +58,7 @@ RSpec.describe IncidentManagement::CloseIncidentWorker do
end
specify do
- expect { worker.perform(issue_id) }.not_to change(ResourceStateEvent, :count)
+ expect { worker.perform(issue_id) }.not_to change { ResourceStateEvent.count }
end
end
end
diff --git a/spec/workers/incident_management/pager_duty/process_incident_worker_spec.rb b/spec/workers/incident_management/pager_duty/process_incident_worker_spec.rb
index e2be91516b9..b81f1a575b5 100644
--- a/spec/workers/incident_management/pager_duty/process_incident_worker_spec.rb
+++ b/spec/workers/incident_management/pager_duty/process_incident_worker_spec.rb
@@ -22,14 +22,14 @@ RSpec.describe IncidentManagement::PagerDuty::ProcessIncidentWorker do
'assignees' => [{
'summary' => 'Laura Haley', 'url' => 'https://webdemo.pagerduty.com/users/P553OPV'
}],
- 'impacted_services' => [{
+ 'impacted_service' => {
'summary' => 'Production XDB Cluster', 'url' => 'https://webdemo.pagerduty.com/services/PN49J75'
- }]
+ }
}
end
it 'creates a GitLab issue' do
- expect { perform }.to change(Issue, :count).by(1)
+ expect { perform }.to change { Issue.count }.by(1)
end
end
@@ -41,7 +41,7 @@ RSpec.describe IncidentManagement::PagerDuty::ProcessIncidentWorker do
end
it 'does not create a GitLab issue' do
- expect { perform }.not_to change(Issue, :count)
+ expect { perform }.not_to change { Issue.count }
end
it 'logs a warning' do
diff --git a/spec/workers/issuable_export_csv_worker_spec.rb b/spec/workers/issuable_export_csv_worker_spec.rb
index a18d10ad3df..a5172d916b6 100644
--- a/spec/workers/issuable_export_csv_worker_spec.rb
+++ b/spec/workers/issuable_export_csv_worker_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe IssuableExportCsvWorker do
let(:issuable_type) { :issue }
it 'emails a CSV' do
- expect { subject }.to change(ActionMailer::Base.deliveries, :size).by(1)
+ expect { subject }.to change { ActionMailer::Base.deliveries.size }.by(1)
end
it 'ensures that project_id is passed to issues_finder' do
@@ -54,7 +54,7 @@ RSpec.describe IssuableExportCsvWorker do
let(:issuable_type) { :merge_request }
it 'emails a CSV' do
- expect { subject }.to change(ActionMailer::Base.deliveries, :size).by(1)
+ expect { subject }.to change { ActionMailer::Base.deliveries.size }.by(1)
end
it 'calls the MR export service' do
diff --git a/spec/workers/jira_connect/forward_event_worker_spec.rb b/spec/workers/jira_connect/forward_event_worker_spec.rb
index 7de9952a1da..d3db07b8cb4 100644
--- a/spec/workers/jira_connect/forward_event_worker_spec.rb
+++ b/spec/workers/jira_connect/forward_event_worker_spec.rb
@@ -24,14 +24,14 @@ RSpec.describe JiraConnect::ForwardEventWorker do
expect(Atlassian::Jwt).to receive(:encode).with({ iss: client_key, qsh: 'some_qsh' }, shared_secret).and_return('auth_token')
expect(JiraConnect::RetryRequestWorker).to receive(:perform_async).with(event_url, 'auth_token')
- expect { perform }.to change(JiraConnectInstallation, :count).by(-1)
+ expect { perform }.to change { JiraConnectInstallation.count }.by(-1)
end
context 'when installation does not exist' do
let(:jira_connect_installation) { instance_double(JiraConnectInstallation, id: -1) }
it 'does nothing' do
- expect { perform }.not_to change(JiraConnectInstallation, :count)
+ expect { perform }.not_to change { JiraConnectInstallation.count }
end
end
@@ -39,7 +39,7 @@ RSpec.describe JiraConnect::ForwardEventWorker do
let!(:jira_connect_installation) { create(:jira_connect_installation) }
it 'forwards the event including the auth header' do
- expect { perform }.to change(JiraConnectInstallation, :count).by(-1)
+ expect { perform }.to change { JiraConnectInstallation.count }.by(-1)
expect(JiraConnect::RetryRequestWorker).not_to receive(:perform_async)
end
diff --git a/spec/workers/jira_connect/send_uninstalled_hook_worker_spec.rb b/spec/workers/jira_connect/send_uninstalled_hook_worker_spec.rb
new file mode 100644
index 00000000000..d8ca8dee54d
--- /dev/null
+++ b/spec/workers/jira_connect/send_uninstalled_hook_worker_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::SendUninstalledHookWorker, feature_category: :integrations do
+ describe '#perform' do
+ let_it_be(:jira_connect_installation) { create(:jira_connect_installation) }
+ let(:instance_url) { 'http://example.com' }
+ let(:attempts) { 3 }
+ let(:service_response) { ServiceResponse.new(status: :success) }
+ let(:job_args) { [jira_connect_installation.id, instance_url] }
+
+ before do
+ allow(JiraConnectInstallations::ProxyLifecycleEventService).to receive(:execute).and_return(service_response)
+ end
+
+ include_examples 'an idempotent worker' do
+ it 'calls the ProxyLifecycleEventService service' do
+ expect(JiraConnectInstallations::ProxyLifecycleEventService).to receive(:execute).with(
+ jira_connect_installation,
+ :uninstalled,
+ instance_url
+ ).twice
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/mail_scheduler/notification_service_worker_spec.rb b/spec/workers/mail_scheduler/notification_service_worker_spec.rb
index 3c17025c152..482d99a43c2 100644
--- a/spec/workers/mail_scheduler/notification_service_worker_spec.rb
+++ b/spec/workers/mail_scheduler/notification_service_worker_spec.rb
@@ -53,31 +53,15 @@ RSpec.describe MailScheduler::NotificationServiceWorker do
end
context 'when the method is not allowed' do
- context 'when verify_mail_scheduler_notification_service_worker_method_names is enabled' do
- it 'raises ArgumentError' do
- expect(worker.notification_service).not_to receive(:async)
- expect(worker.notification_service).not_to receive(:foo)
+ it 'raises ArgumentError' do
+ expect(worker.notification_service).not_to receive(:async)
+ expect(worker.notification_service).not_to receive(:foo)
- expect { worker.perform('async', *serialize(key)) }
- .to raise_error(ArgumentError, 'async not allowed for MailScheduler::NotificationServiceWorker')
+ expect { worker.perform('async', *serialize(key)) }
+ .to raise_error(ArgumentError, 'async not allowed for MailScheduler::NotificationServiceWorker')
- expect { worker.perform('foo', *serialize(key)) }
- .to raise_error(ArgumentError, 'foo not allowed for MailScheduler::NotificationServiceWorker')
- end
- end
-
- context 'when verify_mail_scheduler_notification_service_worker_method_names is disabled' do
- before do
- stub_feature_flags(verify_mail_scheduler_notification_service_worker_method_names: false)
- end
-
- it 'forwards the argument to the service' do
- expect(worker.notification_service).to receive(:async)
- expect(worker.notification_service).to receive(:foo)
-
- worker.perform('async', *serialize(key))
- worker.perform('foo', *serialize(key))
- end
+ expect { worker.perform('foo', *serialize(key)) }
+ .to raise_error(ArgumentError, 'foo not allowed for MailScheduler::NotificationServiceWorker')
end
end
end
diff --git a/spec/workers/merge_requests/delete_branch_worker_spec.rb b/spec/workers/merge_requests/delete_branch_worker_spec.rb
deleted file mode 100644
index 80ca8c061f5..00000000000
--- a/spec/workers/merge_requests/delete_branch_worker_spec.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe MergeRequests::DeleteBranchWorker do
- let_it_be(:merge_request) { create(:merge_request) }
- let_it_be(:user) { create(:user) }
-
- let(:branch) { merge_request.source_branch }
- let(:sha) { merge_request.source_branch_sha }
- let(:retarget_branch) { true }
- let(:worker) { described_class.new }
-
- describe '#perform' do
- context 'with a non-existing merge request' do
- it 'does nothing' do
- expect(::Branches::DeleteService).not_to receive(:new)
- worker.perform(non_existing_record_id, user.id, branch, retarget_branch)
- end
- end
-
- context 'with a non-existing user' do
- it 'does nothing' do
- expect(::Branches::DeleteService).not_to receive(:new)
-
- worker.perform(merge_request.id, non_existing_record_id, branch, retarget_branch)
- end
- end
-
- context 'with existing user and merge request' do
- it 'calls service to delete source branch' do
- expect_next_instance_of(::Branches::DeleteService) do |instance|
- expect(instance).to receive(:execute).with(branch)
- end
-
- worker.perform(merge_request.id, user.id, branch, retarget_branch)
- end
-
- context 'when retarget branch param is true' do
- it 'calls the retarget chain service' do
- expect_next_instance_of(::MergeRequests::RetargetChainService) do |instance|
- expect(instance).to receive(:execute).with(merge_request)
- end
-
- worker.perform(merge_request.id, user.id, branch, retarget_branch)
- end
- end
-
- context 'when retarget branch param is false' do
- let(:retarget_branch) { false }
-
- it 'does not call the retarget chain service' do
- expect(::MergeRequests::RetargetChainService).not_to receive(:new)
-
- worker.perform(merge_request.id, user.id, branch, retarget_branch)
- end
- end
- end
-
- it_behaves_like 'an idempotent worker' do
- let(:merge_request) { create(:merge_request) }
- let(:job_args) { [merge_request.id, sha, user.id, true] }
- end
- end
-end
diff --git a/spec/workers/merge_requests/delete_source_branch_worker_spec.rb b/spec/workers/merge_requests/delete_source_branch_worker_spec.rb
index 2935d3ef5dc..a7e4ffad259 100644
--- a/spec/workers/merge_requests/delete_source_branch_worker_spec.rb
+++ b/spec/workers/merge_requests/delete_source_branch_worker_spec.rb
@@ -3,17 +3,24 @@
require 'spec_helper'
RSpec.describe MergeRequests::DeleteSourceBranchWorker do
- let_it_be(:merge_request) { create(:merge_request) }
let_it_be(:user) { create(:user) }
+ let_it_be(:merge_request) { create(:merge_request, author: user) }
let(:sha) { merge_request.source_branch_sha }
let(:worker) { described_class.new }
describe '#perform' do
+ before do
+ allow_next_instance_of(::Projects::DeleteBranchWorker) do |instance|
+ allow(instance).to receive(:perform).with(merge_request.source_project.id, user.id,
+ merge_request.source_branch)
+ end
+ end
+
context 'when the add_delete_branch_worker feature flag is enabled' do
context 'with a non-existing merge request' do
it 'does nothing' do
- expect(::MergeRequests::DeleteBranchWorker).not_to receive(:perform_async)
+ expect(::Projects::DeleteBranchWorker).not_to receive(:new)
worker.perform(non_existing_record_id, sha, user.id)
end
@@ -21,7 +28,7 @@ RSpec.describe MergeRequests::DeleteSourceBranchWorker do
context 'with a non-existing user' do
it 'does nothing' do
- expect(::MergeRequests::DeleteBranchWorker).not_to receive(:perform_async)
+ expect(::Projects::DeleteBranchWorker).not_to receive(:new)
worker.perform(merge_request.id, sha, non_existing_record_id)
end
@@ -29,15 +36,17 @@ RSpec.describe MergeRequests::DeleteSourceBranchWorker do
context 'with existing user and merge request' do
it 'creates a new delete branch worker async' do
- expect(::MergeRequests::DeleteBranchWorker).to receive(:perform_async).with(merge_request.id, user.id,
- merge_request.source_branch, true)
+ expect_next_instance_of(::Projects::DeleteBranchWorker) do |instance|
+ expect(instance).to receive(:perform).with(merge_request.source_project.id, user.id,
+ merge_request.source_branch)
+ end
worker.perform(merge_request.id, sha, user.id)
end
context 'source branch sha does not match' do
it 'does nothing' do
- expect(::MergeRequests::DeleteBranchWorker).not_to receive(:perform_async)
+ expect(::Projects::DeleteBranchWorker).not_to receive(:new)
worker.perform(merge_request.id, 'new-source-branch-sha', user.id)
end
@@ -45,7 +54,6 @@ RSpec.describe MergeRequests::DeleteSourceBranchWorker do
end
it_behaves_like 'an idempotent worker' do
- let(:merge_request) { create(:merge_request) }
let(:job_args) { [merge_request.id, sha, user.id] }
end
end
@@ -117,7 +125,6 @@ RSpec.describe MergeRequests::DeleteSourceBranchWorker do
end
it_behaves_like 'an idempotent worker' do
- let(:merge_request) { create(:merge_request) }
let(:job_args) { [merge_request.id, sha, user.id] }
end
end
diff --git a/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb b/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
index 11343f69d6f..491ea64cff1 100644
--- a/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
+++ b/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
@@ -10,23 +10,24 @@ RSpec.describe Metrics::Dashboard::PruneOldAnnotationsWorker do
describe '#perform' do
it 'removes all annotations older than cut off', :aggregate_failures do
- Timecop.freeze(now) do
+ travel_to(now) do
described_class.new.perform
expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation, two_weeks_old_annotation])
# is idempotent in the scope of 24h
expect { described_class.new.perform }.not_to change { Metrics::Dashboard::Annotation.all.to_a }
- travel_to(24.hours.from_now) do
- described_class.new.perform
- expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation])
- end
+ end
+
+ travel_to(now + 24.hours) do
+ described_class.new.perform
+ expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation])
end
end
context 'batch to be deleted is bigger than upper limit' do
it 'schedules second job to clear remaining records' do
- Timecop.freeze(now) do
+ travel_to(now) do
create(:metrics_dashboard_annotation, starting_at: 1.month.ago)
stub_const("#{described_class}::DELETE_LIMIT", 1)
diff --git a/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb b/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb
index f151780ffd7..4b670a753e7 100644
--- a/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb
+++ b/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Metrics::Dashboard::SyncDashboardsWorker do
describe ".perform" do
context 'with valid dashboard hash' do
it 'imports metrics' do
- expect { worker.perform(project.id) }.to change(PrometheusMetric, :count).by(3)
+ expect { worker.perform(project.id) }.to change { PrometheusMetric.count }.by(3)
end
it 'is idempotent' do
@@ -32,7 +32,7 @@ RSpec.describe Metrics::Dashboard::SyncDashboardsWorker do
end
it 'does not import metrics' do
- expect { worker.perform(project.id) }.not_to change(PrometheusMetric, :count)
+ expect { worker.perform(project.id) }.not_to change { PrometheusMetric.count }
end
it 'does not raise an error' do
diff --git a/spec/workers/namespaces/process_sync_events_worker_spec.rb b/spec/workers/namespaces/process_sync_events_worker_spec.rb
index 5e5179eab62..9f389089609 100644
--- a/spec/workers/namespaces/process_sync_events_worker_spec.rb
+++ b/spec/workers/namespaces/process_sync_events_worker_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Namespaces::ProcessSyncEventsWorker do
expect(described_class).to receive(:perform_async).at_least(:twice).and_call_original
expect do
described_class.perform_async
- end.to change(Namespaces::SyncEvent, :count).from(3).to(0)
+ end.to change { Namespaces::SyncEvent.count }.from(3).to(0)
end
end
@@ -44,11 +44,11 @@ RSpec.describe Namespaces::ProcessSyncEventsWorker do
end
it 'consumes all sync events' do
- expect { perform }.to change(Namespaces::SyncEvent, :count).from(5).to(0)
+ expect { perform }.to change { Namespaces::SyncEvent.count }.from(5).to(0)
end
it 'syncs namespace hierarchy traversal ids' do
- expect { perform }.to change(Ci::NamespaceMirror, :all).to contain_exactly(
+ expect { perform }.to change { Ci::NamespaceMirror.all }.to contain_exactly(
an_object_having_attributes(namespace_id: group1.id, traversal_ids: [group1.id]),
an_object_having_attributes(namespace_id: group2.id, traversal_ids: [group1.id, group2.id]),
an_object_having_attributes(namespace_id: group3.id, traversal_ids: [group1.id, group2.id, group3.id])
diff --git a/spec/workers/namespaces/root_statistics_worker_spec.rb b/spec/workers/namespaces/root_statistics_worker_spec.rb
index 30854415405..e047c94816f 100644
--- a/spec/workers/namespaces/root_statistics_worker_spec.rb
+++ b/spec/workers/namespaces/root_statistics_worker_spec.rb
@@ -92,7 +92,6 @@ RSpec.describe Namespaces::RootStatisticsWorker, '#perform' do
it_behaves_like 'worker with data consistency',
described_class,
- feature_flag: :root_statistics_worker_read_replica,
data_consistency: :sticky
it 'has the `until_executed` deduplicate strategy' do
diff --git a/spec/workers/namespaces/schedule_aggregation_worker_spec.rb b/spec/workers/namespaces/schedule_aggregation_worker_spec.rb
index f2fe53d6112..62f9be501cc 100644
--- a/spec/workers/namespaces/schedule_aggregation_worker_spec.rb
+++ b/spec/workers/namespaces/schedule_aggregation_worker_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Namespaces::ScheduleAggregationWorker, '#perform', :clean_gitlab_
expect do
worker.perform(group.id)
- end.not_to change(Namespace::AggregationSchedule, :count)
+ end.not_to change { Namespace::AggregationSchedule.count }
end
end
@@ -26,7 +26,7 @@ RSpec.describe Namespaces::ScheduleAggregationWorker, '#perform', :clean_gitlab_
expect do
worker.perform(group.id)
- end.to change(Namespace::AggregationSchedule, :count).by(1)
+ end.to change { Namespace::AggregationSchedule.count }.by(1)
expect(group.aggregation_schedule).to be_present
end
diff --git a/spec/workers/packages/debian/process_changes_worker_spec.rb b/spec/workers/packages/debian/process_changes_worker_spec.rb
index 93eba4bfa9a..fc482245ebe 100644
--- a/spec/workers/packages/debian/process_changes_worker_spec.rb
+++ b/spec/workers/packages/debian/process_changes_worker_spec.rb
@@ -78,10 +78,28 @@ RSpec.describe Packages::Debian::ProcessChangesWorker, type: :worker do
end
end
+ context 'without a distribution' do
+ before do
+ distribution.destroy!
+ end
+
+ it 'removes package file and log exception', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(ActiveRecord::RecordNotFound),
+ package_file_id: package_file_id,
+ user_id: user_id
+ )
+ expect { subject }
+ .to not_change { Packages::Package.count }
+ .and change { Packages::PackageFile.count }.by(-1)
+ .and change { incoming.package_files.count }.from(7).to(6)
+ end
+ end
+
context 'when the service raises an error' do
let(:package_file) { incoming.package_files.first }
- it 'removes package file', :aggregate_failures do
+ it 'removes package file and log exception', :aggregate_failures do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
instance_of(Packages::Debian::ExtractChangesMetadataService::ExtractionError),
package_file_id: package_file_id,
diff --git a/spec/workers/packages/debian/process_package_file_worker_spec.rb b/spec/workers/packages/debian/process_package_file_worker_spec.rb
new file mode 100644
index 00000000000..532bfb096a3
--- /dev/null
+++ b/spec/workers/packages/debian/process_package_file_worker_spec.rb
@@ -0,0 +1,138 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::ProcessPackageFileWorker, type: :worker, feature_category: :package_registry do
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_file, codename: 'unstable') }
+
+ let(:incoming) { create(:debian_incoming, project: distribution.project) }
+ let(:distribution_name) { distribution.codename }
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ let(:package_file_id) { package_file.id }
+ let(:user_id) { user.id }
+
+ subject { worker.perform(package_file_id, user_id, distribution_name, component_name) }
+
+ shared_examples 'returns early without error' do
+ it 'returns early without error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(::Packages::Debian::ProcessPackageFileService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:case_name, :expected_file_type, :file_name, :component_name) do
+ 'with a deb' | 'deb' | 'libsample0_1.2.3~alpha2_amd64.deb' | 'main'
+ 'with an udeb' | 'udeb' | 'sample-udeb_1.2.3~alpha2_amd64.udeb' | 'contrib'
+ end
+
+ with_them do
+ context 'with Debian package file' do
+ let(:package_file) { incoming.package_files.with_file_name(file_name).first }
+
+ context 'with mocked service' do
+ it 'calls ProcessPackageFileService' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect_next_instance_of(::Packages::Debian::ProcessPackageFileService) do |service|
+ expect(service).to receive(:execute)
+ .with(no_args)
+ end
+
+ subject
+ end
+ end
+
+ context 'with non existing user' do
+ let(:user_id) { non_existing_record_id }
+
+ it_behaves_like 'returns early without error'
+ end
+
+ context 'with nil user id' do
+ let(:user_id) { nil }
+
+ it_behaves_like 'returns early without error'
+ end
+
+ context 'when the service raises an error' do
+ let(:package_file) { incoming.package_files.with_file_name('sample_1.2.3~alpha2.tar.xz').first }
+
+ it 'removes package file', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(ArgumentError),
+ package_file_id: package_file_id,
+ user_id: user_id,
+ distribution_name: distribution_name,
+ component_name: component_name
+ )
+ expect { subject }
+ .to not_change(Packages::Package, :count)
+ .and change { Packages::PackageFile.count }.by(-1)
+ .and change { incoming.package_files.count }.from(7).to(6)
+
+ expect { package_file.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [package_file.id, user.id, distribution_name, component_name] }
+
+ it 'sets the Debian file type as deb', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ # Using subject inside this block will process the job multiple times
+ expect { subject }
+ .to change { Packages::Package.count }.from(1).to(2)
+ .and not_change(Packages::PackageFile, :count)
+ .and change { incoming.package_files.count }.from(7).to(6)
+ .and change {
+ package_file&.debian_file_metadatum&.reload&.file_type
+ }.from('unknown').to(expected_file_type)
+
+ created_package = Packages::Package.last
+ expect(created_package.name).to eq 'sample'
+ expect(created_package.version).to eq '1.2.3~alpha2'
+ expect(created_package.creator).to eq user
+ end
+ end
+ end
+ end
+
+ context 'with already processed package file' do
+ let_it_be(:package_file) { create(:debian_package_file) }
+
+ let(:component_name) { 'main' }
+
+ it_behaves_like 'returns early without error'
+ end
+
+ context 'with a deb' do
+ let(:package_file) { incoming.package_files.with_file_name('libsample0_1.2.3~alpha2_amd64.deb').first }
+ let(:component_name) { 'main' }
+
+ context 'with FIPS mode enabled', :fips_mode do
+ it 'raises an error' do
+ expect { subject }.to raise_error(::Packages::FIPS::DisabledError)
+ end
+ end
+
+ context 'with non existing package file' do
+ let(:package_file_id) { non_existing_record_id }
+
+ it_behaves_like 'returns early without error'
+ end
+
+ context 'with nil package file id' do
+ let(:package_file_id) { nil }
+
+ it_behaves_like 'returns early without error'
+ end
+ end
+ end
+end
diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb
index 4a7db0eca56..d23907a8def 100644
--- a/spec/workers/pipeline_schedule_worker_spec.rb
+++ b/spec/workers/pipeline_schedule_worker_spec.rb
@@ -17,8 +17,12 @@ RSpec.describe PipelineScheduleWorker do
before do
stub_application_setting(auto_devops_enabled: false)
stub_ci_pipeline_to_return_yaml_file
+ end
- pipeline_schedule.update_column(:next_run_at, 1.day.ago)
+ around do |example|
+ travel_to(pipeline_schedule.next_run_at + 1.hour) do
+ example.run
+ end
end
context 'when the schedule is runnable by the user' do
@@ -26,16 +30,22 @@ RSpec.describe PipelineScheduleWorker do
project.add_maintainer(user)
end
- context 'when there is a scheduled pipeline within next_run_at' do
+ context 'when there is a scheduled pipeline within next_run_at', :sidekiq_inline do
shared_examples 'successful scheduling' do
- it 'creates a new pipeline', :sidekiq_might_not_need_inline do
+ it 'creates a new pipeline' do
expect { subject }.to change { project.ci_pipelines.count }.by(1)
- expect(Ci::Pipeline.last).to be_schedule
+ last_pipeline = project.ci_pipelines.last
+
+ expect(last_pipeline).to be_schedule
+ expect(last_pipeline.pipeline_schedule).to eq(pipeline_schedule)
+ end
+
+ it 'updates next_run_at' do
+ expect { subject }.to change { pipeline_schedule.reload.next_run_at }.by(1.day)
+ end
- pipeline_schedule.reload
- expect(pipeline_schedule.next_run_at).to be > Time.current
- expect(pipeline_schedule).to eq(project.ci_pipelines.last.pipeline_schedule)
- expect(pipeline_schedule).to be_active
+ it 'does not change active status' do
+ expect { subject }.not_to change { pipeline_schedule.reload.active? }.from(true)
end
end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index d632ca39e44..210987555c9 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -275,30 +275,17 @@ RSpec.describe PostReceive do
expect { perform }.to change { counter.read(:pushes) }.by(1)
end
- it 'records correct payload with Snowplow event', :snowplow do
- stub_feature_flags(route_hll_to_snowplow_phase2: true)
-
- perform
-
- expect_snowplow_event(
- category: 'PostReceive',
- action: 'source_code_pushes',
- namespace: project.namespace,
- user: project.first_owner,
- project: project
- )
- end
-
- context 'when FF is disabled' do
- before do
- stub_feature_flags(route_hll_to_snowplow_phase2: false)
- end
-
- it 'doesnt emit snowplow events', :snowplow do
- perform
-
- expect_no_snowplow_event
- end
+ it_behaves_like 'Snowplow event tracking' do
+ let(:action) { :push }
+ let(:category) { described_class.name }
+ let(:namespace) { project.namespace }
+ let(:user) { project.creator }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:label) { 'counts.source_code_pushes' }
+ let(:property) { 'source_code_pushes' }
+ let(:context) { [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: label).to_h] }
+
+ subject(:post_receive) { perform }
end
end
end
@@ -324,8 +311,8 @@ RSpec.describe PostReceive do
expect do
perform
project.reload
- end.to change(project, :last_activity_at)
- .and change(project, :last_repository_updated_at)
+ end.to change { project.last_activity_at }
+ .and change { project.last_repository_updated_at }
end
end
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index 01c44399b0c..072c660bc2b 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -118,7 +118,7 @@ RSpec.describe ProcessCommitWorker do
it 'creates Issue::CloseWorker jobs' do
expect do
worker.close_issues(project, user, user, commit, [issue])
- end.to change(Issues::CloseWorker.jobs, :size).by(1)
+ end.to change { Issues::CloseWorker.jobs.size }.by(1)
end
end
diff --git a/spec/workers/projects/delete_branch_worker_spec.rb b/spec/workers/projects/delete_branch_worker_spec.rb
new file mode 100644
index 00000000000..c1289f56929
--- /dev/null
+++ b/spec/workers/projects/delete_branch_worker_spec.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+# rubocop: disable Gitlab/ServiceResponse
+
+require 'spec_helper'
+
+RSpec.describe Projects::DeleteBranchWorker, feature_category: :source_code_management do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:branch) { 'master' }
+ let(:worker) { described_class.new }
+ let(:service_result) { ServiceResponse.success(message: 'placeholder', http_status: 200) }
+
+ before do
+ allow_next_instance_of(::Branches::DeleteService) do |instance|
+ allow(instance).to receive(:execute).with(branch).and_return(service_result)
+ end
+ end
+
+ describe '#perform' do
+ context 'when the branch does not exist' do
+ let(:branch) { 'non_existent_branch_name' }
+
+ it 'does nothing' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+
+ worker.perform(project.id, user.id, branch)
+ end
+ end
+
+ context 'with a non-existing project' do
+ it 'does nothing' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+
+ worker.perform(non_existing_record_id, user.id, branch)
+ end
+ end
+
+ context 'with a non-existing user' do
+ it 'does nothing' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+
+ worker.perform(project.id, non_existing_record_id, branch)
+ end
+ end
+
+ context 'with existing user and project' do
+ it 'calls service to delete source branch' do
+ expect_next_instance_of(::Branches::DeleteService) do |instance|
+ expect(instance).to receive(:execute).with(branch).and_return(service_result)
+ end
+
+ worker.perform(project.id, user.id, branch)
+ end
+
+ context 'when delete service returns an error' do
+ let(:service_result) { ServiceResponse.error(message: 'placeholder', http_status: status_code) }
+
+ context 'when the status code is 400' do
+ let(:status_code) { 400 }
+
+ it 'tracks and raises the exception' do
+ expect_next_instance_of(::Branches::DeleteService) do |instance|
+ expect(instance).to receive(:execute).with(branch).and_return(service_result)
+ end
+
+ expect(service_result).to receive(:track_and_raise_exception).and_call_original
+
+ expect { worker.perform(project.id, user.id, branch) }.to raise_error(StandardError)
+ end
+ end
+
+ context 'when the status code is not 400' do
+ let(:status_code) { 405 }
+
+ it 'does not track the exception' do
+ expect_next_instance_of(::Branches::DeleteService) do |instance|
+ expect(instance).to receive(:execute).with(branch).and_return(service_result)
+ end
+
+ expect(service_result).not_to receive(:track_and_raise_exception)
+
+ expect { worker.perform(project.id, user.id, branch) }.not_to raise_error
+ end
+ end
+
+ context 'when track_and_raise_delete_source_errors is disabled' do
+ let(:status_code) { 400 }
+
+ before do
+ stub_feature_flags(track_and_raise_delete_source_errors: false)
+ end
+
+ it 'does not track the exception' do
+ expect_next_instance_of(::Branches::DeleteService) do |instance|
+ expect(instance).to receive(:execute).with(branch).and_return(service_result)
+ end
+
+ expect(service_result).not_to receive(:track_and_raise_exception)
+
+ expect { worker.perform(project.id, user.id, branch) }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [project.id, user.id, branch] }
+ end
+ end
+ # rubocop: enable Gitlab/ServiceResponse
+end
diff --git a/spec/workers/projects/import_export/parallel_project_export_worker_spec.rb b/spec/workers/projects/import_export/parallel_project_export_worker_spec.rb
new file mode 100644
index 00000000000..d3ac0a34295
--- /dev/null
+++ b/spec/workers/projects/import_export/parallel_project_export_worker_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ImportExport::ParallelProjectExportWorker, feature_category: :importers do
+ let_it_be(:user) { create(:user) }
+
+ let(:export_job) { create(:project_export_job, :started) }
+ let(:after_export_strategy) { {} }
+ let(:job_args) { [export_job.id, user.id, after_export_strategy] }
+
+ before do
+ allow_next_instance_of(described_class) do |job|
+ allow(job).to receive(:jid) { SecureRandom.hex(8) }
+ end
+ end
+
+ describe '#perform' do
+ it_behaves_like 'an idempotent worker' do
+ it 'sets the export job status to finished' do
+ subject
+
+ expect(export_job.reload.finished?).to eq(true)
+ end
+ end
+
+ context 'when after export strategy does not exist' do
+ let(:after_export_strategy) { { 'klass' => 'InvalidStrategy' } }
+
+ it 'sets the export job status to failed' do
+ described_class.new.perform(*job_args)
+
+ expect(export_job.reload.failed?).to eq(true)
+ end
+ end
+ end
+
+ describe '.sidekiq_retries_exhausted' do
+ let(:job) { { 'args' => job_args, 'error_message' => 'Error message' } }
+
+ it 'sets export_job status to failed' do
+ described_class.sidekiq_retries_exhausted_block.call(job)
+
+ expect(export_job.reload.failed?).to eq(true)
+ end
+
+ it 'logs an error message' do
+ expect_next_instance_of(Gitlab::Export::Logger) do |logger|
+ expect(logger).to receive(:error).with(
+ hash_including(
+ message: 'Parallel project export error',
+ export_error: 'Error message'
+ )
+ )
+ end
+
+ described_class.sidekiq_retries_exhausted_block.call(job)
+ end
+ end
+end
diff --git a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
index 50b5b0a6e7b..f3c6434dc85 100644
--- a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
+++ b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
describe "#perform" do
subject(:worker) { described_class.new }
- let_it_be(:admin_user) { create(:user, :admin) }
+ let_it_be(:admin_bot) { create(:user, :admin_bot) }
let_it_be(:non_admin_user) { create(:user) }
let_it_be(:new_blank_project) do
create_project_with_statistics.tap do |project|
@@ -121,7 +121,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
end
expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_async)
- expect(::Projects::DestroyService).to receive(:new).with(inactive_large_project, admin_user, {})
+ expect(::Projects::DestroyService).to receive(:new).with(inactive_large_project, admin_bot, {})
.at_least(:once).and_call_original
worker.perform
diff --git a/spec/workers/projects/process_sync_events_worker_spec.rb b/spec/workers/projects/process_sync_events_worker_spec.rb
index 202942ce905..a10a4797b2c 100644
--- a/spec/workers/projects/process_sync_events_worker_spec.rb
+++ b/spec/workers/projects/process_sync_events_worker_spec.rb
@@ -26,11 +26,11 @@ RSpec.describe Projects::ProcessSyncEventsWorker do
end
it 'consumes all sync events' do
- expect { perform }.to change(Projects::SyncEvent, :count).from(2).to(0)
+ expect { perform }.to change { Projects::SyncEvent.count }.from(2).to(0)
end
it 'syncs project namespace id' do
- expect { perform }.to change(Ci::ProjectMirror, :all).to contain_exactly(
+ expect { perform }.to change { Ci::ProjectMirror.all }.to contain_exactly(
an_object_having_attributes(namespace_id: group.id)
)
end
diff --git a/spec/workers/releases/create_evidence_worker_spec.rb b/spec/workers/releases/create_evidence_worker_spec.rb
index 743f2abc8a7..7e3edcfe44a 100644
--- a/spec/workers/releases/create_evidence_worker_spec.rb
+++ b/spec/workers/releases/create_evidence_worker_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Releases::CreateEvidenceWorker do
expect(service).to receive(:execute).and_call_original
end
- expect { described_class.new.perform(release.id) }.to change(Releases::Evidence, :count).by(1)
+ expect { described_class.new.perform(release.id) }.to change { Releases::Evidence.count }.by(1)
end
it 'creates a new Evidence record with pipeline' do
@@ -21,6 +21,6 @@ RSpec.describe Releases::CreateEvidenceWorker do
expect(service).to receive(:execute).and_call_original
end
- expect { described_class.new.perform(release.id, pipeline.id) }.to change(Releases::Evidence, :count).by(1)
+ expect { described_class.new.perform(release.id, pipeline.id) }.to change { Releases::Evidence.count }.by(1)
end
end
diff --git a/spec/workers/releases/manage_evidence_worker_spec.rb b/spec/workers/releases/manage_evidence_worker_spec.rb
index 886fcd346eb..0004a4f4bfb 100644
--- a/spec/workers/releases/manage_evidence_worker_spec.rb
+++ b/spec/workers/releases/manage_evidence_worker_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Releases::ManageEvidenceWorker do
specify :sidekiq_inline do
aggregate_failures do
expect(::Releases::CreateEvidenceService).not_to receive(:execute)
- expect { described_class.new.perform }.to change(Releases::Evidence, :count).by(0)
+ expect { described_class.new.perform }.to change { Releases::Evidence.count }.by(0)
end
end
end
@@ -23,7 +23,7 @@ RSpec.describe Releases::ManageEvidenceWorker do
expect(service).to receive(:execute).and_call_original
end
- expect { described_class.new.perform }.to change(Releases::Evidence, :count).by(1)
+ expect { described_class.new.perform }.to change { Releases::Evidence.count }.by(1)
end
end
diff --git a/spec/workers/repository_check/single_repository_worker_spec.rb b/spec/workers/repository_check/single_repository_worker_spec.rb
index b8db262598b..0a37a296e7a 100644
--- a/spec/workers/repository_check/single_repository_worker_spec.rb
+++ b/spec/workers/repository_check/single_repository_worker_spec.rb
@@ -98,16 +98,6 @@ RSpec.describe RepositoryCheck::SingleRepositoryWorker do
expect(project.reload.last_repository_check_failed).to eq(false)
end
- it 'does not create a wiki if the main repo does not exist at all' do
- project = create(:project, :repository)
- project.repository.raw.remove
- project.wiki.repository.raw.remove
-
- subject.perform(project.id)
-
- expect(TestEnv.storage_dir_exists?(project.repository_storage, project.wiki.path)).to eq(false)
- end
-
def create_push_event(project)
project.events.create!(action: :pushed, author_id: create(:user).id)
end
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index 5fa7c5d64db..4fdf6149435 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe RunPipelineScheduleWorker do
+ it 'has an until_executed deduplicate strategy' do
+ expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
+ end
+
describe '#perform' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, namespace: group) }
@@ -11,6 +15,12 @@ RSpec.describe RunPipelineScheduleWorker do
let(:worker) { described_class.new }
+ around do |example|
+ travel_to(pipeline_schedule.next_run_at + 1.hour) do
+ example.run
+ end
+ end
+
context 'when a schedule not found' do
it 'does not call the Service' do
expect(Ci::CreatePipelineService).not_to receive(:new)
diff --git a/spec/workers/tasks_to_be_done/create_worker_spec.rb b/spec/workers/tasks_to_be_done/create_worker_spec.rb
index a158872273f..e884a71933e 100644
--- a/spec/workers/tasks_to_be_done/create_worker_spec.rb
+++ b/spec/workers/tasks_to_be_done/create_worker_spec.rb
@@ -24,13 +24,13 @@ RSpec.describe TasksToBeDone::CreateWorker do
.and_call_original
end
- expect { described_class.new.perform(*job_args) }.to change(Issue, :count).by(3)
+ expect { described_class.new.perform(*job_args) }.to change { Issue.count }.by(3)
end
end
include_examples 'an idempotent worker' do
it 'creates 3 task issues' do
- expect { subject }.to change(Issue, :count).by(3)
+ expect { subject }.to change { Issue.count }.by(3)
end
end
end
diff --git a/spec/workers/update_highest_role_worker_spec.rb b/spec/workers/update_highest_role_worker_spec.rb
index 0c8ee53da9a..cd127f26e95 100644
--- a/spec/workers/update_highest_role_worker_spec.rb
+++ b/spec/workers/update_highest_role_worker_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe UpdateHighestRoleWorker, :clean_gitlab_redis_shared_state do
describe '#perform' do
context 'when user is not found' do
it 'does not update or deletes any highest role', :aggregate_failures do
- expect { worker.perform(-1) }.not_to change(UserHighestRole, :count)
+ expect { worker.perform(-1) }.not_to change { UserHighestRole.count }
end
end
@@ -71,7 +71,7 @@ RSpec.describe UpdateHighestRoleWorker, :clean_gitlab_redis_shared_state do
it 'does not delete a highest role' do
user = create(:user, state: 'blocked')
- expect { worker.perform(user.id) }.not_to change(UserHighestRole, :count)
+ expect { worker.perform(user.id) }.not_to change { UserHighestRole.count }
end
end
end