Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-10-20 11:43:02 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-10-20 11:43:02 +0300
commitd9ab72d6080f594d0b3cae15f14b3ef2c6c638cb (patch)
tree2341ef426af70ad1e289c38036737e04b0aa5007 /spec/workers
parentd6e514dd13db8947884cd58fe2a9c2a063400a9b (diff)
Add latest changes from gitlab-org/gitlab@14-4-stable-eev14.4.0-rc42
Diffstat (limited to 'spec/workers')
-rw-r--r--spec/workers/authorized_project_update/project_recalculate_per_user_worker_spec.rb70
-rw-r--r--spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb2
-rw-r--r--spec/workers/build_finished_worker_spec.rb28
-rw-r--r--spec/workers/bulk_import_worker_spec.rb6
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb29
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb112
-rw-r--r--spec/workers/ci/create_downstream_pipeline_worker_spec.rb37
-rw-r--r--spec/workers/ci/stuck_builds/drop_running_worker_spec.rb28
-rw-r--r--spec/workers/ci/stuck_builds/drop_scheduled_worker_spec.rb28
-rw-r--r--spec/workers/cleanup_container_repository_worker_spec.rb6
-rw-r--r--spec/workers/concerns/application_worker_spec.rb12
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb63
-rw-r--r--spec/workers/concerns/worker_context_spec.rb4
-rw-r--r--spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb55
-rw-r--r--spec/workers/container_expiration_policy_worker_spec.rb2
-rw-r--r--spec/workers/create_note_diff_file_worker_spec.rb18
-rw-r--r--spec/workers/database/drop_detached_partitions_worker_spec.rb7
-rw-r--r--spec/workers/dependency_proxy/cleanup_blob_worker_spec.rb9
-rw-r--r--spec/workers/dependency_proxy/cleanup_manifest_worker_spec.rb9
-rw-r--r--spec/workers/dependency_proxy/image_ttl_group_policy_worker_spec.rb74
-rw-r--r--spec/workers/email_receiver_worker_spec.rb15
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb25
-rw-r--r--spec/workers/expire_job_cache_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb44
-rw-r--r--spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb28
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb27
-rw-r--r--spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb11
-rw-r--r--spec/workers/issue_placement_worker_spec.rb2
-rw-r--r--spec/workers/packages/composer/cache_cleanup_worker_spec.rb8
-rw-r--r--spec/workers/packages/composer/cache_update_worker_spec.rb8
-rw-r--r--spec/workers/pages_remove_worker_spec.rb11
-rw-r--r--spec/workers/pipeline_hooks_worker_spec.rb8
-rw-r--r--spec/workers/pipeline_process_worker_spec.rb10
-rw-r--r--spec/workers/post_receive_spec.rb2
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb15
-rw-r--r--spec/workers/stuck_ci_jobs_worker_spec.rb50
36 files changed, 672 insertions, 193 deletions
diff --git a/spec/workers/authorized_project_update/project_recalculate_per_user_worker_spec.rb b/spec/workers/authorized_project_update/project_recalculate_per_user_worker_spec.rb
new file mode 100644
index 00000000000..57a0726000f
--- /dev/null
+++ b/spec/workers/authorized_project_update/project_recalculate_per_user_worker_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker do
+ include ExclusiveLeaseHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ subject(:worker) { described_class.new }
+
+ include_examples 'an idempotent worker' do
+ let(:job_args) { [project.id, user.id] }
+
+ it 'does not change authorizations when run twice' do
+ project.add_developer(user)
+
+ user.project_authorizations.delete_all
+
+ expect { worker.perform(project.id, user.id) }.to change { project.project_authorizations.reload.size }.by(1)
+ expect { worker.perform(project.id, user.id) }.not_to change { project.project_authorizations.reload.size }
+ end
+ end
+
+ describe '#perform' do
+ it 'does not fail if the project does not exist' do
+ expect do
+ worker.perform(non_existing_record_id, user.id)
+ end.not_to raise_error
+ end
+
+ it 'does not fail if the user does not exist' do
+ expect do
+ worker.perform(project.id, non_existing_record_id)
+ end.not_to raise_error
+ end
+
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService' do
+ expect_next_instance_of(AuthorizedProjectUpdate::ProjectRecalculatePerUserService, project, user) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ worker.perform(project.id, user.id)
+ end
+
+ context 'exclusive lease' do
+ let(:lock_key) { "#{described_class.superclass.name.underscore}/projects/#{project.id}" }
+ let(:timeout) { 10.seconds }
+
+ context 'when exclusive lease has not been taken' do
+ it 'obtains a new exclusive lease' do
+ expect_to_obtain_exclusive_lease(lock_key, timeout: timeout)
+
+ worker.perform(project.id, user.id)
+ end
+ end
+
+ context 'when exclusive lease has already been taken' do
+ before do
+ stub_exclusive_lease_taken(lock_key, timeout: timeout)
+ end
+
+ it 'raises an error' do
+ expect { worker.perform(project.id, user.id) }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb b/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
index 0da58343773..da4b726c0b5 100644
--- a/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
+++ b/spec/workers/authorized_project_update/user_refresh_from_replica_worker_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe AuthorizedProjectUpdate::UserRefreshFromReplicaWorker do
end
end
- context 'with load balancing enabled', :db_load_balancing do
+ context 'with load balancing enabled' do
it 'reads from the replica database' do
expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index 4e34d2348d6..2ca7837066b 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -3,7 +3,9 @@
require 'spec_helper'
RSpec.describe BuildFinishedWorker do
- subject { described_class.new.perform(build.id) }
+ let(:worker) { described_class.new }
+
+ subject { worker.perform(build.id) }
describe '#perform' do
context 'when build exists' do
@@ -63,6 +65,30 @@ RSpec.describe BuildFinishedWorker do
subject
end
end
+
+ context 'when project is deleted' do
+ before do
+ allow(build).to receive(:project).and_return(nil)
+ end
+
+ it 'does no processing' do
+ expect(worker).not_to receive(:process_build)
+
+ subject
+ end
+ end
+
+ context 'when project is pending_delete' do
+ before do
+ build.project.update_attribute(:pending_delete, true)
+ end
+
+ it 'does no processing' do
+ expect(worker).not_to receive(:process_build)
+
+ subject
+ end
+ end
end
context 'when build does not exist' do
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index b67c5c62f76..12e29573156 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe BulkImportWorker do
expect { subject.perform(bulk_import.id) }
.to change(BulkImports::Tracker, :count)
- .by(BulkImports::Groups::Stage.pipelines.size * 2)
+ .by(BulkImports::Groups::Stage.new(bulk_import).pipelines.size * 2)
expect(entity_1.trackers).not_to be_empty
expect(entity_2.trackers).not_to be_empty
@@ -111,10 +111,10 @@ RSpec.describe BulkImportWorker do
end
context 'when there are project entities to process' do
- it 'does not enqueue ExportRequestWorker' do
+ it 'enqueues ExportRequestWorker' do
create(:bulk_import_entity, :created, :project_entity, bulk_import: bulk_import)
- expect(BulkImports::ExportRequestWorker).not_to receive(:perform_async)
+ expect(BulkImports::ExportRequestWorker).to receive(:perform_async).once
subject.perform(bulk_import.id)
end
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
index cb280c6d263..f838bff528c 100644
--- a/spec/workers/bulk_imports/export_request_worker_spec.rb
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe BulkImports::ExportRequestWorker do
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
- let_it_be(:entity) { create(:bulk_import_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
let_it_be(:version_url) { 'https://gitlab.example/api/v4/version' }
let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
@@ -20,16 +19,30 @@ RSpec.describe BulkImports::ExportRequestWorker do
allow(Gitlab::HTTP).to receive(:post).and_return(response_double)
end
- include_examples 'an idempotent worker' do
- it 'requests relations export' do
- expected = "/groups/foo%2Fbar/export_relations"
+ shared_examples 'requests relations export for api resource' do
+ include_examples 'an idempotent worker' do
+ it 'requests relations export' do
+ expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ expect(client).to receive(:post).with(expected).twice
+ end
- expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
- expect(client).to receive(:post).with(expected).twice
+ perform_multiple(job_args)
end
-
- perform_multiple(job_args)
end
end
+
+ context 'when entity is group' do
+ let(:entity) { create(:bulk_import_entity, :group_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
+ let(:expected) { '/groups/foo%2Fbar/export_relations'}
+
+ include_examples 'requests relations export for api resource'
+ end
+
+ context 'when entity is project' do
+ let(:entity) { create(:bulk_import_entity, :project_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
+ let(:expected) { '/projects/foo%2Fbar/export_relations' }
+
+ include_examples 'requests relations export for api resource'
+ end
end
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index 56f28654ac5..c902d1f2034 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -22,47 +22,65 @@ RSpec.describe BulkImports::PipelineWorker do
before do
stub_const('FakePipeline', pipeline_class)
- allow(BulkImports::Groups::Stage)
- .to receive(:pipelines)
- .and_return([[0, pipeline_class]])
+ allow_next_instance_of(BulkImports::Groups::Stage) do |instance|
+ allow(instance).to receive(:pipelines)
+ .and_return([[0, pipeline_class]])
+ end
end
- it 'runs the given pipeline successfully' do
- pipeline_tracker = create(
- :bulk_import_tracker,
- entity: entity,
- pipeline_name: 'FakePipeline'
- )
+ shared_examples 'successfully runs the pipeline' do
+ it 'runs the given pipeline successfully' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ worker: described_class.name,
+ pipeline_name: 'FakePipeline',
+ entity_id: entity.id
+ )
+ end
+
+ expect(BulkImports::EntityWorker)
+ .to receive(:perform_async)
+ .with(entity.id, pipeline_tracker.stage)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- worker: described_class.name,
- pipeline_name: 'FakePipeline',
- entity_id: entity.id
- )
- end
+ expect(subject).to receive(:jid).and_return('jid')
- expect(BulkImports::EntityWorker)
- .to receive(:perform_async)
- .with(entity.id, pipeline_tracker.stage)
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
- expect(subject).to receive(:jid).and_return('jid')
+ pipeline_tracker.reload
- subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ expect(pipeline_tracker.status_name).to eq(:finished)
+ expect(pipeline_tracker.jid).to eq('jid')
+ end
+ end
- pipeline_tracker.reload
+ it_behaves_like 'successfully runs the pipeline' do
+ let(:pipeline_tracker) do
+ create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+ end
+ end
- expect(pipeline_tracker.status_name).to eq(:finished)
- expect(pipeline_tracker.jid).to eq('jid')
+ it_behaves_like 'successfully runs the pipeline' do
+ let(:pipeline_tracker) do
+ create(
+ :bulk_import_tracker,
+ :started,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+ end
end
context 'when the pipeline cannot be found' do
it 'logs the error' do
pipeline_tracker = create(
:bulk_import_tracker,
- :started,
+ :finished,
entity: entity,
pipeline_name: 'FakePipeline'
)
@@ -126,6 +144,39 @@ RSpec.describe BulkImports::PipelineWorker do
expect(pipeline_tracker.status_name).to eq(:failed)
expect(pipeline_tracker.jid).to eq('jid')
end
+
+ context 'when it is a network error' do
+ it 'reenqueue on retriable network errors' do
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+
+ exception = BulkImports::NetworkError.new(
+ response: double(code: 429, headers: {})
+ )
+
+ expect_next_instance_of(pipeline_class) do |pipeline|
+ expect(pipeline)
+ .to receive(:run)
+ .and_raise(exception)
+ end
+
+ expect(subject).to receive(:jid).and_return('jid')
+
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(
+ 60.seconds,
+ pipeline_tracker.id,
+ pipeline_tracker.stage,
+ pipeline_tracker.entity.id
+ )
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ end
+ end
end
context 'when ndjson pipeline' do
@@ -156,9 +207,10 @@ RSpec.describe BulkImports::PipelineWorker do
before do
stub_const('NdjsonPipeline', ndjson_pipeline)
- allow(BulkImports::Groups::Stage)
- .to receive(:pipelines)
- .and_return([[0, ndjson_pipeline]])
+ allow_next_instance_of(BulkImports::Groups::Stage) do |instance|
+ allow(instance).to receive(:pipelines)
+ .and_return([[0, ndjson_pipeline]])
+ end
end
it 'runs the pipeline successfully' do
diff --git a/spec/workers/ci/create_downstream_pipeline_worker_spec.rb b/spec/workers/ci/create_downstream_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..7a75da850d9
--- /dev/null
+++ b/spec/workers/ci/create_downstream_pipeline_worker_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreateDownstreamPipelineWorker do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:bridge) { create(:ci_bridge, user: user, pipeline: pipeline) }
+
+ let(:service) { double('pipeline creation service') }
+
+ describe '#perform' do
+ context 'when bridge exists' do
+ it 'calls cross project pipeline creation service' do
+ expect(Ci::CreateDownstreamPipelineService)
+ .to receive(:new)
+ .with(project, user)
+ .and_return(service)
+
+ expect(service).to receive(:execute).with(bridge)
+
+ described_class.new.perform(bridge.id)
+ end
+ end
+
+ context 'when bridge does not exist' do
+ it 'does nothing' do
+ expect(Ci::CreateDownstreamPipelineService)
+ .not_to receive(:new)
+
+ described_class.new.perform(non_existing_record_id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/stuck_builds/drop_running_worker_spec.rb b/spec/workers/ci/stuck_builds/drop_running_worker_spec.rb
new file mode 100644
index 00000000000..6d3aa71fe81
--- /dev/null
+++ b/spec/workers/ci/stuck_builds/drop_running_worker_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::StuckBuilds::DropRunningWorker do
+ include ExclusiveLeaseHelpers
+
+ let(:worker) { described_class.new }
+ let(:lease_uuid) { SecureRandom.uuid }
+
+ describe '#perform' do
+ subject { worker.perform }
+
+ it_behaves_like 'an idempotent worker'
+
+ it 'executes an instance of Ci::StuckBuilds::DropRunningService' do
+ expect_to_obtain_exclusive_lease(worker.lease_key, lease_uuid)
+
+ expect_next_instance_of(Ci::StuckBuilds::DropRunningService) do |service|
+ expect(service).to receive(:execute).exactly(:once)
+ end
+
+ expect_to_cancel_exclusive_lease(worker.lease_key, lease_uuid)
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/ci/stuck_builds/drop_scheduled_worker_spec.rb b/spec/workers/ci/stuck_builds/drop_scheduled_worker_spec.rb
new file mode 100644
index 00000000000..57be799d890
--- /dev/null
+++ b/spec/workers/ci/stuck_builds/drop_scheduled_worker_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::StuckBuilds::DropScheduledWorker do
+ include ExclusiveLeaseHelpers
+
+ let(:worker) { described_class.new }
+ let(:lease_uuid) { SecureRandom.uuid }
+
+ describe '#perform' do
+ subject { worker.perform }
+
+ it_behaves_like 'an idempotent worker'
+
+ it 'executes an instance of Ci::StuckBuilds::DropScheduledService with an exclusive lease' do
+ expect_to_obtain_exclusive_lease(worker.lease_key, lease_uuid)
+
+ expect_next_instance_of(Ci::StuckBuilds::DropScheduledService) do |service|
+ expect(service).to receive(:execute).exactly(:once)
+ end
+
+ expect_to_cancel_exclusive_lease(worker.lease_key, lease_uuid)
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/cleanup_container_repository_worker_spec.rb b/spec/workers/cleanup_container_repository_worker_spec.rb
index 9cf8974a2a1..6ae4308bd46 100644
--- a/spec/workers/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/cleanup_container_repository_worker_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe CleanupContainerRepositoryWorker, :clean_gitlab_redis_shared_stat
it 'executes the destroy service' do
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
- .with(project, user, params.merge('container_expiration_policy' => false))
+ .with(repository, user, params.merge('container_expiration_policy' => false))
.and_return(service)
expect(service).to receive(:execute)
@@ -49,7 +49,7 @@ RSpec.describe CleanupContainerRepositoryWorker, :clean_gitlab_redis_shared_stat
expect(repository).to receive(:start_expiration_policy!).and_call_original
expect(repository).to receive(:reset_expiration_policy_started_at!).and_call_original
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
- .with(project, nil, params.merge('container_expiration_policy' => true))
+ .with(repository, nil, params.merge('container_expiration_policy' => true))
.and_return(service)
expect(service).to receive(:execute).and_return(status: :success)
@@ -62,7 +62,7 @@ RSpec.describe CleanupContainerRepositoryWorker, :clean_gitlab_redis_shared_stat
expect(repository).to receive(:start_expiration_policy!).and_call_original
expect(repository).not_to receive(:reset_expiration_policy_started_at!)
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
- .with(project, nil, params.merge('container_expiration_policy' => true))
+ .with(repository, nil, params.merge('container_expiration_policy' => true))
.and_return(service)
expect(service).to receive(:execute).and_return(status: :error, message: 'timeout while deleting tags')
diff --git a/spec/workers/concerns/application_worker_spec.rb b/spec/workers/concerns/application_worker_spec.rb
index ac4e4a682c8..af038c81b9e 100644
--- a/spec/workers/concerns/application_worker_spec.rb
+++ b/spec/workers/concerns/application_worker_spec.rb
@@ -248,6 +248,10 @@ RSpec.describe ApplicationWorker do
end
describe '.perform_async' do
+ before do
+ stub_const(worker.name, worker)
+ end
+
shared_examples_for 'worker utilizes load balancing capabilities' do |data_consistency|
before do
worker.data_consistency(data_consistency)
@@ -282,6 +286,10 @@ RSpec.describe ApplicationWorker do
end
describe '.bulk_perform_async' do
+ before do
+ stub_const(worker.name, worker)
+ end
+
it 'enqueues jobs in bulk' do
Sidekiq::Testing.fake! do
worker.bulk_perform_async([['Foo', [1]], ['Foo', [2]]])
@@ -293,6 +301,10 @@ RSpec.describe ApplicationWorker do
end
describe '.bulk_perform_in' do
+ before do
+ stub_const(worker.name, worker)
+ end
+
context 'when delay is valid' do
it 'correctly schedules jobs' do
Sidekiq::Testing.fake! do
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index c1ac5ffebe8..b5252294b27 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::ObjectImporter do
+RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures do
let(:worker) do
Class.new do
def self.name
@@ -26,9 +26,15 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
let(:importer_class) { double(:importer_class, name: 'klass_name') }
let(:importer_instance) { double(:importer_instance) }
let(:client) { double(:client) }
+ let(:github_identifiers) do
+ {
+ some_id: 1,
+ some_type: '_some_type_'
+ }
+ end
- before do
- stub_const('MockRepresantation', Class.new do
+ let(:representation_class) do
+ Class.new do
include Gitlab::GithubImport::Representation::ToHash
include Gitlab::GithubImport::Representation::ExposeAttribute
@@ -41,7 +47,20 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
def initialize(attributes)
@attributes = attributes
end
- end)
+
+ def github_identifiers
+ {
+ some_id: 1,
+ some_type: '_some_type_'
+ }
+ end
+ end
+ end
+
+ let(:stubbed_representation) { representation_class }
+
+ before do
+ stub_const('MockRepresantation', stubbed_representation)
end
describe '#import', :clean_gitlab_redis_cache do
@@ -64,7 +83,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
- github_id: 1,
+ github_identifiers: github_identifiers,
message: 'starting importer',
project_id: project.id,
importer: 'klass_name'
@@ -73,7 +92,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
- github_id: 1,
+ github_identifiers: github_identifiers,
message: 'importer finished',
project_id: project.id,
importer: 'klass_name'
@@ -101,7 +120,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
- github_id: 1,
+ github_identifiers: github_identifiers,
message: 'starting importer',
project_id: project.id,
importer: 'klass_name'
@@ -125,21 +144,25 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(project.import_failures.last.exception_message).to eq('some error')
end
- it 'logs error when representation does not have a github_id' do
- expect(importer_class).not_to receive(:new)
+ context 'without github_identifiers defined' do
+ let(:stubbed_representation) { representation_class.instance_eval { undef_method :github_identifiers } }
- expect(Gitlab::Import::ImportFailureService)
- .to receive(:track)
- .with(
- project_id: project.id,
- exception: a_kind_of(KeyError),
- error_source: 'klass_name',
- fail_import: true
- )
- .and_call_original
+ it 'logs error when representation does not have a github_id' do
+ expect(importer_class).not_to receive(:new)
- expect { worker.import(project, client, { 'number' => 10 }) }
- .to raise_error(KeyError, 'key not found: :github_id')
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: a_kind_of(NoMethodError),
+ error_source: 'klass_name',
+ fail_import: true
+ )
+ .and_call_original
+
+ expect { worker.import(project, client, { 'number' => 10 }) }
+ .to raise_error(NoMethodError, /^undefined method `github_identifiers/)
+ end
end
end
end
diff --git a/spec/workers/concerns/worker_context_spec.rb b/spec/workers/concerns/worker_context_spec.rb
index ebdb752d900..80b427b2b42 100644
--- a/spec/workers/concerns/worker_context_spec.rb
+++ b/spec/workers/concerns/worker_context_spec.rb
@@ -13,6 +13,10 @@ RSpec.describe WorkerContext do
end
end
+ before do
+ stub_const(worker.name, worker)
+ end
+
describe '.worker_context' do
it 'allows modifying the context for the entire worker' do
worker.worker_context(user: build_stubbed(:user))
diff --git a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
index fdba67638c1..d4126fe688a 100644
--- a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
@@ -74,6 +74,30 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
end
end
+
+ context 'the cache hit ratio field' do
+ where(:after_truncate_size, :cached_tags_count, :ratio) do
+ nil | nil | nil
+ 10 | nil | nil
+ nil | 10 | nil
+ 0 | 5 | nil
+ 10 | 0 | 0
+ 10 | 5 | 0.5
+ 3 | 10 | (10 / 3.to_f)
+ end
+
+ with_them do
+ it 'is logged properly' do
+ service_response = cleanup_service_response(status: :unfinished, repository: repository, cleanup_tags_service_before_truncate_size: after_truncate_size, cleanup_tags_service_after_truncate_size: after_truncate_size, cleanup_tags_service_cached_tags_count: cached_tags_count)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response, cleanup_status: :unfinished, truncated: false, cache_hit_ratio: ratio)
+ expect_log_info(project_id: project.id, container_repository_id: repository.id)
+
+ subject
+ end
+ end
+ end
end
context 'with an erroneous cleanup' do
@@ -372,7 +396,16 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
end
- def cleanup_service_response(status: :finished, repository:, cleanup_tags_service_original_size: 100, cleanup_tags_service_before_truncate_size: 80, cleanup_tags_service_after_truncate_size: 80, cleanup_tags_service_before_delete_size: 50, cleanup_tags_service_deleted_size: 50)
+ def cleanup_service_response(
+ status: :finished,
+ repository:,
+ cleanup_tags_service_original_size: 100,
+ cleanup_tags_service_before_truncate_size: 80,
+ cleanup_tags_service_after_truncate_size: 80,
+ cleanup_tags_service_before_delete_size: 50,
+ cleanup_tags_service_deleted_size: 50,
+ cleanup_tags_service_cached_tags_count: 0
+ )
ServiceResponse.success(
message: "cleanup #{status}",
payload: {
@@ -381,21 +414,35 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
cleanup_tags_service_original_size: cleanup_tags_service_original_size,
cleanup_tags_service_before_truncate_size: cleanup_tags_service_before_truncate_size,
cleanup_tags_service_after_truncate_size: cleanup_tags_service_after_truncate_size,
- cleanup_tags_service_before_delete_size: cleanup_tags_service_before_delete_size
+ cleanup_tags_service_before_delete_size: cleanup_tags_service_before_delete_size,
+ cleanup_tags_service_cached_tags_count: cleanup_tags_service_cached_tags_count
}.compact
)
end
- def expect_log_extra_metadata(service_response:, cleanup_status: :finished, truncated: false)
+ def expect_log_extra_metadata(service_response:, cleanup_status: :finished, truncated: false, cache_hit_ratio: 0)
expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:project_id, repository.project.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, cleanup_status)
- %i[cleanup_tags_service_original_size cleanup_tags_service_before_truncate_size cleanup_tags_service_after_truncate_size cleanup_tags_service_before_delete_size cleanup_tags_service_deleted_size].each do |field|
+ %i[
+ cleanup_tags_service_original_size
+ cleanup_tags_service_before_truncate_size
+ cleanup_tags_service_after_truncate_size
+ cleanup_tags_service_before_delete_size
+ cleanup_tags_service_deleted_size
+ cleanup_tags_service_cached_tags_count
+ ].each do |field|
value = service_response.payload[field]
expect(worker).to receive(:log_extra_metadata_on_done).with(field, value) unless value.nil?
end
expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_tags_service_truncated, truncated)
+
+ after_truncate_size = service_response.payload[:cleanup_tags_service_after_truncate_size]
+ if cache_hit_ratio && after_truncate_size && after_truncate_size != 0
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_tags_service_cache_hit_ratio, cache_hit_ratio)
+ end
+
expect(worker).to receive(:log_extra_metadata_on_done).with(:running_jobs_count, 0)
if service_response.error?
diff --git a/spec/workers/container_expiration_policy_worker_spec.rb b/spec/workers/container_expiration_policy_worker_spec.rb
index 9f370b10f6a..ebf80041151 100644
--- a/spec/workers/container_expiration_policy_worker_spec.rb
+++ b/spec/workers/container_expiration_policy_worker_spec.rb
@@ -156,7 +156,7 @@ RSpec.describe ContainerExpirationPolicyWorker do
subject
end
- context 'with load balancing enabled', :db_load_balancing do
+ context 'with load balancing enabled' do
it 'reads the counts from the replica' do
expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
diff --git a/spec/workers/create_note_diff_file_worker_spec.rb b/spec/workers/create_note_diff_file_worker_spec.rb
index 4c1df8ade06..6d1d6d93e44 100644
--- a/spec/workers/create_note_diff_file_worker_spec.rb
+++ b/spec/workers/create_note_diff_file_worker_spec.rb
@@ -14,5 +14,23 @@ RSpec.describe CreateNoteDiffFileWorker do
described_class.new.perform(diff_note.id)
end
+
+ context "when the supplied diff_note_id doesn't belong to an existing DiffNote" do
+ it "returns nil without raising an error" do
+ expect_any_instance_of(DiffNote).not_to receive(:create_diff_file)
+ .and_call_original
+
+ described_class.new.perform(non_existing_record_id)
+ end
+ end
+
+ context "when called with a missing diff_note id" do
+ it "returns nil without creating diff file" do
+ expect_any_instance_of(DiffNote).not_to receive(:create_diff_file)
+ .and_call_original
+
+ described_class.new.perform(nil)
+ end
+ end
end
end
diff --git a/spec/workers/database/drop_detached_partitions_worker_spec.rb b/spec/workers/database/drop_detached_partitions_worker_spec.rb
index 42c3fa3c188..8693878ddd5 100644
--- a/spec/workers/database/drop_detached_partitions_worker_spec.rb
+++ b/spec/workers/database/drop_detached_partitions_worker_spec.rb
@@ -6,16 +6,15 @@ RSpec.describe Database::DropDetachedPartitionsWorker do
describe '#perform' do
subject { described_class.new.perform }
- let(:dropper) { instance_double('DropDetachedPartitions', perform: nil) }
let(:monitoring) { instance_double('PartitionMonitoring', report_metrics: nil) }
before do
- allow(Gitlab::Database::Partitioning::DetachedPartitionDropper).to receive(:new).and_return(dropper)
+ allow(Gitlab::Database::Partitioning).to receive(:drop_detached_partitions)
allow(Gitlab::Database::Partitioning::PartitionMonitoring).to receive(:new).and_return(monitoring)
end
- it 'delegates to DropPartitionsPendingDrop' do
- expect(dropper).to receive(:perform)
+ it 'delegates to Partitioning.drop_detached_partitions' do
+ expect(Gitlab::Database::Partitioning).to receive(:drop_detached_partitions)
subject
end
diff --git a/spec/workers/dependency_proxy/cleanup_blob_worker_spec.rb b/spec/workers/dependency_proxy/cleanup_blob_worker_spec.rb
new file mode 100644
index 00000000000..b67a56cca7b
--- /dev/null
+++ b/spec/workers/dependency_proxy/cleanup_blob_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DependencyProxy::CleanupBlobWorker do
+ let_it_be(:factory_type) { :dependency_proxy_blob }
+
+ it_behaves_like 'dependency_proxy_cleanup_worker'
+end
diff --git a/spec/workers/dependency_proxy/cleanup_manifest_worker_spec.rb b/spec/workers/dependency_proxy/cleanup_manifest_worker_spec.rb
new file mode 100644
index 00000000000..d53b3e6a1fd
--- /dev/null
+++ b/spec/workers/dependency_proxy/cleanup_manifest_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DependencyProxy::CleanupManifestWorker do
+ let_it_be(:factory_type) { :dependency_proxy_manifest }
+
+ it_behaves_like 'dependency_proxy_cleanup_worker'
+end
diff --git a/spec/workers/dependency_proxy/image_ttl_group_policy_worker_spec.rb b/spec/workers/dependency_proxy/image_ttl_group_policy_worker_spec.rb
new file mode 100644
index 00000000000..d3234f4c212
--- /dev/null
+++ b/spec/workers/dependency_proxy/image_ttl_group_policy_worker_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DependencyProxy::ImageTtlGroupPolicyWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ let_it_be(:policy) { create(:image_ttl_group_policy) }
+ let_it_be(:group) { policy.group }
+
+ subject { worker.perform }
+
+ context 'when there are images to expire' do
+ let_it_be_with_reload(:old_blob) { create(:dependency_proxy_blob, group: group, updated_at: 1.year.ago) }
+ let_it_be_with_reload(:old_manifest) { create(:dependency_proxy_manifest, group: group, updated_at: 1.year.ago) }
+ let_it_be_with_reload(:new_blob) { create(:dependency_proxy_blob, group: group) }
+ let_it_be_with_reload(:new_manifest) { create(:dependency_proxy_manifest, group: group) }
+
+ it 'calls the limited capacity workers', :aggregate_failures do
+ expect(DependencyProxy::CleanupBlobWorker).to receive(:perform_with_capacity)
+ expect(DependencyProxy::CleanupManifestWorker).to receive(:perform_with_capacity)
+
+ subject
+ end
+
+ it 'updates the old images to expired' do
+ expect { subject }
+ .to change { old_blob.reload.status }.from('default').to('expired')
+ .and change { old_manifest.reload.status }.from('default').to('expired')
+ .and not_change { new_blob.reload.status }
+ .and not_change { new_manifest.reload.status }
+ end
+ end
+
+ context 'when there are no images to expire' do
+ it 'does not do anything', :aggregate_failures do
+ expect(DependencyProxy::CleanupBlobWorker).not_to receive(:perform_with_capacity)
+ expect(DependencyProxy::CleanupManifestWorker).not_to receive(:perform_with_capacity)
+
+ subject
+ end
+ end
+
+ context 'counts logging' do
+ let_it_be(:expired_blob) { create(:dependency_proxy_blob, :expired, group: group) }
+ let_it_be(:expired_blob2) { create(:dependency_proxy_blob, :expired, group: group) }
+ let_it_be(:expired_manifest) { create(:dependency_proxy_manifest, :expired, group: group) }
+ let_it_be(:processing_blob) { create(:dependency_proxy_blob, status: :processing, group: group) }
+ let_it_be(:processing_manifest) { create(:dependency_proxy_manifest, status: :processing, group: group) }
+ let_it_be(:error_blob) { create(:dependency_proxy_blob, status: :error, group: group) }
+ let_it_be(:error_manifest) { create(:dependency_proxy_manifest, status: :error, group: group) }
+
+ it 'logs all the counts', :aggregate_failures do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:expired_dependency_proxy_blob_count, 2)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:expired_dependency_proxy_manifest_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:processing_dependency_proxy_blob_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:processing_dependency_proxy_manifest_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:error_dependency_proxy_blob_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:error_dependency_proxy_manifest_count, 1)
+
+ subject
+ end
+
+ context 'with load balancing enabled', :db_load_balancing do
+ it 'reads the counts from the replica' do
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/email_receiver_worker_spec.rb b/spec/workers/email_receiver_worker_spec.rb
index d26c08fb221..83e13ded7b3 100644
--- a/spec/workers/email_receiver_worker_spec.rb
+++ b/spec/workers/email_receiver_worker_spec.rb
@@ -80,6 +80,21 @@ RSpec.describe EmailReceiverWorker, :mailer do
expect(email).to be_nil
end
end
+
+ context 'when the error is RateLimitedService::RateLimitedError' do
+ let(:error) { RateLimitedService::RateLimitedError.new(key: :issues_create, rate_limiter: Gitlab::ApplicationRateLimiter) }
+
+ it 'does not report the error to the sender' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(error).and_call_original
+
+ perform_enqueued_jobs do
+ described_class.new.perform(raw_message)
+ end
+
+ email = ActionMailer::Base.deliveries.last
+ expect(email).to be_nil
+ end
+ end
end
end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 235a1f6e3dd..9a4b27997e9 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe 'Every Sidekiq worker' do
describe "feature category declarations" do
let(:feature_categories) do
- YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).map(&:to_sym).to_set
+ Gitlab::FeatureCategories.default.categories.map(&:to_sym).to_set
end
# All Sidekiq worker classes should declare a valid `feature_category`
@@ -155,11 +155,13 @@ RSpec.describe 'Every Sidekiq worker' do
'Ci::BuildScheduleWorker' => 3,
'Ci::BuildTraceChunkFlushWorker' => 3,
'Ci::CreateCrossProjectPipelineWorker' => 3,
+ 'Ci::CreateDownstreamPipelineWorker' => 3,
'Ci::DailyBuildGroupReportResultsWorker' => 3,
'Ci::DeleteObjectsWorker' => 0,
'Ci::DropPipelineWorker' => 3,
'Ci::InitialPipelineProcessWorker' => 3,
'Ci::MergeRequests::AddTodoWhenBuildFailsWorker' => 3,
+ 'Ci::Minutes::UpdateProjectAndNamespaceUsageWorker' => 3,
'Ci::PipelineArtifacts::CoverageReportWorker' => 3,
'Ci::PipelineArtifacts::CreateQualityReportWorker' => 3,
'Ci::PipelineBridgeStatusWorker' => 3,
@@ -197,6 +199,8 @@ RSpec.describe 'Every Sidekiq worker' do
'DeleteMergedBranchesWorker' => 3,
'DeleteStoredFilesWorker' => 3,
'DeleteUserWorker' => 3,
+ 'DependencyProxy::CleanupBlobWorker' => 3,
+ 'DependencyProxy::CleanupManifestWorker' => 3,
'Deployments::AutoRollbackWorker' => 3,
'Deployments::DropOlderDeploymentsWorker' => 3,
'Deployments::FinishedWorker' => 3,
@@ -233,29 +237,27 @@ RSpec.describe 'Every Sidekiq worker' do
'FlushCounterIncrementsWorker' => 3,
'Geo::Batch::ProjectRegistrySchedulerWorker' => 3,
'Geo::Batch::ProjectRegistryWorker' => 3,
- 'Geo::ContainerRepositorySyncWorker' => 3,
+ 'Geo::ContainerRepositorySyncWorker' => 1,
'Geo::DesignRepositoryShardSyncWorker' => false,
- 'Geo::DesignRepositorySyncWorker' => 3,
+ 'Geo::DesignRepositorySyncWorker' => 1,
'Geo::DestroyWorker' => 3,
'Geo::EventWorker' => 3,
'Geo::FileDownloadWorker' => 3,
'Geo::FileRegistryRemovalWorker' => 3,
'Geo::FileRemovalWorker' => 3,
- 'Geo::HashedStorageAttachmentsMigrationWorker' => 3,
- 'Geo::HashedStorageMigrationWorker' => 3,
- 'Geo::ProjectSyncWorker' => 3,
+ 'Geo::ProjectSyncWorker' => 1,
'Geo::RenameRepositoryWorker' => 3,
- 'Geo::RepositoriesCleanUpWorker' => 3,
'Geo::RepositoryCleanupWorker' => 3,
'Geo::RepositoryShardSyncWorker' => false,
'Geo::RepositoryVerification::Primary::ShardWorker' => false,
'Geo::RepositoryVerification::Primary::SingleWorker' => false,
'Geo::RepositoryVerification::Secondary::SingleWorker' => false,
'Geo::ReverificationBatchWorker' => 0,
- 'Geo::Scheduler::Primary::SchedulerWorker' => 3,
- 'Geo::Scheduler::SchedulerWorker' => 3,
- 'Geo::Scheduler::Secondary::SchedulerWorker' => 3,
+ 'Geo::Scheduler::Primary::SchedulerWorker' => false,
+ 'Geo::Scheduler::SchedulerWorker' => false,
+ 'Geo::Scheduler::Secondary::SchedulerWorker' => false,
'Geo::VerificationBatchWorker' => 0,
+ 'Geo::VerificationStateBackfillWorker' => false,
'Geo::VerificationTimeoutWorker' => false,
'Geo::VerificationWorker' => 3,
'GeoRepositoryDestroyWorker' => 3,
@@ -357,14 +359,13 @@ RSpec.describe 'Every Sidekiq worker' do
'ObjectPool::ScheduleJoinWorker' => 3,
'ObjectStorage::BackgroundMoveWorker' => 5,
'ObjectStorage::MigrateUploadsWorker' => 3,
- 'Packages::Composer::CacheUpdateWorker' => 3,
+ 'Packages::Composer::CacheUpdateWorker' => false,
'Packages::Go::SyncPackagesWorker' => 3,
'Packages::Maven::Metadata::SyncWorker' => 3,
'Packages::Nuget::ExtractionWorker' => 3,
'Packages::Rubygems::ExtractionWorker' => 3,
'PagesDomainSslRenewalWorker' => 3,
'PagesDomainVerificationWorker' => 3,
- 'PagesRemoveWorker' => 3,
'PagesTransferWorker' => 3,
'PagesUpdateConfigurationWorker' => 3,
'PagesWorker' => 3,
diff --git a/spec/workers/expire_job_cache_worker_spec.rb b/spec/workers/expire_job_cache_worker_spec.rb
index 6b14ccea105..e9af39ed2df 100644
--- a/spec/workers/expire_job_cache_worker_spec.rb
+++ b/spec/workers/expire_job_cache_worker_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe ExpireJobCacheWorker do
let(:job_args) { job.id }
+ it_behaves_like 'an idempotent worker'
+
it_behaves_like 'worker with data consistency',
described_class,
data_consistency: :delayed
diff --git a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
index 132fe1dc618..dd976eef28b 100644
--- a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
@@ -7,39 +7,27 @@ RSpec.describe Gitlab::GithubImport::Stage::FinishImportWorker do
let(:worker) { described_class.new }
describe '#perform' do
- it 'marks the import as finished' do
+ it 'marks the import as finished and reports import statistics' do
expect(project).to receive(:after_import)
- expect(worker).to receive(:report_import_time).with(project)
-
- worker.import(double(:client), project)
- end
- end
-
- describe '#report_import_time' do
- it 'reports the total import time' do
- expect(worker.histogram)
- .to receive(:observe)
- .with({ project: project.path_with_namespace }, a_kind_of(Numeric))
- .and_call_original
-
- expect(worker.counter)
- .to receive(:increment)
- .and_call_original
+ expect_next_instance_of(Gitlab::Import::Metrics) do |instance|
+ expect(instance).to receive(:track_finished_import)
+ expect(instance).to receive(:duration).and_return(3.005)
+ end
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
- .with(
- message: 'GitHub project import finished',
- import_stage: 'Gitlab::GithubImport::Stage::FinishImportWorker',
- object_counts: {
- 'fetched' => {},
- 'imported' => {}
- },
- project_id: project.id,
- duration_s: a_kind_of(Numeric)
- )
+ .with(
+ message: 'GitHub project import finished',
+ import_stage: 'Gitlab::GithubImport::Stage::FinishImportWorker',
+ object_counts: {
+ 'fetched' => {},
+ 'imported' => {}
+ },
+ project_id: project.id,
+ duration_s: 3.01
+ )
- worker.report_import_time(project)
+ worker.import(double(:client), project)
end
end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
index f68d0838501..7b2218b1725 100644
--- a/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
@@ -3,15 +3,15 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportBaseDataWorker do
- let(:project) { create(:project) }
- let(:import_state) { create(:import_state, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:import_state) { create(:import_state, project: project) }
+
let(:worker) { described_class.new }
+ let(:importer) { double(:importer) }
+ let(:client) { double(:client) }
describe '#import' do
it 'imports the base data of a project' do
- importer = double(:importer)
- client = double(:client)
-
described_class::IMPORTERS.each do |klass|
expect(klass)
.to receive(:new)
@@ -29,5 +29,23 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportBaseDataWorker do
worker.import(client, project)
end
+
+ it 'raises an error' do
+ exception = StandardError.new('_some_error_')
+
+ expect_next_instance_of(Gitlab::GithubImport::Importer::LabelsImporter) do |importer|
+ expect(importer).to receive(:execute).and_raise(exception)
+ end
+ expect(Gitlab::Import::ImportFailureService).to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: true,
+ metrics: true
+ ).and_call_original
+
+ expect { worker.import(client, project) }.to raise_error(StandardError)
+ end
end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
index 29578f9bf37..b18b5ce64d1 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
@@ -3,14 +3,15 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsWorker do
- let(:project) { create(:project) }
- let(:import_state) { create(:import_state, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:import_state) { create(:import_state, project: project) }
+
let(:worker) { described_class.new }
+ let(:importer) { double(:importer) }
+ let(:client) { double(:client) }
describe '#import' do
it 'imports all the pull requests' do
- importer = double(:importer)
- client = double(:client)
waiter = Gitlab::JobWaiter.new(2, '123')
expect(Gitlab::GithubImport::Importer::PullRequestsImporter)
@@ -32,4 +33,22 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsWorker do
worker.import(client, project)
end
end
+
+ it 'raises an error' do
+ exception = StandardError.new('_some_error_')
+
+ expect_next_instance_of(Gitlab::GithubImport::Importer::PullRequestsImporter) do |importer|
+ expect(importer).to receive(:execute).and_raise(exception)
+ end
+ expect(Gitlab::Import::ImportFailureService).to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: true,
+ metrics: true
+ ).and_call_original
+
+ expect { worker.import(client, project) }.to raise_error(StandardError)
+ end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
index 875fc082975..582cb76a6cd 100644
--- a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
- let(:project) { double(:project, id: 4) }
+ let_it_be(:project) { create(:project, :import_started) }
let(:worker) { described_class.new }
@@ -43,6 +43,15 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
expect(instance).to receive(:execute).and_raise(exception_class)
end
+ expect(Gitlab::Import::ImportFailureService).to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: exception_class,
+ error_source: described_class.name,
+ fail_import: true,
+ metrics: true
+ ).and_call_original
+
expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
.not_to receive(:perform_async)
diff --git a/spec/workers/issue_placement_worker_spec.rb b/spec/workers/issue_placement_worker_spec.rb
index 780790dbb1b..50b9d58a5b0 100644
--- a/spec/workers/issue_placement_worker_spec.rb
+++ b/spec/workers/issue_placement_worker_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe IssuePlacementWorker do
it 'places all issues created at most 5 minutes before this one at the end, most recent last' do
expect { run_worker }.not_to change { irrelevant.reset.relative_position }
- expect(project.issues.order_relative_position_asc)
+ expect(project.issues.order_by_relative_position)
.to eq([issue_e, issue_b, issue_a, issue, issue_c, issue_f, issue_d])
expect(project.issues.where(relative_position: nil)).not_to exist
end
diff --git a/spec/workers/packages/composer/cache_cleanup_worker_spec.rb b/spec/workers/packages/composer/cache_cleanup_worker_spec.rb
index e69fe55acc2..39eac4e4ae1 100644
--- a/spec/workers/packages/composer/cache_cleanup_worker_spec.rb
+++ b/spec/workers/packages/composer/cache_cleanup_worker_spec.rb
@@ -18,12 +18,8 @@ RSpec.describe Packages::Composer::CacheCleanupWorker, type: :worker do
cache_file4.update_columns(namespace_id: nil)
end
- it 'deletes expired packages' do
- expect { subject }.to change { Packages::Composer::CacheFile.count }.by(-2)
- expect { cache_file1.reload }.not_to raise_error ActiveRecord::RecordNotFound
- expect { cache_file2.reload }.not_to raise_error ActiveRecord::RecordNotFound
- expect { cache_file3.reload }.to raise_error ActiveRecord::RecordNotFound
- expect { cache_file4.reload }.to raise_error ActiveRecord::RecordNotFound
+ it 'does nothing' do
+ expect { subject }.not_to change { Packages::Composer::CacheFile.count }
end
end
end
diff --git a/spec/workers/packages/composer/cache_update_worker_spec.rb b/spec/workers/packages/composer/cache_update_worker_spec.rb
index a0d8aa5d375..6c17d49e986 100644
--- a/spec/workers/packages/composer/cache_update_worker_spec.rb
+++ b/spec/workers/packages/composer/cache_update_worker_spec.rb
@@ -21,8 +21,8 @@ RSpec.describe Packages::Composer::CacheUpdateWorker, type: :worker do
include_examples 'an idempotent worker' do
context 'creating a package' do
- it 'updates the cache' do
- expect { subject }.to change { Packages::Composer::CacheFile.count }.by(1)
+ it 'does nothing' do
+ expect { subject }.to change { Packages::Composer::CacheFile.count }.by(0)
end
end
@@ -36,12 +36,12 @@ RSpec.describe Packages::Composer::CacheUpdateWorker, type: :worker do
package.destroy!
end
- it 'marks the file for deletion' do
+ it 'does nothing' do
expect { subject }.not_to change { Packages::Composer::CacheFile.count }
cache_file = Packages::Composer::CacheFile.last
- expect(cache_file.reload.delete_at).not_to be_nil
+ expect(cache_file.reload.delete_at).to be_nil
end
end
end
diff --git a/spec/workers/pages_remove_worker_spec.rb b/spec/workers/pages_remove_worker_spec.rb
deleted file mode 100644
index 9d49088b371..00000000000
--- a/spec/workers/pages_remove_worker_spec.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe PagesRemoveWorker do
- it 'does not raise error' do
- expect do
- described_class.new.perform(create(:project).id)
- end.not_to raise_error
- end
-end
diff --git a/spec/workers/pipeline_hooks_worker_spec.rb b/spec/workers/pipeline_hooks_worker_spec.rb
index 0ed00c0c66a..13a86c3d4fe 100644
--- a/spec/workers/pipeline_hooks_worker_spec.rb
+++ b/spec/workers/pipeline_hooks_worker_spec.rb
@@ -8,8 +8,10 @@ RSpec.describe PipelineHooksWorker do
let(:pipeline) { create(:ci_pipeline) }
it 'executes hooks for the pipeline' do
- expect_any_instance_of(Ci::Pipeline)
- .to receive(:execute_hooks)
+ hook_service = double
+
+ expect(Ci::Pipelines::HookService).to receive(:new).and_return(hook_service)
+ expect(hook_service).to receive(:execute)
described_class.new.perform(pipeline.id)
end
@@ -17,6 +19,8 @@ RSpec.describe PipelineHooksWorker do
context 'when pipeline does not exist' do
it 'does not raise exception' do
+ expect(Ci::Pipelines::HookService).not_to receive(:new)
+
expect { described_class.new.perform(123) }
.not_to raise_error
end
diff --git a/spec/workers/pipeline_process_worker_spec.rb b/spec/workers/pipeline_process_worker_spec.rb
index f8140d11f2e..6e95b7a4753 100644
--- a/spec/workers/pipeline_process_worker_spec.rb
+++ b/spec/workers/pipeline_process_worker_spec.rb
@@ -29,16 +29,6 @@ RSpec.describe PipelineProcessWorker do
end
end
- context 'when the FF ci_idempotent_pipeline_process_worker is disabled' do
- before do
- stub_feature_flags(ci_idempotent_pipeline_process_worker: false)
- end
-
- it 'is not deduplicated' do
- expect(described_class).not_to be_deduplication_enabled
- end
- end
-
describe '#perform' do
context 'when pipeline exists' do
it 'processes pipeline' do
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index ddd295215a1..039f86f1911 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -428,7 +428,7 @@ RSpec.describe PostReceive do
end
it 'expires the status cache' do
- expect(snippet.repository).to receive(:empty?).and_return(true)
+ expect(snippet.repository).to receive(:empty?).at_least(:once).and_return(true)
expect(snippet.repository).to receive(:expire_status_cache)
perform
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index fc572c0d9c3..bb11d1dbb58 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -68,5 +68,20 @@ RSpec.describe RunPipelineScheduleWorker do
worker.perform(pipeline_schedule.id, user.id)
end
end
+
+ context 'when pipeline cannot be created' do
+ before do
+ allow(Ci::CreatePipelineService).to receive(:new) { raise Ci::CreatePipelineService::CreateError }
+ end
+
+ it 'logging a pipeline error' do
+ expect(worker)
+ .to receive(:log_extra_metadata_on_done)
+ .with(:pipeline_creation_error, an_instance_of(Ci::CreatePipelineService::CreateError))
+ .and_call_original
+
+ worker.perform(pipeline_schedule.id, user.id)
+ end
+ end
end
end
diff --git a/spec/workers/stuck_ci_jobs_worker_spec.rb b/spec/workers/stuck_ci_jobs_worker_spec.rb
index e0a5d3c6c1c..19ff8ec55c2 100644
--- a/spec/workers/stuck_ci_jobs_worker_spec.rb
+++ b/spec/workers/stuck_ci_jobs_worker_spec.rb
@@ -5,50 +5,34 @@ require 'spec_helper'
RSpec.describe StuckCiJobsWorker do
include ExclusiveLeaseHelpers
- let(:worker_lease_key) { StuckCiJobsWorker::EXCLUSIVE_LEASE_KEY }
- let(:worker_lease_uuid) { SecureRandom.uuid }
- let(:worker2) { described_class.new }
-
- subject(:worker) { described_class.new }
-
- before do
- stub_exclusive_lease(worker_lease_key, worker_lease_uuid)
- end
+ let(:worker) { described_class.new }
+ let(:lease_uuid) { SecureRandom.uuid }
describe '#perform' do
- it 'executes an instance of Ci::StuckBuildsDropService' do
- expect_next_instance_of(Ci::StuckBuilds::DropService) do |service|
- expect(service).to receive(:execute).exactly(:once)
- end
-
- worker.perform
- end
+ subject { worker.perform }
- context 'with an exclusive lease' do
- it 'does not execute concurrently' do
- expect(worker).to receive(:remove_lease).exactly(:once)
- expect(worker2).not_to receive(:remove_lease)
+ it 'enqueues a Ci::StuckBuilds::DropRunningWorker job' do
+ expect(Ci::StuckBuilds::DropRunningWorker).to receive(:perform_in).with(20.minutes).exactly(:once)
- worker.perform
+ subject
+ end
- stub_exclusive_lease_taken(worker_lease_key)
+ it 'enqueues a Ci::StuckBuilds::DropScheduledWorker job' do
+ expect(Ci::StuckBuilds::DropScheduledWorker).to receive(:perform_in).with(40.minutes).exactly(:once)
- worker2.perform
- end
+ subject
+ end
- it 'can execute in sequence' do
- expect(worker).to receive(:remove_lease).at_least(:once)
- expect(worker2).to receive(:remove_lease).at_least(:once)
+ it 'executes an instance of Ci::StuckBuilds::DropPendingService' do
+ expect_to_obtain_exclusive_lease(worker.lease_key, lease_uuid)
- worker.perform
- worker2.perform
+ expect_next_instance_of(Ci::StuckBuilds::DropPendingService) do |service|
+ expect(service).to receive(:execute).exactly(:once)
end
- it 'cancels exclusive leases after worker perform' do
- expect_to_cancel_exclusive_lease(worker_lease_key, worker_lease_uuid)
+ expect_to_cancel_exclusive_lease(worker.lease_key, lease_uuid)
- worker.perform
- end
+ subject
end
end
end