Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/achievements/update_user_achievement_priorities_service_spec.rb73
-rw-r--r--spec/services/admin/abuse_reports/moderate_user_service_spec.rb37
-rw-r--r--spec/services/audit_events/build_service_spec.rb8
-rw-r--r--spec/services/auto_merge/base_service_spec.rb41
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb2
-rw-r--r--spec/services/bulk_imports/process_service_spec.rb325
-rw-r--r--spec/services/bulk_imports/relation_batch_export_service_spec.rb14
-rw-r--r--spec/services/bulk_imports/relation_export_service_spec.rb14
-rw-r--r--spec/services/chat_names/find_user_service_spec.rb26
-rw-r--r--spec/services/ci/catalog/resources/validate_service_spec.rb (renamed from spec/services/ci/catalog/validate_resource_service_spec.rb)2
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb26
-rw-r--r--spec/services/ci/delete_objects_service_spec.rb4
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb2
-rw-r--r--spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb224
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb118
-rw-r--r--spec/services/ci/process_sync_events_service_spec.rb8
-rw-r--r--spec/services/ci/refs/enqueue_pipelines_to_unlock_service_spec.rb109
-rw-r--r--spec/services/ci/retry_job_service_spec.rb43
-rw-r--r--spec/services/ci/runners/register_runner_service_spec.rb4
-rw-r--r--spec/services/ci/unlock_pipeline_service_spec.rb160
-rw-r--r--spec/services/deployments/create_service_spec.rb1
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb2
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb4
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb2
-rw-r--r--spec/services/git/branch_push_service_spec.rb43
-rw-r--r--spec/services/groups/update_service_spec.rb24
-rw-r--r--spec/services/import/github_service_spec.rb395
-rw-r--r--spec/services/import/validate_remote_git_endpoint_service_spec.rb43
-rw-r--r--spec/services/issues/close_service_spec.rb2
-rw-r--r--spec/services/issues/set_crm_contacts_service_spec.rb8
-rw-r--r--spec/services/issues/update_service_spec.rb11
-rw-r--r--spec/services/jira_connect/sync_service_spec.rb18
-rw-r--r--spec/services/members/create_service_spec.rb118
-rw-r--r--spec/services/members/invite_service_spec.rb28
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb33
-rw-r--r--spec/services/merge_requests/create_ref_service_spec.rb4
-rw-r--r--spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb3
-rw-r--r--spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb46
-rw-r--r--spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb23
-rw-r--r--spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb60
-rw-r--r--spec/services/merge_requests/mergeability/run_checks_service_spec.rb153
-rw-r--r--spec/services/merge_requests/update_service_spec.rb44
-rw-r--r--spec/services/ml/experiment_tracking/candidate_repository_spec.rb2
-rw-r--r--spec/services/ml/experiment_tracking/experiment_repository_spec.rb2
-rw-r--r--spec/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service_spec.rb2
-rw-r--r--spec/services/notes/create_service_spec.rb2
-rw-r--r--spec/services/notes/destroy_service_spec.rb4
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb79
-rw-r--r--spec/services/notes/update_service_spec.rb2
-rw-r--r--spec/services/packages/npm/create_package_service_spec.rb28
-rw-r--r--spec/services/packages/nuget/extract_metadata_file_service_spec.rb44
-rw-r--r--spec/services/packages/nuget/metadata_extraction_service_spec.rb24
-rw-r--r--spec/services/packages/nuget/odata_package_entry_service_spec.rb33
-rw-r--r--spec/services/packages/nuget/process_package_file_service_spec.rb83
-rw-r--r--spec/services/packages/nuget/symbols/create_symbol_files_service_spec.rb103
-rw-r--r--spec/services/packages/nuget/symbols/extract_symbol_signature_service_spec.rb23
-rw-r--r--spec/services/packages/nuget/update_package_from_metadata_service_spec.rb13
-rw-r--r--spec/services/packages/protection/create_rule_service_spec.rb139
-rw-r--r--spec/services/pages/migrate_from_legacy_storage_service_spec.rb137
-rw-r--r--spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb118
-rw-r--r--spec/services/pages/zip_directory_service_spec.rb280
-rw-r--r--spec/services/projects/after_rename_service_spec.rb207
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb152
-rw-r--r--spec/services/projects/hashed_storage/migration_service_spec.rb44
-rw-r--r--spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb106
-rw-r--r--spec/services/projects/hashed_storage/rollback_repository_service_spec.rb152
-rw-r--r--spec/services/projects/hashed_storage/rollback_service_spec.rb78
-rw-r--r--spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb136
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_service_spec.rb2
-rw-r--r--spec/services/projects/participants_service_spec.rb26
-rw-r--r--spec/services/projects/record_target_platforms_service_spec.rb46
-rw-r--r--spec/services/projects/transfer_service_spec.rb22
-rw-r--r--spec/services/projects/update_pages_service_spec.rb58
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb12
-rw-r--r--spec/services/projects/update_service_spec.rb2
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb118
-rw-r--r--spec/services/releases/destroy_service_spec.rb6
-rw-r--r--spec/services/repositories/replicate_service_spec.rb45
-rw-r--r--spec/services/resource_events/change_labels_service_spec.rb2
-rw-r--r--spec/services/snippets/destroy_service_spec.rb2
-rw-r--r--spec/services/snippets/update_repository_storage_service_spec.rb4
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb12
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb31
-rw-r--r--spec/services/system_notes/time_tracking_service_spec.rb6
-rw-r--r--spec/services/tasks_to_be_done/base_service_spec.rb69
-rw-r--r--spec/services/todo_service_spec.rb80
-rw-r--r--spec/services/update_container_registry_info_service_spec.rb11
-rw-r--r--spec/services/users/auto_ban_service_spec.rb56
-rw-r--r--spec/services/users/in_product_marketing_email_records_spec.rb13
-rw-r--r--spec/services/users/signup_service_spec.rb75
-rw-r--r--spec/services/users/trust_service_spec.rb (renamed from spec/services/users/allow_possible_spam_service_spec.rb)4
-rw-r--r--spec/services/users/untrust_service_spec.rb (renamed from spec/services/users/disallow_possible_spam_service_spec.rb)8
-rw-r--r--spec/services/verify_pages_domain_service_spec.rb16
-rw-r--r--spec/services/vs_code/settings/create_or_update_service_spec.rb48
-rw-r--r--spec/services/web_hook_service_spec.rb17
-rw-r--r--spec/services/work_items/parent_links/create_service_spec.rb2
-rw-r--r--spec/services/work_items/related_work_item_links/create_service_spec.rb3
-rw-r--r--spec/services/work_items/update_service_spec.rb2
-rw-r--r--spec/services/work_items/widgets/labels_service/update_service_spec.rb10
-rw-r--r--spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb10
100 files changed, 2566 insertions, 2550 deletions
diff --git a/spec/services/achievements/update_user_achievement_priorities_service_spec.rb b/spec/services/achievements/update_user_achievement_priorities_service_spec.rb
new file mode 100644
index 00000000000..a020bf9770e
--- /dev/null
+++ b/spec/services/achievements/update_user_achievement_priorities_service_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Achievements::UpdateUserAchievementPrioritiesService, feature_category: :user_profile do
+ describe '#execute' do
+ let_it_be(:achievement_owner) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let_it_be(:achievement) { create(:achievement, namespace: group) }
+
+ let!(:user_achievement1) do
+ create(:user_achievement, achievement: achievement, user: achievement_owner, priority: 0)
+ end
+
+ let_it_be(:user_achievement2) { create(:user_achievement, achievement: achievement, user: achievement_owner) }
+ let_it_be(:user_achievement3) { create(:user_achievement, achievement: achievement, user: achievement_owner) }
+
+ subject(:response) { described_class.new(current_user, user_achievements).execute }
+
+ context 'when user does not have permission' do
+ let(:current_user) { create(:user) }
+ let(:user_achievements) { [user_achievement1] }
+
+ it 'returns an error', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to match_array(["You can't update at least one of the given user achievements."])
+ end
+ end
+
+ context 'when user has permission' do
+ let_it_be_with_reload(:current_user) { achievement_owner }
+
+ context 'with empty input' do
+ let(:user_achievements) { [] }
+
+ it 'removes all priorities', :aggregate_failures do
+ expect(response).to be_success
+
+ [user_achievement1, user_achievement2, user_achievement3].each do |ua|
+ expect(ua.reload.priority).to be_nil
+ end
+ end
+ end
+
+ context 'with prioritised achievements' do
+ let(:user_achievements) { [user_achievement3, user_achievement1] }
+
+ it 're-orders the achievements correctly', :aggregate_failures do
+ expect(response).to be_success
+
+ expect(user_achievement1.reload.priority).to eq(1)
+ expect(user_achievement2.reload.priority).to be_nil
+ expect(user_achievement3.reload.priority).to be_zero
+ end
+ end
+
+ context 'when no achievement is prioritized and no prioritizations are made' do
+ let!(:user_achievement1) { create(:user_achievement, achievement: achievement, user: achievement_owner) }
+
+ let(:user_achievements) { [] }
+
+ it 'works without errors', :aggregate_failures do
+ expect(response).to be_success
+
+ [user_achievement1, user_achievement2, user_achievement3].each do |ua|
+ expect(ua.reload.priority).to be_nil
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/admin/abuse_reports/moderate_user_service_spec.rb b/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
index 7e08db2b612..3b80d3276a2 100644
--- a/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
+++ b/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
@@ -210,6 +210,43 @@ RSpec.describe Admin::AbuseReports::ModerateUserService, feature_category: :inst
end
end
+ describe 'when trusting the user' do
+ let(:action) { 'trust_user' }
+
+ it 'calls the Users::TrustService method' do
+ expect_next_instance_of(Users::TrustService, admin) do |service|
+ expect(service).to receive(:execute).with(abuse_report.user).and_return(status: :success)
+ end
+
+ subject
+ end
+
+ context 'when not closing the report' do
+ let(:close) { false }
+
+ it_behaves_like 'does not close the report'
+ it_behaves_like 'records an event', action: 'trust_user'
+ end
+
+ context 'when closing the report' do
+ it_behaves_like 'closes the report'
+ it_behaves_like 'records an event', action: 'trust_user_and_close_report'
+ end
+
+ context 'when trusting the user fails' do
+ before do
+ allow_next_instance_of(Users::TrustService) do |service|
+ allow(service).to receive(:execute).with(abuse_report.user)
+ .and_return(status: :error, message: 'Trusting the user failed')
+ end
+ end
+
+ it_behaves_like 'returns an error response', 'Trusting the user failed'
+ it_behaves_like 'does not close the report'
+ it_behaves_like 'does not record an event'
+ end
+ end
+
describe 'when only closing the report' do
let(:action) { '' }
diff --git a/spec/services/audit_events/build_service_spec.rb b/spec/services/audit_events/build_service_spec.rb
index 575ec9e58b8..d5a3d1bbaf7 100644
--- a/spec/services/audit_events/build_service_spec.rb
+++ b/spec/services/audit_events/build_service_spec.rb
@@ -129,25 +129,25 @@ RSpec.describe AuditEvents::BuildService, feature_category: :audit_events do
context 'when author is missing' do
let(:author) { nil }
- it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ it { expect { service }.to raise_error(described_class::MissingAttributeError, "author") }
end
context 'when scope is missing' do
let(:scope) { nil }
- it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ it { expect { service }.to raise_error(described_class::MissingAttributeError, "scope") }
end
context 'when target is missing' do
let(:target) { nil }
- it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ it { expect { service }.to raise_error(described_class::MissingAttributeError, "target") }
end
context 'when message is missing' do
let(:message) { nil }
- it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ it { expect { service }.to raise_error(described_class::MissingAttributeError, "message") }
end
end
end
diff --git a/spec/services/auto_merge/base_service_spec.rb b/spec/services/auto_merge/base_service_spec.rb
index be5b753f484..8cd33f8ff1e 100644
--- a/spec/services/auto_merge/base_service_spec.rb
+++ b/spec/services/auto_merge/base_service_spec.rb
@@ -301,4 +301,45 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
specify { expect(service).to respond_to :process }
specify { expect { service.process(nil) }.to raise_error NotImplementedError }
end
+
+ describe '#available_for?' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:available_for) { service.available_for?(merge_request) { true } }
+
+ let(:merge_request) { create(:merge_request) }
+
+ where(:can_be_merged, :open, :broken, :discussions, :blocked, :draft, :skip_draft, :skip_blocked,
+ :skip_discussions, :result) do
+ true | true | false | true | false | false | false | false | false | true
+ true | true | false | true | false | false | true | true | false | true
+ true | true | false | true | false | true | true | false | false | true
+ true | true | false | true | true | false | false | true | false | true
+ true | true | false | false | false | false | false | false | true | true
+ true | true | false | true | false | true | false | false | false | false
+ false | true | false | true | false | false | false | false | false | false
+ true | false | false | true | false | false | false | false | false | false
+ true | true | true | true | false | false | false | false | false | false
+ true | true | false | false | false | false | false | false | false | false
+ true | true | false | true | true | false | false | false | false | false
+ end
+
+ with_them do
+ before do
+ allow(service).to receive(:skip_draft_check).and_return(skip_draft)
+ allow(service).to receive(:skip_blocked_check).and_return(skip_blocked)
+ allow(service).to receive(:skip_discussions_check).and_return(skip_discussions)
+ allow(merge_request).to receive(:can_be_merged_by?).and_return(can_be_merged)
+ allow(merge_request).to receive(:open?).and_return(open)
+ allow(merge_request).to receive(:broken?).and_return(broken)
+ allow(merge_request).to receive(:draft?).and_return(draft)
+ allow(merge_request).to receive(:mergeable_discussions_state?).and_return(discussions)
+ allow(merge_request).to receive(:merge_blocked_by_other_mrs?).and_return(blocked)
+ end
+
+ it 'returns the expected results' do
+ expect(available_for).to eq(result)
+ end
+ end
+ end
end
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index 2197b0b4fac..1734ea45507 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
allowed_content_types: allowed_content_types
)
- expect { service.execute }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { service.execute }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
diff --git a/spec/services/bulk_imports/process_service_spec.rb b/spec/services/bulk_imports/process_service_spec.rb
new file mode 100644
index 00000000000..5398e76cb67
--- /dev/null
+++ b/spec/services/bulk_imports/process_service_spec.rb
@@ -0,0 +1,325 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::ProcessService, feature_category: :importers do
+ describe '#execute' do
+ let_it_be_with_reload(:bulk_import) { create(:bulk_import) }
+
+ subject { described_class.new(bulk_import) }
+
+ context 'when no bulk import is found' do
+ let(:bulk_import) { nil }
+
+ it 'does nothing' do
+ expect(described_class).not_to receive(:process_bulk_import)
+ subject.execute
+ end
+ end
+
+ context 'when bulk import is finished' do
+ it 'does nothing' do
+ bulk_import.update!(status: 2)
+
+ expect(described_class).not_to receive(:process_bulk_import)
+ subject.execute
+ end
+ end
+
+ context 'when bulk import is failed' do
+ it 'does nothing' do
+ bulk_import.update!(status: -1)
+
+ expect(described_class).not_to receive(:process_bulk_import)
+ subject.execute
+ end
+ end
+
+ context 'when bulk import has timed out' do
+ it 'does nothing' do
+ bulk_import.update!(status: 3)
+
+ expect(described_class).not_to receive(:process_bulk_import)
+ subject.execute
+ end
+ end
+
+ context 'when all entities are processed' do
+ it 'marks bulk import as finished' do
+ bulk_import.update!(status: 1)
+ create(:bulk_import_entity, :finished, bulk_import: bulk_import)
+ create(:bulk_import_entity, :failed, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(bulk_import.reload.finished?).to eq(true)
+ end
+ end
+
+ context 'when all entities are failed' do
+ it 'marks bulk import as failed' do
+ bulk_import.update!(status: 1)
+ create(:bulk_import_entity, :failed, bulk_import: bulk_import)
+ create(:bulk_import_entity, :failed, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(bulk_import.reload.failed?).to eq(true)
+ end
+ end
+
+ context 'when maximum allowed number of import entities in progress' do
+ it 're-enqueues itself' do
+ bulk_import.update!(status: 1)
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+ (described_class::DEFAULT_BATCH_SIZE + 1).times do
+ create(:bulk_import_entity, :started, bulk_import: bulk_import)
+ end
+
+ expect(BulkImportWorker).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
+ expect(BulkImports::ExportRequestWorker).not_to receive(:perform_async)
+
+ subject.execute
+ end
+ end
+
+ context 'when bulk import is created' do
+ it 'marks bulk import as started' do
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(bulk_import.reload.started?).to eq(true)
+ end
+
+ it 'creates all the required pipeline trackers' do
+ entity_1 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
+ entity_2 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ expect { subject.execute }
+ .to change { BulkImports::Tracker.count }
+ .by(BulkImports::Groups::Stage.new(entity_1).pipelines.size * 2)
+
+ expect(entity_1.trackers).not_to be_empty
+ expect(entity_2.trackers).not_to be_empty
+ end
+
+ context 'when there are created entities to process' do
+ before do
+ stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
+ end
+
+ it 'marks a batch of entities as started, enqueues EntityWorker, ExportRequestWorker and reenqueues' do
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ expect(BulkImportWorker).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
+ expect(BulkImports::ExportRequestWorker).to receive(:perform_async).once
+
+ subject.execute
+
+ bulk_import.reload
+
+ expect(bulk_import.entities.map(&:status_name)).to contain_exactly(:created, :started)
+ end
+
+ context 'when there are project entities to process' do
+ it 'enqueues ExportRequestWorker' do
+ create(:bulk_import_entity, :created, :project_entity, bulk_import: bulk_import)
+
+ expect(BulkImports::ExportRequestWorker).to receive(:perform_async).once
+
+ subject.execute
+ end
+ end
+ end
+
+ context 'when exception occurs' do
+ it 'tracks the exception & marks import as failed' do
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ allow(BulkImports::ExportRequestWorker).to receive(:perform_async).and_raise(StandardError)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ kind_of(StandardError),
+ bulk_import_id: bulk_import.id
+ )
+
+ subject.execute
+
+ expect(bulk_import.reload.failed?).to eq(true)
+ end
+ end
+ end
+
+ context 'when importing a group' do
+ it 'creates trackers for group entity' do
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(entity.trackers.to_a).to include(
+ have_attributes(
+ stage: 0, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupPipeline.to_s
+ ),
+ have_attributes(
+ stage: 1, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupAttributesPipeline.to_s
+ )
+ )
+ end
+ end
+
+ context 'when importing a project' do
+ it 'creates trackers for project entity' do
+ entity = create(:bulk_import_entity, :project_entity, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(entity.trackers.to_a).to include(
+ have_attributes(
+ stage: 0, status_name: :created, relation: BulkImports::Projects::Pipelines::ProjectPipeline.to_s
+ ),
+ have_attributes(
+ stage: 1, status_name: :created, relation: BulkImports::Projects::Pipelines::RepositoryPipeline.to_s
+ )
+ )
+ end
+ end
+
+ context 'when tracker configuration has a minimum version defined' do
+ before do
+ allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
+ pipeline2: { pipeline: 'PipelineClass2', stage: 1, minimum_source_version: '14.10.0' },
+ pipeline3: { pipeline: 'PipelineClass3', stage: 1, minimum_source_version: '15.0.0' },
+ pipeline5: { pipeline: 'PipelineClass4', stage: 1, minimum_source_version: '15.1.0' },
+ pipeline6: { pipeline: 'PipelineClass5', stage: 1, minimum_source_version: '16.0.0' }
+ }
+ )
+ end
+ end
+
+ context 'when the source instance version is older than the tracker mininum version' do
+ let_it_be(:entity) { create(:bulk_import_entity, :group_entity, bulk_import: bulk_import) }
+
+ before do
+ bulk_import.update!(source_version: '15.0.0')
+ end
+
+ it 'creates trackers as skipped if version requirement does not meet' do
+ subject.execute
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:created, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:skipped, 'PipelineClass4'],
+ [:skipped, 'PipelineClass5']
+ )
+ end
+
+ it 'logs an info message for the skipped pipelines' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:info).with(
+ message: 'Pipeline skipped as source instance version not compatible with pipeline',
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import_id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
+ importer: 'gitlab_migration',
+ pipeline_name: 'PipelineClass4',
+ minimum_source_version: '15.1.0',
+ maximum_source_version: nil,
+ source_version: '15.0.0'
+ )
+
+ expect(logger).to receive(:info).with(
+ message: 'Pipeline skipped as source instance version not compatible with pipeline',
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import_id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
+ importer: 'gitlab_migration',
+ pipeline_name: 'PipelineClass5',
+ minimum_source_version: '16.0.0',
+ maximum_source_version: nil,
+ source_version: '15.0.0'
+ )
+ end
+
+ subject.execute
+ end
+ end
+
+ context 'when the source instance version is undefined' do
+ it 'creates trackers as created' do
+ bulk_import.update!(source_version: nil)
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:created, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:created, 'PipelineClass4'],
+ [:created, 'PipelineClass5']
+ )
+ end
+ end
+ end
+
+ context 'when tracker configuration has a maximum version defined' do
+ before do
+ allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
+ pipeline2: { pipeline: 'PipelineClass2', stage: 1, maximum_source_version: '14.10.0' },
+ pipeline3: { pipeline: 'PipelineClass3', stage: 1, maximum_source_version: '15.0.0' },
+ pipeline5: { pipeline: 'PipelineClass4', stage: 1, maximum_source_version: '15.1.0' },
+ pipeline6: { pipeline: 'PipelineClass5', stage: 1, maximum_source_version: '16.0.0' }
+ }
+ )
+ end
+ end
+
+ context 'when the source instance version is older than the tracker maximum version' do
+ it 'creates trackers as skipped if version requirement does not meet' do
+ bulk_import.update!(source_version: '15.0.0')
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:skipped, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:created, 'PipelineClass4'],
+ [:created, 'PipelineClass5']
+ )
+ end
+ end
+
+ context 'when the source instance version is a patch version' do
+ it 'creates trackers with the same status as the non-patch source version' do
+ bulk_import_1 = create(:bulk_import, source_version: '15.0.1')
+ entity_1 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_1)
+
+ bulk_import_2 = create(:bulk_import, source_version: '15.0.0')
+ entity_2 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_2)
+
+ described_class.new(bulk_import_1).execute
+ described_class.new(bulk_import_2).execute
+
+ trackers_1 = entity_1.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
+ trackers_2 = entity_2.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
+
+ expect(trackers_1).to eq(trackers_2)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/bulk_imports/relation_batch_export_service_spec.rb b/spec/services/bulk_imports/relation_batch_export_service_spec.rb
index 09f55f14a96..8548e01a6aa 100644
--- a/spec/services/bulk_imports/relation_batch_export_service_spec.rb
+++ b/spec/services/bulk_imports/relation_batch_export_service_spec.rb
@@ -45,6 +45,20 @@ RSpec.describe BulkImports::RelationBatchExportService, feature_category: :impor
service.execute
end
+ context 'when relation is empty and there is nothing to export' do
+ let_it_be(:export) { create(:bulk_import_export, :batched, project: project, relation: 'milestones') }
+ let_it_be(:batch) { create(:bulk_import_export_batch, export: export) }
+
+ it 'creates empty file on disk' do
+ allow(subject).to receive(:export_path).and_return('foo')
+ allow(FileUtils).to receive(:remove_entry)
+
+ expect(FileUtils).to receive(:touch).with('foo/milestones.ndjson')
+
+ subject.execute
+ end
+ end
+
context 'when exception occurs' do
before do
allow(service).to receive(:gzip).and_raise(StandardError, 'Error!')
diff --git a/spec/services/bulk_imports/relation_export_service_spec.rb b/spec/services/bulk_imports/relation_export_service_spec.rb
index 1c050fe4143..bd8447e3d97 100644
--- a/spec/services/bulk_imports/relation_export_service_spec.rb
+++ b/spec/services/bulk_imports/relation_export_service_spec.rb
@@ -13,10 +13,12 @@ RSpec.describe BulkImports::RelationExportService, feature_category: :importers
let_it_be_with_reload(:export) { create(:bulk_import_export, group: group, relation: relation) }
before do
+ FileUtils.mkdir_p(export_path)
+
group.add_owner(user)
project.add_maintainer(user)
- allow(export).to receive(:export_path).and_return(export_path)
+ allow(subject).to receive(:export_path).and_return(export_path)
end
after :all do
@@ -53,6 +55,16 @@ RSpec.describe BulkImports::RelationExportService, feature_category: :importers
expect(export.upload.export_file).to be_present
end
+ context 'when relation is empty and there is nothing to export' do
+ let(:relation) { 'milestones' }
+
+ it 'creates empty file on disk' do
+ expect(FileUtils).to receive(:touch).with("#{export_path}/#{relation}.ndjson")
+
+ subject.execute
+ end
+ end
+
context 'when exporting a file relation' do
it 'uses file export service' do
service = described_class.new(user, project, 'uploads', jid)
diff --git a/spec/services/chat_names/find_user_service_spec.rb b/spec/services/chat_names/find_user_service_spec.rb
index 14bece4efb4..94a56553983 100644
--- a/spec/services/chat_names/find_user_service_spec.rb
+++ b/spec/services/chat_names/find_user_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe ChatNames::FindUserService, :clean_gitlab_redis_shared_state, fea
context 'find user mapping' do
let_it_be(:user) { create(:user) }
- let_it_be(:chat_name) { create(:chat_name, user: user) }
+ let(:chat_name) { create(:chat_name, user: user) }
let(:team_id) { chat_name.team_id }
let(:user_id) { chat_name.chat_id }
@@ -19,26 +19,20 @@ RSpec.describe ChatNames::FindUserService, :clean_gitlab_redis_shared_state, fea
end
it 'updates the last used timestamp if one is not already set' do
- expect(chat_name.last_used_at).to be_nil
-
- subject
-
- expect(chat_name.reload.last_used_at).to be_present
+ expect { subject }.to change { chat_name.reload.last_used_at }.from(nil)
end
it 'only updates an existing timestamp once within a certain time frame' do
- chat_name = create(:chat_name, user: user)
- service = described_class.new(team_id, user_id)
-
- expect(chat_name.last_used_at).to be_nil
-
- service.execute
-
- time = chat_name.reload.last_used_at
+ expect { described_class.new(team_id, user_id).execute }.to change { chat_name.reload.last_used_at }.from(nil)
+ expect { described_class.new(team_id, user_id).execute }.not_to change { chat_name.reload.last_used_at }
+ end
- service.execute
+ it 'records activity for the related user' do
+ expect_next_instance_of(Users::ActivityService, author: user) do |activity_service|
+ expect(activity_service).to receive(:execute)
+ end
- expect(chat_name.reload.last_used_at).to eq(time)
+ subject
end
end
diff --git a/spec/services/ci/catalog/validate_resource_service_spec.rb b/spec/services/ci/catalog/resources/validate_service_spec.rb
index 3bee37b7e55..b43d98788e2 100644
--- a/spec/services/ci/catalog/validate_resource_service_spec.rb
+++ b/spec/services/ci/catalog/resources/validate_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Catalog::ValidateResourceService, feature_category: :pipeline_composition do
+RSpec.describe Ci::Catalog::Resources::ValidateService, feature_category: :pipeline_composition do
describe '#execute' do
context 'with a project that has a README and a description' do
it 'is valid' do
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index a28dd9e7a55..11f9708f9f3 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -1953,6 +1953,32 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
expect(pipeline.statuses.count).to eq 2
expect(pipeline.statuses.map(&:name)).to match_array %w[test-1 test-my-job]
end
+
+ context 'when inputs have a description' do
+ let(:template) do
+ <<~YAML
+ spec:
+ inputs:
+ stage:
+ suffix:
+ default: my-job
+ description: description
+ ---
+ test-$[[ inputs.suffix ]]:
+ stage: $[[ inputs.stage ]]
+ script: run tests
+ YAML
+ end
+
+ it 'creates a pipeline' do
+ response = execute_service(save_on_errors: true)
+
+ pipeline = response.payload
+
+ expect(pipeline).to be_persisted
+ expect(pipeline.yaml_errors).to be_blank
+ end
+ end
end
context 'when interpolation is invalid' do
diff --git a/spec/services/ci/delete_objects_service_spec.rb b/spec/services/ci/delete_objects_service_spec.rb
index 939b72cef3b..f9fc2316595 100644
--- a/spec/services/ci/delete_objects_service_spec.rb
+++ b/spec/services/ci/delete_objects_service_spec.rb
@@ -47,8 +47,8 @@ RSpec.describe Ci::DeleteObjectsService, :aggregate_failures, feature_category:
context 'with artifacts both ready and not ready for deletion' do
let(:data) { [] }
- let_it_be(:past_ready) { create(:ci_deleted_object, pick_up_at: 2.days.ago) }
- let_it_be(:ready) { create(:ci_deleted_object, pick_up_at: 1.day.ago) }
+ let!(:past_ready) { create(:ci_deleted_object, pick_up_at: 2.days.ago) }
+ let!(:ready) { create(:ci_deleted_object, pick_up_at: 1.day.ago) }
it 'skips records with pick_up_at in the future' do
not_ready = create(:ci_deleted_object, pick_up_at: 1.day.from_now)
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index cdbb0c0f8ce..c060c72ffb2 100644
--- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -78,7 +78,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
end
- context 'when the project in which the arfifact belongs to is undergoing stats refresh' do
+ context 'when the project in which the artifact belongs to is undergoing stats refresh' do
before do
create(:project_build_artifacts_size_refresh, :pending, project: artifact.project)
end
diff --git a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
index fffac0fd64b..a5dda1d13aa 100644
--- a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
+++ b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
@@ -267,230 +267,6 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
end
end
- context 'when the use_offset_pagination_for_canceling_redundant_pipelines FF is off' do
- # copy-paste from above
-
- before do
- stub_feature_flags(use_offset_pagination_for_canceling_redundant_pipelines: false)
- end
-
- describe '#execute!' do
- subject(:execute) { service.execute }
-
- context 'when build statuses are set up correctly' do
- it 'has builds of all statuses' do
- expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- end
- end
-
- context 'when auto-cancel is enabled' do
- before do
- project.update!(auto_cancel_pending_pipelines: 'enabled')
- end
-
- it 'cancels only previous interruptible builds' do
- execute
-
- expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- end
-
- it 'logs canceled pipelines' do
- allow(Gitlab::AppLogger).to receive(:info)
-
- execute
-
- expect(Gitlab::AppLogger).to have_received(:info).with(
- class: described_class.name,
- message: "Pipeline #{pipeline.id} auto-canceling pipeline #{prev_pipeline.id}",
- canceled_pipeline_id: prev_pipeline.id,
- canceled_by_pipeline_id: pipeline.id,
- canceled_by_pipeline_source: pipeline.source
- )
- end
-
- context 'when the previous pipeline has a child pipeline' do
- let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) }
-
- context 'with another nested child pipeline' do
- let(:another_child_pipeline) { create(:ci_pipeline, child_of: child_pipeline) }
-
- before do
- create(:ci_build, :interruptible, :running, pipeline: another_child_pipeline)
- create(:ci_build, :interruptible, :running, pipeline: another_child_pipeline)
- end
-
- it 'cancels all nested child pipeline builds' do
- expect(build_statuses(another_child_pipeline)).to contain_exactly('running', 'running')
-
- execute
-
- expect(build_statuses(another_child_pipeline)).to contain_exactly('canceled', 'canceled')
- end
- end
-
- context 'when started after pipeline was finished' do
- before do
- create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
- prev_pipeline.update!(status: "success")
- end
-
- it 'cancels child pipeline builds' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running')
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled')
- end
- end
-
- context 'when the child pipeline has interruptible running jobs' do
- before do
- create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
- create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
- end
-
- it 'cancels all child pipeline builds' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running')
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
- end
-
- context 'when the child pipeline includes completed interruptible jobs' do
- before do
- create(:ci_build, :interruptible, :failed, pipeline: child_pipeline)
- create(:ci_build, :interruptible, :success, pipeline: child_pipeline)
- end
-
- it 'cancels all child pipeline builds with a cancelable_status' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running', 'failed', 'success')
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled', 'failed', 'success')
- end
- end
- end
-
- context 'when the child pipeline has started non-interruptible job' do
- before do
- create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
- # non-interruptible started
- create(:ci_build, :success, pipeline: child_pipeline)
- end
-
- it 'does not cancel any child pipeline builds' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
- end
- end
-
- context 'when the child pipeline has non-interruptible non-started job' do
- before do
- create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
- end
-
- not_started_statuses = Ci::HasStatus::AVAILABLE_STATUSES - Ci::HasStatus::STARTED_STATUSES
- context 'when the jobs are cancelable' do
- cancelable_not_started_statuses =
- Set.new(not_started_statuses).intersection(Ci::HasStatus::CANCELABLE_STATUSES)
- cancelable_not_started_statuses.each do |status|
- it "cancels all child pipeline builds when build status #{status} included" do
- # non-interruptible but non-started
- create(:ci_build, status.to_sym, pipeline: child_pipeline)
-
- expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
- end
- end
- end
-
- context 'when the jobs are not cancelable' do
- not_cancelable_not_started_statuses = not_started_statuses - Ci::HasStatus::CANCELABLE_STATUSES
- not_cancelable_not_started_statuses.each do |status|
- it "does not cancel child pipeline builds when build status #{status} included" do
- # non-interruptible but non-started
- create(:ci_build, status.to_sym, pipeline: child_pipeline)
-
- expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled', status)
- end
- end
- end
- end
- end
-
- context 'when the pipeline is a child pipeline' do
- let!(:parent_pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) }
- let(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
-
- before do
- create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
- create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
- end
-
- it 'does not cancel any builds' do
- expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
- expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
-
- execute
-
- expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
- expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
- end
- end
-
- context 'when the previous pipeline source is webide' do
- let(:prev_pipeline) { create(:ci_pipeline, :webide, project: project) }
-
- it 'does not cancel builds of the previous pipeline' do
- execute
-
- expect(build_statuses(prev_pipeline)).to contain_exactly('created', 'running', 'success')
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- end
- end
-
- it 'does not cancel future pipelines' do
- expect(prev_pipeline.id).to be < pipeline.id
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
-
- described_class.new(prev_pipeline).execute
-
- expect(build_statuses(pipeline.reload)).to contain_exactly('pending')
- end
-
- it_behaves_like 'time limits pipeline cancellation'
- end
-
- context 'when auto-cancel is disabled' do
- before do
- project.update!(auto_cancel_pending_pipelines: 'disabled')
- end
-
- it 'does not cancel any build' do
- subject
-
- expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- end
- end
- end
- end
-
private
def build_statuses(pipeline)
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index 93dc9481bf0..88ccda90df0 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -1247,6 +1247,124 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category
end
end
+ describe 'deployments creation' do
+ let(:config) do
+ <<-YAML
+ stages: [stage-0, stage-1, stage-2, stage-3, stage-4]
+
+ test:
+ stage: stage-0
+ script: exit 0
+
+ review:
+ stage: stage-1
+ environment:
+ name: review
+ action: start
+ script: exit 0
+
+ staging:
+ stage: stage-2
+ environment:
+ name: staging
+ action: start
+ script: exit 0
+ when: manual
+ allow_failure: false
+
+ canary:
+ stage: stage-3
+ environment:
+ name: canary
+ action: start
+ script: exit 0
+ when: manual
+
+ production-a:
+ stage: stage-4
+ environment:
+ name: production-a
+ action: start
+ script: exit 0
+ when: manual
+
+ production-b:
+ stage: stage-4
+ environment:
+ name: production-b
+ action: start
+ script: exit 0
+ when: manual
+ needs: [canary]
+ YAML
+ end
+
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
+ end
+
+ let(:test_job) { all_builds.find_by(name: 'test') }
+ let(:review_deploy_job) { all_builds.find_by(name: 'review') }
+ let(:staging_deploy_job) { all_builds.find_by(name: 'staging') }
+ let(:canary_deploy_job) { all_builds.find_by(name: 'canary') }
+ let(:production_a_deploy_job) { all_builds.find_by(name: 'production-a') }
+ let(:production_b_deploy_job) { all_builds.find_by(name: 'production-b') }
+
+ before do
+ create(:environment, name: 'review', project: project)
+ create(:environment, name: 'staging', project: project)
+ create(:environment, name: 'canary', project: project)
+ create(:environment, name: 'production-a', project: project)
+ create(:environment, name: 'production-b', project: project)
+
+ stub_ci_pipeline_yaml_file(config)
+ pipeline # create the pipeline
+ end
+
+ it 'creates deployment records for the deploy jobs', :aggregate_failures do
+ # processes the 'test' job, not creating a Deployment record
+ expect { process_pipeline }.not_to change { Deployment.count }
+ succeed_pending
+ expect(test_job.status).to eq 'success'
+
+ # processes automatic 'review' deploy job, creating a Deployment record
+ expect { process_pipeline }.to change { Deployment.count }.by(1)
+ succeed_pending
+ expect(review_deploy_job.status).to eq 'success'
+
+ # processes manual 'staging' deploy job, creating a Deployment record
+ # the subsequent manual deploy jobs ('canary', 'production-a', 'production-b')
+ # are not yet processed because 'staging' is set as `allow_failure: false`
+ expect { process_pipeline }.to change { Deployment.count }.by(1)
+ play_manual_action('staging')
+ succeed_pending
+ expect(staging_deploy_job.reload.status).to eq 'success'
+
+ # processes manual 'canary' deployment job
+ # the subsequent manual deploy jobs ('production-a' and 'production-b')
+ # are also processed because 'canary' is set by default as `allow_failure: true`
+ # the 'production-b' is set as `needs: [canary]`, but it is still processed
+ # overall, 3 Deployment records are created
+ expect { process_pipeline }.to change { Deployment.count }.by(3)
+ expect(canary_deploy_job.status).to eq 'manual'
+ expect(production_a_deploy_job.status).to eq 'manual'
+ expect(production_b_deploy_job.status).to eq 'skipped'
+
+ # play and succeed the manual 'canary' and 'production-a' jobs
+ play_manual_action('canary')
+ play_manual_action('production-a')
+ succeed_pending
+ expect(canary_deploy_job.reload.status).to eq 'success'
+ expect(production_a_deploy_job.reload.status).to eq 'success'
+ expect(production_b_deploy_job.reload.status).to eq 'created'
+
+ # process the manual 'production-b' job again, no Deployment record is created
+ # because it has already been created when 'production-b' was first processed
+ expect { process_pipeline }.not_to change { Deployment.count }
+ expect(production_b_deploy_job.reload.status).to eq 'manual'
+ end
+ end
+
private
def all_builds
diff --git a/spec/services/ci/process_sync_events_service_spec.rb b/spec/services/ci/process_sync_events_service_spec.rb
index ff9bcd0f8e9..c58d73815b0 100644
--- a/spec/services/ci/process_sync_events_service_spec.rb
+++ b/spec/services/ci/process_sync_events_service_spec.rb
@@ -145,14 +145,6 @@ RSpec.describe Ci::ProcessSyncEventsService, feature_category: :continuous_integ
end
end
- context 'when the use_traversal_ids FF is disabled' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it_behaves_like 'event consuming'
- end
-
it_behaves_like 'event consuming'
it 'enqueues Namespaces::ProcessSyncEventsWorker if any left' do
diff --git a/spec/services/ci/refs/enqueue_pipelines_to_unlock_service_spec.rb b/spec/services/ci/refs/enqueue_pipelines_to_unlock_service_spec.rb
new file mode 100644
index 00000000000..468302cb689
--- /dev/null
+++ b/spec/services/ci/refs/enqueue_pipelines_to_unlock_service_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Refs::EnqueuePipelinesToUnlockService, :unlock_pipelines, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ describe '#execute' do
+ let_it_be(:ref) { 'master' }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:tag_ref_path) { "#{::Gitlab::Git::TAG_REF_PREFIX}#{ref}" }
+ let_it_be(:ci_ref_tag) { create(:ci_ref, ref_path: tag_ref_path, project: project) }
+ let_it_be(:branch_ref_path) { "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{ref}" }
+ let_it_be(:ci_ref_branch) { create(:ci_ref, ref_path: branch_ref_path, project: project) }
+ let_it_be(:other_ref) { 'other_ref' }
+ let_it_be(:other_ref_path) { "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{other_ref}" }
+ let_it_be(:other_ci_ref) { create(:ci_ref, ref_path: other_ref_path, project: project) }
+
+ let(:service) { described_class.new }
+
+ subject(:execute) { service.execute(target_ref, before_pipeline: before_pipeline) }
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ stub_const("#{described_class}::ENQUEUE_INTERVAL_SECONDS", 0)
+ end
+
+ shared_examples_for 'unlocking pipelines' do
+ let(:is_tag) { target_ref.ref_path.include?(::Gitlab::Git::TAG_REF_PREFIX) }
+
+ let!(:other_ref_pipeline) { create_pipeline(:locked, other_ref, tag: false) }
+ let!(:old_unlocked_pipeline) { create_pipeline(:unlocked, ref) }
+ let!(:older_locked_pipeline_1) { create_pipeline(:locked, ref) }
+ let!(:older_locked_pipeline_2) { create_pipeline(:locked, ref) }
+ let!(:older_locked_pipeline_3) { create_pipeline(:locked, ref) }
+ let!(:older_child_pipeline) { create_pipeline(:locked, ref, child_of: older_locked_pipeline_3) }
+ let!(:pipeline) { create_pipeline(:locked, ref) }
+ let!(:child_pipeline) { create_pipeline(:locked, ref, child_of: pipeline) }
+ let!(:newer_pipeline) { create_pipeline(:locked, ref) }
+
+ context 'when before_pipeline is given' do
+ let(:before_pipeline) { pipeline }
+
+ it 'only enqueues older locked pipelines within the ref' do
+ expect { execute }
+ .to change { pipeline_ids_waiting_to_be_unlocked }
+ .from([])
+ .to([
+ older_locked_pipeline_1.id,
+ older_locked_pipeline_2.id,
+ older_locked_pipeline_3.id,
+ older_child_pipeline.id
+ ])
+
+ expect(execute).to include(
+ status: :success,
+ total_pending_entries: 4,
+ total_new_entries: 4
+ )
+ end
+ end
+
+ context 'when before_pipeline is not given' do
+ let(:before_pipeline) { nil }
+
+ it 'enqueues all locked pipelines within the ref' do
+ expect { execute }
+ .to change { pipeline_ids_waiting_to_be_unlocked }
+ .from([])
+ .to([
+ older_locked_pipeline_1.id,
+ older_locked_pipeline_2.id,
+ older_locked_pipeline_3.id,
+ older_child_pipeline.id,
+ pipeline.id,
+ child_pipeline.id,
+ newer_pipeline.id
+ ])
+
+ expect(execute).to include(
+ status: :success,
+ total_pending_entries: 7,
+ total_new_entries: 7
+ )
+ end
+ end
+ end
+
+ context 'when ref is a tag' do
+ let(:target_ref) { ci_ref_tag }
+
+ it_behaves_like 'unlocking pipelines'
+ end
+
+ context 'when ref is a branch' do
+ let(:target_ref) { ci_ref_branch }
+
+ it_behaves_like 'unlocking pipelines'
+ end
+
+ def create_pipeline(type, ref, tag: is_tag, child_of: nil)
+ trait = type == :locked ? :artifacts_locked : :unlocked
+ create(:ci_pipeline, trait, ref: ref, tag: tag, project: project, child_of: child_of).tap do |p|
+ if child_of
+ build = create(:ci_build, pipeline: child_of)
+ create(:ci_sources_pipeline, source_job: build, source_project: project, pipeline: p, project: project)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/retry_job_service_spec.rb b/spec/services/ci/retry_job_service_spec.rb
index caed9815fb3..80fbfc04f9b 100644
--- a/spec/services/ci/retry_job_service_spec.rb
+++ b/spec/services/ci/retry_job_service_spec.rb
@@ -248,7 +248,8 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
end
describe '#clone!' do
- let(:new_job) { service.clone!(job) }
+ let(:start_pipeline_on_clone) { false }
+ let(:new_job) { service.clone!(job, start_pipeline: start_pipeline_on_clone) }
it 'raises an error when an unexpected class is passed' do
expect { service.clone!(create(:ci_build).present) }.to raise_error(TypeError)
@@ -258,7 +259,24 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
include_context 'retryable bridge'
it_behaves_like 'clones the job'
- it_behaves_like 'creates associations for a deployable job', :ci_bridge
+
+ it 'does not create a new deployment' do
+ expect { new_job }.not_to change { Deployment.count }
+ end
+
+ context 'when the pipeline is started automatically' do
+ let(:start_pipeline_on_clone) { true }
+
+ it_behaves_like 'creates associations for a deployable job', :ci_bridge
+ end
+
+ context 'when `create_deployment_only_for_processable_jobs` FF is disabled' do
+ before do
+ stub_feature_flags(create_deployment_only_for_processable_jobs: false)
+ end
+
+ it_behaves_like 'creates associations for a deployable job', :ci_bridge
+ end
context 'when given variables' do
let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
@@ -272,10 +290,25 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
context 'when the job to be cloned is a build' do
include_context 'retryable build'
- let(:job) { job_to_clone }
-
it_behaves_like 'clones the job'
- it_behaves_like 'creates associations for a deployable job', :ci_build
+
+ it 'does not create a new deployment' do
+ expect { new_job }.not_to change { Deployment.count }
+ end
+
+ context 'when the pipeline is started automatically' do
+ let(:start_pipeline_on_clone) { true }
+
+ it_behaves_like 'creates associations for a deployable job', :ci_build
+ end
+
+ context 'when `create_deployment_only_for_processable_jobs` FF is disabled' do
+ before do
+ stub_feature_flags(create_deployment_only_for_processable_jobs: false)
+ end
+
+ it_behaves_like 'creates associations for a deployable job', :ci_build
+ end
context 'when given variables' do
let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
diff --git a/spec/services/ci/runners/register_runner_service_spec.rb b/spec/services/ci/runners/register_runner_service_spec.rb
index 7252763c13e..b5921773364 100644
--- a/spec/services/ci/runners/register_runner_service_spec.rb
+++ b/spec/services/ci/runners/register_runner_service_spec.rb
@@ -173,7 +173,7 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute', feature_categor
expect(runner).to be_an_instance_of(::Ci::Runner)
expect(runner.persisted?).to be_falsey
expect(runner.errors.messages).to eq(
- runner_projects: ['Maximum number of ci registered project runners (1) exceeded']
+ 'runner_projects.base': ['Maximum number of ci registered project runners (1) exceeded']
)
expect(project.runners.reload.size).to eq(1)
end
@@ -252,7 +252,7 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute', feature_categor
expect(runner).to be_an_instance_of(::Ci::Runner)
expect(runner.persisted?).to be_falsey
expect(runner.errors.messages).to eq(
- runner_namespaces: ['Maximum number of ci registered group runners (1) exceeded']
+ 'runner_namespaces.base': ['Maximum number of ci registered group runners (1) exceeded']
)
expect(group.runners.reload.size).to eq(1)
end
diff --git a/spec/services/ci/unlock_pipeline_service_spec.rb b/spec/services/ci/unlock_pipeline_service_spec.rb
new file mode 100644
index 00000000000..1a1150dca9e
--- /dev/null
+++ b/spec/services/ci/unlock_pipeline_service_spec.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::UnlockPipelineService, :unlock_pipelines, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ describe '#execute', :aggregate_failures do
+ let(:service) { described_class.new(pipeline) }
+
+ let!(:pipeline) do
+ create(
+ :ci_pipeline,
+ :with_coverage_report_artifact,
+ :with_codequality_mr_diff_report,
+ :with_persisted_artifacts,
+ locked: :artifacts_locked
+ )
+ end
+
+ subject(:execute) { service.execute }
+
+ context 'when pipeline is not yet exclusively leased' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ it 'unlocks the pipeline and all its artifacts' do
+ expect { execute }
+ .to change { pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
+ .and change { pipeline.reload.job_artifacts.all?(&:artifact_unlocked?) }.to(true)
+ .and change { pipeline.reload.pipeline_artifacts.all?(&:artifact_unlocked?) }.to(true)
+
+ expect(execute).to eq(
+ status: :success,
+ skipped_already_leased: false,
+ skipped_already_unlocked: false,
+ exec_timeout: false,
+ unlocked_job_artifacts: pipeline.job_artifacts.count,
+ unlocked_pipeline_artifacts: pipeline.pipeline_artifacts.count
+ )
+ end
+
+ context 'and pipeline is already unlocked' do
+ before do
+ described_class.new(pipeline).execute
+ end
+
+ it 'skips the pipeline' do
+ expect(Ci::JobArtifact).not_to receive(:where)
+
+ expect(execute).to eq(
+ status: :success,
+ skipped_already_leased: false,
+ skipped_already_unlocked: true,
+ exec_timeout: false,
+ unlocked_job_artifacts: 0,
+ unlocked_pipeline_artifacts: 0
+ )
+ end
+ end
+
+ context 'and max execution duration was reached' do
+ let!(:first_artifact) { pipeline.job_artifacts.order(:id).first }
+ let!(:last_artifact) { pipeline.job_artifacts.order(:id).last }
+
+ before do
+ stub_const("#{described_class}::MAX_EXEC_DURATION", 0.seconds)
+ end
+
+ it 'keeps the unlocked state of job artifacts already processed and re-enqueues the pipeline' do
+ expect { execute }
+ .to change { first_artifact.reload.artifact_unlocked? }.to(true)
+ .and not_change { last_artifact.reload.artifact_unlocked? }
+ .and not_change { pipeline.reload.locked }
+ .and not_change { pipeline.reload.pipeline_artifacts.all?(&:artifact_unlocked?) }
+ .and change { pipeline_ids_waiting_to_be_unlocked }.from([]).to([pipeline.id])
+
+ expect(execute).to eq(
+ status: :success,
+ skipped_already_leased: false,
+ skipped_already_unlocked: false,
+ exec_timeout: true,
+ unlocked_job_artifacts: 1,
+ unlocked_pipeline_artifacts: 0
+ )
+ end
+ end
+
+ context 'and an error happened' do
+ context 'and was raised in the middle batches of job artifacts being unlocked' do
+ let!(:first_artifact) { pipeline.job_artifacts.order(:id).first }
+ let!(:last_artifact) { pipeline.job_artifacts.order(:id).last }
+
+ before do
+ mock_relation = instance_double('Ci::JobArtifact::ActiveRecord_Relation')
+ allow(Ci::JobArtifact).to receive(:where).and_call_original
+ allow(Ci::JobArtifact).to receive(:where).with(id: [last_artifact.id]).and_return(mock_relation)
+ allow(mock_relation).to receive(:update_all).and_raise('An error')
+ end
+
+ it 'keeps the unlocked state of job artifacts already processed and re-enqueues the pipeline' do
+ expect { execute }
+ .to raise_error('An error')
+ .and change { first_artifact.reload.artifact_unlocked? }.to(true)
+ .and not_change { last_artifact.reload.artifact_unlocked? }
+ .and not_change { pipeline.reload.locked }
+ .and not_change { pipeline.reload.pipeline_artifacts.all?(&:artifact_unlocked?) }
+ .and change { pipeline_ids_waiting_to_be_unlocked }.from([]).to([pipeline.id])
+ end
+ end
+
+ context 'and was raised while unlocking pipeline artifacts' do
+ before do
+ allow(pipeline).to receive_message_chain(:pipeline_artifacts, :update_all).and_raise('An error')
+ end
+
+ it 'keeps the unlocked state of job artifacts and re-enqueues the pipeline' do
+ expect { execute }
+ .to raise_error('An error')
+ .and change { pipeline.reload.job_artifacts.all?(&:artifact_unlocked?) }.to(true)
+ .and not_change { Ci::PipelineArtifact.where(pipeline_id: pipeline.id).all?(&:artifact_unlocked?) }
+ .and not_change { pipeline.reload.locked }.from('artifacts_locked')
+ .and change { pipeline_ids_waiting_to_be_unlocked }.from([]).to([pipeline.id])
+ end
+ end
+
+ context 'and was raised while unlocking pipeline' do
+ before do
+ allow(pipeline).to receive(:update_column).and_raise('An error')
+ end
+
+ it 'keeps the unlocked state of job artifacts and pipeline artifacts and re-enqueues the pipeline' do
+ expect { execute }
+ .to raise_error('An error')
+ .and change { pipeline.reload.job_artifacts.all?(&:artifact_unlocked?) }.to(true)
+ .and change { pipeline.reload.pipeline_artifacts.all?(&:artifact_unlocked?) }.to(true)
+ .and not_change { pipeline.reload.locked }.from('artifacts_locked')
+ .and change { pipeline_ids_waiting_to_be_unlocked }.from([]).to([pipeline.id])
+ end
+ end
+ end
+ end
+
+ context 'when pipeline is already exclusively leased' do
+ before do
+ allow(service).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+
+ it 'does nothing and returns success' do
+ expect { execute }.not_to change { pipeline.reload.locked }
+
+ expect(execute).to include(
+ status: :success,
+ skipped_already_leased: true,
+ unlocked_job_artifacts: 0,
+ unlocked_pipeline_artifacts: 0
+ )
+ end
+ end
+ end
+end
diff --git a/spec/services/deployments/create_service_spec.rb b/spec/services/deployments/create_service_spec.rb
index 2a70d450575..77dcad35f70 100644
--- a/spec/services/deployments/create_service_spec.rb
+++ b/spec/services/deployments/create_service_spec.rb
@@ -86,7 +86,6 @@ RSpec.describe Deployments::CreateService, feature_category: :continuous_deliver
)
expect(service.deployment_attributes).to eq(
- cluster_id: 1,
project_id: 2,
environment_id: 3,
ref: 'master',
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
index b6a80cf26cc..5534dea85b2 100644
--- a/spec/services/design_management/delete_designs_service_spec.rb
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -174,7 +174,7 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_REMOVED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_REMOVED }
let(:namespace) { project.namespace }
subject(:service_action) { run_service }
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index 8e5065184ca..8a4dd8b5fc2 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -120,7 +120,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_ADDED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_ADDED }
let(:namespace) { project.namespace }
subject(:service_action) { run_service }
end
@@ -219,7 +219,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_MODIFIED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_MODIFIED }
let(:namespace) { project.namespace }
subject(:service_action) { run_service }
end
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index 48959baeaa5..e087f2ffc7e 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -181,7 +181,7 @@ RSpec.describe DraftNotes::PublishService, feature_category: :code_review_workfl
# NOTE: This should be reduced as we work on reducing Gitaly calls.
# Gitaly requests shouldn't go above this threshold as much as possible
# as it may add more to the Gitaly N+1 issue we are experiencing.
- expect { publish }.to change { Gitlab::GitalyClient.get_request_count }.by(20)
+ expect { publish }.to change { Gitlab::GitalyClient.get_request_count }.by(19)
end
end
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index 74f1f4bc7ac..fe54663b983 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -685,44 +685,21 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
let(:commits_to_sync) { [] }
shared_examples 'enqueues Jira sync worker' do
- context "batch_delay_jira_branch_sync_worker feature flag is enabled" do
- before do
- stub_feature_flags(batch_delay_jira_branch_sync_worker: true)
- end
-
- specify :aggregate_failures do
- Sidekiq::Testing.fake! do
- if commits_to_sync.any?
- expect(JiraConnect::SyncBranchWorker)
- .to receive(:perform_in)
- .with(kind_of(Numeric), project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
- .and_call_original
- else
- expect(JiraConnect::SyncBranchWorker)
- .to receive(:perform_async)
- .with(project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
- .and_call_original
- end
-
- expect { subject }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1)
- end
- end
- end
-
- context "batch_delay_jira_branch_sync_worker feature flag is disabled" do
- before do
- stub_feature_flags(batch_delay_jira_branch_sync_worker: false)
- end
-
- specify :aggregate_failures do
- Sidekiq::Testing.fake! do
+ specify :aggregate_failures do
+ Sidekiq::Testing.fake! do
+ if commits_to_sync.any?
+ expect(JiraConnect::SyncBranchWorker)
+ .to receive(:perform_in)
+ .with(kind_of(Numeric), project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
+ .and_call_original
+ else
expect(JiraConnect::SyncBranchWorker)
.to receive(:perform_async)
.with(project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
.and_call_original
-
- expect { subject }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1)
end
+
+ expect { subject }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1)
end
end
end
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 5e37f33e4f2..78deb3cf254 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -491,30 +491,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it 'returns true' do
expect(service.execute).to eq(true)
end
-
- context 'error moving group' do
- before do
- allow(internal_group).to receive(:move_dir).and_raise(Gitlab::UpdatePathError)
- end
-
- it 'does not raise an error' do
- expect { service.execute }.not_to raise_error
- end
-
- it 'returns false' do
- expect(service.execute).to eq(false)
- end
-
- it 'has the right error' do
- service.execute
-
- expect(internal_group.errors.full_messages.first).to eq('Gitlab::UpdatePathError')
- end
-
- it "hasn't changed the path" do
- expect { service.execute }.not_to change { internal_group.reload.path }
- end
- end
end
context 'for a subgroup' do
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index 982b8b11383..39832ee4b13 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -15,115 +15,157 @@ RSpec.describe Import::GithubService, feature_category: :importers do
let(:settings) { instance_double(Gitlab::GithubImport::Settings) }
let(:user_namespace_path) { user.namespace_path }
let(:optional_stages) { nil }
+ let(:timeout_strategy) { "optimistic" }
let(:params) do
{
repo_id: 123,
new_name: 'new_repo',
target_namespace: user_namespace_path,
- optional_stages: optional_stages
+ optional_stages: optional_stages,
+ timeout_strategy: timeout_strategy
}
end
+ let(:client) { Gitlab::GithubImport::Client.new(token) }
+ let(:project_double) { instance_double(Project, persisted?: true) }
+
subject(:github_importer) { described_class.new(client, user, params) }
- shared_examples 'handles errors' do |klass|
- let(:client) { klass.new(token) }
- let(:project_double) { instance_double(Project, persisted?: true) }
+ before do
+ allow(Gitlab::GithubImport::Settings).to receive(:new).with(project_double).and_return(settings)
+ allow(settings)
+ .to receive(:write)
+ .with(
+ optional_stages: optional_stages,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
+ end
+
+ context 'do not raise an exception on input error' do
+ let(:exception) { Octokit::ClientError.new(status: 404, body: 'Not Found') }
before do
- allow(Gitlab::GithubImport::Settings).to receive(:new).with(project_double).and_return(settings)
- allow(settings)
- .to receive(:write)
- .with(
- optional_stages: optional_stages,
- additional_access_tokens: access_params[:additional_access_tokens]
- )
+ expect(client).to receive(:repository).and_raise(exception)
end
- context 'do not raise an exception on input error' do
- let(:exception) { Octokit::ClientError.new(status: 404, body: 'Not Found') }
+ it 'logs the original error' do
+ expect(Gitlab::Import::Logger).to receive(:error).with({
+ message: 'Import failed due to a GitHub error',
+ status: 404,
+ error: 'Not Found'
+ }).and_call_original
- before do
- expect(client).to receive(:repository).and_raise(exception)
- end
+ subject.execute(access_params, :github)
+ end
- it 'logs the original error' do
- expect(Gitlab::Import::Logger).to receive(:error).with({
- message: 'Import failed due to a GitHub error',
- status: 404,
- error: 'Not Found'
- }).and_call_original
+ it 'returns an error with message and code' do
+ result = subject.execute(access_params, :github)
- subject.execute(access_params, :github)
- end
+ expect(result).to include(
+ message: 'Import failed due to a GitHub error: Not Found (HTTP 404)',
+ status: :error,
+ http_status: :unprocessable_entity
+ )
+ end
+ end
- it 'returns an error with message and code' do
- result = subject.execute(access_params, :github)
+ it 'raises an exception for unknown error causes' do
+ exception = StandardError.new('Not Implemented')
- expect(result).to include(
- message: 'Import failed due to a GitHub error: Not Found (HTTP 404)',
- status: :error,
- http_status: :unprocessable_entity
- )
- end
- end
+ expect(client).to receive(:repository).and_raise(exception)
- it 'raises an exception for unknown error causes' do
- exception = StandardError.new('Not Implemented')
+ expect(Gitlab::Import::Logger).not_to receive(:error)
- expect(client).to receive(:repository).and_raise(exception)
+ expect { subject.execute(access_params, :github) }.to raise_error(exception)
+ end
+
+ context 'repository size validation' do
+ let(:repository_double) { { name: 'repository', size: 99 } }
- expect(Gitlab::Import::Logger).not_to receive(:error)
+ before do
+ allow(subject).to receive(:authorized?).and_return(true)
+ expect(client).to receive(:repository).and_return(repository_double)
- expect { subject.execute(access_params, :github) }.to raise_error(exception)
+ allow_next_instance_of(Gitlab::LegacyGithubImport::ProjectCreator) do |creator|
+ allow(creator).to receive(:execute).and_return(project_double)
+ end
end
- context 'repository size validation' do
- let(:repository_double) { { name: 'repository', size: 99 } }
+ context 'when there is no repository size limit defined' do
+ it 'skips the check, succeeds, and tracks an access level' do
+ expect(subject.execute(access_params, :github)).to include(status: :success)
+ expect(settings)
+ .to have_received(:write)
+ .with(optional_stages: nil,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
+ expect_snowplow_event(
+ category: 'Import::GithubService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { import_type: 'github', user_role: 'Owner' }
+ )
+ end
+ end
- before do
- allow(subject).to receive(:authorized?).and_return(true)
- expect(client).to receive(:repository).and_return(repository_double)
+ context 'when the target namespace repository size limit is defined' do
+ let_it_be(:group) { create(:group, repository_size_limit: 100) }
- allow_next_instance_of(Gitlab::LegacyGithubImport::ProjectCreator) do |creator|
- allow(creator).to receive(:execute).and_return(project_double)
- end
+ before do
+ params[:target_namespace] = group.full_path
end
- context 'when there is no repository size limit defined' do
- it 'skips the check, succeeds, and tracks an access level' do
- expect(subject.execute(access_params, :github)).to include(status: :success)
- expect(settings)
- .to have_received(:write)
- .with(optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens])
- expect_snowplow_event(
- category: 'Import::GithubService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { import_type: 'github', user_role: 'Owner' }
+ it 'succeeds when the repository is smaller than the limit' do
+ expect(subject.execute(access_params, :github)).to include(status: :success)
+ expect(settings)
+ .to have_received(:write)
+ .with(
+ optional_stages: nil,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
)
- end
+ expect_snowplow_event(
+ category: 'Import::GithubService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { import_type: 'github', user_role: 'Not a member' }
+ )
end
- context 'when the target namespace repository size limit is defined' do
- let_it_be(:group) { create(:group, repository_size_limit: 100) }
+ it 'returns error when the repository is larger than the limit' do
+ repository_double[:size] = 101
- before do
- params[:target_namespace] = group.full_path
- end
+ expect(subject.execute(access_params, :github)).to include(size_limit_error)
+ end
+ end
+
+ context 'when target namespace repository limit is not defined' do
+ let_it_be(:group) { create(:group) }
+ before do
+ stub_application_setting(repository_size_limit: 100)
+ end
+
+ context 'when application size limit is defined' do
it 'succeeds when the repository is smaller than the limit' do
expect(subject.execute(access_params, :github)).to include(status: :success)
expect(settings)
.to have_received(:write)
- .with(optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens])
+ .with(
+ optional_stages: nil,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
expect_snowplow_event(
category: 'Import::GithubService',
action: 'create',
label: 'import_access_level',
user: user,
- extra: { import_type: 'github', user_role: 'Not a member' }
+ extra: { import_type: 'github', user_role: 'Owner' }
)
end
@@ -133,167 +175,142 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(subject.execute(access_params, :github)).to include(size_limit_error)
end
end
-
- context 'when target namespace repository limit is not defined' do
- let_it_be(:group) { create(:group) }
-
- before do
- stub_application_setting(repository_size_limit: 100)
- end
-
- context 'when application size limit is defined' do
- it 'succeeds when the repository is smaller than the limit' do
- expect(subject.execute(access_params, :github)).to include(status: :success)
- expect(settings)
- .to have_received(:write)
- .with(optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens])
- expect_snowplow_event(
- category: 'Import::GithubService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { import_type: 'github', user_role: 'Owner' }
- )
- end
-
- it 'returns error when the repository is larger than the limit' do
- repository_double[:size] = 101
-
- expect(subject.execute(access_params, :github)).to include(size_limit_error)
- end
- end
- end
-
- context 'when optional stages params present' do
- let(:optional_stages) do
- {
- single_endpoint_issue_events_import: true,
- single_endpoint_notes_import: 'false',
- attachments_import: false,
- collaborators_import: true
- }
- end
-
- it 'saves optional stages choice to import_data' do
- subject.execute(access_params, :github)
-
- expect(settings)
- .to have_received(:write)
- .with(
- optional_stages: optional_stages,
- additional_access_tokens: access_params[:additional_access_tokens]
- )
- end
- end
-
- context 'when additional access tokens are present' do
- it 'saves additional access tokens to import_data' do
- subject.execute(access_params, :github)
-
- expect(settings)
- .to have_received(:write)
- .with(optional_stages: optional_stages, additional_access_tokens: %w[foo bar])
- end
- end
end
- context 'when import source is disabled' do
- let(:repository_double) do
+ context 'when optional stages params present' do
+ let(:optional_stages) do
{
- name: 'vim',
- description: 'test',
- full_name: 'test/vim',
- clone_url: 'http://repo.com/repo/repo.git',
- private: false,
- has_wiki?: false
+ single_endpoint_issue_events_import: true,
+ single_endpoint_notes_import: 'false',
+ attachments_import: false,
+ collaborators_import: true
}
end
- before do
- stub_application_setting(import_sources: nil)
- allow(client).to receive(:repository).and_return(repository_double)
+ it 'saves optional stages choice to import_data' do
+ subject.execute(access_params, :github)
+
+ expect(settings)
+ .to have_received(:write)
+ .with(
+ optional_stages: optional_stages,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
end
+ end
- it 'returns forbidden' do
- result = subject.execute(access_params, :github)
+ context 'when timeout strategy param is present' do
+ let(:timeout_strategy) { 'pessimistic' }
- expect(result).to include(
- status: :error,
- http_status: :forbidden
- )
+ it 'saves timeout strategy to import_data' do
+ subject.execute(access_params, :github)
+
+ expect(settings)
+ .to have_received(:write)
+ .with(
+ optional_stages: optional_stages,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
end
end
- context 'when a blocked/local URL is used as github_hostname' do
- let(:message) { 'Error while attempting to import from GitHub' }
- let(:error) { "Invalid URL: #{url}" }
+ context 'when additional access tokens are present' do
+ it 'saves additional access tokens to import_data' do
+ subject.execute(access_params, :github)
- before do
- stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+ expect(settings)
+ .to have_received(:write)
+ .with(
+ optional_stages: optional_stages,
+ additional_access_tokens: %w[foo bar],
+ timeout_strategy: timeout_strategy
+ )
end
+ end
+ end
- where(url: %w[https://localhost https://10.0.0.1])
-
- with_them do
- it 'returns and logs an error' do
- allow(github_importer).to receive(:url).and_return(url)
+ context 'when import source is disabled' do
+ let(:repository_double) do
+ {
+ name: 'vim',
+ description: 'test',
+ full_name: 'test/vim',
+ clone_url: 'http://repo.com/repo/repo.git',
+ private: false,
+ has_wiki?: false
+ }
+ end
- expect(Gitlab::Import::Logger).to receive(:error).with({
- message: message,
- error: error
- }).and_call_original
- expect(github_importer.execute(access_params, :github)).to include(blocked_url_error(url))
- end
- end
+ before do
+ stub_application_setting(import_sources: nil)
+ allow(client).to receive(:repository).and_return(repository_double)
end
- context 'when target_namespace is blank' do
- before do
- params[:target_namespace] = ''
- end
+ it 'returns forbidden' do
+ result = subject.execute(access_params, :github)
- it 'raises an exception' do
- expect { subject.execute(access_params, :github) }.to raise_error(ArgumentError, 'Target namespace is required')
- end
+ expect(result).to include(
+ status: :error,
+ http_status: :forbidden
+ )
end
+ end
- context 'when namespace to import repository into does not exist' do
- before do
- params[:target_namespace] = 'unknown_path'
- end
+ context 'when a blocked/local URL is used as github_hostname' do
+ let(:message) { 'Error while attempting to import from GitHub' }
+ let(:error) { "Invalid URL: #{url}" }
- it 'returns an error' do
- expect(github_importer.execute(access_params, :github)).to include(not_existed_namespace_error)
- end
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
end
- context 'when user has no permissions to import repository into the specified namespace' do
- let_it_be(:group) { create(:group) }
+ where(url: %w[https://localhost https://10.0.0.1])
- before do
- params[:target_namespace] = group.full_path
- end
+ with_them do
+ it 'returns and logs an error' do
+ allow(github_importer).to receive(:url).and_return(url)
- it 'returns an error' do
- expect(github_importer.execute(access_params, :github)).to include(taken_namespace_error)
+ expect(Gitlab::Import::Logger).to receive(:error).with({
+ message: message,
+ error: error
+ }).and_call_original
+ expect(github_importer.execute(access_params, :github)).to include(blocked_url_error(url))
end
end
end
- context 'when remove_legacy_github_client feature flag is enabled' do
+ context 'when target_namespace is blank' do
+ before do
+ params[:target_namespace] = ''
+ end
+
+ it 'raises an exception' do
+ expect { subject.execute(access_params, :github) }.to raise_error(ArgumentError, 'Target namespace is required')
+ end
+ end
+
+ context 'when namespace to import repository into does not exist' do
before do
- stub_feature_flags(remove_legacy_github_client: true)
+ params[:target_namespace] = 'unknown_path'
end
- include_examples 'handles errors', Gitlab::GithubImport::Client
+ it 'returns an error' do
+ expect(github_importer.execute(access_params, :github)).to include(not_existed_namespace_error)
+ end
end
- context 'when remove_legacy_github_client feature flag is disabled' do
+ context 'when user has no permissions to import repository into the specified namespace' do
+ let_it_be(:group) { create(:group) }
+
before do
- stub_feature_flags(remove_legacy_github_client: false)
+ params[:target_namespace] = group.full_path
end
- include_examples 'handles errors', Gitlab::LegacyGithubImport::Client
+ it 'returns an error' do
+ expect(github_importer.execute(access_params, :github)).to include(taken_namespace_error)
+ end
end
def size_limit_error
diff --git a/spec/services/import/validate_remote_git_endpoint_service_spec.rb b/spec/services/import/validate_remote_git_endpoint_service_spec.rb
index 1d2b3975832..15e80f2c85d 100644
--- a/spec/services/import/validate_remote_git_endpoint_service_spec.rb
+++ b/spec/services/import/validate_remote_git_endpoint_service_spec.rb
@@ -7,7 +7,9 @@ RSpec.describe Import::ValidateRemoteGitEndpointService, feature_category: :impo
let_it_be(:base_url) { 'http://demo.host/path' }
let_it_be(:endpoint_url) { "#{base_url}/info/refs?service=git-upload-pack" }
- let_it_be(:error_message) { "#{base_url} is not a valid HTTP Git repository" }
+ let_it_be(:endpoint_error_message) { "#{base_url} endpoint error:" }
+ let_it_be(:body_error_message) { described_class::INVALID_BODY_MESSAGE }
+ let_it_be(:content_type_error_message) { described_class::INVALID_CONTENT_TYPE_MESSAGE }
describe '#execute' do
let(:valid_response) do
@@ -70,13 +72,14 @@ RSpec.describe Import::ValidateRemoteGitEndpointService, feature_category: :impo
end
it 'reports error when status code is not 200' do
- stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ status: 301 }))
+ error_response = { status: 401 }
+ stub_full_request(endpoint_url, method: :get).to_return(error_response)
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
- expect(result.message).to eq(error_message)
+ expect(result.message).to eq("#{endpoint_error_message} #{error_response[:status]}")
end
it 'reports error when invalid URL is provided' do
@@ -94,27 +97,49 @@ RSpec.describe Import::ValidateRemoteGitEndpointService, feature_category: :impo
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
- expect(result.message).to eq(error_message)
+ expect(result.message).to eq(content_type_error_message)
end
- it 'reports error when body is in invalid format' do
+ it 'reports error when body is too short' do
stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ body: 'invalid content' }))
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
- expect(result.message).to eq(error_message)
+ expect(result.message).to eq(body_error_message)
+ end
+
+ it 'reports error when body is in invalid format' do
+ stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ body: 'invalid long content with no git respons whatshowever' }))
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq(body_error_message)
+ end
+
+ it 'reports error when http exceptions are raised' do
+ err = SocketError.new('dummy message')
+ stub_full_request(endpoint_url, method: :get).to_raise(err)
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq("HTTP #{err.class.name.underscore} error: #{err.message}")
end
- it 'reports error when exception is raised' do
- stub_full_request(endpoint_url, method: :get).to_raise(SocketError.new('dummy message'))
+ it 'reports error when other exceptions are raised' do
+ err = StandardError.new('internal dummy message')
+ stub_full_request(endpoint_url, method: :get).to_raise(err)
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
- expect(result.message).to eq(error_message)
+ expect(result.message).to eq("Internal #{err.class.name.underscore} error: #{err.message}")
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index dabbd4bfa84..009f68594d7 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -194,7 +194,7 @@ RSpec.describe Issues::CloseService, feature_category: :team_planning do
end
end
- context "closed by a merge request", :sidekiq_might_not_need_inline do
+ context "closed by a merge request" do
subject(:close_issue) do
perform_enqueued_jobs do
described_class.new(container: project, current_user: user).close_issue(issue, closed_via: closing_merge_request)
diff --git a/spec/services/issues/set_crm_contacts_service_spec.rb b/spec/services/issues/set_crm_contacts_service_spec.rb
index aa5dec20a13..7d709bbd9c8 100644
--- a/spec/services/issues/set_crm_contacts_service_spec.rb
+++ b/spec/services/issues/set_crm_contacts_service_spec.rb
@@ -106,6 +106,14 @@ RSpec.describe Issues::SetCrmContactsService, feature_category: :team_planning d
it_behaves_like 'setting contacts'
it_behaves_like 'adds system note', 1, 1
+
+ context 'with empty list' do
+ let(:params) { { replace_ids: [] } }
+ let(:expected_contacts) { [] }
+
+ it_behaves_like 'setting contacts'
+ it_behaves_like 'adds system note', 0, 2
+ end
end
context 'add' do
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index eb9fe2b4ed7..c4012e2a98f 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
let_it_be(:label3) { create(:label, title: 'c', project: project) }
let_it_be(:milestone) { create(:milestone, project: project) }
+ let(:container) { project }
let(:issue) do
create(
:issue,
@@ -49,7 +50,7 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
end
def update_issue(opts)
- described_class.new(container: project, current_user: user, params: opts).execute(issue)
+ described_class.new(container: container, current_user: user, params: opts).execute(issue)
end
it_behaves_like 'issuable update service updating last_edited_at values' do
@@ -825,7 +826,7 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
end
it 'updates updated_at' do
- expect(issue.reload.updated_at).to be > Time.current
+ expect(issue.reload.updated_at).to be_future
end
end
end
@@ -1006,6 +1007,12 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
it_behaves_like 'keeps issuable labels sorted after update'
it_behaves_like 'broadcasting issuable labels updates'
+ context 'when the issue belongs directly to a group' do
+ let(:container) { group }
+
+ it_behaves_like 'updating issuable labels'
+ end
+
def update_issuable(update_params)
update_issue(update_params)
end
diff --git a/spec/services/jira_connect/sync_service_spec.rb b/spec/services/jira_connect/sync_service_spec.rb
index 7457cdca13c..019370ce87f 100644
--- a/spec/services/jira_connect/sync_service_spec.rb
+++ b/spec/services/jira_connect/sync_service_spec.rb
@@ -7,9 +7,11 @@ RSpec.describe JiraConnect::SyncService, feature_category: :integrations do
describe '#execute' do
let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:merge_request_reviewer) { create(:merge_request_reviewer, merge_request: merge_request) }
let(:client) { Atlassian::JiraConnect::Client }
- let(:info) { { a: 'Some', b: 'Info' } }
+ let(:info) { { a: 'Some', b: 'Info', merge_requests: [merge_request] } }
subject do
described_class.new(project).execute(**info)
@@ -44,6 +46,20 @@ RSpec.describe JiraConnect::SyncService, feature_category: :integrations do
subject
end
+ it 'does not execute any queries for preloaded reviewers' do
+ expect_next(client).to store_info
+
+ expect_log(:info, { 'status': 'success' })
+
+ amount = ActiveRecord::QueryRecorder
+ .new { info[:merge_requests].flat_map(&:merge_request_reviewers).map(&:reviewer) }
+ .count
+
+ expect(amount).to be_zero
+
+ subject
+ end
+
context 'when a request returns errors' do
it 'logs each response as an error' do
expect_next(client).to store_info(
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index 96fa8ab278d..b977292bcf4 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -167,12 +167,15 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
let(:user_id) { '' }
it 'does not add a member' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(an_instance_of(described_class::BlankInvitesError), class: described_class.to_s, user_id: user.id)
expect(Gitlab::EventStore)
.not_to receive(:publish)
.with(an_instance_of(Members::MembersAddedEvent))
expect(execute_service[:status]).to eq(:error)
- expect(execute_service[:message]).to be_present
+ expect(execute_service[:message]).to eq(s_('AddMember|No users specified.'))
expect(source.users).not_to include member
expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(false)
end
@@ -182,6 +185,10 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
let(:user_id) { 1.upto(101).to_a.join(',') }
it 'limits the number of users to 100' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(an_instance_of(described_class::TooManyInvitesError), class: described_class.to_s, user_id: user.id)
+
expect(execute_service[:status]).to eq(:error)
expect(execute_service[:message]).to be_present
expect(source.users).not_to include member
@@ -297,113 +304,4 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
end
end
end
-
- context 'when assigning tasks to be done' do
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_to_be_done: %w(ci code), tasks_project_id: source.id }
- end
-
- it 'creates 2 task issues', :aggregate_failures do
- expect(TasksToBeDone::CreateWorker)
- .to receive(:perform_async)
- .with(anything, user.id, [member.id])
- .once
- .and_call_original
- expect { execute_service }.to change { source.issues.count }.by(2)
-
- expect(source.issues).to all have_attributes(
- project: source,
- author: user
- )
- end
-
- context 'when it is an invite by email passed to user_id' do
- let(:user_id) { 'email@example.org' }
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
-
- context 'when passing many user ids' do
- before do
- stub_licensed_features(multiple_issue_assignees: false)
- end
-
- let(:another_user) { create(:user) }
- let(:user_id) { [member.id, another_user.id].join(',') }
-
- it 'still creates 2 task issues', :aggregate_failures do
- expect(TasksToBeDone::CreateWorker)
- .to receive(:perform_async)
- .with(anything, user.id, array_including(member.id, another_user.id))
- .once
- .and_call_original
- expect { execute_service }.to change { source.issues.count }.by(2)
-
- expect(source.issues).to all have_attributes(
- project: source,
- author: user
- )
- end
- end
-
- context 'when a `tasks_project_id` is missing' do
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_to_be_done: %w(ci code) }
- end
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
-
- context 'when `tasks_to_be_done` are missing' do
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_project_id: source.id }
- end
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
-
- context 'when invalid `tasks_to_be_done` are passed' do
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_project_id: source.id, tasks_to_be_done: %w(invalid_task) }
- end
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
-
- context 'when invalid `tasks_project_id` is passed' do
- let(:another_project) { create(:project) }
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_project_id: another_project.id, tasks_to_be_done: %w(ci code) }
- end
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
-
- context 'when a member was already invited' do
- let(:user_id) { create(:project_member, :invited, project: source).invite_email }
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_project_id: source.id, tasks_to_be_done: %w(ci code) }
- end
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
- end
end
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index 76cd5d6c89e..bf81388357f 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -24,11 +24,6 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
end
it_behaves_like 'records an onboarding progress action', :user_added
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- expect { result }.not_to change { project.issues.count }
- end
end
context 'when email belongs to an existing user as a confirmed secondary email' do
@@ -321,11 +316,11 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
let(:params) { { email: unconfirmed_user.email } }
- it 'adds an existing user to members' do
+ it 'adds a new member as an invite for unconfirmed primary email' do
expect_to_create_members(count: 1)
expect(result[:status]).to eq(:success)
- expect(project.users).to include unconfirmed_user
- expect(project.members.last).not_to be_invite
+ expect(project.users).not_to include unconfirmed_user
+ expect(project.members.last).to be_invite
end
end
@@ -339,23 +334,6 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
expect(result[:status]).to eq(:success)
expect(project.users).to include project_user
end
-
- context 'when assigning tasks to be done' do
- let(:params) do
- { user_id: project_user.id, tasks_to_be_done: %w(ci code), tasks_project_id: project.id }
- end
-
- it 'creates 2 task issues', :aggregate_failures do
- expect(TasksToBeDone::CreateWorker)
- .to receive(:perform_async)
- .with(anything, user.id, [project_user.id])
- .once
- .and_call_original
- expect { result }.to change { project.issues.count }.by(2)
-
- expect(project.issues).to all have_attributes(project: project, author: user)
- end
- end
end
end
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index 81fc5661032..e7fe5c19fa3 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -82,39 +82,12 @@ RSpec.describe MergeRequests::ApprovalService, feature_category: :code_review_wo
it 'records a value' do
service.execute(merge_request)
- expect(merge_request.approvals.last.patch_id_sha).not_to be_nil
+ expect(merge_request.approvals.last.patch_id_sha).to eq(merge_request.current_patch_id_sha)
end
- context 'when base_sha is nil' do
+ context 'when MergeRequest#current_patch_id_sha is nil' do
it 'records patch_id_sha as nil' do
- expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
- expect(diff_ref).to receive(:base_sha).at_least(:once).and_return(nil)
- end
-
- service.execute(merge_request)
-
- expect(merge_request.approvals.last.patch_id_sha).to be_nil
- end
- end
-
- context 'when head_sha is nil' do
- it 'records patch_id_sha as nil' do
- expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
- expect(diff_ref).to receive(:head_sha).at_least(:once).and_return(nil)
- end
-
- service.execute(merge_request)
-
- expect(merge_request.approvals.last.patch_id_sha).to be_nil
- end
- end
-
- context 'when base_sha and head_sha match' do
- it 'records patch_id_sha as nil' do
- expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
- expect(diff_ref).to receive(:base_sha).at_least(:once).and_return("abc123")
- expect(diff_ref).to receive(:head_sha).at_least(:once).and_return("abc123")
- end
+ expect(merge_request).to receive(:current_patch_id_sha).and_return(nil)
service.execute(merge_request)
diff --git a/spec/services/merge_requests/create_ref_service_spec.rb b/spec/services/merge_requests/create_ref_service_spec.rb
index 5f7b9430416..b99187f9a56 100644
--- a/spec/services/merge_requests/create_ref_service_spec.rb
+++ b/spec/services/merge_requests/create_ref_service_spec.rb
@@ -246,13 +246,13 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
expect_next_instance_of(described_class) do |instance|
original = instance.method(:maybe_merge!)
- expect(instance).to receive(:maybe_merge!) do |*args|
+ expect(instance).to receive(:maybe_merge!) do |*args, **kwargs|
# Corrupt target_ref before the merge, simulating a race with
# another instance of the service for the same MR. source_sha is
# just an arbitrary valid commit that differs from what was just
# written.
project.repository.write_ref(target_ref, source_sha)
- original.call(*args)
+ original.call(*args, **kwargs)
end
end
diff --git a/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb b/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb
index d9e60911ada..7ce2317918d 100644
--- a/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb
+++ b/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb
@@ -17,7 +17,8 @@ RSpec.describe MergeRequests::DeleteNonLatestDiffsService, :clean_gitlab_redis_s
merge_request.reset
end
- it 'schedules non-latest merge request diffs removal' do
+ it 'schedules non-latest merge request diffs removal',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/426807' do
diffs = merge_request.merge_request_diffs
expect(diffs.count).to eq(4)
diff --git a/spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb
new file mode 100644
index 00000000000..14173c19bfb
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckConflictStatusService, feature_category: :code_review_workflow do
+ subject(:check_conflict_status) { described_class.new(merge_request: merge_request, params: {}) }
+
+ let(:merge_request) { build(:merge_request) }
+
+ describe '#execute' do
+ let(:result) { check_conflict_status.execute }
+
+ before do
+ allow(merge_request).to receive(:can_be_merged?).and_return(can_be_merged)
+ end
+
+ context 'when MergeRequest#can_be_merged is true' do
+ let(:can_be_merged) { true }
+
+ it 'returns a check result with status success' do
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ end
+ end
+
+ context 'when MergeRequest#can_be_merged is false' do
+ let(:can_be_merged) { false }
+
+ it 'returns a check result with status failed' do
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.payload[:reason]).to eq(:conflict)
+ end
+ end
+ end
+
+ describe '#skip?' do
+ it 'returns false' do
+ expect(check_conflict_status.skip?).to eq false
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'returns false' do
+ expect(check_conflict_status.cacheable?).to eq false
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
index cb624705a02..3837022232d 100644
--- a/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
@@ -3,9 +3,12 @@
require 'spec_helper'
RSpec.describe MergeRequests::Mergeability::CheckDraftStatusService, feature_category: :code_review_workflow do
- subject(:check_draft_status) { described_class.new(merge_request: merge_request, params: {}) }
+ subject(:check_draft_status) { described_class.new(merge_request: merge_request, params: params) }
- let(:merge_request) { build(:merge_request) }
+ let_it_be(:merge_request) { build(:merge_request) }
+
+ let(:params) { { skip_draft_check: skip_check } }
+ let(:skip_check) { false }
describe '#execute' do
let(:result) { check_draft_status.execute }
@@ -33,8 +36,20 @@ RSpec.describe MergeRequests::Mergeability::CheckDraftStatusService, feature_cat
end
describe '#skip?' do
- it 'returns false' do
- expect(check_draft_status.skip?).to eq false
+ context 'when skip check param is true' do
+ let(:skip_check) { true }
+
+ it 'returns true' do
+ expect(check_draft_status.skip?).to eq true
+ end
+ end
+
+ context 'when skip check param is false' do
+ let(:skip_check) { false }
+
+ it 'returns false' do
+ expect(check_draft_status.skip?).to eq false
+ end
end
end
diff --git a/spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb
new file mode 100644
index 00000000000..31ec44856b1
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckRebaseStatusService, feature_category: :code_review_workflow do
+ subject(:check_rebase_status) { described_class.new(merge_request: merge_request, params: params) }
+
+ let(:merge_request) { build(:merge_request) }
+ let(:params) { { skip_rebase_check: skip_check } }
+ let(:skip_check) { false }
+
+ describe '#execute' do
+ let(:result) { check_rebase_status.execute }
+
+ before do
+ allow(merge_request).to receive(:should_be_rebased?).and_return(should_be_rebased)
+ end
+
+ context 'when the merge request should be rebased' do
+ let(:should_be_rebased) { true }
+
+ it 'returns a check result with status failed' do
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.payload[:reason]).to eq :need_rebase
+ end
+ end
+
+ context 'when the merge request should not be rebased' do
+ let(:should_be_rebased) { false }
+
+ it 'returns a check result with status success' do
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ end
+ end
+ end
+
+ describe '#skip?' do
+ context 'when skip check is true' do
+ let(:skip_check) { true }
+
+ it 'returns true' do
+ expect(check_rebase_status.skip?).to eq true
+ end
+ end
+
+ context 'when skip check is false' do
+ let(:skip_check) { false }
+
+ it 'returns false' do
+ expect(check_rebase_status.skip?).to eq false
+ end
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'returns false' do
+ expect(check_rebase_status.cacheable?).to eq false
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
index bfff582994b..546d583a2fb 100644
--- a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
@@ -3,16 +3,32 @@
require 'spec_helper'
RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redis_cache, feature_category: :code_review_workflow do
+ let(:checks) { MergeRequest.all_mergeability_checks }
+ let(:execute_all) { false }
+
subject(:run_checks) { described_class.new(merge_request: merge_request, params: {}) }
describe '#execute' do
- subject(:execute) { run_checks.execute }
+ subject(:execute) { run_checks.execute(checks, execute_all: execute_all) }
let_it_be(:merge_request) { create(:merge_request) }
let(:params) { {} }
let(:success_result) { Gitlab::MergeRequests::Mergeability::CheckResult.success }
+ shared_examples 'checks are all executed' do
+ context 'when all checks are set to be executed' do
+ let(:execute_all) { true }
+
+ specify do
+ result = execute
+
+ expect(result.success?).to eq(success?)
+ expect(result.payload[:results].count).to eq(expected_count)
+ end
+ end
+ end
+
context 'when every check is skipped', :eager_load do
before do
MergeRequests::Mergeability::CheckBaseService.subclasses.each do |subclass|
@@ -25,17 +41,28 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
it 'is still a success' do
expect(execute.success?).to eq(true)
end
+
+ it_behaves_like 'checks are all executed' do
+ let(:success?) { true }
+ let(:expected_count) { 0 }
+ end
end
context 'when a check is skipped' do
- it 'does not execute the check' do
- merge_request.mergeability_checks.each do |check|
+ before do
+ checks.each do |check|
allow_next_instance_of(check) do |service|
allow(service).to receive(:skip?).and_return(false)
allow(service).to receive(:execute).and_return(success_result)
end
end
+ allow_next_instance_of(MergeRequests::Mergeability::CheckCiStatusService) do |service|
+ allow(service).to receive(:skip?).and_return(true)
+ end
+ end
+
+ it 'does not execute the check' do
expect_next_instance_of(MergeRequests::Mergeability::CheckCiStatusService) do |service|
expect(service).to receive(:skip?).and_return(true)
expect(service).not_to receive(:execute)
@@ -43,6 +70,34 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
expect(execute.success?).to eq(true)
end
+
+ it_behaves_like 'checks are all executed' do
+ let(:success?) { true }
+ let(:expected_count) { checks.count - 1 }
+ end
+
+ context 'when one check fails' do
+ let(:failed_result) { Gitlab::MergeRequests::Mergeability::CheckResult.failed(payload: { reason: 'failed' }) }
+
+ before do
+ allow_next_instance_of(MergeRequests::Mergeability::CheckOpenStatusService) do |service|
+ allow(service).to receive(:skip?).and_return(false)
+ allow(service).to receive(:execute).and_return(failed_result)
+ end
+ end
+
+ it 'returns the failure reason' do
+ result = execute
+
+ expect(result.success?).to eq(false)
+ expect(execute.payload[:failure_reason]).to eq(:failed)
+ end
+
+ it_behaves_like 'checks are all executed' do
+ let(:success?) { false }
+ let(:expected_count) { checks.count - 1 }
+ end
+ end
end
context 'when a check is not skipped' do
@@ -50,7 +105,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
let(:merge_check) { instance_double(MergeRequests::Mergeability::CheckCiStatusService) }
before do
- merge_request.mergeability_checks.each do |check|
+ checks.each do |check|
allow_next_instance_of(check) do |service|
allow(service).to receive(:skip?).and_return(true)
end
@@ -64,11 +119,13 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
context 'when the check is cacheable' do
context 'when the check is cached' do
- it 'returns the cached result' do
+ before do
expect_next_instance_of(Gitlab::MergeRequests::Mergeability::ResultsStore) do |service|
expect(service).to receive(:read).with(merge_check: merge_check).and_return(success_result)
end
+ end
+ it 'returns the cached result' do
expect_next_instance_of(MergeRequests::Mergeability::Logger, merge_request: merge_request) do |logger|
expect(logger).to receive(:instrument).with(mergeability_name: 'check_ci_status_service').and_call_original
expect(logger).to receive(:commit)
@@ -76,15 +133,22 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
expect(execute.success?).to eq(true)
end
+
+ it_behaves_like 'checks are all executed' do
+ let(:success?) { true }
+ let(:expected_count) { 1 }
+ end
end
context 'when the check is not cached' do
- it 'writes and returns the result' do
+ before do
expect_next_instance_of(Gitlab::MergeRequests::Mergeability::ResultsStore) do |service|
expect(service).to receive(:read).with(merge_check: merge_check).and_return(nil)
expect(service).to receive(:write).with(merge_check: merge_check, result_hash: success_result.to_hash).and_return(true)
end
+ end
+ it 'writes and returns the result' do
expect_next_instance_of(MergeRequests::Mergeability::Logger, merge_request: merge_request) do |logger|
expect(logger).to receive(:instrument).with(mergeability_name: 'check_ci_status_service').and_call_original
expect(logger).to receive(:commit)
@@ -92,6 +156,11 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
expect(execute.success?).to eq(true)
end
+
+ it_behaves_like 'checks are all executed' do
+ let(:success?) { true }
+ let(:expected_count) { 1 }
+ end
end
end
@@ -106,76 +175,4 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
end
end
end
-
- describe '#success?' do
- subject(:success) { run_checks.success? }
-
- let_it_be(:merge_request) { create(:merge_request) }
-
- context 'when the execute method has been executed' do
- before do
- run_checks.execute
- end
-
- context 'when all the checks succeed' do
- it 'returns true' do
- expect(success).to eq(true)
- end
- end
-
- context 'when one check fails' do
- before do
- allow(merge_request).to receive(:open?).and_return(false)
- run_checks.execute
- end
-
- it 'returns false' do
- expect(success).to eq(false)
- end
- end
- end
-
- context 'when execute has not been exectued' do
- it 'raises an error' do
- expect { subject }
- .to raise_error(/Execute needs to be called before/)
- end
- end
- end
-
- describe '#failure_reason' do
- subject(:failure_reason) { run_checks.failure_reason }
-
- let_it_be(:merge_request) { create(:merge_request) }
-
- context 'when the execute method has been executed' do
- context 'when all the checks succeed' do
- before do
- run_checks.execute
- end
-
- it 'returns nil' do
- expect(failure_reason).to eq(nil)
- end
- end
-
- context 'when one check fails' do
- before do
- allow(merge_request).to receive(:open?).and_return(false)
- run_checks.execute
- end
-
- it 'returns the open reason' do
- expect(failure_reason).to eq(:not_open)
- end
- end
- end
-
- context 'when execute has not been exectued' do
- it 'raises an error' do
- expect { subject }
- .to raise_error(/Execute needs to be called before/)
- end
- end
- end
end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 72e41f7b814..f5494f429c3 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -788,7 +788,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
update_merge_request({ label_ids: [label.id] })
end
- expect(merge_request.reload.updated_at).to be > Time.current
+ expect(merge_request.reload.updated_at).to be_future
end
end
@@ -897,6 +897,27 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
update_merge_request(title: 'New title')
end
+ context 'when additional_merge_when_checks_ready is enabled' do
+ it 'publishes a DraftStateChangeEvent' do
+ expected_data = {
+ current_user_id: user.id,
+ merge_request_id: merge_request.id
+ }
+
+ expect { update_merge_request(title: 'New title') }.to publish_event(MergeRequests::DraftStateChangeEvent).with(expected_data)
+ end
+ end
+
+ context 'when additional_merge_when_checks_ready is disabled' do
+ before do
+ stub_feature_flags(additional_merge_when_checks_ready: false)
+ end
+
+ it 'does not publish a DraftStateChangeEvent' do
+ expect { update_merge_request(title: 'New title') }.not_to publish_event(MergeRequests::DraftStateChangeEvent)
+ end
+ end
+
context 'when removing through wip_event param' do
it 'removes Draft from the title' do
expect { update_merge_request({ wip_event: "ready" }) }
@@ -923,6 +944,27 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
should_not_email(non_subscriber)
end
+ context 'when additional_merge_when_checks_ready is enabled' do
+ it 'publishes a DraftStateChangeEvent' do
+ expected_data = {
+ current_user_id: user.id,
+ merge_request_id: merge_request.id
+ }
+
+ expect { update_merge_request(title: 'Draft: New title') }.to publish_event(MergeRequests::DraftStateChangeEvent).with(expected_data)
+ end
+ end
+
+ context 'when additional_merge_when_checks_ready is disabled' do
+ before do
+ stub_feature_flags(additional_merge_when_checks_ready: false)
+ end
+
+ it 'does not publish a DraftStateChangeEvent' do
+ expect { update_merge_request(title: 'Draft: New title') }.not_to publish_event(MergeRequests::DraftStateChangeEvent)
+ end
+ end
+
it 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(merge_request)
diff --git a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
index 9b46675a08e..c68a581c8ff 100644
--- a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
+++ b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ml::ExperimentTracking::CandidateRepository, feature_category: :experimentation_activation do
+RSpec.describe ::Ml::ExperimentTracking::CandidateRepository, feature_category: :activation do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) }
diff --git a/spec/services/ml/experiment_tracking/experiment_repository_spec.rb b/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
index 3c645fa84b4..f1afc4d66c2 100644
--- a/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
+++ b/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository, feature_category: :experimentation_activation do
+RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository, feature_category: :activation do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) }
diff --git a/spec/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service_spec.rb b/spec/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service_spec.rb
index f0e7c241d5d..a3a7d538bcc 100644
--- a/spec/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service_spec.rb
+++ b/spec/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ml::ExperimentTracking::HandleCandidateGitlabMetadataService, feature_category: :experimentation_activation do
+RSpec.describe ::Ml::ExperimentTracking::HandleCandidateGitlabMetadataService, feature_category: :activation do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.owner }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index b5eb5f8037a..0cc66696184 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -181,7 +181,7 @@ RSpec.describe Notes::CreateService, feature_category: :team_planning do
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED }
let(:namespace) { project.namespace }
subject(:service_action) { execute_create_service }
end
diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb
index 54782774b4e..33c973a2431 100644
--- a/spec/services/notes/destroy_service_spec.rb
+++ b/spec/services/notes/destroy_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Notes::DestroyService, feature_category: :team_planning do
end
describe 'comment removed event tracking', :snowplow do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_REMOVED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_REMOVED }
let(:note) { create(:note, project: project, noteable: issue) }
let(:service_action) { described_class.new(project, user).execute(note) }
@@ -39,7 +39,7 @@ RSpec.describe Notes::DestroyService, feature_category: :team_planning do
expect do
service_action
end.to change {
- counter.unique_events(event_names: action, start_date: Date.today.beginning_of_week, end_date: 1.week.from_now)
+ counter.unique_events(event_names: event, start_date: Date.today.beginning_of_week, end_date: 1.week.from_now)
}.by(1)
end
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index b6e29299fdd..0a16037c976 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -334,6 +334,85 @@ RSpec.describe Notes::QuickActionsService, feature_category: :team_planning do
end
end
+ describe '/add_child' do
+ let_it_be_with_reload(:noteable) { create(:work_item, :objective, project: project) }
+ let_it_be_with_reload(:child) { create(:work_item, :objective, project: project) }
+ let_it_be_with_reload(:second_child) { create(:work_item, :objective, project: project) }
+ let_it_be(:note_text) { "/add_child #{child.to_reference}, #{second_child.to_reference}" }
+ let_it_be(:note) { create(:note, noteable: noteable, project: project, note: note_text) }
+ let_it_be(:children) { [child, second_child] }
+
+ shared_examples 'adds child work items' do
+ it 'leaves the note empty' do
+ expect(execute(note)).to be_empty
+ end
+
+ it 'adds child work items' do
+ execute(note)
+
+ expect(noteable.valid?).to be_truthy
+ expect(noteable.work_item_children).to eq(children)
+ end
+ end
+
+ context 'when using work item reference' do
+ let_it_be(:note_text) { "/add_child #{child.to_reference(full: true)},#{second_child.to_reference(full: true)}" }
+
+ it_behaves_like 'adds child work items'
+ end
+
+ context 'when using work item iid' do
+ it_behaves_like 'adds child work items'
+ end
+
+ context 'when using work item URL' do
+ let_it_be(:project_path) { "#{Gitlab.config.gitlab.url}/#{project.full_path}" }
+ let_it_be(:url) { "#{project_path}/work_items/#{child.iid},#{project_path}/work_items/#{second_child.iid}" }
+ let_it_be(:note_text) { "/add_child #{url}" }
+
+ it_behaves_like 'adds child work items'
+ end
+ end
+
+ describe '/set_parent' do
+ let_it_be_with_reload(:noteable) { create(:work_item, :objective, project: project) }
+ let_it_be_with_reload(:parent) { create(:work_item, :objective, project: project) }
+ let_it_be(:note_text) { "/set_parent #{parent.to_reference}" }
+ let_it_be(:note) { create(:note, noteable: noteable, project: project, note: note_text) }
+
+ shared_examples 'sets work item parent' do
+ it 'leaves the note empty' do
+ expect(execute(note)).to be_empty
+ end
+
+ it 'sets work item parent' do
+ execute(note)
+
+ expect(parent.valid?).to be_truthy
+ expect(noteable.work_item_parent).to eq(parent)
+ end
+ end
+
+ context 'when using work item reference' do
+ let_it_be(:note_text) { "/set_parent #{project.full_path}#{parent.to_reference}" }
+
+ it_behaves_like 'sets work item parent'
+ end
+
+ context 'when using work item iid' do
+ let_it_be(:note_text) { "/set_parent #{parent.to_reference}" }
+
+ it_behaves_like 'sets work item parent'
+ end
+
+ context 'when using work item URL' do
+ let_it_be(:url) { "#{Gitlab.config.gitlab.url}/#{project.full_path}/work_items/#{parent.iid}" }
+ let_it_be(:note_text) { "/set_parent #{url}" }
+
+ it_behaves_like 'sets work item parent'
+ end
+ end
+
describe '/promote_to' do
shared_examples 'promotes work item' do |from:, to:|
it 'leaves the note empty' do
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index 8389db000b8..908f348c68b 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe Notes::UpdateService, feature_category: :team_planning do
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED }
let(:namespace) { project.namespace }
subject(:service_action) { update_note(note: 'new text') }
diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb
index 8b94bce6650..1c935c27d7f 100644
--- a/spec/services/packages/npm/create_package_service_spec.rb
+++ b/spec/services/packages/npm/create_package_service_spec.rb
@@ -235,7 +235,7 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
# TODO (technical debt): Extract the package size calculation outside the service and add separate specs for it.
# Right now we have several contexts here to test the calculation's different scenarios.
- context "when encoded package data is not padded" do
+ context 'when encoded package data is not padded' do
# 'Hello!' (size = 6 bytes) => 'SGVsbG8h'
let(:encoded_package_data) { 'SGVsbG8h' }
@@ -260,18 +260,18 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
end
end
- [
- '@inv@lid_scope/package',
- '@scope/sub/group',
- '@scope/../../package',
- '@scope%2e%2e%2fpackage'
- ].each do |invalid_package_name|
- context "with invalid name #{invalid_package_name}" do
- let(:package_name) { invalid_package_name }
-
- it 'raises a RecordInvalid error' do
- expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
- end
+ context 'with invalid name' do
+ where(:package_name) do
+ [
+ '@inv@lid_scope/package',
+ '@scope/sub/group',
+ '@scope/../../package',
+ '@scope%2e%2e%2fpackage'
+ ]
+ end
+
+ with_them do
+ it { expect { subject }.to raise_error(ActiveRecord::RecordInvalid) }
end
end
@@ -283,8 +283,6 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
end
context 'with invalid versions' do
- using RSpec::Parameterized::TableSyntax
-
where(:version) do
[
'1',
diff --git a/spec/services/packages/nuget/extract_metadata_file_service_spec.rb b/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
index 57b08f8773c..4c761826b53 100644
--- a/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
+++ b/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
@@ -3,9 +3,10 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :package_registry do
- let_it_be_with_reload(:package_file) { create(:nuget_package).package_files.first }
+ let_it_be(:package_file) { build(:package_file, :nuget) }
+ let_it_be(:package_zip_file) { Zip::File.new(package_file.file) }
- let(:service) { described_class.new(package_file) }
+ let(:service) { described_class.new(package_zip_file) }
describe '#execute' do
subject { service.execute }
@@ -39,35 +40,9 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
end
end
- context 'with invalid package file' do
- let(:package_file) { nil }
-
- it_behaves_like 'raises an error', 'invalid package file'
- end
-
- context 'when linked to a non nuget package' do
- before do
- package_file.package.maven!
- end
-
- it_behaves_like 'raises an error', 'invalid package file'
- end
-
- context 'with a 0 byte package file' do
- before do
- allow_next_instance_of(Packages::PackageFileUploader) do |instance|
- allow(instance).to receive(:size).and_return(0)
- end
- end
-
- it_behaves_like 'raises an error', 'invalid package file'
- end
-
context 'without the nuspec file' do
before do
- allow_next_instance_of(Zip::File) do |instance|
- allow(instance).to receive(:glob).and_return([])
- end
+ allow(package_zip_file).to receive(:glob).and_return([])
end
it_behaves_like 'raises an error', 'nuspec file not found'
@@ -75,9 +50,9 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
context 'with a too big nuspec file' do
before do
- allow_next_instance_of(Zip::File) do |instance|
- allow(instance).to receive(:glob).and_return([instance_double(File, size: 6.megabytes)])
- end
+ allow(package_zip_file).to receive(:glob).and_return(
+ [instance_double(File, size: described_class::MAX_FILE_SIZE + 1)]
+ )
end
it_behaves_like 'raises an error', 'nuspec file too big'
@@ -85,10 +60,7 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
context 'with a corrupted nupkg file with a wrong entry size' do
let(:nupkg_fixture_path) { expand_fixture_path('packages/nuget/corrupted_package.nupkg') }
-
- before do
- allow(Zip::File).to receive(:new).and_return(Zip::File.new(nupkg_fixture_path, false, false))
- end
+ let(:package_zip_file) { Zip::File.new(nupkg_fixture_path) }
it_behaves_like 'raises an error',
<<~ERROR.squish
diff --git a/spec/services/packages/nuget/metadata_extraction_service_spec.rb b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
index ea7557b6d64..81a4e4a430b 100644
--- a/spec/services/packages/nuget/metadata_extraction_service_spec.rb
+++ b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
@@ -3,13 +3,14 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :package_registry do
- let_it_be(:package_file) { create(:nuget_package).package_files.first }
-
- subject { described_class.new(package_file) }
+ let_it_be(:package_file) { build(:package_file, :nuget) }
+ let(:service) { described_class.new(package_file) }
describe '#execute' do
+ subject { service.execute }
+
let(:nuspec_file_content) do
- <<~XML.squish
+ <<~XML
<?xml version="1.0" encoding="utf-8"?>
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
<metadata>
@@ -49,18 +50,15 @@ RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :pa
end
it 'calls the necessary services and executes the metadata extraction' do
- expect(::Packages::Nuget::ExtractMetadataFileService).to receive(:new).with(package_file) do
- double.tap do |service|
- expect(service).to receive(:execute).and_return(double(payload: nuspec_file_content))
- end
+ expect_next_instance_of(Packages::Nuget::ProcessPackageFileService, package_file) do |service|
+ expect(service).to receive(:execute).and_return(ServiceResponse.success(payload: { nuspec_file_content: nuspec_file_content }))
end
- expect(::Packages::Nuget::ExtractMetadataContentService).to receive_message_chain(:new, :execute)
- .with(nuspec_file_content).with(no_args).and_return(double(payload: expected_metadata))
-
- metadata = subject.execute.payload
+ expect_next_instance_of(Packages::Nuget::ExtractMetadataContentService, nuspec_file_content) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
- expect(metadata).to eq(expected_metadata)
+ expect(subject.payload).to eq(expected_metadata)
end
end
end
diff --git a/spec/services/packages/nuget/odata_package_entry_service_spec.rb b/spec/services/packages/nuget/odata_package_entry_service_spec.rb
index d4c47538ce2..b4a22fef32b 100644
--- a/spec/services/packages/nuget/odata_package_entry_service_spec.rb
+++ b/spec/services/packages/nuget/odata_package_entry_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :package_registry do
+ include GrapePathHelpers::NamedRouteMatcher
+
let_it_be(:project) { build_stubbed(:project) }
let_it_be(:params) { { package_name: 'dummy', package_version: '1.0.0' } }
let(:doc) { Nokogiri::XML(subject.payload) }
@@ -10,7 +12,7 @@ RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :pac
subject { described_class.new(project, params).execute }
describe '#execute' do
- shared_examples 'returning a package entry with the correct attributes' do |pkg_version, content_url_pkg_version|
+ shared_examples 'returning a package entry with the correct attributes' do |pkg_version = ''|
it 'returns a package entry with the correct attributes' do
expect(doc.root.name).to eq('entry')
expect(doc_node('id').text).to include(
@@ -18,7 +20,7 @@ RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :pac
)
expect(doc_node('title').text).to eq(params[:package_name])
expect(doc_node('content').attr('src')).to include(
- content_url(project.id, params[:package_name], content_url_pkg_version)
+ content_url(project.id, params[:package_name], pkg_version)
)
expect(doc_node('Version').text).to eq(pkg_version)
end
@@ -29,29 +31,17 @@ RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :pac
expect(subject).to be_success
end
- it_behaves_like 'returning a package entry with the correct attributes', '1.0.0', '1.0.0'
+ it_behaves_like 'returning a package entry with the correct attributes', '1.0.0'
end
- context 'when package_version is nil' do
+ context 'when package_version is not present' do
let(:params) { { package_name: 'dummy', package_version: nil } }
it 'returns a success ServiceResponse' do
expect(subject).to be_success
end
- it_behaves_like 'returning a package entry with the correct attributes',
- described_class::SEMVER_LATEST_VERSION_PLACEHOLDER, described_class::LATEST_VERSION_FOR_V2_DOWNLOAD_ENDPOINT
- end
-
- context 'when package_version is 0.0.0-latest-version' do
- let(:params) { { package_name: 'dummy', package_version: described_class::SEMVER_LATEST_VERSION_PLACEHOLDER } }
-
- it 'returns a success ServiceResponse' do
- expect(subject).to be_success
- end
-
- it_behaves_like 'returning a package entry with the correct attributes',
- described_class::SEMVER_LATEST_VERSION_PLACEHOLDER, described_class::LATEST_VERSION_FOR_V2_DOWNLOAD_ENDPOINT
+ it_behaves_like 'returning a package entry with the correct attributes'
end
end
@@ -64,6 +54,13 @@ RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :pac
end
def content_url(id, package_name, package_version)
- "api/v4/projects/#{id}/packages/nuget/v2/download/#{package_name}/#{package_version}"
+ if package_version.present?
+ filename = "#{package_name}.#{package_version}.nupkg"
+ api_v4_projects_packages_nuget_download_package_name_package_version_package_filename_path(
+ { id: id, package_name: package_name, package_version: package_version, package_filename: filename }, true
+ )
+ else
+ api_v4_projects_packages_nuget_v2_path(id: id)
+ end
end
end
diff --git a/spec/services/packages/nuget/process_package_file_service_spec.rb b/spec/services/packages/nuget/process_package_file_service_spec.rb
new file mode 100644
index 00000000000..cdeb5b32737
--- /dev/null
+++ b/spec/services/packages/nuget/process_package_file_service_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::ProcessPackageFileService, feature_category: :package_registry do
+ let_it_be(:package_file) { build(:package_file, :nuget) }
+
+ let(:service) { described_class.new(package_file) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ shared_examples 'raises an error' do |error_message|
+ it { expect { subject }.to raise_error(described_class::ExtractionError, error_message) }
+ end
+
+ shared_examples 'not creating a symbol file' do
+ it 'does not call the CreateSymbolFilesService' do
+ expect(Packages::Nuget::Symbols::CreateSymbolFilesService).not_to receive(:new)
+
+ expect(subject).to be_success
+ end
+ end
+
+ context 'with valid package file' do
+ it 'calls the ExtractMetadataFileService' do
+ expect_next_instance_of(Packages::Nuget::ExtractMetadataFileService, instance_of(Zip::File)) do |service|
+ expect(service).to receive(:execute) do
+ instance_double(ServiceResponse).tap do |response|
+ expect(response).to receive(:payload).and_return(instance_of(String))
+ end
+ end
+ end
+
+ expect(subject).to be_success
+ end
+ end
+
+ context 'with invalid package file' do
+ let(:package_file) { nil }
+
+ it_behaves_like 'raises an error', 'invalid package file'
+ end
+
+ context 'when linked to a non nuget package' do
+ before do
+ package_file.package.maven!
+ end
+
+ it_behaves_like 'raises an error', 'invalid package file'
+ end
+
+ context 'with a 0 byte package file' do
+ before do
+ allow_next_instance_of(Packages::PackageFileUploader) do |instance|
+ allow(instance).to receive(:size).and_return(0)
+ end
+ end
+
+ it_behaves_like 'raises an error', 'invalid package file'
+ end
+
+ context 'with a symbol package file' do
+ let(:package_file) { build(:package_file, :snupkg) }
+
+ it 'calls the CreateSymbolFilesService' do
+ expect_next_instance_of(
+ Packages::Nuget::Symbols::CreateSymbolFilesService, package_file.package, instance_of(Zip::File)
+ ) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ expect(subject).to be_success
+ end
+ end
+
+ context 'with a non symbol package file' do
+ let(:package_file) { build(:package_file, :nuget) }
+
+ it_behaves_like 'not creating a symbol file'
+ end
+ end
+end
diff --git a/spec/services/packages/nuget/symbols/create_symbol_files_service_spec.rb b/spec/services/packages/nuget/symbols/create_symbol_files_service_spec.rb
new file mode 100644
index 00000000000..97bfc3e06a8
--- /dev/null
+++ b/spec/services/packages/nuget/symbols/create_symbol_files_service_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::Symbols::CreateSymbolFilesService, feature_category: :package_registry do
+ let_it_be(:package) { create(:nuget_package) }
+ let_it_be(:package_file) do
+ create(:package_file, :snupkg, package: package,
+ file_fixture: expand_fixture_path('packages/nuget/package_with_symbols.snupkg'))
+ end
+
+ let(:package_zip_file) { Zip::File.new(package_file.file) }
+ let(:service) { described_class.new(package, package_zip_file) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ shared_examples 'logs an error' do |error_class|
+ it 'logs an error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(error_class),
+ class: described_class.name,
+ package_id: package.id
+ )
+
+ subject
+ end
+ end
+
+ context 'when symbol files are found' do
+ it 'creates a symbol record and extracts the signature' do
+ expect_next_instance_of(Packages::Nuget::Symbols::ExtractSymbolSignatureService,
+ instance_of(String)) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
+ expect { subject }.to change { package.nuget_symbols.count }.by(1)
+ end
+ end
+
+ context 'when symbol files hit the limit' do
+ before do
+ stub_const("#{described_class}::SYMBOL_ENTRIES_LIMIT", 0)
+ end
+
+ it 'does not create a symbol record' do
+ expect { subject }.not_to change { package.nuget_symbols.count }
+ end
+
+ it_behaves_like 'logs an error', described_class::ExtractionError
+ end
+
+ context 'when creating a symbol record without a signature' do
+ before do
+ allow_next_instance_of(Packages::Nuget::Symbols::ExtractSymbolSignatureService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: nil))
+ end
+ end
+
+ it 'does not call create! on the symbol record' do
+ expect(::Packages::Nuget::Symbol).not_to receive(:create!)
+
+ subject
+ end
+ end
+
+ context 'when creating duplicate symbol records' do
+ let_it_be(:symbol) { create(:nuget_symbol, package: package) }
+
+ before do
+ allow_next_instance_of(Packages::Nuget::Symbols::ExtractSymbolSignatureService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: symbol.signature))
+ end
+ end
+
+ it 'does not create a symbol record' do
+ expect { subject }.not_to change { package.nuget_symbols.count }
+ end
+
+ it_behaves_like 'logs an error', ActiveRecord::RecordInvalid
+ end
+
+ context 'when a symbol file has the wrong entry size' do
+ before do
+ allow_next_instance_of(Zip::Entry) do |instance|
+ allow(instance).to receive(:extract).and_raise(Zip::EntrySizeError)
+ end
+ end
+
+ it_behaves_like 'logs an error', described_class::ExtractionError
+ end
+
+ context 'when a symbol file has the wrong entry name' do
+ before do
+ allow_next_instance_of(Zip::Entry) do |instance|
+ allow(instance).to receive(:extract).and_raise(Zip::EntryNameError)
+ end
+ end
+
+ it_behaves_like 'logs an error', described_class::ExtractionError
+ end
+ end
+end
diff --git a/spec/services/packages/nuget/symbols/extract_symbol_signature_service_spec.rb b/spec/services/packages/nuget/symbols/extract_symbol_signature_service_spec.rb
new file mode 100644
index 00000000000..87b0d00a0a7
--- /dev/null
+++ b/spec/services/packages/nuget/symbols/extract_symbol_signature_service_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::Symbols::ExtractSymbolSignatureService, feature_category: :package_registry do
+ let_it_be(:symbol_file) { fixture_file('packages/nuget/symbol/package.pdb') }
+
+ let(:service) { described_class.new(symbol_file) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'with a valid symbol file' do
+ it { expect(subject.payload).to eq('b91a152048fc4b3883bf3cf73fbc03f1FFFFFFFF') }
+ end
+
+ context 'with corrupted data' do
+ let(:symbol_file) { 'corrupted data' }
+
+ it { expect(subject).to be_error }
+ end
+ end
+end
diff --git a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
index 0459588bf8d..cb70176ee61 100644
--- a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
+++ b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_redis_shared_state, feature_category: :package_registry do
include ExclusiveLeaseHelpers
- let!(:package) { create(:nuget_package, :processing, :with_symbol_package) }
+ let!(:package) { create(:nuget_package, :processing, :with_symbol_package, :with_build) }
let(:package_file) { package.package_files.first }
let(:service) { described_class.new(package_file) }
let(:package_name) { 'DummyProject.DummyPackage' }
@@ -101,6 +101,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
.and change { Packages::DependencyLink.count }.by(0)
.and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
.and change { ::Packages::Nuget::Metadatum.count }.by(1)
+ .and change { existing_package.build_infos.count }.by(1)
expect(package_file.reload.file_name).to eq(package_file_name)
expect(package_file.package).to eq(existing_package)
end
@@ -260,6 +261,16 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
expect(package_file.package).to eq(existing_package)
end
+ context 'with packages_nuget_symbols records' do
+ before do
+ create_list(:nuget_symbol, 2, package: package)
+ end
+
+ it 'links the symbol records to the existing package' do
+ expect { subject }.to change { existing_package.nuget_symbols.count }.by(2)
+ end
+ end
+
it_behaves_like 'taking the lease'
it_behaves_like 'not updating the package if the lease is taken'
diff --git a/spec/services/packages/protection/create_rule_service_spec.rb b/spec/services/packages/protection/create_rule_service_spec.rb
new file mode 100644
index 00000000000..67835479473
--- /dev/null
+++ b/spec/services/packages/protection/create_rule_service_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Protection::CreateRuleService, '#execute', feature_category: :environment_management do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:maintainer) { create(:user).tap { |u| project.add_maintainer(u) } }
+
+ let(:service) { described_class.new(project: project, current_user: current_user, params: params) }
+ let(:current_user) { maintainer }
+ let(:params) { attributes_for(:package_protection_rule) }
+
+ subject { service.execute }
+
+ shared_examples 'a successful service response' do
+ let(:package_protection_rule_count_expected) { 1 }
+ it { is_expected.to be_success }
+
+ it do
+ is_expected.to have_attributes(
+ payload: include(
+ package_protection_rule: be_a(Packages::Protection::Rule)
+ )
+ )
+ end
+
+ it { expect(subject.payload).to include(package_protection_rule: be_a(Packages::Protection::Rule)) }
+
+ it do
+ expect { subject }.to change { Packages::Protection::Rule.count }.by(1)
+
+ expect(Packages::Protection::Rule.where(project: project).count).to eq package_protection_rule_count_expected
+ expect(Packages::Protection::Rule.where(project: project,
+ package_name_pattern: params[:package_name_pattern])).to exist
+ end
+ end
+
+ shared_examples 'an erroneous service response' do
+ let(:package_protection_rule_count_expected) { 0 }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes(payload: include(package_protection_rule: nil)) }
+
+ it do
+ expect { subject }.to change { Packages::Protection::Rule.count }.by(0)
+
+ expect(Packages::Protection::Rule.where(project: project).count).to eq package_protection_rule_count_expected
+ expect(Packages::Protection::Rule.where(project: project,
+ package_name_pattern: params[:package_name_pattern])).not_to exist
+ end
+ end
+
+ context 'without existing PackageProtectionRules' do
+ context 'when fields are valid' do
+ it_behaves_like 'a successful service response'
+ end
+
+ context 'when fields are invalid' do
+ let(:params) do
+ {
+ package_name_pattern: '',
+ package_type: 'unknown_package_type',
+ push_protected_up_to_access_level: 1000
+ }
+ end
+
+ it_behaves_like 'an erroneous service response'
+ end
+ end
+
+ context 'with existing PackageProtectionRule' do
+ let_it_be(:existing_package_protection_rule) { create(:package_protection_rule, project: project) }
+
+ context 'when package name pattern is slightly different' do
+ let(:params) do
+ attributes_for(
+ :package_protection_rule,
+ # The field `package_name_pattern` is unique; this is why we change the value in a minimum way
+ package_name_pattern: "#{existing_package_protection_rule.package_name_pattern}-unique",
+ package_type: existing_package_protection_rule.package_type,
+ push_protected_up_to_access_level: existing_package_protection_rule.push_protected_up_to_access_level
+ )
+ end
+
+ it_behaves_like 'a successful service response' do
+ let(:package_protection_rule_count_expected) { 2 }
+ end
+ end
+
+ context 'when field `package_name_pattern` is taken' do
+ let(:params) do
+ attributes_for(
+ :package_protection_rule,
+ package_name_pattern: existing_package_protection_rule.package_name_pattern,
+ package_type: existing_package_protection_rule.package_type,
+ push_protected_up_to_access_level: existing_package_protection_rule.push_protected_up_to_access_level
+ )
+ end
+
+ it { is_expected.to be_error }
+
+ it do
+ expect { subject }.to change { Packages::Protection::Rule.count }.by(0)
+
+ expect(Packages::Protection::Rule.where(project: project).count).to eq 1
+ expect(
+ Packages::Protection::Rule.where(
+ project: project,
+ package_name_pattern: params[:package_name_pattern]
+ )
+ ).to exist
+ end
+ end
+ end
+
+ context 'when disallowed params are passed' do
+ let(:params) do
+ attributes_for(:package_protection_rule)
+ .merge(
+ project_id: 1,
+ unsupported_param: 'unsupported_param_value'
+ )
+ end
+
+ it_behaves_like 'a successful service response'
+ end
+
+ context 'with forbidden user access level (project developer role)' do
+ # Because of the access level hierarchy, we can assume that
+ # other access levels below developer role will also not be able to
+ # create package protection rules.
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+
+ let(:current_user) { developer }
+
+ it_behaves_like 'an erroneous service response'
+
+ it { is_expected.to have_attributes(message: match(/Unauthorized/)) }
+ end
+end
diff --git a/spec/services/pages/migrate_from_legacy_storage_service_spec.rb b/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
deleted file mode 100644
index 48690a035f5..00000000000
--- a/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
+++ /dev/null
@@ -1,137 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Pages::MigrateFromLegacyStorageService, feature_category: :pages do
- let(:batch_size) { 10 }
- let(:mark_projects_as_not_deployed) { false }
- let(:service) { described_class.new(Rails.logger, ignore_invalid_entries: false, mark_projects_as_not_deployed: mark_projects_as_not_deployed) }
-
- shared_examples "migrates projects properly" do
- it 'does not try to migrate pages if pages are not deployed' do
- expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
-
- is_expected.to eq(migrated: 0, errored: 0)
- end
-
- context 'when pages are marked as deployed' do
- let(:project) { create(:project) }
-
- before do
- project.mark_pages_as_deployed
- end
-
- context 'when pages directory does not exist' do
- context 'when mark_projects_as_not_deployed is set' do
- let(:mark_projects_as_not_deployed) { true }
-
- it 'counts project as migrated' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: true) do |service|
- expect(service).to receive(:execute).and_call_original
- end
-
- is_expected.to eq(migrated: 1, errored: 0)
- end
- end
-
- it 'counts project as errored' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute).and_call_original
- end
-
- is_expected.to eq(migrated: 0, errored: 1)
- end
- end
-
- context 'when pages directory exists on disk' do
- before do
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
- end
- end
-
- it 'migrates pages projects without deployments' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute).and_call_original
- end
-
- expect(project.pages_metadatum.reload.pages_deployment).to eq(nil)
- expect(subject).to eq(migrated: 1, errored: 0)
- expect(project.pages_metadatum.reload.pages_deployment).to be_present
- end
-
- context 'when deployed already exists for the project' do
- before do
- deployment = create(:pages_deployment, project: project)
- project.set_first_pages_deployment!(deployment)
- end
-
- it 'does not try to migrate project' do
- expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
-
- is_expected.to eq(migrated: 0, errored: 0)
- end
- end
- end
- end
- end
-
- describe '#execute_with_threads' do
- subject { service.execute_with_threads(threads: 3, batch_size: batch_size) }
-
- include_examples "migrates projects properly"
-
- context 'when there is work for multiple threads' do
- let(:batch_size) { 2 } # override to force usage of multiple threads
-
- it 'uses multiple threads' do
- projects = create_list(:project, 20)
- projects.each do |project|
- project.mark_pages_as_deployed
-
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
- end
- end
-
- threads = Concurrent::Set.new
-
- expect(service).to receive(:migrate_project).exactly(20).times.and_wrap_original do |m, *args|
- threads.add(Thread.current)
-
- # sleep to be 100% certain that once thread can't consume all the queue
- # it works without it, but I want to avoid making this test flaky
- sleep(0.01)
-
- m.call(*args)
- end
-
- is_expected.to eq(migrated: 20, errored: 0)
- expect(threads.length).to eq(3)
- end
- end
- end
-
- describe "#execute_for_batch" do
- subject { service.execute_for_batch(Project.ids) }
-
- include_examples "migrates projects properly"
-
- it 'only tries to migrate projects with passed ids' do
- projects = create_list(:project, 5)
-
- projects.each(&:mark_pages_as_deployed)
- projects_to_migrate = projects.first(3)
-
- projects_to_migrate.each do |project|
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute).and_call_original
- end
- end
-
- expect(service.execute_for_batch(projects_to_migrate.pluck(:id))).to eq(migrated: 0, errored: 3)
- end
- end
-end
diff --git a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
deleted file mode 100644
index e1cce2c87eb..00000000000
--- a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
+++ /dev/null
@@ -1,118 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Pages::MigrateLegacyStorageToDeploymentService, feature_category: :pages do
- let(:project) { create(:project, :repository) }
- let(:service) { described_class.new(project) }
-
- it 'calls ::Pages::ZipDirectoryService' do
- expect_next_instance_of(::Pages::ZipDirectoryService, project.pages_path, ignore_invalid_entries: true) do |zip_service|
- expect(zip_service).to receive(:execute).and_call_original
- end
-
- expect(described_class.new(project, ignore_invalid_entries: true).execute[:status]).to eq(:error)
- end
-
- context 'when mark_projects_as_not_deployed is passed' do
- let(:service) { described_class.new(project, mark_projects_as_not_deployed: true) }
-
- it 'marks pages as not deployed if public directory is absent and invalid entries are ignored' do
- project.mark_pages_as_deployed
- expect(project.pages_metadatum.reload.deployed).to eq(true)
-
- expect(service.execute).to eq(
- status: :success,
- message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed"
- )
-
- expect(project.pages_metadatum.reload.deployed).to eq(false)
- end
-
- it 'does not mark pages as not deployed if public directory is absent but pages_deployment exists' do
- deployment = create(:pages_deployment, project: project)
- project.update_pages_deployment!(deployment)
- project.mark_pages_as_deployed
- expect(project.pages_metadatum.reload.deployed).to eq(true)
-
- expect(service.execute).to eq(
- status: :success,
- message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed"
- )
-
- expect(project.pages_metadatum.reload.deployed).to eq(true)
- end
- end
-
- it 'does not mark pages as not deployed if public directory is absent but invalid entries are not ignored' do
- project.mark_pages_as_deployed
-
- expect(project.pages_metadatum.reload.deployed).to eq(true)
-
- expect(service.execute).to eq(
- status: :error,
- message: "Archive not created. Missing public directory in #{project.pages_path}"
- )
-
- expect(project.pages_metadatum.reload.deployed).to eq(true)
- end
-
- it 'removes pages archive when can not save deployment' do
- archive = fixture_file_upload("spec/fixtures/pages.zip")
- expect_next_instance_of(::Pages::ZipDirectoryService) do |zip_service|
- expect(zip_service).to receive(:execute).and_return(
- status: :success, archive_path: archive.path, entries_count: 3
- )
- end
-
- expect_next_instance_of(PagesDeployment) do |deployment|
- expect(deployment).to receive(:save!).and_raise("Something")
- end
-
- expect do
- service.execute
- end.to raise_error("Something")
-
- expect(File.exist?(archive.path)).to eq(false)
- end
-
- context 'when pages site is deployed to legacy storage' do
- before do
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
- end
- end
-
- it 'creates pages deployment' do
- expect do
- expect(described_class.new(project).execute).to eq(status: :success)
- end.to change { project.reload.pages_deployments.count }.by(1)
-
- deployment = project.pages_metadatum.pages_deployment
-
- Zip::File.open(deployment.file.path) do |zip_file|
- expect(zip_file.glob("public").first.ftype).to eq(:directory)
- expect(zip_file.glob("public/index.html").first.get_input_stream.read).to eq("Hello!")
- end
-
- expect(deployment.file_count).to eq(2)
- expect(deployment.file_sha256).to eq(Digest::SHA256.file(deployment.file.path).hexdigest)
- end
-
- it 'removes tmp pages archive' do
- described_class.new(project).execute
-
- expect(File.exist?(File.join(project.pages_path, '@migrated.zip'))).to eq(false)
- end
-
- it 'does not change pages deployment if it is set' do
- old_deployment = create(:pages_deployment, project: project)
- project.update_pages_deployment!(old_deployment)
-
- expect do
- described_class.new(project).execute
- end.not_to change { project.pages_metadatum.reload.pages_deployment_id }.from(old_deployment.id)
- end
- end
-end
diff --git a/spec/services/pages/zip_directory_service_spec.rb b/spec/services/pages/zip_directory_service_spec.rb
deleted file mode 100644
index 4917bc65a02..00000000000
--- a/spec/services/pages/zip_directory_service_spec.rb
+++ /dev/null
@@ -1,280 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Pages::ZipDirectoryService, feature_category: :pages do
- around do |example|
- Dir.mktmpdir do |dir|
- @work_dir = dir
- example.run
- end
- end
-
- let(:ignore_invalid_entries) { false }
-
- let(:service_directory) { @work_dir }
-
- let(:service) do
- described_class.new(service_directory, ignore_invalid_entries: ignore_invalid_entries)
- end
-
- let(:result) do
- service.execute
- end
-
- let(:status) { result[:status] }
- let(:message) { result[:message] }
- let(:archive) { result[:archive_path] }
- let(:entries_count) { result[:entries_count] }
-
- it 'returns true if ZIP64 is enabled' do
- expect(::Zip.write_zip64_support).to be true
- end
-
- shared_examples 'handles invalid public directory' do
- it 'returns success' do
- expect(status).to eq(:success)
- expect(archive).to be_nil
- expect(entries_count).to be_nil
- end
- end
-
- context "when work directory doesn't exist" do
- let(:service_directory) { "/tmp/not/existing/dir" }
-
- include_examples 'handles invalid public directory'
- end
-
- context 'when public directory is absent' do
- include_examples 'handles invalid public directory'
- end
-
- context 'when public directory is a symlink' do
- before do
- create_dir('target')
- create_file('./target/index.html', 'hello')
- create_link("public", "./target")
- end
-
- include_examples 'handles invalid public directory'
- end
-
- context 'when there is a public directory' do
- before do
- create_dir('public')
- end
-
- it 'creates the file next the public directory' do
- expect(archive).to eq(File.join(@work_dir, "@migrated.zip"))
- end
-
- it 'includes public directory' do
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/")
- expect(entry.ftype).to eq(:directory)
- end
- end
-
- it 'returns number of entries' do
- create_file("public/index.html", "hello")
- create_link("public/link.html", "./index.html")
- expect(entries_count).to eq(3) # + 'public' directory
- end
-
- it 'removes the old file if it exists' do
- # simulate the old run
- described_class.new(@work_dir).execute
-
- with_zip_file do |zip_file|
- expect(zip_file.entries.count).to eq(1)
- end
- end
-
- it 'ignores other top level files and directories' do
- create_file("top_level.html", "hello")
- create_dir("public2")
-
- with_zip_file do |zip_file|
- expect { zip_file.get_entry("top_level.html") }.to raise_error(Errno::ENOENT)
- expect { zip_file.get_entry("public2/") }.to raise_error(Errno::ENOENT)
- end
- end
-
- it 'includes index.html file' do
- create_file("public/index.html", "Hello!")
-
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/index.html")
- expect(zip_file.read(entry)).to eq("Hello!")
- end
- end
-
- it 'includes hidden file' do
- create_file("public/.hidden.html", "Hello!")
-
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/.hidden.html")
- expect(zip_file.read(entry)).to eq("Hello!")
- end
- end
-
- it 'includes nested directories and files' do
- create_dir("public/nested")
- create_dir("public/nested/nested2")
- create_file("public/nested/nested2/nested.html", "Hello nested")
-
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/nested")
- expect(entry.ftype).to eq(:directory)
-
- entry = zip_file.get_entry("public/nested/nested2")
- expect(entry.ftype).to eq(:directory)
-
- entry = zip_file.get_entry("public/nested/nested2/nested.html")
- expect(zip_file.read(entry)).to eq("Hello nested")
- end
- end
-
- it 'adds a valid symlink' do
- create_file("public/target.html", "hello")
- create_link("public/link.html", "./target.html")
-
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/link.html")
- expect(entry.ftype).to eq(:symlink)
- expect(zip_file.read(entry)).to eq("./target.html")
- end
- end
-
- shared_examples "raises or ignores file" do |raised_exception, file|
- it 'raises error' do
- expect do
- result
- end.to raise_error(raised_exception)
- end
-
- context 'when errors are ignored' do
- let(:ignore_invalid_entries) { true }
-
- it 'does not create entry' do
- with_zip_file do |zip_file|
- expect { zip_file.get_entry(file) }.to raise_error(Errno::ENOENT)
- end
- end
- end
- end
-
- context 'when symlink points outside of public directory' do
- before do
- create_file("target.html", "hello")
- create_link("public/link.html", "../target.html")
- end
-
- include_examples "raises or ignores file", described_class::InvalidEntryError, "public/link.html"
- end
-
- context 'when target of the symlink is absent' do
- before do
- create_link("public/link.html", "./target.html")
- end
-
- include_examples "raises or ignores file", Errno::ENOENT, "public/link.html"
- end
-
- context 'when targets itself' do
- before do
- create_link("public/link.html", "./link.html")
- end
-
- include_examples "raises or ignores file", Errno::ELOOP, "public/link.html"
- end
-
- context 'when symlink is absolute and points to outside of directory' do
- before do
- target = File.join(@work_dir, "target")
- FileUtils.touch(target)
-
- create_link("public/link.html", target)
- end
-
- include_examples "raises or ignores file", described_class::InvalidEntryError, "public/link.html"
- end
-
- context 'when entry has unknown ftype' do
- before do
- file = create_file("public/index.html", "hello")
-
- allow(File).to receive(:lstat).and_call_original
- expect(File).to receive(:lstat).with(file) { double("lstat", ftype: "unknown") }
- end
-
- include_examples "raises or ignores file", described_class::InvalidEntryError, "public/index.html"
- end
-
- it "includes raw symlink if it's target is a valid directory" do
- create_dir("public/target")
- create_file("public/target/index.html", "hello")
- create_link("public/link", "./target")
-
- with_zip_file do |zip_file|
- expect(zip_file.entries.count).to eq(4) # /public and 3 created above
-
- entry = zip_file.get_entry("public/link")
- expect(entry.ftype).to eq(:symlink)
- expect(zip_file.read(entry)).to eq("./target")
- end
- end
- end
-
- context "validating fixtures pages archives" do
- using RSpec::Parameterized::TableSyntax
-
- where(:fixture_path) do
- ["spec/fixtures/pages.zip", "spec/fixtures/pages_non_writeable.zip"]
- end
-
- with_them do
- let(:full_fixture_path) { Rails.root.join(fixture_path) }
-
- it 'a created archives contains exactly the same entries' do
- SafeZip::Extract.new(full_fixture_path).extract(directories: ['public'], to: @work_dir)
-
- with_zip_file do |created_archive|
- Zip::File.open(full_fixture_path) do |original_archive|
- original_archive.entries do |original_entry|
- created_entry = created_archive.get_entry(original_entry.name)
-
- expect(created_entry.name).to eq(original_entry.name)
- expect(created_entry.ftype).to eq(original_entry.ftype)
- expect(created_archive.read(created_entry)).to eq(original_archive.read(original_entry))
- end
- end
- end
- end
- end
- end
-
- def create_file(name, content)
- file_path = File.join(@work_dir, name)
-
- File.open(file_path, "w") do |f|
- f.write(content)
- end
-
- file_path
- end
-
- def create_dir(dir)
- Dir.mkdir(File.join(@work_dir, dir))
- end
-
- def create_link(new_name, target)
- File.symlink(target, File.join(@work_dir, new_name))
- end
-
- def with_zip_file
- Zip::File.open(archive) do |zip_file|
- yield zip_file
- end
- end
-end
diff --git a/spec/services/projects/after_rename_service_spec.rb b/spec/services/projects/after_rename_service_spec.rb
index 411ff5662d4..4b2569f6b2d 100644
--- a/spec/services/projects/after_rename_service_spec.rb
+++ b/spec/services/projects/after_rename_service_spec.rb
@@ -21,183 +21,88 @@ RSpec.describe Projects::AfterRenameService, feature_category: :groups_and_proje
end
describe '#execute' do
- context 'using legacy storage' do
- let(:project) { create(:project, :repository, :wiki_repo, :legacy_storage) }
- let(:project_storage) { project.send(:storage) }
- let(:gitlab_shell) { Gitlab::Shell.new }
-
- before do
- # Project#gitlab_shell returns a new instance of Gitlab::Shell on every
- # call. This makes testing a bit easier.
- allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
-
- stub_application_setting(hashed_storage_enabled: false)
- end
-
- it 'renames a repository' do
- stub_container_registry_config(enabled: false)
-
- expect_any_instance_of(SystemHooksService)
- .to receive(:execute_hooks_for)
- .with(project, :rename)
-
- expect_any_instance_of(Gitlab::UploadsTransfer)
- .to receive(:rename_project)
- .with(path_before_rename, path_after_rename, project.namespace.full_path)
-
- expect(repo_before_rename).to exist
- expect(wiki_repo_before_rename).to exist
-
- service_execute
-
- expect(repo_before_rename).not_to exist
- expect(wiki_repo_before_rename).not_to exist
- expect(repo_after_rename).to exist
- expect(wiki_repo_after_rename).to exist
- end
-
- context 'container registry with images' do
- let(:container_repository) { create(:container_repository) }
-
- before do
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags(repository: :any, tags: ['tag'])
- project.container_repositories << container_repository
- end
-
- it 'raises a RenameFailedError' do
- expect { service_execute }.to raise_error(described_class::RenameFailedError)
- end
- end
-
- context 'attachments' do
- before do
- expect(project_storage).to receive(:rename_repo) { true }
- end
-
- it 'moves uploads folder to new location' do
- expect_any_instance_of(Gitlab::UploadsTransfer).to receive(:rename_project)
-
- service_execute
- end
- end
-
- it 'updates project full path in gitaly' do
- service_execute
-
- expect(project.repository.full_path).to eq(project.full_path)
- end
-
- it 'updates storage location' do
- allow(project_storage).to receive(:rename_repo).and_return(true)
-
- service_execute
+ let(:project) { create(:project, :repository, skip_disk_validation: true) }
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
+ let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
+ let(:hashed_path) { File.join(hashed_prefix, hash) }
+
+ before do
+ # Project#gitlab_shell returns a new instance of Gitlab::Shell on every
+ # call. This makes testing a bit easier.
+ allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
+
+ stub_application_setting(hashed_storage_enabled: true)
+ end
- expect(project.project_repository).to have_attributes(
- disk_path: project.disk_path,
- shard_name: project.repository_storage
- )
- end
+ it 'renames a repository' do
+ stub_container_registry_config(enabled: false)
- context 'with hashed storage upgrade when renaming enabled' do
- it 'calls HashedStorage::MigrationService with correct options' do
- stub_application_setting(hashed_storage_enabled: true)
+ expect_any_instance_of(SystemHooksService)
+ .to receive(:execute_hooks_for)
+ .with(project, :rename)
- expect_next_instance_of(::Projects::HashedStorage::MigrationService) do |service|
- expect(service).to receive(:execute).and_return(true)
- end
+ expect(project).to receive(:expire_caches_before_rename)
- service_execute
- end
- end
+ service_execute
end
- context 'using hashed storage' do
- let(:project) { create(:project, :repository, skip_disk_validation: true) }
- let(:gitlab_shell) { Gitlab::Shell.new }
- let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
- let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
- let(:hashed_path) { File.join(hashed_prefix, hash) }
+ context 'container registry with images' do
+ let(:container_repository) { create(:container_repository) }
before do
- # Project#gitlab_shell returns a new instance of Gitlab::Shell on every
- # call. This makes testing a bit easier.
- allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
-
- stub_application_setting(hashed_storage_enabled: true)
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: :any, tags: ['tag'])
+ project.container_repositories << container_repository
end
- it 'renames a repository' do
- stub_container_registry_config(enabled: false)
-
- expect(gitlab_shell).not_to receive(:mv_repository)
-
- expect_any_instance_of(SystemHooksService)
- .to receive(:execute_hooks_for)
- .with(project, :rename)
-
- expect(project).to receive(:expire_caches_before_rename)
-
- service_execute
+ it 'raises a RenameFailedError' do
+ expect { service_execute }
+ .to raise_error(described_class::RenameFailedError)
end
+ end
- context 'container registry with images' do
- let(:container_repository) { create(:container_repository) }
+ context 'attachments' do
+ let(:uploader) { create(:upload, :issuable_upload, :with_file, model: project) }
+ let(:file_uploader) { build(:file_uploader, project: project) }
+ let(:legacy_storage_path) { File.join(file_uploader.root, legacy_storage.disk_path) }
+ let(:hashed_storage_path) { File.join(file_uploader.root, hashed_storage.disk_path) }
- before do
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags(repository: :any, tags: ['tag'])
- project.container_repositories << container_repository
- end
+ it 'keeps uploads folder location unchanged' do
+ expect_any_instance_of(Gitlab::UploadsTransfer).not_to receive(:rename_project)
- it 'raises a RenameFailedError' do
- expect { service_execute }
- .to raise_error(described_class::RenameFailedError)
- end
+ service_execute
end
- context 'attachments' do
- let(:uploader) { create(:upload, :issuable_upload, :with_file, model: project) }
- let(:file_uploader) { build(:file_uploader, project: project) }
- let(:legacy_storage_path) { File.join(file_uploader.root, legacy_storage.disk_path) }
- let(:hashed_storage_path) { File.join(file_uploader.root, hashed_storage.disk_path) }
+ context 'when not rolled out' do
+ let(:project) { create(:project, :repository, storage_version: 1, skip_disk_validation: true) }
- it 'keeps uploads folder location unchanged' do
- expect_any_instance_of(Gitlab::UploadsTransfer).not_to receive(:rename_project)
+ it 'moves attachments folder to hashed storage' do
+ expect(File.directory?(legacy_storage_path)).to be_truthy
+ expect(File.directory?(hashed_storage_path)).to be_falsey
service_execute
- end
-
- context 'when not rolled out' do
- let(:project) { create(:project, :repository, storage_version: 1, skip_disk_validation: true) }
-
- it 'moves attachments folder to hashed storage' do
- expect(File.directory?(legacy_storage_path)).to be_truthy
- expect(File.directory?(hashed_storage_path)).to be_falsey
+ expect(project.reload.hashed_storage?(:attachments)).to be_truthy
- service_execute
- expect(project.reload.hashed_storage?(:attachments)).to be_truthy
-
- expect(File.directory?(legacy_storage_path)).to be_falsey
- expect(File.directory?(hashed_storage_path)).to be_truthy
- end
+ expect(File.directory?(legacy_storage_path)).to be_falsey
+ expect(File.directory?(hashed_storage_path)).to be_truthy
end
end
+ end
- it 'updates project full path in gitaly' do
- service_execute
+ it 'updates project full path in gitaly' do
+ service_execute
- expect(project.repository.full_path).to eq(project.full_path)
- end
+ expect(project.repository.full_path).to eq(project.full_path)
+ end
- it 'updates storage location' do
- service_execute
+ it 'updates storage location' do
+ service_execute
- expect(project.project_repository).to have_attributes(
- disk_path: project.disk_path,
- shard_name: project.repository_storage
- )
- end
+ expect(project.project_repository).to have_attributes(
+ disk_path: project.disk_path,
+ shard_name: project.repository_storage
+ )
end
context 'EventStore' do
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
deleted file mode 100644
index e21d8b6fa83..00000000000
--- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
+++ /dev/null
@@ -1,152 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::HashedStorage::MigrateRepositoryService, feature_category: :groups_and_projects do
- let(:gitlab_shell) { Gitlab::Shell.new }
- let(:project) { create(:project, :legacy_storage, :repository, :wiki_repo, :design_repo) }
- let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::Hashed.new(project) }
-
- subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path) }
-
- describe '#execute' do
- let(:old_disk_path) { legacy_storage.disk_path }
- let(:new_disk_path) { hashed_storage.disk_path }
-
- before do
- allow(service).to receive(:gitlab_shell) { gitlab_shell }
- end
-
- context 'repository lock' do
- it 'tries to lock the repository' do
- expect(service).to receive(:try_to_set_repository_read_only!)
-
- service.execute
- end
-
- it 'fails when a git operation is in progress' do
- allow(project).to receive(:git_transfer_in_progress?) { true }
-
- expect { service.execute }.to raise_error(Projects::HashedStorage::RepositoryInUseError)
- end
- end
-
- context 'when repository doesnt exist on disk' do
- let(:project) { create(:project, :legacy_storage) }
-
- it 'skips the disk change but increase the version' do
- service.execute
-
- expect(project.hashed_storage?(:repository)).to be_truthy
- end
- end
-
- context 'when succeeds' do
- it 'renames project, wiki and design repositories' do
- service.execute
-
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_truthy
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_truthy
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_truthy
- end
-
- it 'updates project to be hashed and not read-only' do
- service.execute
-
- expect(project.hashed_storage?(:repository)).to be_truthy
- expect(project.repository_read_only).to be_falsey
- end
-
- it 'move operation is called for all repositories' do
- expect_move_repository(old_disk_path, new_disk_path)
- expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
- expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
-
- service.execute
- end
-
- it 'writes project full path to gitaly' do
- service.execute
-
- expect(project.repository.full_path).to eq project.full_path
- end
- end
-
- context 'when exception happens' do
- it 'handles OpenSSL::Cipher::CipherError' do
- expect(project).to receive(:ensure_runners_token).and_raise(OpenSSL::Cipher::CipherError)
-
- expect { service.execute }.not_to raise_exception
- end
-
- it 'ensures rollback when OpenSSL::Cipher::CipherError' do
- expect(project).to receive(:ensure_runners_token).and_raise(OpenSSL::Cipher::CipherError)
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
- project.reload
-
- expect(project.legacy_storage?).to be_truthy
- expect(project.repository_read_only?).to be_falsey
- end
-
- it 'handles Gitlab::Git::CommandError' do
- expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
-
- expect { service.execute }.not_to raise_exception
- end
-
- it 'ensures rollback when Gitlab::Git::CommandError' do
- expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
- project.reload
-
- expect(project.legacy_storage?).to be_truthy
- expect(project.repository_read_only?).to be_falsey
- end
- end
-
- context 'when one move fails' do
- it 'rollsback repositories to original name' do
- allow(service).to receive(:move_repository).and_call_original
- allow(service).to receive(:move_repository).with(old_disk_path, new_disk_path).once { false } # will disable first move only
-
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
-
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_falsey
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_falsey
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_falsey
- expect(project.repository_read_only?).to be_falsey
- end
-
- context 'when rollback fails' do
- before do
- gitlab_shell.mv_repository(project.repository_storage, old_disk_path, new_disk_path)
- end
-
- it 'does not try to move nil repository over existing' do
- expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage, old_disk_path, new_disk_path)
- expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
- expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
-
- service.execute
- end
- end
- end
-
- it 'works even when project validation fails' do
- allow(project).to receive(:valid?) { false }
-
- expect { service.execute }.to change { project.hashed_storage?(:repository) }.to(true)
- end
-
- def expect_move_repository(from_name, to_name)
- expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage, from_name, to_name).and_call_original
- end
- end
-end
diff --git a/spec/services/projects/hashed_storage/migration_service_spec.rb b/spec/services/projects/hashed_storage/migration_service_spec.rb
index ffbd5c2500a..d5b04688322 100644
--- a/spec/services/projects/hashed_storage/migration_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migration_service_spec.rb
@@ -14,43 +14,6 @@ RSpec.describe Projects::HashedStorage::MigrationService, feature_category: :gro
subject(:service) { described_class.new(project, project.full_path, logger: logger) }
describe '#execute' do
- context 'repository migration' do
- let(:repository_service) do
- Projects::HashedStorage::MigrateRepositoryService.new(
- project: project,
- old_disk_path: project.full_path,
- logger: logger
- )
- end
-
- it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
- expect(service).to receive(:migrate_repository_service).and_return(repository_service)
- expect(repository_service).to receive(:execute)
-
- service.execute
- end
-
- it 'does not delegate migration if repository is already migrated' do
- project.storage_version = ::Project::LATEST_STORAGE_VERSION
- expect(Projects::HashedStorage::MigrateRepositoryService).not_to receive(:new)
-
- service.execute
- end
-
- it 'migrates legacy repositories to hashed storage' do
- legacy_attachments_path = FileUploader.absolute_base_dir(project)
- hashed_project = project.dup.tap { |p| p.id = project.id }
- hashed_project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
- hashed_attachments_path = FileUploader.absolute_base_dir(hashed_project)
-
- expect(logger).to receive(:info).with(/Repository moved from '#{project_legacy_path}' to '#{project_hashed_path}'/)
- expect(logger).to receive(:info).with(/Repository moved from '#{wiki_legacy_path}' to '#{wiki_hashed_path}'/)
- expect(logger).to receive(:info).with(/Project attachments moved from '#{legacy_attachments_path}' to '#{hashed_attachments_path}'/)
-
- expect { service.execute }.to change { project.storage_version }.from(nil).to(2)
- end
- end
-
context 'attachments migration' do
let(:project) { create(:project, :empty_repo, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
@@ -62,13 +25,6 @@ RSpec.describe Projects::HashedStorage::MigrationService, feature_category: :gro
)
end
- it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
- expect(service).to receive(:migrate_attachments_service).and_return(attachments_service)
- expect(attachments_service).to receive(:execute)
-
- service.execute
- end
-
it 'does not delegate migration if attachments are already migrated' do
project.storage_version = ::Project::LATEST_STORAGE_VERSION
expect(Projects::HashedStorage::MigrateAttachmentsService).not_to receive(:new)
diff --git a/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb b/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
deleted file mode 100644
index d1a68503fa3..00000000000
--- a/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
+++ /dev/null
@@ -1,106 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::HashedStorage::RollbackAttachmentsService, feature_category: :groups_and_projects do
- subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path, logger: nil) }
-
- let(:project) { create(:project, :repository, skip_disk_validation: true) }
- let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::Hashed.new(project) }
-
- let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
- let(:file_uploader) { build(:file_uploader, project: project) }
- let(:old_disk_path) { File.join(base_path(hashed_storage), upload.path) }
- let(:new_disk_path) { File.join(base_path(legacy_storage), upload.path) }
-
- describe '#execute' do
- context 'when succeeds' do
- it 'moves attachments to legacy storage layout' do
- expect(File.file?(old_disk_path)).to be_truthy
- expect(File.file?(new_disk_path)).to be_falsey
- expect(File.exist?(base_path(hashed_storage))).to be_truthy
- expect(File.exist?(base_path(legacy_storage))).to be_falsey
- expect(FileUtils).to receive(:mv).with(base_path(hashed_storage), base_path(legacy_storage)).and_call_original
-
- service.execute
-
- expect(File.exist?(base_path(legacy_storage))).to be_truthy
- expect(File.exist?(base_path(hashed_storage))).to be_falsey
- expect(File.file?(old_disk_path)).to be_falsey
- expect(File.file?(new_disk_path)).to be_truthy
- end
-
- it 'returns true' do
- expect(service.execute).to be_truthy
- end
-
- it 'sets skipped to false' do
- service.execute
-
- expect(service.skipped?).to be_falsey
- end
- end
-
- context 'when original folder does not exist anymore' do
- before do
- FileUtils.rm_rf(base_path(hashed_storage))
- end
-
- it 'skips moving folders and go to next' do
- expect(FileUtils).not_to receive(:mv).with(base_path(hashed_storage), base_path(legacy_storage))
-
- service.execute
-
- expect(File.exist?(base_path(legacy_storage))).to be_falsey
- expect(File.file?(new_disk_path)).to be_falsey
- end
-
- it 'returns true' do
- expect(service.execute).to be_truthy
- end
-
- it 'sets skipped to true' do
- service.execute
-
- expect(service.skipped?).to be_truthy
- end
- end
-
- context 'when target folder already exists' do
- before do
- FileUtils.mkdir_p(base_path(legacy_storage))
- end
-
- it 'raises AttachmentCannotMoveError' do
- expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
-
- expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentCannotMoveError)
- end
- end
-
- it 'works even when project validation fails' do
- allow(project).to receive(:valid?) { false }
-
- expect { service.execute }.to change { project.hashed_storage?(:attachments) }.to(false)
- end
- end
-
- describe '#old_disk_path' do
- it 'returns old disk_path for project' do
- expect(service.old_disk_path).to eq(project.disk_path)
- end
- end
-
- describe '#new_disk_path' do
- it 'returns new disk_path for project' do
- service.execute
-
- expect(service.new_disk_path).to eq(project.full_path)
- end
- end
-
- def base_path(storage)
- File.join(FileUploader.root, storage.disk_path)
- end
-end
diff --git a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
deleted file mode 100644
index 1e5d4ae4d20..00000000000
--- a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
+++ /dev/null
@@ -1,152 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis_shared_state, feature_category: :groups_and_projects do
- let(:gitlab_shell) { Gitlab::Shell.new }
- let(:project) { create(:project, :repository, :wiki_repo, :design_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
- let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::Hashed.new(project) }
-
- subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path) }
-
- describe '#execute' do
- let(:old_disk_path) { hashed_storage.disk_path }
- let(:new_disk_path) { legacy_storage.disk_path }
-
- before do
- allow(service).to receive(:gitlab_shell) { gitlab_shell }
- end
-
- context 'repository lock' do
- it 'tries to lock the repository' do
- expect(service).to receive(:try_to_set_repository_read_only!)
-
- service.execute
- end
-
- it 'fails when a git operation is in progress' do
- allow(project).to receive(:git_transfer_in_progress?) { true }
-
- expect { service.execute }.to raise_error(Projects::HashedStorage::RepositoryInUseError)
- end
- end
-
- context 'when repository doesnt exist on disk' do
- let(:project) { create(:project) }
-
- it 'skips the disk change but decrease the version' do
- service.execute
-
- expect(project.legacy_storage?).to be_truthy
- end
- end
-
- context 'when succeeds' do
- it 'renames project, wiki and design repositories' do
- service.execute
-
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_truthy
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_truthy
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_truthy
- end
-
- it 'updates project to be legacy and not read-only' do
- service.execute
-
- expect(project.legacy_storage?).to be_truthy
- expect(project.repository_read_only).to be_falsey
- end
-
- it 'move operation is called for both repositories' do
- expect_move_repository(old_disk_path, new_disk_path)
- expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
- expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
-
- service.execute
- end
-
- it 'writes project full path to gitaly' do
- service.execute
-
- expect(project.repository.full_path).to eq project.full_path
- end
- end
-
- context 'when exception happens' do
- it 'handles OpenSSL::Cipher::CipherError' do
- expect(project).to receive(:ensure_runners_token).and_raise(OpenSSL::Cipher::CipherError)
-
- expect { service.execute }.not_to raise_exception
- end
-
- it 'ensures rollback when OpenSSL::Cipher::CipherError' do
- expect(project).to receive(:ensure_runners_token).and_raise(OpenSSL::Cipher::CipherError)
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
- project.reload
-
- expect(project.hashed_storage?(:repository)).to be_truthy
- expect(project.repository_read_only?).to be_falsey
- end
-
- it 'handles Gitlab::Git::CommandError' do
- expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
-
- expect { service.execute }.not_to raise_exception
- end
-
- it 'ensures rollback when Gitlab::Git::CommandError' do
- expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
- project.reload
-
- expect(project.hashed_storage?(:repository)).to be_truthy
- expect(project.repository_read_only?).to be_falsey
- end
- end
-
- context 'when one move fails' do
- it 'rolls repositories back to original name' do
- allow(service).to receive(:move_repository).and_call_original
- allow(service).to receive(:move_repository).with(old_disk_path, new_disk_path).once { false } # will disable first move only
-
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
-
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_falsey
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_falsey
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_falsey
- expect(project.repository_read_only?).to be_falsey
- end
-
- context 'when rollback fails' do
- before do
- gitlab_shell.mv_repository(project.repository_storage, old_disk_path, new_disk_path)
- end
-
- it 'does not try to move nil repository over existing' do
- expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage, old_disk_path, new_disk_path)
- expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
- expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
-
- service.execute
- end
- end
- end
-
- it 'works even when project validation fails' do
- allow(project).to receive(:valid?) { false }
-
- expect { service.execute }.to change { project.legacy_storage? }.to(true)
- end
-
- def expect_move_repository(from_name, to_name)
- expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage, from_name, to_name).and_call_original
- end
- end
-end
diff --git a/spec/services/projects/hashed_storage/rollback_service_spec.rb b/spec/services/projects/hashed_storage/rollback_service_spec.rb
deleted file mode 100644
index 088eb9d2734..00000000000
--- a/spec/services/projects/hashed_storage/rollback_service_spec.rb
+++ /dev/null
@@ -1,78 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::HashedStorage::RollbackService, feature_category: :groups_and_projects do
- let(:project) { create(:project, :empty_repo, :wiki_repo) }
- let(:logger) { double }
- let!(:project_attachment) { build(:file_uploader, project: project) }
- let(:project_hashed_path) { Storage::Hashed.new(project).disk_path }
- let(:project_legacy_path) { Storage::LegacyProject.new(project).disk_path }
- let(:wiki_hashed_path) { "#{project_hashed_path}.wiki" }
- let(:wiki_legacy_path) { "#{project_legacy_path}.wiki" }
-
- subject(:service) { described_class.new(project, project.disk_path, logger: logger) }
-
- describe '#execute' do
- context 'attachments rollback' do
- let(:attachments_service_class) { Projects::HashedStorage::RollbackAttachmentsService }
- let(:attachments_service) { attachments_service_class.new(project: project, old_disk_path: project.disk_path, logger: logger) }
-
- it 'delegates rollback to Projects::HashedStorage::RollbackAttachmentsService' do
- expect(service).to receive(:rollback_attachments_service).and_return(attachments_service)
- expect(attachments_service).to receive(:execute)
-
- service.execute
- end
-
- it 'does not delegate rollback if repository is in legacy storage already' do
- project.storage_version = nil
- expect(attachments_service_class).not_to receive(:new)
-
- service.execute
- end
-
- it 'rollbacks to legacy storage' do
- hashed_attachments_path = FileUploader.absolute_base_dir(project)
- legacy_project = project.dup
- legacy_project.storage_version = nil
- legacy_attachments_path = FileUploader.absolute_base_dir(legacy_project)
-
- expect(logger).to receive(:info).with(/Project attachments moved from '#{hashed_attachments_path}' to '#{legacy_attachments_path}'/)
-
- expect(logger).to receive(:info).with(/Repository moved from '#{project_hashed_path}' to '#{project_legacy_path}'/)
- expect(logger).to receive(:info).with(/Repository moved from '#{wiki_hashed_path}' to '#{wiki_legacy_path}'/)
-
- expect { service.execute }.to change { project.storage_version }.from(2).to(nil)
- end
- end
-
- context 'repository rollback' do
- let(:project) { create(:project, :empty_repo, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
- let(:repository_service_class) { Projects::HashedStorage::RollbackRepositoryService }
- let(:repository_service) { repository_service_class.new(project: project, old_disk_path: project.disk_path, logger: logger) }
-
- it 'delegates rollback to RollbackRepositoryService' do
- expect(service).to receive(:rollback_repository_service).and_return(repository_service)
- expect(repository_service).to receive(:execute)
-
- service.execute
- end
-
- it 'does not delegate rollback if repository is in legacy storage already' do
- project.storage_version = nil
-
- expect(repository_service_class).not_to receive(:new)
-
- service.execute
- end
-
- it 'rollbacks to legacy storage' do
- expect(logger).to receive(:info).with(/Repository moved from '#{project_hashed_path}' to '#{project_legacy_path}'/)
- expect(logger).to receive(:info).with(/Repository moved from '#{wiki_hashed_path}' to '#{wiki_legacy_path}'/)
-
- expect { service.execute }.to change { project.storage_version }.from(1).to(nil)
- end
- end
- end
-end
diff --git a/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb b/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb
deleted file mode 100644
index fab8cafd1a0..00000000000
--- a/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb
+++ /dev/null
@@ -1,136 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_category: :experimentation_adoption do
- describe '#execute' do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:campaign) { Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE }
-
- before do
- allow(Notify)
- .to receive(:build_ios_app_guide_email)
- .and_return(instance_double(ActionMailer::MessageDelivery, deliver_later: true))
- end
-
- subject(:execute) do
- described_class.new(project, campaign).execute
- end
-
- context 'users can receive marketing emails' do
- let(:maintainer) { create(:user) }
- let(:developer) { create(:user) }
-
- before do
- project.add_developer(developer)
- project.add_maintainer(maintainer)
- end
-
- it 'sends the email to all project members with access_level >= Developer', :aggregate_failures do
- [project.owner, maintainer, developer].each do |user|
- email = user.notification_email_or_default
-
- expect(Notify).to receive(:build_ios_app_guide_email).with(email)
- end
-
- execute
- end
-
- it 'records sent emails', :aggregate_failures do
- expect { execute }.to change { Users::InProductMarketingEmail.count }.from(0).to(3)
-
- [project.owner, maintainer, developer].each do |user|
- expect(
- Users::InProductMarketingEmail.where(
- user: user,
- campaign: campaign
- )
- ).to exist
- end
- end
-
- it 'tracks experiment :email_sent event', :experiment do
- expect(experiment(:build_ios_app_guide_email)).to track(:email_sent)
- .on_next_instance
- .with_context(project: project)
-
- execute
- end
- end
-
- shared_examples 'does not send the email' do
- it do
- email = user.notification_email_or_default
- expect(Notify).not_to receive(:build_ios_app_guide_email).with(email)
- execute
- end
- end
-
- shared_examples 'does not create a record of the sent email' do
- it do
- expect(
- Users::InProductMarketingEmail.where(
- user: user,
- campaign: campaign
- )
- ).not_to exist
-
- execute
- end
- end
-
- context "when user can't receive marketing emails" do
- before do
- project.add_developer(user)
- end
-
- context 'when user.can?(:receive_notifications) is false' do
- it 'does not send the email' do
- allow_next_found_instance_of(User) do |user|
- allow(user).to receive(:can?).with(:receive_notifications) { false }
-
- email = user.notification_email_or_default
- expect(Notify).not_to receive(:build_ios_app_guide_email).with(email)
-
- expect(
- Users::InProductMarketingEmail.where(
- user: user,
- campaign: campaign
- )
- ).not_to exist
- end
-
- execute
- end
- end
- end
-
- context 'when campaign email has already been sent to the user' do
- before do
- project.add_developer(user)
- create(:in_product_marketing_email, :campaign, user: user, campaign: campaign)
- end
-
- it_behaves_like 'does not send the email'
- end
-
- context "when user is a reporter" do
- before do
- project.add_reporter(user)
- end
-
- it_behaves_like 'does not send the email'
- it_behaves_like 'does not create a record of the sent email'
- end
-
- context "when user is a guest" do
- before do
- project.add_guest(user)
- end
-
- it_behaves_like 'does not send the email'
- it_behaves_like 'does not create a record of the sent email'
- end
- end
-end
diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
index 00c156ba538..ef2a89a15b1 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
@@ -94,7 +94,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService, feature_category: :sou
it 'streams the download' do
expected_options = { headers: anything, stream_body: true }
- expect(Gitlab::HTTP).to receive(:perform_request).with(Net::HTTP::Get, anything, expected_options)
+ expect(Gitlab::HTTP).to receive(:get).with(anything, expected_options)
subject.execute
end
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index b01e64439ec..692f43eb205 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -18,6 +18,14 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
described_class.new(project, user).execute(noteable)
end
+ it 'returns results in correct order' do
+ group = create(:group).tap { |g| g.add_owner(user) }
+
+ expect(run_service.pluck(:username)).to eq([
+ noteable.author.username, 'all', user.username, group.full_path
+ ])
+ end
+
it 'includes `All Project and Group Members`' do
expect(run_service).to include(a_hash_including({ username: "all", name: "All Project and Group Members" }))
end
@@ -104,6 +112,24 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
expect(group_items.first[:avatar_url]).to eq("/gitlab/uploads/-/system/group/avatar/#{group.id}/dk.png")
end
end
+
+ context 'with subgroups' do
+ let(:group_1) { create(:group, path: 'bb') }
+ let(:group_2) { create(:group, path: 'zz') }
+ let(:subgroup) { create(:group, path: 'aa', parent: group_1) }
+
+ before do
+ group_1.add_owner(user)
+ group_2.add_owner(user)
+ subgroup.add_owner(user)
+ end
+
+ it 'returns results ordered by full path' do
+ expect(group_items.pluck(:username)).to eq([
+ group_1.full_path, subgroup.full_path, group_2.full_path
+ ])
+ end
+ end
end
context 'when `disable_all_mention` FF is enabled' do
diff --git a/spec/services/projects/record_target_platforms_service_spec.rb b/spec/services/projects/record_target_platforms_service_spec.rb
index 7c6907c7a95..bf87b763341 100644
--- a/spec/services/projects/record_target_platforms_service_spec.rb
+++ b/spec/services/projects/record_target_platforms_service_spec.rb
@@ -51,52 +51,6 @@ RSpec.describe Projects::RecordTargetPlatformsService, '#execute', feature_categ
end
end
end
-
- describe 'Build iOS guide email experiment' do
- shared_examples 'tracks experiment assignment event' do
- it 'tracks the assignment event', :experiment do
- expect(experiment(:build_ios_app_guide_email))
- .to track(:assignment)
- .with_context(project: project)
- .on_next_instance
-
- execute
- end
- end
-
- context 'experiment candidate' do
- before do
- stub_experiments(build_ios_app_guide_email: :candidate)
- end
-
- it 'executes a Projects::InProductMarketingCampaignEmailsService' do
- service_double = instance_double(Projects::InProductMarketingCampaignEmailsService, execute: true)
-
- expect(Projects::InProductMarketingCampaignEmailsService)
- .to receive(:new).with(project, Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE)
- .and_return service_double
- expect(service_double).to receive(:execute)
-
- execute
- end
-
- it_behaves_like 'tracks experiment assignment event'
- end
-
- context 'experiment control' do
- before do
- stub_experiments(build_ios_app_guide_email: :control)
- end
-
- it 'does not execute a Projects::InProductMarketingCampaignEmailsService' do
- expect(Projects::InProductMarketingCampaignEmailsService).not_to receive(:new)
-
- execute
- end
-
- it_behaves_like 'tracks experiment assignment event'
- end
- end
end
context 'when project is not an XCode project' do
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 1ddf6168c07..22264819e3b 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -425,28 +425,6 @@ RSpec.describe Projects::TransferService, feature_category: :groups_and_projects
end
end
- context 'namespace which contains orphan repository with same projects path name' do
- let(:raw_fake_repo) { Gitlab::Git::Repository.new('default', File.join(group.full_path, "#{project.path}.git"), nil, nil) }
-
- before do
- group.add_owner(user)
-
- raw_fake_repo.create_repository
- end
-
- after do
- raw_fake_repo.remove
- end
-
- it 'does not allow the project transfer' do
- transfer_result = execute_transfer
-
- expect(transfer_result).to eq false
- expect(project.namespace).to eq(user.namespace)
- expect(project.errors[:new_namespace]).to include('Cannot move project')
- end
- end
-
context 'target namespace containing the same project name' do
before do
group.add_owner(user)
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 6c767876d05..0ad7693a047 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -22,6 +22,20 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
subject(:service) { described_class.new(project, build) }
+ RSpec.shared_examples 'old deployments' do
+ it 'deactivates old deployments from the same project with the same path prefix', :freeze_time do
+ other_project = create(:pages_deployment)
+ same_project_other_path_prefix = create(:pages_deployment, project: project, path_prefix: 'other')
+ same_project = create(:pages_deployment, project: project)
+
+ expect { expect(service.execute[:status]).to eq(:success) }
+ .to not_change { other_project.reload.deleted_at }
+ .and not_change { same_project_other_path_prefix.reload.deleted_at }
+ .and change { same_project.reload.deleted_at }
+ .from(nil).to(described_class::OLD_DEPLOYMENTS_DESTRUCTION_DELAY.from_now)
+ end
+ end
+
RSpec.shared_examples 'pages size limit is' do |size_limit|
context "when size is below the limit" do
before do
@@ -36,6 +50,8 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
expect(deploy_status.description).not_to be_present
expect(project.pages_metadatum).to be_deployed
end
+
+ it_behaves_like 'old deployments'
end
context "when size is above the limit" do
@@ -95,6 +111,8 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
build.reload
end
+ it_behaves_like 'old deployments'
+
it "doesn't delete artifacts after deploying" do
expect(service.execute[:status]).to eq(:success)
@@ -146,31 +164,6 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
expect(project.pages_metadatum.reload.pages_deployment).to eq(project.pages_deployments.last)
end
- context 'when there is an old pages deployment' do
- let!(:old_deployment_from_another_project) { create(:pages_deployment) }
- let!(:old_deployment) { create(:pages_deployment, project: project) }
-
- it 'schedules a destruction of older deployments' do
- expect(DestroyPagesDeploymentsWorker).to(
- receive(:perform_in).with(
- described_class::OLD_DEPLOYMENTS_DESTRUCTION_DELAY,
- project.id,
- instance_of(Integer)
- )
- )
-
- service.execute
- end
-
- it 'removes older deployments', :sidekiq_inline do
- expect do
- service.execute
- end.not_to change { PagesDeployment.count } # it creates one and deletes one
-
- expect(PagesDeployment.find_by_id(old_deployment.id)).to be_nil
- end
- end
-
context 'when archive does not have pages directory' do
let(:file) { empty_file }
let(:metadata_filename) { empty_metadata_filename }
@@ -291,20 +284,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
expect(deployment.ci_build_id).to eq(build.id)
end
- context 'when old deployment present' do
- let!(:old_build) { create(:ci_build, name: 'pages', pipeline: old_pipeline, ref: 'HEAD') }
- let!(:old_deployment) { create(:pages_deployment, ci_build: old_build, project: project) }
-
- before do
- project.update_pages_deployment!(old_deployment)
- end
-
- it 'deactivates old deployments' do
- expect(service.execute[:status]).to eq(:success)
-
- expect(old_deployment.reload.deleted_at).not_to be_nil
- end
- end
+ it_behaves_like 'old deployments'
context 'when newer deployment present' do
before do
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index b30c1d30044..d173d23a1d6 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -103,6 +103,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
.and_raise(Gitlab::Git::CommandError)
+ expect(project_repository_double).to receive(:remove)
expect do
subject.execute
@@ -140,10 +141,11 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
.with(project.repository.raw)
expect(project_repository_double).to receive(:checksum)
.and_return('not matching checksum')
+ expect(project_repository_double).to receive(:remove)
expect do
subject.execute
- end.to raise_error(UpdateRepositoryStorageMethods::Error, /Failed to verify project repository checksum/)
+ end.to raise_error(Repositories::ReplicateService::Error, /Failed to verify project repository checksum/)
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
@@ -316,10 +318,14 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
context 'when object pool checksum does not match' do
let(:new_object_pool_checksum) { 'not_match' }
- it 'raises an error and does not change state' do
+ it 'raises an error and removes the new object pool repository' do
+ expect(object_pool_repository_double).to receive(:remove)
+
original_count = PoolRepository.count
- expect { subject.execute }.to raise_error(UpdateRepositoryStorageMethods::Error)
+ expect do
+ subject.execute
+ end.to raise_error(Repositories::ReplicateService::Error, /Failed to verify object_pool repository/)
project.reload
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 195cfe78b3f..7ab85d8253a 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -356,7 +356,7 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
context 'when changes project features' do
# Using some sample features for testing.
# Not using all the features because some of them must be enabled/disabled together
- %w[issues wiki forking].each do |feature_name|
+ %w[issues wiki forking model_experiments].each do |feature_name|
context "with feature_name:#{feature_name}" do
let(:feature) { "#{feature_name}_access_level" }
let(:params) do
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 5e7fb8397e3..2c34d6a59be 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -2478,6 +2478,26 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
it_behaves_like 'quick actions that change work item type'
+
+ context '/set_parent command' do
+ let_it_be(:parent) { create(:work_item, :issue, project: project) }
+ let_it_be(:work_item) { create(:work_item, :task, project: project) }
+ let_it_be(:parent_ref) { parent.to_reference(project) }
+
+ let(:content) { "/set_parent #{parent_ref}" }
+
+ it 'returns success message' do
+ _, _, message = service.execute(content, work_item)
+
+ expect(message).to eq('Work item parent set successfully')
+ end
+
+ it 'sets correct update params' do
+ _, updates, _ = service.execute(content, work_item)
+
+ expect(updates).to eq(set_parent: parent)
+ end
+ end
end
describe '#explain' do
@@ -3022,6 +3042,104 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
end
+
+ describe '/set_parent command' do
+ let_it_be(:parent) { create(:work_item, :issue, project: project) }
+ let_it_be(:work_item) { create(:work_item, :task, project: project) }
+ let_it_be(:parent_ref) { parent.to_reference(project) }
+
+ let(:command) { "/set_parent #{parent_ref}" }
+
+ shared_examples 'command is available' do
+ it 'explanation contains correct message' do
+ _, explanations = service.explain(command, work_item)
+
+ expect(explanations)
+ .to contain_exactly("Change work item's parent to #{parent_ref}.")
+ end
+
+ it 'contains command' do
+ expect(service.available_commands(work_item)).to include(a_hash_including(name: :set_parent))
+ end
+ end
+
+ shared_examples 'command is not available' do
+ it 'explanation is empty' do
+ _, explanations = service.explain(command, work_item)
+
+ expect(explanations).to eq([])
+ end
+
+ it 'does not contain command' do
+ expect(service.available_commands(work_item)).not_to include(a_hash_including(name: :set_parent))
+ end
+ end
+
+ context 'when user can admin link' do
+ it_behaves_like 'command is available'
+
+ context 'when work item type does not support a parent' do
+ let_it_be(:work_item) { build(:work_item, :incident, project: project) }
+
+ it_behaves_like 'command is not available'
+ end
+ end
+
+ context 'when user cannot admin link' do
+ subject(:service) { described_class.new(project, create(:user)) }
+
+ it_behaves_like 'command is not available'
+ end
+ end
+
+ describe '/add_child command' do
+ let_it_be(:child) { create(:work_item, :issue, project: project) }
+ let_it_be(:work_item) { create(:work_item, :objective, project: project) }
+ let_it_be(:child_ref) { child.to_reference(project) }
+
+ let(:command) { "/add_child #{child_ref}" }
+
+ shared_examples 'command is available' do
+ it 'explanation contains correct message' do
+ _, explanations = service.explain(command, work_item)
+
+ expect(explanations)
+ .to contain_exactly("Add #{child_ref} to this work item as child(ren).")
+ end
+
+ it 'contains command' do
+ expect(service.available_commands(work_item)).to include(a_hash_including(name: :add_child))
+ end
+ end
+
+ shared_examples 'command is not available' do
+ it 'explanation is empty' do
+ _, explanations = service.explain(command, work_item)
+
+ expect(explanations).to eq([])
+ end
+
+ it 'does not contain command' do
+ expect(service.available_commands(work_item)).not_to include(a_hash_including(name: :add_child))
+ end
+ end
+
+ context 'when user can admin link' do
+ it_behaves_like 'command is available'
+
+ context 'when work item type does not support children' do
+ let_it_be(:work_item) { build(:work_item, :key_result, project: project) }
+
+ it_behaves_like 'command is not available'
+ end
+ end
+
+ context 'when user cannot admin link' do
+ subject(:service) { described_class.new(project, create(:user)) }
+
+ it_behaves_like 'command is not available'
+ end
+ end
end
describe '#available_commands' do
diff --git a/spec/services/releases/destroy_service_spec.rb b/spec/services/releases/destroy_service_spec.rb
index 2b6e96a781e..de3ce2b6206 100644
--- a/spec/services/releases/destroy_service_spec.rb
+++ b/spec/services/releases/destroy_service_spec.rb
@@ -83,5 +83,11 @@ RSpec.describe Releases::DestroyService, feature_category: :release_orchestratio
expect(milestone.reload).to be_persisted
end
end
+
+ it 'executes hooks' do
+ expect(service.release).to receive(:execute_hooks).with('delete')
+
+ service.execute
+ end
end
end
diff --git a/spec/services/repositories/replicate_service_spec.rb b/spec/services/repositories/replicate_service_spec.rb
new file mode 100644
index 00000000000..b4fbc478d2f
--- /dev/null
+++ b/spec/services/repositories/replicate_service_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Repositories::ReplicateService, feature_category: :source_code_management do
+ let(:new_checksum) { 'match' }
+ let(:repository) { instance_double('Gitlab::Git::Repository', checksum: 'match') }
+ let(:new_repository) { instance_double('Gitlab::Git::Repository', checksum: new_checksum) }
+
+ subject { described_class.new(repository) }
+
+ it 'replicates repository' do
+ expect(new_repository).to receive(:replicate).with(repository)
+ expect(new_repository).not_to receive(:remove)
+
+ expect { subject.execute(new_repository, :project) }.not_to raise_error
+ end
+
+ context 'when checksum does not match' do
+ let(:new_checksum) { 'does not match' }
+
+ it 'raises an error and removes new repository' do
+ expect(new_repository).to receive(:replicate).with(repository)
+ expect(new_repository).to receive(:remove)
+
+ expect do
+ subject.execute(new_repository, :project)
+ end.to raise_error(described_class::Error, /Failed to verify project repository/)
+ end
+ end
+
+ context 'when an error is raised during checksum calculation' do
+ it 'raises the error and removes new repository' do
+ error = StandardError.new
+
+ expect(new_repository).to receive(:replicate).with(repository)
+ expect(new_repository).to receive(:checksum).and_raise(error)
+ expect(new_repository).to receive(:remove)
+
+ expect do
+ subject.execute(new_repository, :project)
+ end.to raise_error(error)
+ end
+ end
+end
diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb
index 28b345f8191..89974360154 100644
--- a/spec/services/resource_events/change_labels_service_spec.rb
+++ b/spec/services/resource_events/change_labels_service_spec.rb
@@ -125,7 +125,7 @@ RSpec.describe ResourceEvents::ChangeLabelsService, feature_category: :team_plan
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL_CHANGED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL_CHANGED }
let(:user) { author }
let(:namespace) { project.namespace }
diff --git a/spec/services/snippets/destroy_service_spec.rb b/spec/services/snippets/destroy_service_spec.rb
index ace9847185e..29898e3ab09 100644
--- a/spec/services/snippets/destroy_service_spec.rb
+++ b/spec/services/snippets/destroy_service_spec.rb
@@ -70,7 +70,6 @@ RSpec.describe Snippets::DestroyService, feature_category: :source_code_manageme
it 'does not schedule anything and return success' do
allow(snippet).to receive(:repository).and_return(nil)
- expect(GitlabShellWorker).not_to receive(:perform_in)
expect_next_instance_of(Repositories::DestroyService) do |instance|
expect(instance).to receive(:execute).and_call_original
end
@@ -151,7 +150,6 @@ RSpec.describe Snippets::DestroyService, feature_category: :source_code_manageme
expect(snippet.repository).not_to be_nil
expect(snippet.repository.exists?).to be_falsey
- expect(GitlabShellWorker).not_to receive(:perform_in)
expect_next_instance_of(Repositories::DestroyService) do |instance|
expect(instance).to receive(:execute).and_call_original
end
diff --git a/spec/services/snippets/update_repository_storage_service_spec.rb b/spec/services/snippets/update_repository_storage_service_spec.rb
index c417fbfd8b1..66847a43335 100644
--- a/spec/services/snippets/update_repository_storage_service_spec.rb
+++ b/spec/services/snippets/update_repository_storage_service_spec.rb
@@ -67,6 +67,7 @@ RSpec.describe Snippets::UpdateRepositoryStorageService, feature_category: :sour
expect(snippet_repository_double).to receive(:replicate)
.with(snippet.repository.raw)
.and_raise(Gitlab::Git::CommandError)
+ expect(snippet_repository_double).to receive(:remove)
expect do
subject.execute
@@ -101,10 +102,11 @@ RSpec.describe Snippets::UpdateRepositoryStorageService, feature_category: :sour
.with(snippet.repository.raw)
expect(snippet_repository_double).to receive(:checksum)
.and_return('not matching checksum')
+ expect(snippet_repository_double).to receive(:remove)
expect do
subject.execute
- end.to raise_error(UpdateRepositoryStorageMethods::Error, /Failed to verify snippet repository checksum from \w+ to not matching checksum/)
+ end.to raise_error(Repositories::ReplicateService::Error, /Failed to verify snippet repository checksum from \w+ to not matching checksum/)
expect(snippet).not_to be_repository_read_only
expect(snippet.repository_storage).to eq('default')
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index 70f43d82ead..361742699b0 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
end
let(:check_for_spam) { true }
- let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:user) { create(:user) }
let_it_be(:issue) { create(:issue, author: user) }
let_it_be(:snippet) { create(:personal_snippet, :public, author: user) }
@@ -136,15 +136,9 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
end
end
- context 'if allow_possible_spam user custom attribute is set' do
+ context 'if user is trusted to create possible spam' do
before do
- UserCustomAttribute.upsert_custom_attributes(
- [{
- user_id: user.id,
- key: 'allow_possible_spam',
- value: 'does not matter'
- }]
- )
+ user.custom_attributes.create!(key: 'trusted_by', value: 'does not matter')
end
context 'and a service returns a verdict that should be overridden' do
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 4a795f2db20..bcca1ed0b23 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -15,17 +15,34 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
let(:service) { described_class.new(noteable: noteable, project: project, author: author) }
describe '#relate_issuable' do
- let(:noteable_ref) { create(:issue) }
+ let_it_be(:issue1) { create(:issue, project: project) }
+ let_it_be(:issue2) { create(:issue, project: project) }
- subject { service.relate_issuable(noteable_ref) }
+ let(:noteable_ref) { issue1 }
- it_behaves_like 'a system note' do
- let(:action) { 'relate' }
- end
+ subject(:system_note) { service.relate_issuable(noteable_ref) }
context 'when issue marks another as related' do
+ it_behaves_like 'a system note' do
+ let(:action) { 'relate' }
+ end
+
it 'sets the note text' do
- expect(subject.note).to eq "marked this issue as related to #{noteable_ref.to_reference(project)}"
+ expect(system_note.note).to eq "marked this issue as related to #{issue1.to_reference(project)}"
+ end
+ end
+
+ context 'when issue marks several other issues as related' do
+ let(:noteable_ref) { [issue1, issue2] }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'relate' }
+ end
+
+ it 'sets the note text' do
+ expect(system_note.note).to eq(
+ "marked this issue as related to #{issue1.to_reference(project)} and #{issue2.to_reference(project)}"
+ )
end
end
@@ -695,7 +712,7 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLONED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLONED }
let(:user) { author }
let(:namespace) { project.namespace }
end
diff --git a/spec/services/system_notes/time_tracking_service_spec.rb b/spec/services/system_notes/time_tracking_service_spec.rb
index 52b99a6976d..3242ae9e533 100644
--- a/spec/services/system_notes/time_tracking_service_spec.rb
+++ b/spec/services/system_notes/time_tracking_service_spec.rb
@@ -119,7 +119,7 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DUE_DATE_CHANGED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DUE_DATE_CHANGED }
let(:user) { author }
let(:namespace) { project.namespace }
end
@@ -232,7 +232,7 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_ESTIMATE_CHANGED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_ESTIMATE_CHANGED }
let(:user) { author }
let(:namespace) { project.namespace }
end
@@ -364,7 +364,7 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_SPENT_CHANGED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_SPENT_CHANGED }
let(:user) { author }
let(:namespace) { project.namespace }
end
diff --git a/spec/services/tasks_to_be_done/base_service_spec.rb b/spec/services/tasks_to_be_done/base_service_spec.rb
deleted file mode 100644
index 32b07cab095..00000000000
--- a/spec/services/tasks_to_be_done/base_service_spec.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe TasksToBeDone::BaseService, feature_category: :team_planning do
- let_it_be(:project) { create(:project) }
- let_it_be(:current_user) { create(:user) }
- let_it_be(:assignee_one) { create(:user) }
- let_it_be(:assignee_two) { create(:user) }
- let_it_be(:assignee_ids) { [assignee_one.id] }
- let_it_be(:label) { create(:label, title: 'tasks to be done:ci', project: project) }
-
- before do
- project.add_maintainer(current_user)
- project.add_developer(assignee_one)
- project.add_developer(assignee_two)
- end
-
- subject(:service) do
- TasksToBeDone::CreateCiTaskService.new(
- container: project,
- current_user: current_user,
- assignee_ids: assignee_ids
- )
- end
-
- context 'no existing task issue', :aggregate_failures do
- it 'creates an issue' do
- params = {
- assignee_ids: assignee_ids,
- title: 'Set up CI/CD',
- description: anything,
- add_labels: label.title
- }
-
- expect(Issues::CreateService)
- .to receive(:new)
- .with(container: project, current_user: current_user, params: params, perform_spam_check: false)
- .and_call_original
-
- expect { service.execute }.to change(Issue, :count).by(1)
-
- expect(project.issues.last).to have_attributes(
- author: current_user,
- title: params[:title],
- assignees: [assignee_one],
- labels: [label]
- )
- end
- end
-
- context 'an open issue with the same label already exists', :aggregate_failures do
- let_it_be(:assignee_ids) { [assignee_two.id] }
-
- it 'assigns the user to the existing issue' do
- issue = create(:labeled_issue, project: project, labels: [label], assignees: [assignee_one])
- params = { add_assignee_ids: assignee_ids }
-
- expect(Issues::UpdateService)
- .to receive(:new)
- .with(container: project, current_user: current_user, params: params)
- .and_call_original
-
- expect { service.execute }.not_to change(Issue, :count)
-
- expect(issue.reload.assignees).to match_array([assignee_one, assignee_two])
- end
- end
-end
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 0888c27aab2..0b4cf9e53db 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe TodoService, feature_category: :team_planning do
include AfterNextHelpers
+ let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:author) { create(:user) }
let_it_be(:assignee) { create(:user) }
@@ -31,11 +32,18 @@ RSpec.describe TodoService, feature_category: :team_planning do
end
shared_examples 'reassigned target' do
+ let(:additional_todo_attributes) { {} }
+
it 'creates a pending todo for new assignee' do
target_unassigned.assignees = [john_doe]
service.send(described_method, target_unassigned, author)
- should_create_todo(user: john_doe, target: target_unassigned, action: Todo::ASSIGNED)
+ should_create_todo(
+ user: john_doe,
+ target: target_unassigned,
+ action: Todo::ASSIGNED,
+ **additional_todo_attributes
+ )
end
it 'does not create a todo if unassigned' do
@@ -48,7 +56,13 @@ RSpec.describe TodoService, feature_category: :team_planning do
target_assigned.assignees = [john_doe]
service.send(described_method, target_assigned, john_doe)
- should_create_todo(user: john_doe, target: target_assigned, author: john_doe, action: Todo::ASSIGNED)
+ should_create_todo(
+ user: john_doe,
+ target: target_assigned,
+ author: john_doe,
+ action: Todo::ASSIGNED,
+ **additional_todo_attributes
+ )
end
it 'does not create a todo for guests' do
@@ -657,11 +671,27 @@ RSpec.describe TodoService, feature_category: :team_planning do
end
describe '#mark_todo' do
- it 'creates a todo from a issue' do
+ it 'creates a todo from an issue' do
service.mark_todo(unassigned_issue, author)
should_create_todo(user: author, target: unassigned_issue, action: Todo::MARKED)
end
+
+ context 'when issue belongs to a group' do
+ it 'creates a todo from an issue' do
+ group_issue = create(:issue, :group_level, namespace: group)
+ service.mark_todo(group_issue, group_issue.author)
+
+ should_create_todo(
+ user: group_issue.author,
+ author: group_issue.author,
+ target: group_issue,
+ action: Todo::MARKED,
+ project: nil,
+ group: group
+ )
+ end
+ end
end
describe '#todo_exists?' do
@@ -726,6 +756,22 @@ RSpec.describe TodoService, feature_category: :team_planning do
should_create_todo(user: author, target: work_item, action: Todo::MARKED)
end
+
+ context 'when work item belongs to a group' do
+ it 'creates a todo from a work item' do
+ group_work_item = create(:work_item, :group_level, namespace: group)
+ service.mark_todo(group_work_item, group_work_item.author)
+
+ should_create_todo(
+ user: group_work_item.author,
+ author: group_work_item.author,
+ target: group_work_item,
+ action: Todo::MARKED,
+ project: nil,
+ group: group
+ )
+ end
+ end
end
describe '#todo_exists?' do
@@ -779,7 +825,7 @@ RSpec.describe TodoService, feature_category: :team_planning do
end
end
- context 'assignable is an issue' do
+ context 'assignable is a project level issue' do
it_behaves_like 'reassigned target' do
let(:target_assigned) { create(:issue, project: project, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
let(:addressed_target_assigned) { create(:issue, project: project, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
@@ -787,6 +833,32 @@ RSpec.describe TodoService, feature_category: :team_planning do
end
end
+ context 'assignable is a project level work_item' do
+ it_behaves_like 'reassigned target' do
+ let(:target_assigned) { create(:work_item, project: project, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
+ let(:addressed_target_assigned) { create(:work_item, project: project, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
+ let(:target_unassigned) { create(:work_item, project: project, author: author, assignees: []) }
+ end
+ end
+
+ context 'assignable is a group level issue' do
+ it_behaves_like 'reassigned target' do
+ let(:additional_todo_attributes) { { project: nil, group: group } }
+ let(:target_assigned) { create(:issue, :group_level, namespace: group, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
+ let(:addressed_target_assigned) { create(:issue, :group_level, namespace: group, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
+ let(:target_unassigned) { create(:issue, :group_level, namespace: group, author: author, assignees: []) }
+ end
+ end
+
+ context 'assignable is a group level work item' do
+ it_behaves_like 'reassigned target' do
+ let(:additional_todo_attributes) { { project: nil, group: group } }
+ let(:target_assigned) { create(:work_item, :group_level, namespace: group, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
+ let(:addressed_target_assigned) { create(:work_item, :group_level, namespace: group, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
+ let(:target_unassigned) { create(:work_item, :group_level, namespace: group, author: author, assignees: []) }
+ end
+ end
+
context 'assignable is an alert' do
it_behaves_like 'reassigned target' do
let(:target_assigned) { create(:alert_management_alert, project: project, assignees: [john_doe]) }
diff --git a/spec/services/update_container_registry_info_service_spec.rb b/spec/services/update_container_registry_info_service_spec.rb
index 416b08bd04b..b21e3f4bd13 100644
--- a/spec/services/update_container_registry_info_service_spec.rb
+++ b/spec/services/update_container_registry_info_service_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe UpdateContainerRegistryInfoService, feature_category: :container_
it 'uses a token with no access permissions' do
expect(Auth::ContainerRegistryAuthenticationService)
- .to receive(:access_token).with([], []).and_return(token)
+ .to receive(:access_token).with({}).and_return(token)
expect(ContainerRegistry::Client)
.to receive(:new).with(api_url, token: token).and_return(client)
@@ -72,13 +72,14 @@ RSpec.describe UpdateContainerRegistryInfoService, feature_category: :container_
expect(application_settings.container_registry_vendor).to be_blank
expect(application_settings.container_registry_version).to be_blank
expect(application_settings.container_registry_features).to eq([])
+ expect(application_settings.container_registry_db_enabled).to be_falsey
end
end
context 'when able to detect the container registry type' do
context 'when using the GitLab container registry' do
it 'updates application settings accordingly' do
- stub_registry_info(vendor: 'gitlab', version: '2.9.1-gitlab', features: %w[a b c])
+ stub_registry_info(vendor: 'gitlab', version: '2.9.1-gitlab', features: %w[a b c], db_enabled: true)
stub_supports_gitlab_api(true)
subject
@@ -88,12 +89,13 @@ RSpec.describe UpdateContainerRegistryInfoService, feature_category: :container_
expect(application_settings.container_registry_version).to eq('2.9.1-gitlab')
expect(application_settings.container_registry_features)
.to match_array(%W[a b c #{ContainerRegistry::GitlabApiClient::REGISTRY_GITLAB_V1_API_FEATURE}])
+ expect(application_settings.container_registry_db_enabled).to be_truthy
end
end
context 'when using a third-party container registry' do
it 'updates application settings accordingly' do
- stub_registry_info(vendor: 'other', version: nil, features: nil)
+ stub_registry_info(vendor: 'other', version: nil, features: nil, db_enabled: false)
stub_supports_gitlab_api(false)
subject
@@ -102,6 +104,7 @@ RSpec.describe UpdateContainerRegistryInfoService, feature_category: :container_
expect(application_settings.container_registry_vendor).to eq('other')
expect(application_settings.container_registry_version).to be_blank
expect(application_settings.container_registry_features).to eq([])
+ expect(application_settings.container_registry_db_enabled).to be_falsey
end
end
end
@@ -109,7 +112,7 @@ RSpec.describe UpdateContainerRegistryInfoService, feature_category: :container_
def stub_access_token
allow(Auth::ContainerRegistryAuthenticationService)
- .to receive(:access_token).with([], []).and_return('foo')
+ .to receive(:access_token).with({}).and_return('foo')
end
def stub_registry_info(output)
diff --git a/spec/services/users/auto_ban_service_spec.rb b/spec/services/users/auto_ban_service_spec.rb
new file mode 100644
index 00000000000..b989cec6a9d
--- /dev/null
+++ b/spec/services/users/auto_ban_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::AutoBanService, feature_category: :instance_resiliency do
+ let_it_be_with_reload(:user) { create(:user) }
+ let(:reason) { :auto_ban_reason }
+
+ context 'when auto banning a user', :aggregate_failures do
+ subject(:auto_ban_user) { described_class.new(user: user, reason: reason).execute }
+
+ context 'when successful' do
+ it 'returns success status' do
+ response = auto_ban_user
+
+ expect(response[:status]).to eq(:success)
+ end
+
+ it 'bans the user' do
+ expect { auto_ban_user }.to change { user.state }.from('active').to('banned')
+ end
+
+ it 'creates a BannedUser' do
+ expect { auto_ban_user }.to change { Users::BannedUser.count }.by(1)
+ expect(Users::BannedUser.last.user_id).to eq(user.id)
+ end
+
+ describe 'recording a custom attribute' do
+ it 'records a custom attribute' do
+ expect { auto_ban_user }.to change { UserCustomAttribute.count }.by(1)
+ expect(user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY).first.value).to eq(reason.to_s)
+ end
+ end
+ end
+
+ context 'when failed' do
+ context 'when user is blocked' do
+ before do
+ user.block!
+ end
+
+ it 'returns state error message' do
+ response = auto_ban_user
+
+ expect(response[:status]).to eq(:error)
+ expect(response[:message]).to match('State cannot transition via \"ban\"')
+ end
+
+ it 'does not modify the BannedUser record or user state' do
+ expect { auto_ban_user }.not_to change { Users::BannedUser.count }
+ expect { auto_ban_user }.not_to change { user.state }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/users/in_product_marketing_email_records_spec.rb b/spec/services/users/in_product_marketing_email_records_spec.rb
index 059f0890b53..d214560b2a6 100644
--- a/spec/services/users/in_product_marketing_email_records_spec.rb
+++ b/spec/services/users/in_product_marketing_email_records_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe Users::InProductMarketingEmailRecords, feature_category: :onboard
records.add(user, track: :team_short, series: 0)
records.add(user, track: :create, series: 1)
- records.add(user, campaign: Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE)
end
it 'bulk inserts added records' do
@@ -36,30 +35,20 @@ RSpec.describe Users::InProductMarketingEmailRecords, feature_category: :onboard
freeze_time do
records.add(user, track: :team_short, series: 0)
records.add(user, track: :create, series: 1)
- records.add(user, campaign: Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE)
- first, second, third = records.records
+ first, second = records.records
expect(first).to be_a Users::InProductMarketingEmail
- expect(first.campaign).to be_nil
expect(first.track.to_sym).to eq :team_short
expect(first.series).to eq 0
expect(first.created_at).to eq Time.zone.now
expect(first.updated_at).to eq Time.zone.now
expect(second).to be_a Users::InProductMarketingEmail
- expect(second.campaign).to be_nil
expect(second.track.to_sym).to eq :create
expect(second.series).to eq 1
expect(second.created_at).to eq Time.zone.now
expect(second.updated_at).to eq Time.zone.now
-
- expect(third).to be_a Users::InProductMarketingEmail
- expect(third.campaign).to eq Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE
- expect(third.track).to be_nil
- expect(third.series).to be_nil
- expect(third.created_at).to eq Time.zone.now
- expect(third.updated_at).to eq Time.zone.now
end
end
end
diff --git a/spec/services/users/signup_service_spec.rb b/spec/services/users/signup_service_spec.rb
deleted file mode 100644
index 29663411346..00000000000
--- a/spec/services/users/signup_service_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Users::SignupService, feature_category: :system_access do
- let(:user) { create(:user, setup_for_company: true) }
-
- describe '#execute' do
- context 'when updating name' do
- it 'updates the name attribute' do
- result = update_user(user, name: 'New Name')
-
- expect(result.success?).to be(true)
- expect(user.reload.name).to eq('New Name')
- end
-
- it 'returns an error result when name is missing' do
- result = update_user(user, name: '')
-
- expect(user.reload.name).not_to be_blank
- expect(result.success?).to be(false)
- expect(result.message).to include("Name can't be blank")
- end
- end
-
- context 'when updating role' do
- it 'updates the role attribute' do
- result = update_user(user, role: 'development_team_lead')
-
- expect(result.success?).to be(true)
- expect(user.reload.role).to eq('development_team_lead')
- end
-
- it 'returns an error result when role is missing' do
- result = update_user(user, role: '')
-
- expect(user.reload.role).not_to be_blank
- expect(result.success?).to be(false)
- expect(result.message).to eq("Role can't be blank")
- end
- end
-
- context 'when updating setup_for_company' do
- it 'updates the setup_for_company attribute' do
- result = update_user(user, setup_for_company: 'false')
-
- expect(result.success?).to be(true)
- expect(user.reload.setup_for_company).to be(false)
- end
-
- context 'when on SaaS', :saas do
- it 'returns an error result when setup_for_company is missing' do
- result = update_user(user, setup_for_company: '')
-
- expect(user.reload.setup_for_company).not_to be_blank
- expect(result.success?).to be(false)
- expect(result.message).to eq("Setup for company can't be blank")
- end
- end
-
- context 'when not on .com' do
- it 'returns success when setup_for_company is blank' do
- result = update_user(user, setup_for_company: '')
-
- expect(result.success?).to be(true)
- expect(user.reload.setup_for_company).to be(nil)
- end
- end
- end
-
- def update_user(user, opts)
- described_class.new(user, opts).execute
- end
- end
-end
diff --git a/spec/services/users/allow_possible_spam_service_spec.rb b/spec/services/users/trust_service_spec.rb
index 53618f0c8e9..1f71992ce9b 100644
--- a/spec/services/users/allow_possible_spam_service_spec.rb
+++ b/spec/services/users/trust_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Users::AllowPossibleSpamService, feature_category: :user_management do
+RSpec.describe Users::TrustService, feature_category: :user_management do
let_it_be(:current_user) { create(:admin) }
subject(:service) { described_class.new(current_user) }
@@ -18,7 +18,7 @@ RSpec.describe Users::AllowPossibleSpamService, feature_category: :user_manageme
operation
user.reload
- expect(user.custom_attributes.by_key(UserCustomAttribute::ALLOW_POSSIBLE_SPAM)).to be_present
+ expect(user.custom_attributes.by_key(UserCustomAttribute::TRUSTED_BY)).to be_present
end
end
end
diff --git a/spec/services/users/disallow_possible_spam_service_spec.rb b/spec/services/users/untrust_service_spec.rb
index 32a47e05525..054cb9b82dc 100644
--- a/spec/services/users/disallow_possible_spam_service_spec.rb
+++ b/spec/services/users/untrust_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Users::DisallowPossibleSpamService, feature_category: :user_management do
+RSpec.describe Users::UntrustService, feature_category: :user_management do
let_it_be(:current_user) { create(:admin) }
subject(:service) { described_class.new(current_user) }
@@ -16,19 +16,19 @@ RSpec.describe Users::DisallowPossibleSpamService, feature_category: :user_manag
UserCustomAttribute.upsert_custom_attributes(
[{
user_id: user.id,
- key: :allow_possible_spam,
+ key: UserCustomAttribute::TRUSTED_BY,
value: 'not important'
}]
)
end
it 'updates the custom attributes', :aggregate_failures do
- expect(user.custom_attributes.by_key(UserCustomAttribute::ALLOW_POSSIBLE_SPAM)).to be_present
+ expect(user.trusted_with_spam_attribute).to be_present
operation
user.reload
- expect(user.custom_attributes).to be_empty
+ expect(user.trusted_with_spam_attribute).to be nil
end
end
end
diff --git a/spec/services/verify_pages_domain_service_spec.rb b/spec/services/verify_pages_domain_service_spec.rb
index d66d584d3d0..9f6e37ec10d 100644
--- a/spec/services/verify_pages_domain_service_spec.rb
+++ b/spec/services/verify_pages_domain_service_spec.rb
@@ -312,20 +312,4 @@ RSpec.describe VerifyPagesDomainService, feature_category: :pages do
def disallow_resolver!
expect(Resolv::DNS).not_to receive(:open)
end
-
- def stub_resolver(stubbed_lookups = {})
- resolver = instance_double('Resolv::DNS')
- allow(resolver).to receive(:timeouts=)
-
- expect(Resolv::DNS).to receive(:open).and_yield(resolver)
-
- allow(resolver).to receive(:getresources) { [] }
- stubbed_lookups.each do |domain, records|
- records = Array(records).map { |txt| Resolv::DNS::Resource::IN::TXT.new(txt) }
- # Append '.' to domain_name, indicating absolute FQDN
- allow(resolver).to receive(:getresources).with(domain + '.', Resolv::DNS::Resource::IN::TXT) { records }
- end
-
- resolver
- end
end
diff --git a/spec/services/vs_code/settings/create_or_update_service_spec.rb b/spec/services/vs_code/settings/create_or_update_service_spec.rb
new file mode 100644
index 00000000000..aab8b2c95c6
--- /dev/null
+++ b/spec/services/vs_code/settings/create_or_update_service_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe VsCode::Settings::CreateOrUpdateService, feature_category: :web_ide do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+
+ let(:opts) do
+ {
+ setting_type: "settings",
+ content: '{ "editor.fontSize": 12 }'
+ }
+ end
+
+ subject { described_class.new(current_user: user, params: opts).execute }
+
+ context 'when setting_type is machines' do
+ it 'returns default machine as a successful response' do
+ opts = { setting_type: "machines", machines: '[]' }
+ result = described_class.new(current_user: user, params: opts).execute
+
+ expect(result.payload).to eq(VsCode::Settings::DEFAULT_MACHINE)
+ end
+ end
+
+ it 'creates a new record when a record with the setting does not exist' do
+ expect { subject }.to change { User.find(user.id).vscode_settings.count }.from(0).to(1)
+ record = User.find(user.id).vscode_settings.by_setting_type('settings').first
+ expect(record.content).to eq('{ "editor.fontSize": 12 }')
+ end
+
+ it 'updates the existing record if setting exists' do
+ setting = create(:vscode_setting, user: user)
+
+ expect { subject }.to change {
+ VsCode::Settings::VsCodeSetting.find(setting.id).content
+ }.from(setting.content).to(opts[:content])
+ end
+
+ it 'fails if an invalid value is passed' do
+ invalid_opts = { setting_type: nil, content: nil }
+ result = described_class.new(current_user: user, params: invalid_opts).execute
+
+ expect(result.status).to eq(:error)
+ end
+ end
+end
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index 259f5156d42..89346353db2 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -171,6 +171,23 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state,
end
end
+ context 'with SystemHook' do
+ let_it_be(:system_hook) { create(:system_hook) }
+ let(:service_instance) { described_class.new(system_hook, data, :push_hooks) }
+
+ before do
+ stub_full_request(system_hook.url, method: :post)
+ end
+
+ it 'POSTs to the webhook URL with correct headers' do
+ service_instance.execute
+
+ expect(WebMock).to have_requested(:post, stubbed_hostname(system_hook.url)).with(
+ headers: headers.merge({ 'X-Gitlab-Event' => 'System Hook' })
+ ).once
+ end
+ end
+
it 'POSTs the data as JSON and returns expected headers' do
stub_full_request(project_hook.url, method: :post)
diff --git a/spec/services/work_items/parent_links/create_service_spec.rb b/spec/services/work_items/parent_links/create_service_spec.rb
index 41ae6398614..1ff48f4e269 100644
--- a/spec/services/work_items/parent_links/create_service_spec.rb
+++ b/spec/services/work_items/parent_links/create_service_spec.rb
@@ -200,7 +200,7 @@ RSpec.describe WorkItems::ParentLinks::CreateService, feature_category: :portfol
it 'returns error status' do
error = "#{issue.to_reference} cannot be added: is not allowed to add this type of parent. " \
- "#{other_project_task.to_reference} cannot be added: parent must be in the same project as child."
+ "#{other_project_task.to_reference} cannot be added: parent must be in the same project or group as child."
is_expected.to eq(service_error(error, http_status: 422))
end
diff --git a/spec/services/work_items/related_work_item_links/create_service_spec.rb b/spec/services/work_items/related_work_item_links/create_service_spec.rb
index 992beb705aa..62d60280902 100644
--- a/spec/services/work_items/related_work_item_links/create_service_spec.rb
+++ b/spec/services/work_items/related_work_item_links/create_service_spec.rb
@@ -28,7 +28,8 @@ RSpec.describe WorkItems::RelatedWorkItemLinks::CreateService, feature_category:
it_behaves_like 'issuable link creation', use_references: false do
let(:response_keys) { [:status, :created_references, :message] }
- let(:already_assigned_error_msg) { "Work items are already linked" }
+ let(:async_notes) { true }
+ let(:already_assigned_error_msg) { "Items are already linked" }
let(:no_found_error_msg) do
'No matching work item found. Make sure you are adding a valid ID and you have access to the item.'
end
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index 38e5d4dc153..557617f61bb 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TITLE_CHANGED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TITLE_CHANGED }
let(:user) { current_user }
let(:namespace) { project.namespace }
subject(:service_action) { update_work_item[:status] }
diff --git a/spec/services/work_items/widgets/labels_service/update_service_spec.rb b/spec/services/work_items/widgets/labels_service/update_service_spec.rb
index 17daec2b1ea..43d9d46a268 100644
--- a/spec/services/work_items/widgets/labels_service/update_service_spec.rb
+++ b/spec/services/work_items/widgets/labels_service/update_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe WorkItems::Widgets::LabelsService::UpdateService, feature_categor
let_it_be(:label1) { create(:label, project: project) }
let_it_be(:label2) { create(:label, project: project) }
let_it_be(:label3) { create(:label, project: project) }
- let_it_be(:current_user) { create(:user) }
+ let_it_be(:current_user) { create(:user).tap { |user| project.add_reporter(user) } }
let(:work_item) { create(:work_item, project: project, labels: [label1, label2]) }
let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Labels) } }
@@ -26,6 +26,14 @@ RSpec.describe WorkItems::Widgets::LabelsService::UpdateService, feature_categor
}
)
end
+
+ context "and user doesn't have permissions to update labels" do
+ let_it_be(:current_user) { create(:user) }
+
+ it 'removes label params' do
+ expect(service.prepare_update_params(params: params)).to be_nil
+ end
+ end
end
context 'when widget does not exist in new type' do
diff --git a/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb b/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb
index 0196e7c2b02..f9708afd313 100644
--- a/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb
+++ b/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, feature_category: :portfolio_management do
- let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user).tap { |user| project.add_reporter(user) } }
let_it_be_with_reload(:work_item) { create(:work_item, project: project) }
let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::StartAndDueDate) } }
@@ -26,6 +26,14 @@ RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, featur
change(work_item, :due_date).from(nil).to(due_date)
)
end
+
+ context "and user doesn't have permissions to update start and due date" do
+ let_it_be(:user) { create(:user) }
+
+ it 'removes start and due date params params' do
+ expect(update_params).to be_nil
+ end
+ end
end
context 'when date params are not present' do