Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/admin/abuse_report_labels/create_service_spec.rb51
-rw-r--r--spec/services/admin/abuse_reports/moderate_user_service_spec.rb17
-rw-r--r--spec/services/admin/abuse_reports/update_service_spec.rb85
-rw-r--r--spec/services/application_settings/update_service_spec.rb8
-rw-r--r--spec/services/auto_merge/base_service_spec.rb5
-rw-r--r--spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb176
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb22
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb14
-rw-r--r--spec/services/ci/components/fetch_service_spec.rb37
-rw-r--r--spec/services/ci/create_commit_status_service_spec.rb461
-rw-r--r--spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb17
-rw-r--r--spec/services/ci/create_pipeline_service/environment_spec.rb22
-rw-r--r--spec/services/ci/create_pipeline_service/logger_spec.rb69
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb32
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb40
-rw-r--r--spec/services/ci/create_pipeline_service/variables_spec.rb33
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb174
-rw-r--r--spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb14
-rw-r--r--spec/services/ci/register_job_service_spec.rb15
-rw-r--r--spec/services/ci/runners/set_runner_associated_projects_service_spec.rb4
-rw-r--r--spec/services/concerns/rate_limited_service_spec.rb2
-rw-r--r--spec/services/concerns/services/return_service_responses_spec.rb32
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb21
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb8
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb27
-rw-r--r--spec/services/discussions/resolve_service_spec.rb8
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb9
-rw-r--r--spec/services/environments/stop_service_spec.rb3
-rw-r--r--spec/services/environments/stop_stale_service_spec.rb6
-rw-r--r--spec/services/files/delete_service_spec.rb7
-rw-r--r--spec/services/files/update_service_spec.rb6
-rw-r--r--spec/services/git/branch_push_service_spec.rb24
-rw-r--r--spec/services/google_cloud/create_cloudsql_instance_service_spec.rb30
-rw-r--r--spec/services/google_cloud/fetch_google_ip_list_service_spec.rb2
-rw-r--r--spec/services/google_cloud/generate_pipeline_service_spec.rb48
-rw-r--r--spec/services/google_cloud/get_cloudsql_instances_service_spec.rb38
-rw-r--r--spec/services/gpg_keys/destroy_service_spec.rb22
-rw-r--r--spec/services/groups/destroy_service_spec.rb11
-rw-r--r--spec/services/groups/group_links/create_service_spec.rb3
-rw-r--r--spec/services/groups/update_service_spec.rb72
-rw-r--r--spec/services/import_export_clean_up_service_spec.rb16
-rw-r--r--spec/services/incident_management/incidents/create_service_spec.rb2
-rw-r--r--spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb4
-rw-r--r--spec/services/issuable/process_assignees_spec.rb80
-rw-r--r--spec/services/issue_links/destroy_service_spec.rb9
-rw-r--r--spec/services/issue_links/list_service_spec.rb55
-rw-r--r--spec/services/issues/close_service_spec.rb4
-rw-r--r--spec/services/issues/create_service_spec.rb8
-rw-r--r--spec/services/issues/export_csv_service_spec.rb32
-rw-r--r--spec/services/issues/move_service_spec.rb3
-rw-r--r--spec/services/issues/resolve_discussions_spec.rb26
-rw-r--r--spec/services/issues/update_service_spec.rb13
-rw-r--r--spec/services/labels/available_labels_service_spec.rb24
-rw-r--r--spec/services/labels/update_service_spec.rb8
-rw-r--r--spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb20
-rw-r--r--spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb4
-rw-r--r--spec/services/members/invitation_reminder_email_service_spec.rb2
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb45
-rw-r--r--spec/services/merge_requests/base_service_spec.rb28
-rw-r--r--spec/services/merge_requests/create_ref_service_spec.rb183
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb144
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb895
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb7
-rw-r--r--spec/services/merge_requests/update_service_spec.rb52
-rw-r--r--spec/services/metrics/global_metrics_update_service_spec.rb14
-rw-r--r--spec/services/metrics/sample_metrics_service_spec.rb45
-rw-r--r--spec/services/namespaces/in_product_marketing_emails_service_spec.rb216
-rw-r--r--spec/services/note_summary_spec.rb9
-rw-r--r--spec/services/notes/create_service_spec.rb5
-rw-r--r--spec/services/notes/destroy_service_spec.rb7
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb39
-rw-r--r--spec/services/notes/update_service_spec.rb12
-rw-r--r--spec/services/notification_service_spec.rb134
-rw-r--r--spec/services/packages/ml_model/create_package_file_service_spec.rb4
-rw-r--r--spec/services/packages/npm/generate_metadata_service_spec.rb11
-rw-r--r--spec/services/packages/nuget/check_duplicates_service_spec.rb155
-rw-r--r--spec/services/packages/nuget/extract_metadata_file_service_spec.rb14
-rw-r--r--spec/services/packages/nuget/extract_remote_metadata_file_service_spec.rb126
-rw-r--r--spec/services/packages/nuget/metadata_extraction_service_spec.rb4
-rw-r--r--spec/services/packages/nuget/odata_package_entry_service_spec.rb69
-rw-r--r--spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb24
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb3
-rw-r--r--spec/services/preview_markdown_service_spec.rb21
-rw-r--r--spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb19
-rw-r--r--spec/services/projects/create_service_spec.rb26
-rw-r--r--spec/services/projects/import_service_spec.rb102
-rw-r--r--spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb54
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb4
-rw-r--r--spec/services/projects/update_pages_service_spec.rb128
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb47
-rw-r--r--spec/services/projects/update_service_spec.rb74
-rw-r--r--spec/services/protected_branches/api_service_spec.rb20
-rw-r--r--spec/services/push_event_payload_service_spec.rb4
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb33
-rw-r--r--spec/services/releases/create_service_spec.rb2
-rw-r--r--spec/services/releases/destroy_service_spec.rb28
-rw-r--r--spec/services/resource_access_tokens/revoke_service_spec.rb3
-rw-r--r--spec/services/resource_events/change_labels_service_spec.rb12
-rw-r--r--spec/services/resource_events/merge_into_notes_service_spec.rb3
-rw-r--r--spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb2
-rw-r--r--spec/services/security/merge_reports_service_spec.rb105
-rw-r--r--spec/services/service_desk/custom_email_verifications/create_service_spec.rb23
-rw-r--r--spec/services/service_desk/custom_email_verifications/update_service_spec.rb34
-rw-r--r--spec/services/service_desk/custom_emails/create_service_spec.rb13
-rw-r--r--spec/services/service_desk/custom_emails/destroy_service_spec.rb7
-rw-r--r--spec/services/service_desk_settings/update_service_spec.rb25
-rw-r--r--spec/services/spam/spam_action_service_spec.rb11
-rw-r--r--spec/services/system_notes/alert_management_service_spec.rb4
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb13
-rw-r--r--spec/services/system_notes/time_tracking_service_spec.rb21
-rw-r--r--spec/services/users/authorized_build_service_spec.rb8
-rw-r--r--spec/services/users/build_service_spec.rb51
-rw-r--r--spec/services/users/migrate_records_to_ghost_user_service_spec.rb10
-rw-r--r--spec/services/users/upsert_credit_card_validation_service_spec.rb11
-rw-r--r--spec/services/work_items/related_work_item_links/destroy_service_spec.rb82
-rw-r--r--spec/services/work_items/update_service_spec.rb5
116 files changed, 3163 insertions, 2072 deletions
diff --git a/spec/services/admin/abuse_report_labels/create_service_spec.rb b/spec/services/admin/abuse_report_labels/create_service_spec.rb
new file mode 100644
index 00000000000..168229d6ed9
--- /dev/null
+++ b/spec/services/admin/abuse_report_labels/create_service_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::AbuseReportLabels::CreateService, feature_category: :insider_threat do
+ describe '#execute' do
+ let(:color) { 'red' }
+ let(:color_in_hex) { ::Gitlab::Color.of(color) }
+ let(:params) { { title: 'FancyLabel', color: color } }
+
+ subject(:execute) { described_class.new(params).execute }
+
+ shared_examples 'creates a label with the correct values' do
+ it 'creates a label with the correct values', :aggregate_failures do
+ expect { execute }.to change { Admin::AbuseReportLabel.count }.from(0).to(1)
+
+ label = Admin::AbuseReportLabel.last
+ expect(label.title).to eq params[:title]
+ expect(label.color).to eq color_in_hex
+ end
+
+ it 'returns the persisted label' do
+ result = execute
+ expect(result).to be_an_instance_of(Admin::AbuseReportLabel)
+ expect(result.persisted?).to eq true
+ end
+ end
+
+ it_behaves_like 'creates a label with the correct values'
+
+ context 'without color param' do
+ let(:params) { { title: 'FancyLabel' } }
+ let(:color_in_hex) { ::Gitlab::Color.of(Label::DEFAULT_COLOR) }
+
+ it_behaves_like 'creates a label with the correct values'
+ end
+
+ context 'with errors' do
+ let!(:existing_label) { create(:abuse_report_label, title: params[:title]) }
+
+ it 'does not create the label' do
+ expect { execute }.not_to change { Admin::AbuseReportLabel.count }
+ end
+
+ it 'returns the label with errors' do
+ label = execute
+ expect(label.errors.messages).to include({ title: ["has already been taken"] })
+ end
+ end
+ end
+end
diff --git a/spec/services/admin/abuse_reports/moderate_user_service_spec.rb b/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
index 6e8a59f4e49..7e08db2b612 100644
--- a/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
+++ b/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
@@ -4,6 +4,10 @@ require 'spec_helper'
RSpec.describe Admin::AbuseReports::ModerateUserService, feature_category: :instance_resiliency do
let_it_be_with_reload(:abuse_report) { create(:abuse_report) }
+ let_it_be_with_reload(:similar_abuse_report) do
+ create(:abuse_report, user: abuse_report.user, category: abuse_report.category)
+ end
+
let(:action) { 'ban_user' }
let(:close) { true }
let(:reason) { 'spam' }
@@ -26,6 +30,12 @@ RSpec.describe Admin::AbuseReports::ModerateUserService, feature_category: :inst
it 'closes the report' do
expect { subject }.to change { abuse_report.closed? }.from(false).to(true)
end
+
+ context 'when similar open reports for the user exist' do
+ it 'closes the similar report' do
+ expect { subject }.to change { similar_abuse_report.reload.closed? }.from(false).to(true)
+ end
+ end
end
shared_examples 'does not close the report' do
@@ -33,6 +43,13 @@ RSpec.describe Admin::AbuseReports::ModerateUserService, feature_category: :inst
subject
expect(abuse_report.closed?).to be(false)
end
+
+ context 'when similar open reports for the user exist' do
+ it 'does not close the similar report' do
+ subject
+ expect(similar_abuse_report.reload.closed?).to be(false)
+ end
+ end
end
shared_examples 'does not record an event' do
diff --git a/spec/services/admin/abuse_reports/update_service_spec.rb b/spec/services/admin/abuse_reports/update_service_spec.rb
new file mode 100644
index 00000000000..e53b40979ec
--- /dev/null
+++ b/spec/services/admin/abuse_reports/update_service_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::AbuseReports::UpdateService, feature_category: :instance_resiliency do
+ let_it_be(:current_user) { create(:admin) }
+ let_it_be(:abuse_report) { create(:abuse_report) }
+ let_it_be(:label) { create(:abuse_report_label) }
+
+ let(:params) { {} }
+ let(:service) { described_class.new(abuse_report, current_user, params) }
+
+ describe '#execute', :enable_admin_mode do
+ subject { service.execute }
+
+ shared_examples 'returns an error response' do |error|
+ it 'returns an error response' do
+ expect(subject).to be_error
+ expect(subject.message).to eq error
+ end
+ end
+
+ context 'with invalid parameters' do
+ describe 'invalid user' do
+ describe 'when no user is given' do
+ let_it_be(:current_user) { nil }
+
+ it_behaves_like 'returns an error response', 'Admin is required'
+ end
+
+ describe 'when given user is not an admin' do
+ let_it_be(:current_user) { create(:user) }
+
+ it_behaves_like 'returns an error response', 'Admin is required'
+ end
+ end
+
+ describe 'invalid label_ids' do
+ let(:params) { { label_ids: ['invalid_global_id', non_existing_record_id] } }
+
+ it 'does not update the abuse report' do
+ expect { subject }.not_to change { abuse_report.labels }
+ end
+
+ it { is_expected.to be_success }
+ end
+ end
+
+ describe 'with valid parameters' do
+ context 'when label_ids is empty' do
+ let(:params) { { label_ids: [] } }
+
+ context 'when abuse report has existing labels' do
+ before do
+ abuse_report.labels = [label]
+ end
+
+ it 'clears the abuse report labels' do
+ expect { subject }.to change { abuse_report.labels.count }.from(1).to(0)
+ end
+
+ it { is_expected.to be_success }
+ end
+
+ context 'when abuse report has no existing labels' do
+ it 'does not update the abuse report' do
+ expect { subject }.not_to change { abuse_report.labels }
+ end
+
+ it { is_expected.to be_success }
+ end
+ end
+
+ context 'when label_ids is not empty' do
+ let(:params) { { label_ids: [Gitlab::GlobalId.build(label, id: label.id).to_s] } }
+
+ it 'updates the abuse report' do
+ expect { subject }.to change { abuse_report.label_ids }.from([]).to([label.id])
+ end
+
+ it { is_expected.to be_success }
+ end
+ end
+ end
+end
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index 9d73a4a6cee..0b5ba1db9d4 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
-RSpec.describe ApplicationSettings::UpdateService do
+RSpec.describe ApplicationSettings::UpdateService, feature_category: :shared do
include ExternalAuthorizationServiceHelpers
- let(:application_settings) { create(:application_setting) }
+ let(:application_settings) { ::Gitlab::CurrentSettings.current_application_settings }
let(:admin) { create(:user, :admin) }
let(:params) { {} }
@@ -331,7 +331,8 @@ RSpec.describe ApplicationSettings::UpdateService do
throttle_protected_paths_enabled: 1,
throttle_protected_paths_period_in_seconds: 600,
throttle_protected_paths_requests_per_period: 100,
- protected_paths_raw: "/users/password\r\n/users/sign_in\r\n"
+ protected_paths_raw: "/users/password\r\n/users/sign_in\r\n",
+ protected_paths_for_get_request_raw: "/users/password\r\n/users/sign_up\r\n"
}
end
@@ -344,6 +345,7 @@ RSpec.describe ApplicationSettings::UpdateService do
expect(application_settings.throttle_protected_paths_period_in_seconds).to eq(600)
expect(application_settings.throttle_protected_paths_requests_per_period).to eq(100)
expect(application_settings.protected_paths).to eq(['/users/password', '/users/sign_in'])
+ expect(application_settings.protected_paths_for_get_request).to match_array(['/users/password', '/users/sign_up'])
end
end
diff --git a/spec/services/auto_merge/base_service_spec.rb b/spec/services/auto_merge/base_service_spec.rb
index d14470df9ee..be5b753f484 100644
--- a/spec/services/auto_merge/base_service_spec.rb
+++ b/spec/services/auto_merge/base_service_spec.rb
@@ -296,4 +296,9 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
end
end
end
+
+ describe '#process' do
+ specify { expect(service).to respond_to :process }
+ specify { expect { service.process(nil) }.to raise_error NotImplementedError }
+ end
end
diff --git a/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb b/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
deleted file mode 100644
index 9a74f5ca07a..00000000000
--- a/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
+++ /dev/null
@@ -1,176 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::CreatePipelineTrackersService, feature_category: :importers do
- describe '#execute!' do
- context 'when entity is group' do
- it 'creates trackers for group entity' do
- bulk_import = create(:bulk_import)
- entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
-
- described_class.new(entity).execute!
-
- expect(entity.trackers.to_a).to include(
- have_attributes(
- stage: 0, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupPipeline.to_s
- ),
- have_attributes(
- stage: 1, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupAttributesPipeline.to_s
- )
- )
- end
- end
-
- context 'when entity is project' do
- it 'creates trackers for project entity' do
- bulk_import = create(:bulk_import)
- entity = create(:bulk_import_entity, :project_entity, bulk_import: bulk_import)
-
- described_class.new(entity).execute!
-
- expect(entity.trackers.to_a).to include(
- have_attributes(
- stage: 0, status_name: :created, relation: BulkImports::Projects::Pipelines::ProjectPipeline.to_s
- ),
- have_attributes(
- stage: 1, status_name: :created, relation: BulkImports::Projects::Pipelines::RepositoryPipeline.to_s
- )
- )
- end
- end
-
- context 'when tracker configuration has a minimum version defined' do
- before do
- allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
- allow(stage).to receive(:config).and_return(
- {
- pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
- pipeline2: { pipeline: 'PipelineClass2', stage: 1, minimum_source_version: '14.10.0' },
- pipeline3: { pipeline: 'PipelineClass3', stage: 1, minimum_source_version: '15.0.0' },
- pipeline5: { pipeline: 'PipelineClass4', stage: 1, minimum_source_version: '15.1.0' },
- pipeline6: { pipeline: 'PipelineClass5', stage: 1, minimum_source_version: '16.0.0' }
- }
- )
- end
- end
-
- context 'when the source instance version is older than the tracker mininum version' do
- let_it_be(:bulk_import) { create(:bulk_import, source_version: '15.0.0') }
- let_it_be(:entity) { create(:bulk_import_entity, :group_entity, bulk_import: bulk_import) }
-
- it 'creates trackers as skipped if version requirement does not meet' do
- described_class.new(entity).execute!
-
- expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
- [:created, 'PipelineClass1'],
- [:created, 'PipelineClass2'],
- [:created, 'PipelineClass3'],
- [:skipped, 'PipelineClass4'],
- [:skipped, 'PipelineClass5']
- )
- end
-
- it 'logs an info message for the skipped pipelines' do
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:info).with({
- message: 'Pipeline skipped as source instance version not compatible with pipeline',
- bulk_import_entity_id: entity.id,
- bulk_import_id: entity.bulk_import_id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
- importer: 'gitlab_migration',
- pipeline_name: 'PipelineClass4',
- minimum_source_version: '15.1.0',
- maximum_source_version: nil,
- source_version: '15.0.0'
- })
-
- expect(logger).to receive(:info).with({
- message: 'Pipeline skipped as source instance version not compatible with pipeline',
- bulk_import_entity_id: entity.id,
- bulk_import_id: entity.bulk_import_id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
- importer: 'gitlab_migration',
- pipeline_name: 'PipelineClass5',
- minimum_source_version: '16.0.0',
- maximum_source_version: nil,
- source_version: '15.0.0'
- })
- end
-
- described_class.new(entity).execute!
- end
- end
-
- context 'when the source instance version is undefined' do
- it 'creates trackers as created' do
- bulk_import = create(:bulk_import, source_version: nil)
- entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
-
- described_class.new(entity).execute!
-
- expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
- [:created, 'PipelineClass1'],
- [:created, 'PipelineClass2'],
- [:created, 'PipelineClass3'],
- [:created, 'PipelineClass4'],
- [:created, 'PipelineClass5']
- )
- end
- end
- end
-
- context 'when tracker configuration has a maximum version defined' do
- before do
- allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
- allow(stage).to receive(:config).and_return(
- {
- pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
- pipeline2: { pipeline: 'PipelineClass2', stage: 1, maximum_source_version: '14.10.0' },
- pipeline3: { pipeline: 'PipelineClass3', stage: 1, maximum_source_version: '15.0.0' },
- pipeline5: { pipeline: 'PipelineClass4', stage: 1, maximum_source_version: '15.1.0' },
- pipeline6: { pipeline: 'PipelineClass5', stage: 1, maximum_source_version: '16.0.0' }
- }
- )
- end
- end
-
- context 'when the source instance version is older than the tracker maximum version' do
- it 'creates trackers as skipped if version requirement does not meet' do
- bulk_import = create(:bulk_import, source_version: '15.0.0')
- entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
-
- described_class.new(entity).execute!
-
- expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
- [:created, 'PipelineClass1'],
- [:skipped, 'PipelineClass2'],
- [:created, 'PipelineClass3'],
- [:created, 'PipelineClass4'],
- [:created, 'PipelineClass5']
- )
- end
- end
-
- context 'when the source instance version is a patch version' do
- it 'creates trackers with the same status as the non-patch source version' do
- bulk_import_1 = create(:bulk_import, source_version: '15.0.1')
- entity_1 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_1)
-
- bulk_import_2 = create(:bulk_import, source_version: '15.0.0')
- entity_2 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_2)
-
- described_class.new(entity_1).execute!
- described_class.new(entity_2).execute!
-
- trackers_1 = entity_1.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
- trackers_2 = entity_2.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
-
- expect(trackers_1).to eq(trackers_2)
- end
- end
- end
- end
-end
diff --git a/spec/services/bulk_imports/create_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index 93feab97f44..20872623802 100644
--- a/spec/services/bulk_imports/create_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -62,6 +62,24 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
end
end
+ # response when authorize_admin_project in API endpoint fails
+ context 'when direct transfer status query returns a 403' do
+ it 'raises a ServiceResponse::Error' do
+ expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ expect(client).to receive(:validate_instance_version!).and_return(true)
+ expect(client).to receive(:get)
+ .with("/groups/full%2Fpath%2Fto%2Fgroup1/export_relations/status")
+ .and_raise(BulkImports::NetworkError, '403 Forbidden')
+ end
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_error
+ expect(result.message).to eq("403 Forbidden")
+ end
+ end
+
context 'when direct transfer setting query returns a 404' do
it 'raises a ServiceResponse::Error' do
stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
@@ -313,7 +331,7 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
"Source full path must have a relative path structure with " \
"no HTTP protocol characters, or leading or trailing forward slashes. " \
"Path segments must not start or end with a special character, and " \
- "must not contain consecutive special characters.")
+ "must not contain consecutive special characters")
end
describe '#user-role' do
@@ -470,7 +488,7 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
context 'when the source_full_path contains only integer characters' do
let(:query_string) { BulkImports::Projects::Graphql::GetProjectQuery.new(context: nil).to_s }
let(:graphql_response) do
- double(original_hash: { 'data' => { 'project' => { 'id' => entity_source_id } } }) # rubocop:disable RSpec/VerifiedDoubles
+ double(original_hash: { 'data' => { 'project' => { 'id' => entity_source_id } } }) # rubocop:disable RSpec/VerifiedDoubles
end
let(:params) do
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index c035eabf767..2197b0b4fac 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -82,8 +82,20 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
context 'when content-type is not valid' do
let(:content_type) { 'invalid' }
+ let(:import_logger) { instance_double(Gitlab::Import::Logger) }
+
+ before do
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+ allow(import_logger).to receive(:warn)
+ end
+
+ it 'logs and raises an error' do
+ expect(import_logger).to receive(:warn).once.with(
+ message: 'Invalid content type',
+ response_headers: headers,
+ importer: 'gitlab_migration'
+ )
- it 'raises an error' do
expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid content type')
end
end
diff --git a/spec/services/ci/components/fetch_service_spec.rb b/spec/services/ci/components/fetch_service_spec.rb
index 532098b3b20..21b7df19f4a 100644
--- a/spec/services/ci/components/fetch_service_spec.rb
+++ b/spec/services/ci/components/fetch_service_spec.rb
@@ -3,15 +3,35 @@
require 'spec_helper'
RSpec.describe Ci::Components::FetchService, feature_category: :pipeline_composition do
- let_it_be(:project) { create(:project, :repository, create_tag: 'v1.0') }
let_it_be(:user) { create(:user) }
let_it_be(:current_user) { user }
let_it_be(:current_host) { Gitlab.config.gitlab.host }
+ let_it_be(:content) do
+ <<~COMPONENT
+ job:
+ script: echo
+ COMPONENT
+ end
let(:service) do
described_class.new(address: address, current_user: current_user)
end
+ let_it_be(:project) do
+ project = create(
+ :project, :custom_repo,
+ files: {
+ 'template.yml' => content,
+ 'my-component/template.yml' => content,
+ 'my-dir/my-component/template.yml' => content
+ }
+ )
+
+ project.repository.add_tag(project.creator, 'v0.1', project.repository.commit.sha)
+
+ project
+ end
+
before do
project.add_developer(user)
end
@@ -22,19 +42,6 @@ RSpec.describe Ci::Components::FetchService, feature_category: :pipeline_composi
shared_examples 'an external component' do
shared_examples 'component address' do
context 'when content exists' do
- let(:sha) { project.commit(version).id }
-
- let(:content) do
- <<~COMPONENT
- job:
- script: echo
- COMPONENT
- end
-
- before do
- stub_project_blob(sha, component_yaml_path, content)
- end
-
it 'returns the content' do
expect(result).to be_success
expect(result.payload[:content]).to eq(content)
@@ -42,6 +49,8 @@ RSpec.describe Ci::Components::FetchService, feature_category: :pipeline_composi
end
context 'when content does not exist' do
+ let(:address) { "#{current_host}/#{component_path}@~version-does-not-exist" }
+
it 'returns an error' do
expect(result).to be_error
expect(result.reason).to eq(:content_not_found)
diff --git a/spec/services/ci/create_commit_status_service_spec.rb b/spec/services/ci/create_commit_status_service_spec.rb
new file mode 100644
index 00000000000..ec200e24c8f
--- /dev/null
+++ b/spec/services/ci/create_commit_status_service_spec.rb
@@ -0,0 +1,461 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreateCommitStatusService, :clean_gitlab_redis_cache, feature_category: :continuous_integration do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:response) { execute_service(params) }
+
+ let_it_be_with_refind(:project) { create(:project, :repository) }
+ let_it_be(:commit) { project.repository.commit }
+ let_it_be(:guest) { create_user(:guest) }
+ let_it_be(:reporter) { create_user(:reporter) }
+ let_it_be(:developer) { create_user(:developer) }
+
+ let(:user) { developer }
+ let(:sha) { commit.id }
+ let(:params) { { state: 'pending' } }
+ let(:job) { response.payload[:job] }
+
+ %w[pending running success failed canceled].each do |status|
+ context "for #{status}" do
+ let(:params) { { state: status } }
+
+ context 'when pipeline for sha does not exists' do
+ it 'creates commit status and sets pipeline iid' do
+ expect(response).to be_success
+ expect(job.sha).to eq(commit.id)
+ expect(job.status).to eq(status)
+ expect(job.name).to eq('default')
+ expect(job.ref).not_to be_empty
+ expect(job.target_url).to be_nil
+ expect(job.description).to be_nil
+ expect(job.pipeline_id).not_to be_nil
+
+ expect(CommitStatus.find(job.id)).to be_api_failure if status == 'failed'
+
+ expect(::Ci::Pipeline.last.iid).not_to be_nil
+ end
+ end
+ end
+ end
+
+ context 'when status transitions from pending' do
+ before do
+ execute_service(state: 'pending')
+ end
+
+ %w[running success failed canceled].each do |status|
+ context "for #{status}" do
+ let(:params) { { state: status } }
+
+ it "changes to #{status}" do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(1)
+ .and not_change { ::Ci::Stage.count }.from(1)
+ .and not_change { ::CommitStatus.count }.from(1)
+
+ expect(response).to be_success
+ expect(job.status).to eq(status)
+ end
+ end
+ end
+
+ context 'for invalid transition' do
+ let(:params) { { state: 'pending' } }
+
+ it 'returns bad request and error message' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(1)
+ .and not_change { ::Ci::Stage.count }.from(1)
+ .and not_change { ::CommitStatus.count }.from(1)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq(
+ "Cannot transition status via :enqueue from :pending (Reason(s): Status cannot transition via \"enqueue\")"
+ )
+ end
+ end
+ end
+
+ context 'with all optional parameters' do
+ context 'when creating a commit status' do
+ let(:params) do
+ {
+ sha: sha,
+ state: 'success',
+ context: 'coverage',
+ ref: 'master',
+ description: 'test',
+ coverage: 80.0,
+ target_url: 'http://gitlab.com/status'
+ }
+ end
+
+ it 'creates commit status' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(1)
+
+ expect(response).to be_success
+ expect(job.sha).to eq(commit.id)
+ expect(job.status).to eq('success')
+ expect(job.name).to eq('coverage')
+ expect(job.ref).to eq('master')
+ expect(job.coverage).to eq(80.0)
+ expect(job.description).to eq('test')
+ expect(job.target_url).to eq('http://gitlab.com/status')
+ end
+
+ context 'when merge request exists for given branch' do
+ let!(:merge_request) do
+ create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'develop')
+ end
+
+ it 'sets head pipeline' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(1)
+
+ expect(response).to be_success
+ expect(merge_request.reload.head_pipeline).not_to be_nil
+ end
+ end
+ end
+
+ context 'when updating a commit status' do
+ let(:parameters) do
+ {
+ state: 'success',
+ name: 'coverage',
+ ref: 'master'
+ }
+ end
+
+ let(:updatable_optional_attributes) do
+ {
+ description: 'new description',
+ coverage: 90.0
+ }
+ end
+
+ let(:params) { parameters.merge(updatable_optional_attributes) }
+
+ # creating the initial commit status
+ before do
+ execute_service(
+ sha: sha,
+ state: 'running',
+ context: 'coverage',
+ ref: 'master',
+ description: 'coverage test',
+ coverage: 10.0,
+ target_url: 'http://gitlab.com/status'
+ )
+ end
+
+ it 'updates a commit status' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(1)
+ .and not_change { ::Ci::Stage.count }.from(1)
+ .and not_change { ::CommitStatus.count }.from(1)
+
+ expect(response).to be_success
+ expect(job.sha).to eq(commit.id)
+ expect(job.status).to eq('success')
+ expect(job.name).to eq('coverage')
+ expect(job.ref).to eq('master')
+ expect(job.coverage).to eq(90.0)
+ expect(job.description).to eq('new description')
+ expect(job.target_url).to eq('http://gitlab.com/status')
+ end
+
+ context 'when the `state` parameter is sent the same' do
+ let(:parameters) do
+ {
+ sha: sha,
+ state: 'running',
+ name: 'coverage',
+ ref: 'master'
+ }
+ end
+
+ it 'does not update the commit status' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(1)
+ .and not_change { ::Ci::Stage.count }.from(1)
+ .and not_change { ::CommitStatus.count }.from(1)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq(
+ "Cannot transition status via :run from :running (Reason(s): Status cannot transition via \"run\")"
+ )
+
+ commit_status = project.commit_statuses.find_by!(name: 'coverage')
+
+ expect(commit_status.description).to eq('coverage test')
+ expect(commit_status.coverage).to eq(10.0)
+ end
+ end
+ end
+
+ context 'when a pipeline id is specified' do
+ let!(:first_pipeline) do
+ project.ci_pipelines.build(source: :push, sha: commit.id, ref: 'master', status: 'created').tap do |p|
+ p.ensure_project_iid! # Necessary to avoid cross-database modification error
+ p.save!
+ end
+ end
+
+ let!(:other_pipeline) do
+ project.ci_pipelines.build(source: :push, sha: commit.id, ref: 'master', status: 'created').tap do |p|
+ p.ensure_project_iid! # Necessary to avoid cross-database modification error
+ p.save!
+ end
+ end
+
+ let(:params) do
+ {
+ sha: sha,
+ pipeline_id: other_pipeline.id,
+ state: 'success',
+ ref: 'master'
+ }
+ end
+
+ it 'update the correct pipeline', :sidekiq_might_not_need_inline do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(2)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(1)
+
+ expect(first_pipeline.reload.status).to eq('created')
+ expect(other_pipeline.reload.status).to eq('success')
+ end
+ end
+ end
+
+ context 'when retrying a commit status' do
+ subject(:response) do
+ execute_service(state: 'failed', name: 'test', ref: 'master')
+
+ execute_service(state: 'success', name: 'test', ref: 'master')
+ end
+
+ it 'correctly posts a new commit status' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(2)
+
+ expect(response).to be_success
+ expect(job.sha).to eq(commit.id)
+ expect(job.status).to eq('success')
+ end
+
+ it 'retries the commit status', :sidekiq_might_not_need_inline do
+ response
+
+ expect(CommitStatus.count).to eq 2
+ expect(CommitStatus.first).to be_retried
+ expect(CommitStatus.last.pipeline).to be_success
+ end
+ end
+
+ context 'when status is invalid' do
+ let(:params) { { state: 'invalid' } }
+
+ it 'does not create commit status' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq('invalid state')
+ end
+ end
+
+ context 'when request without a state made' do
+ let(:params) { {} }
+
+ it 'does not create commit status' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(0)
+ .and not_change { ::Ci::Stage.count }.from(0)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq('State is required')
+ end
+ end
+
+ context 'when updating a protected ref' do
+ let(:params) { { state: 'running', ref: 'master' } }
+
+ before do
+ create(:protected_branch, project: project, name: 'master')
+ end
+
+ context 'with user as developer' do
+ let(:user) { developer }
+
+ it 'does not create commit status' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and not_change { ::Ci::Stage.count }.from(0)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:forbidden)
+ expect(response.message).to eq('403 Forbidden')
+ end
+ end
+
+ context 'with user as maintainer' do
+ let(:user) { create_user(:maintainer) }
+
+ it 'creates commit status' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(1)
+
+ expect(response).to be_success
+ end
+ end
+ end
+
+ context 'when commit SHA is invalid' do
+ let(:sha) { 'invalid_sha' }
+ let(:params) { { state: 'running', sha: sha } }
+
+ it 'returns not found error' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(0)
+ .and not_change { ::Ci::Stage.count }.from(0)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:not_found)
+ expect(response.message).to eq('404 Commit Not Found')
+ end
+ end
+
+ context 'when target URL is an invalid address' do
+ let(:params) { { state: 'pending', target_url: 'invalid url' } }
+
+ it 'responds with bad request status and validation errors' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message[:target_url])
+ .to include 'is blocked: Only allowed schemes are http, https'
+ end
+ end
+
+ context 'when target URL is an unsupported scheme' do
+ let(:params) { { state: 'pending', target_url: 'git://example.com' } }
+
+ it 'responds with bad request status and validation errors' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message[:target_url])
+ .to include 'is blocked: Only allowed schemes are http, https'
+ end
+ end
+
+ context 'when trying to update a status of a different type' do
+ let!(:pipeline) { create(:ci_pipeline, project: project, sha: sha, ref: 'ref') }
+ let!(:ci_build) { create(:ci_build, pipeline: pipeline, name: 'test-job') }
+ let(:params) { { state: 'pending', name: 'test-job' } }
+
+ before do
+ execute_service(params)
+ end
+
+ it 'responds with bad request status and validation errors' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(1)
+ .and not_change { ::Ci::Stage.count }.from(2)
+ .and not_change { ::CommitStatus.count }.from(1)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message[:name])
+ .to include 'has already been taken'
+ end
+ end
+
+ context 'with partitions', :ci_partitionable do
+ let(:current_partition_id) { ci_testing_partition_id }
+ let(:params) { { state: 'running' } }
+
+ before do
+ allow(Ci::Pipeline)
+ .to receive(:current_partition_value) { current_partition_id }
+ end
+
+ it 'creates records in the current partition' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(1)
+
+ expect(response).to be_success
+
+ status = CommitStatus.find(job.id)
+
+ expect(status.partition_id).to eq(current_partition_id)
+ expect(status.pipeline.partition_id).to eq(current_partition_id)
+ end
+ end
+
+ context 'for race condition' do
+ let(:licenses_snyk_params) { { state: 'running', name: 'licenses', description: 'testing' } }
+ let(:security_snyk_params) { { state: 'running', name: 'security', description: 'testing' } }
+ let(:snyk_params_list) { [licenses_snyk_params, security_snyk_params] }
+
+ it 'creates one pipeline and two jobs (one for licenses, one for security)' do
+ expect do
+ snyk_params_list.map do |snyk_params|
+ Thread.new do
+ response = execute_service(snyk_params)
+ expect(response).to be_success
+ end
+ end.each(&:join)
+ end
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(2)
+ end
+ end
+
+ def create_user(access_level_trait)
+ user = create(:user)
+ create(:project_member, access_level_trait, user: user, project: project)
+ user
+ end
+
+ def execute_service(params = self.params)
+ described_class
+ .new(project, user, params)
+ .execute(optional_commit_status_params: params.slice(*%i[target_url description coverage]))
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
index e6bdb2a3fc6..07bc3aa28cf 100644
--- a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
@@ -21,6 +21,23 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_fl
create_gitlab_ci_yml(downstream_project, downstream_config)
end
+ it_behaves_like 'creating a pipeline with environment keyword' do
+ let(:execute_service) { service.execute(:push) }
+ let(:upstream_config) { config }
+ let(:expected_deployable_class) { Ci::Bridge }
+ let(:expected_deployment_status) { 'running' }
+ let(:expected_job_status) { 'running' }
+ let(:downstream_config) { YAML.dump({ deploy: { script: 'deploy' } }) }
+ let(:base_config) do
+ {
+ trigger: {
+ project: downstream_project.full_path,
+ strategy: 'depend'
+ }
+ }
+ end
+ end
+
context 'with resource group', :aggregate_failures do
let(:upstream_config) do
<<~YAML
diff --git a/spec/services/ci/create_pipeline_service/environment_spec.rb b/spec/services/ci/create_pipeline_service/environment_spec.rb
index 96e54af43cd..e900f4ba10c 100644
--- a/spec/services/ci/create_pipeline_service/environment_spec.rb
+++ b/spec/services/ci/create_pipeline_service/environment_spec.rb
@@ -14,6 +14,26 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
project.add_developer(developer)
end
+ it_behaves_like 'creating a pipeline with environment keyword' do
+ let!(:project) { create(:project, :repository) }
+ let(:execute_service) { service.execute(:push) }
+ let(:expected_deployable_class) { Ci::Build }
+ let(:expected_deployment_status) { 'created' }
+ let(:expected_job_status) { 'pending' }
+ let(:expected_tag_names) { %w[hello] }
+ let(:base_config) do
+ {
+ script: 'deploy',
+ tags: ['hello']
+ }
+ end
+
+ before do
+ project.add_developer(developer) # rubocop:disable RSpec/BeforeAllRoleAssignment
+ project.repository.create_file(developer, '.gitlab-ci.yml', config, branch_name: 'master', message: 'test')
+ end
+ end
+
describe '#execute' do
subject { service.execute(:push).payload }
@@ -104,6 +124,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
expect(pipeline).to be_created_successfully
expect(Environment.find_by_name('test/deploy/2')).to be_persisted
expect(pipeline.builds.size).to eq(1)
+ # Clearing cache of BatchLoader in `build.persisted_environment` for fetching fresh data.
+ BatchLoader::Executor.clear_current
expect(build.persisted_environment.name).to eq('test/deploy/2')
expect(build.name).to eq('deploy-review-app-2')
expect(build.environment).to eq('test/$CI_JOB_STAGE/2')
diff --git a/spec/services/ci/create_pipeline_service/logger_spec.rb b/spec/services/ci/create_pipeline_service/logger_spec.rb
index 6a1987fcc7c..6b4a1809d9a 100644
--- a/spec/services/ci/create_pipeline_service/logger_spec.rb
+++ b/spec/services/ci/create_pipeline_service/logger_spec.rb
@@ -139,74 +139,5 @@ RSpec.describe Ci::CreatePipelineService, # rubocop: disable RSpec/FilePath
expect(pipeline).to be_created_successfully
end
end
-
- describe 'pipeline includes count' do
- before do
- stub_const('Gitlab::Ci::Config::External::Context::TEMP_MAX_INCLUDES', 2)
- end
-
- context 'when the includes count exceeds the maximum' do
- before do
- allow_next_instance_of(Ci::Pipeline) do |pipeline|
- allow(pipeline).to receive(:config_metadata)
- .and_return({ includes: [{ file: 1 }, { file: 2 }, { file: 3 }] })
- end
- end
-
- it 'creates a log entry' do
- expect(Gitlab::AppJsonLogger)
- .to receive(:info)
- .with(a_hash_including({ 'pipeline_includes_count' => 3 }))
- .and_call_original
-
- expect(pipeline).to be_created_successfully
- end
- end
-
- context 'when the includes count does not exceed the maximum' do
- before do
- allow_next_instance_of(Ci::Pipeline) do |pipeline|
- allow(pipeline).to receive(:config_metadata)
- .and_return({ includes: [{ file: 1 }, { file: 2 }] })
- end
- end
-
- it 'does not create a log entry but it collects the data' do
- expect(Gitlab::AppJsonLogger).not_to receive(:info)
- expect(pipeline).to be_created_successfully
-
- expect(service.logger.observations_hash)
- .to match(a_hash_including({ 'pipeline_includes_count' => 2 }))
- end
- end
-
- context 'when the includes data is nil' do
- before do
- allow_next_instance_of(Ci::Pipeline) do |pipeline|
- allow(pipeline).to receive(:config_metadata)
- .and_return({})
- end
- end
-
- it 'does not create a log entry' do
- expect(Gitlab::AppJsonLogger).not_to receive(:info)
- expect(pipeline).to be_created_successfully
- end
- end
-
- context 'when the pipeline config_metadata is nil' do
- before do
- allow_next_instance_of(Ci::Pipeline) do |pipeline|
- allow(pipeline).to receive(:config_metadata)
- .and_return(nil)
- end
- end
-
- it 'does not create a log entry but it collects the data' do
- expect(Gitlab::AppJsonLogger).not_to receive(:info)
- expect(pipeline).to be_created_successfully
- end
- end
- end
end
end
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index e644273df9a..65180ac055f 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -20,7 +20,29 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_fl
before do
project.add_developer(user)
- stub_ci_pipeline_yaml_file(config)
+ end
+
+ it_behaves_like 'creating a pipeline with environment keyword' do
+ let!(:project) { create(:project, :repository) }
+ let(:execute_service) { service.execute(:push) }
+ let(:expected_deployable_class) { Ci::Bridge }
+ let(:expected_deployment_status) { 'running' }
+ let(:expected_job_status) { 'running' }
+ let(:child_config) { YAML.dump({ deploy: { script: 'deploy' } }) }
+ let(:base_config) do
+ {
+ trigger: {
+ include: [{ local: '.child.yml' }],
+ strategy: 'depend'
+ }
+ }
+ end
+
+ before do
+ project.add_developer(user)
+ project.repository.create_file(user, '.gitlab-ci.yml', config, branch_name: 'master', message: 'ok')
+ project.repository.create_file(user, '.child.yml', child_config, branch_name: 'master', message: 'ok')
+ end
end
shared_examples 'successful creation' do
@@ -67,6 +89,10 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_fl
YAML
end
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
it_behaves_like 'successful creation' do
let(:expected_bridge_options) do
{
@@ -158,6 +184,10 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_fl
end
describe 'child pipeline triggers' do
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
context 'when YAML is valid' do
let(:config) do
<<~YAML
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index a81d1487fab..05fa3cfeba3 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -298,6 +298,46 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
end
+ context 'with CI_ENVIRONMENT_* predefined variables' do
+ let(:config) do
+ <<-EOY
+ deploy:
+ script: "deploy"
+ environment:
+ name: review/$CI_COMMIT_REF_NAME
+ deployment_tier: development
+ url: https://gitlab.com
+ rules:
+ - if: $CI_ENVIRONMENT_NAME =~ /^review\// && $CI_ENVIRONMENT_ACTION == "start" && $CI_ENVIRONMENT_TIER == "development" && $CI_ENVIRONMENT_URL == "https://gitlab.com"
+
+ teardown:
+ script: "teardown"
+ environment:
+ name: review/$CI_COMMIT_REF_NAME
+ deployment_tier: development
+ url: https://gitlab.com
+ action: stop
+ rules:
+ - if: $CI_ENVIRONMENT_NAME =~ /^review\// && $CI_ENVIRONMENT_ACTION == "stop" && $CI_ENVIRONMENT_TIER == "development" && $CI_ENVIRONMENT_URL == "https://gitlab.com"
+ when: manual
+ EOY
+ end
+
+ it 'assigns correct attributes to the jobs' do
+ expect(pipeline).to be_persisted
+
+ BatchLoader::Executor.clear_current
+
+ expect(build_names).to contain_exactly('deploy', 'teardown')
+ expect(find_job('deploy').when).to eq('on_success')
+ expect(find_job('teardown').when).to eq('manual')
+ expect(find_job('deploy').allow_failure).to eq(false)
+ expect(find_job('teardown').allow_failure).to eq(false)
+ expect(find_job('deploy').actual_persisted_environment.name).to eq('review/master')
+ expect(find_job('teardown').actual_persisted_environment.name).to eq('review/master')
+ end
+ end
+
context 'with simple if: clauses' do
let(:config) do
<<-EOY
diff --git a/spec/services/ci/create_pipeline_service/variables_spec.rb b/spec/services/ci/create_pipeline_service/variables_spec.rb
index aac9a0c9c2d..3039ffb2751 100644
--- a/spec/services/ci/create_pipeline_service/variables_spec.rb
+++ b/spec/services/ci/create_pipeline_service/variables_spec.rb
@@ -90,6 +90,39 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
)
end
end
+
+ context 'when trigger variables have CI_ENVIRONMENT_* predefined variables' do
+ let(:config) do
+ <<-YAML
+ child:
+ variables:
+ UPSTREAM_ENVIRONMENT_NAME: $CI_ENVIRONMENT_NAME
+ UPSTREAM_ENVIRONMENT_TIER: $CI_ENVIRONMENT_TIER
+ UPSTREAM_ENVIRONMENT_URL: $CI_ENVIRONMENT_URL
+ UPSTREAM_ENVIRONMENT_ACTION: $CI_ENVIRONMENT_ACTION
+ environment:
+ name: review/$CI_COMMIT_REF_NAME
+ deployment_tier: testing
+ url: https://gitlab.com
+ action: start
+ trigger:
+ include: child.yml
+ YAML
+ end
+
+ let(:child) { find_job('child') }
+
+ it 'creates the pipeline with a trigger job that has downstream_variables' do
+ expect(pipeline).to be_created_successfully
+
+ expect(child.downstream_variables).to include(
+ { key: 'UPSTREAM_ENVIRONMENT_NAME', value: 'review/master' },
+ { key: 'UPSTREAM_ENVIRONMENT_TIER', value: 'testing' },
+ { key: 'UPSTREAM_ENVIRONMENT_URL', value: 'https://gitlab.com' },
+ { key: 'UPSTREAM_ENVIRONMENT_ACTION', value: 'start' }
+ )
+ end
+ end
end
private
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index a28ede89cee..a28dd9e7a55 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -748,131 +748,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
end
- context 'with environment' do
- before do
- config = YAML.dump(
- deploy: {
- environment: { name: "review/$CI_COMMIT_REF_NAME" },
- script: 'ls',
- tags: ['hello']
- })
-
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'creates the environment with tags', :sidekiq_inline do
- result = execute_service.payload
-
- expect(result).to be_persisted
- expect(Environment.find_by(name: "review/master")).to be_present
- expect(result.builds.first.tag_list).to contain_exactly('hello')
- expect(result.builds.first.deployment).to be_persisted
- expect(result.builds.first.deployment.deployable).to be_a(Ci::Build)
- end
- end
-
- context 'with environment with auto_stop_in' do
- before do
- config = YAML.dump(
- deploy: {
- environment: { name: "review/$CI_COMMIT_REF_NAME", auto_stop_in: '1 day' },
- script: 'ls'
- })
-
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'creates the environment with auto stop in' do
- result = execute_service.payload
-
- expect(result).to be_persisted
- expect(result.builds.first.options[:environment][:auto_stop_in]).to eq('1 day')
- end
- end
-
- context 'with environment name including persisted variables' do
- before do
- config = YAML.dump(
- deploy: {
- environment: { name: "review/id1$CI_PIPELINE_ID/id2$CI_JOB_ID" },
- script: 'ls'
- }
- )
-
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'skips persisted variables in environment name' do
- result = execute_service.payload
-
- expect(result).to be_persisted
- expect(Environment.find_by(name: "review/id1/id2")).to be_present
- end
- end
-
- context 'environment with Kubernetes configuration' do
- let(:kubernetes_namespace) { 'custom-namespace' }
-
- before do
- config = YAML.dump(
- deploy: {
- environment: {
- name: "environment-name",
- kubernetes: { namespace: kubernetes_namespace }
- },
- script: 'ls'
- }
- )
-
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'stores the requested namespace' do
- result = execute_service.payload
- build = result.builds.first
-
- expect(result).to be_persisted
- expect(build.options.dig(:environment, :kubernetes, :namespace)).to eq(kubernetes_namespace)
- end
- end
-
- context 'when environment with invalid name' do
- before do
- config = YAML.dump(deploy: { environment: { name: 'name,with,commas' }, script: 'ls' })
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'does not create an environment' do
- expect do
- result = execute_service.payload
-
- expect(result).to be_persisted
- end.not_to change { Environment.count }
- end
- end
-
- context 'when environment with duplicate names' do
- let(:ci_yaml) do
- {
- deploy: { environment: { name: 'production' }, script: 'ls' },
- deploy_2: { environment: { name: 'production' }, script: 'ls' }
- }
- end
-
- before do
- stub_ci_pipeline_yaml_file(YAML.dump(ci_yaml))
- end
-
- it 'creates a pipeline with the environment', :sidekiq_inline do
- result = execute_service.payload
-
- expect(result).to be_persisted
- expect(Environment.find_by(name: 'production')).to be_present
- expect(result.builds.first.deployment).to be_persisted
- expect(result.builds.first.deployment.deployable).to be_a(Ci::Build)
- end
- end
-
context 'when builds with auto-retries are configured' do
let(:pipeline) { execute_service.payload }
let(:rspec_job) { pipeline.builds.find_by(name: 'rspec') }
@@ -1294,55 +1169,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
end
- context 'when pipeline has a job with environment' do
- let(:pipeline) { execute_service.payload }
-
- before do
- stub_ci_pipeline_yaml_file(YAML.dump(config))
- end
-
- context 'when environment name is valid' do
- let(:config) do
- {
- review_app: {
- script: 'deploy',
- environment: {
- name: 'review/${CI_COMMIT_REF_NAME}',
- url: 'http://${CI_COMMIT_REF_SLUG}-staging.example.com'
- }
- }
- }
- end
-
- it 'has a job with environment', :sidekiq_inline do
- expect(pipeline.builds.count).to eq(1)
- expect(pipeline.builds.first.persisted_environment.name).to eq('review/master')
- expect(pipeline.builds.first.persisted_environment.name).to eq('review/master')
- expect(pipeline.builds.first.deployment).to be_created
- end
- end
-
- context 'when environment name is invalid' do
- let(:config) do
- {
- 'job:deploy-to-test-site': {
- script: 'deploy',
- environment: {
- name: '${CI_JOB_NAME}',
- url: 'https://$APP_URL'
- }
- }
- }
- end
-
- it 'has a job without environment' do
- expect(pipeline.builds.count).to eq(1)
- expect(pipeline.builds.first.persisted_environment).to be_nil
- expect(pipeline.builds.first.deployment).to be_nil
- end
- end
- end
-
describe 'Pipeline for external pull requests' do
let(:response) do
execute_service(
diff --git a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
index 82a8e425cd0..fffac0fd64b 100644
--- a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
+++ b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
@@ -35,20 +35,6 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
expect(build_statuses(pipeline)).to contain_exactly('pending')
expect(build_statuses(old_pipeline)).to contain_exactly('pending')
end
-
- context 'with lower_interval_for_canceling_redundant_pipelines disabled' do
- before do
- stub_feature_flags(lower_interval_for_canceling_redundant_pipelines: false)
- end
-
- it 'cancels pipelines created more than 3 days ago' do
- execute
-
- expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- expect(build_statuses(old_pipeline)).to contain_exactly('canceled')
- end
- end
end
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 61fec82c688..83bae16a30e 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -26,7 +26,9 @@ module Ci
end
it 'result is valid if replica did caught-up', :aggregate_failures do
- expect(ApplicationRecord.sticking).to receive(:all_caught_up?).with(:runner, runner.id) { true }
+ expect(ApplicationRecord.sticking).to receive(:find_caught_up_replica)
+ .with(:runner, runner.id, use_primary_on_failure: false)
+ .and_return(true)
expect { execute }.not_to change { Ci::RunnerManagerBuild.count }.from(0)
expect(execute).to be_valid
@@ -35,8 +37,9 @@ module Ci
end
it 'result is invalid if replica did not caught-up', :aggregate_failures do
- expect(ApplicationRecord.sticking).to receive(:all_caught_up?)
- .with(:runner, shared_runner.id) { false }
+ expect(ApplicationRecord.sticking).to receive(:find_caught_up_replica)
+ .with(:runner, shared_runner.id, use_primary_on_failure: false)
+ .and_return(false)
expect(subject).not_to be_valid
expect(subject.build).to be_nil
@@ -948,8 +951,8 @@ module Ci
let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 tag2)) }
let(:expected_shared_runner) { true }
let(:expected_shard) { ::Gitlab::Ci::Queue::Metrics::DEFAULT_METRICS_SHARD }
- let(:expected_jobs_running_for_project_first_job) { 0 }
- let(:expected_jobs_running_for_project_third_job) { 2 }
+ let(:expected_jobs_running_for_project_first_job) { '0' }
+ let(:expected_jobs_running_for_project_third_job) { '2' }
it_behaves_like 'metrics collector'
@@ -969,7 +972,7 @@ module Ci
context 'when max running jobs bucket size is exceeded' do
before do
- stub_const('Gitlab::Ci::Queue::Metrics::JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET', 1)
+ stub_const('Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET', 1)
end
let(:expected_jobs_running_for_project_third_job) { '1+' }
diff --git a/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
index d952fca25a5..8d612174a0b 100644
--- a/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
+++ b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
@@ -97,7 +97,7 @@ RSpec.describe ::Ci::Runners::SetRunnerAssociatedProjectsService, '#execute', fe
end
expect(execute).to be_error
- expect(runner.reload.projects).to eq(original_projects)
+ expect(runner.reload.projects.order(:id)).to eq(original_projects)
end
end
@@ -117,7 +117,7 @@ RSpec.describe ::Ci::Runners::SetRunnerAssociatedProjectsService, '#execute', fe
it 'returns error response and rolls back transaction' do
expect(execute).to be_error
expect(execute.errors).to contain_exactly('user is not authorized to add runners to project')
- expect(runner.reload.projects).to eq(original_projects)
+ expect(runner.reload.projects.order(:id)).to eq(original_projects)
end
end
end
diff --git a/spec/services/concerns/rate_limited_service_spec.rb b/spec/services/concerns/rate_limited_service_spec.rb
index 2172c756ecf..2cfc6692f23 100644
--- a/spec/services/concerns/rate_limited_service_spec.rb
+++ b/spec/services/concerns/rate_limited_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe RateLimitedService, feature_category: :rate_limiting do
let(:key) { :issues_create }
let(:scope) { [:container, :current_user] }
- let(:opts) { { scope: scope, users_allowlist: -> { [User.support_bot.username] } } }
+ let(:opts) { { scope: scope, users_allowlist: -> { [Users::Internal.support_bot.username] } } }
let(:rate_limiter) { ::Gitlab::ApplicationRateLimiter }
describe 'RateLimitedError' do
diff --git a/spec/services/concerns/services/return_service_responses_spec.rb b/spec/services/concerns/services/return_service_responses_spec.rb
new file mode 100644
index 00000000000..3589b952e87
--- /dev/null
+++ b/spec/services/concerns/services/return_service_responses_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Services::ReturnServiceResponses, feature_category: :rate_limiting do
+ subject(:object) { Class.new { include Services::ReturnServiceResponses }.new }
+
+ let(:message) { 'a delivering message' }
+ let(:payload) { 'string payload' }
+
+ describe '#success' do
+ it 'returns a ServiceResponse instance' do
+ response = object.success(payload)
+ expect(response).to be_an(ServiceResponse)
+ expect(response).to be_success
+ expect(response.message).to be_nil
+ expect(response.payload).to eq(payload)
+ expect(response.http_status).to eq(:ok)
+ end
+ end
+
+ describe '#error' do
+ it 'returns a ServiceResponse instance' do
+ response = object.error(message, :not_found, pass_back: payload)
+ expect(response).to be_an(ServiceResponse)
+ expect(response).to be_error
+ expect(response.message).to eq(message)
+ expect(response.payload).to eq(payload)
+ expect(response.http_status).to eq(:not_found)
+ end
+ end
+end
diff --git a/spec/services/deployments/update_environment_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index 0a93e300eb6..79bf0d972d4 100644
--- a/spec/services/deployments/update_environment_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -79,6 +79,27 @@ RSpec.describe Deployments::UpdateEnvironmentService, feature_category: :continu
expect(subject.execute).to eq(deployment)
end
+ context 'when deployable is bridge job' do
+ let(:job) do
+ create(:ci_bridge,
+ :with_deployment,
+ pipeline: pipeline,
+ ref: 'master',
+ tag: false,
+ environment: environment_name,
+ options: { environment: options },
+ project: project)
+ end
+
+ it 'creates ref' do
+ expect_any_instance_of(Repository)
+ .to receive(:create_ref)
+ .with(deployment.sha, "refs/environments/production/deployments/#{deployment.iid}")
+
+ service.execute
+ end
+ end
+
context 'when start action is defined' do
let(:options) { { name: 'production', action: 'start' } }
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
index 22570a14443..b6a80cf26cc 100644
--- a/spec/services/design_management/delete_designs_service_spec.rb
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
subject(:service) { described_class.new(project, user, issue: issue, designs: designs) }
- # Defined as a method so that the reponse is not cached. We also construct
+ # Defined as a method so that the response is not cached. We also construct
# a new service executor each time to avoid the intermediate cached values
# it constructs during its execution.
def run_service(delenda = nil)
@@ -173,8 +173,10 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
run_service
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_REMOVED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_REMOVED }
+ let(:namespace) { project.namespace }
+
subject(:service_action) { run_service }
end
end
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index ea53fcc3b12..8e5065184ca 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -11,9 +11,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
let(:project) { issue.project }
let(:user) { developer }
let(:files) { [rails_sample] }
- let(:design_repository) do
- ::Gitlab::GlRepository::DESIGN.repository_resolver.call(project)
- end
+ let(:design_repository) { project.find_or_create_design_management_repository.repository }
let(:rails_sample_name) { 'rails_sample.jpg' }
let(:rails_sample) { sample_image(rails_sample_name) }
@@ -43,9 +41,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
design_files = files_to_upload || files
design_files.each(&:rewind)
- service = described_class.new(project, user,
- issue: issue,
- files: design_files)
+ service = described_class.new(project, user, issue: issue, files: design_files)
service.execute
end
@@ -123,8 +119,9 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
)
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_ADDED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_ADDED }
+ let(:namespace) { project.namespace }
subject(:service_action) { run_service }
end
@@ -221,8 +218,9 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
run_service
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_MODIFIED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_MODIFIED }
+ let(:namespace) { project.namespace }
subject(:service_action) { run_service }
end
@@ -390,9 +388,12 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
before do
path = File.join(build(:design, issue: issue, filename: filename).full_path)
design_repository.create_if_not_exists
- design_repository.create_file(user, path, 'something fake',
- branch_name: project.default_branch_or_main,
- message: 'Somehow created without being tracked in db')
+ design_repository.create_file(
+ user,
+ path, 'something fake',
+ branch_name: project.default_branch_or_main,
+ message: 'Somehow created without being tracked in db'
+ )
end
it 'creates the design and a new version for it' do
diff --git a/spec/services/discussions/resolve_service_spec.rb b/spec/services/discussions/resolve_service_spec.rb
index a6e1bad30ce..88ef390bb02 100644
--- a/spec/services/discussions/resolve_service_spec.rb
+++ b/spec/services/discussions/resolve_service_spec.rb
@@ -94,9 +94,11 @@ RSpec.describe Discussions::ResolveService, feature_category: :code_review_workf
it 'raises an argument error if discussions do not belong to the same noteable' do
other_merge_request = create(:merge_request)
- other_discussion = create(:diff_note_on_merge_request,
- noteable: other_merge_request,
- project: other_merge_request.source_project).to_discussion
+ other_discussion = create(
+ :diff_note_on_merge_request,
+ noteable: other_merge_request,
+ project: other_merge_request.source_project
+ ).to_discussion
expect do
described_class.new(project, user, one_or_more_discussions: [discussion, other_discussion])
end.to raise_error(
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index dab06637c1a..48959baeaa5 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -292,9 +292,12 @@ RSpec.describe DraftNotes::PublishService, feature_category: :code_review_workfl
other_user = create(:user)
project.add_developer(other_user)
- create(:draft_note, merge_request: merge_request,
- author: user,
- note: "thanks\n/assign #{other_user.to_reference}")
+ create(
+ :draft_note,
+ merge_request: merge_request,
+ author: user,
+ note: "thanks\n/assign #{other_user.to_reference}"
+ )
expect { publish }.to change { DraftNote.count }.by(-1).and change { Note.count }.by(2)
expect(merge_request.reload.assignees).to match_array([other_user])
diff --git a/spec/services/environments/stop_service_spec.rb b/spec/services/environments/stop_service_spec.rb
index 6e3b36b5636..04116c5238f 100644
--- a/spec/services/environments/stop_service_spec.rb
+++ b/spec/services/environments/stop_service_spec.rb
@@ -135,8 +135,7 @@ RSpec.describe Environments::StopService, feature_category: :continuous_delivery
context 'when branch for stop action is protected' do
before do
project.add_developer(user)
- create(:protected_branch, :no_one_can_push,
- name: 'master', project: project)
+ create(:protected_branch, :no_one_can_push, name: 'master', project: project)
end
it 'does not stop environment' do
diff --git a/spec/services/environments/stop_stale_service_spec.rb b/spec/services/environments/stop_stale_service_spec.rb
index 46d770c30cc..0aa5659f81d 100644
--- a/spec/services/environments/stop_stale_service_spec.rb
+++ b/spec/services/environments/stop_stale_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Environments::StopStaleService,
- :clean_gitlab_redis_shared_state,
- :sidekiq_inline,
- feature_category: :continuous_delivery do
+ :clean_gitlab_redis_shared_state,
+ :sidekiq_inline,
+ feature_category: :continuous_delivery do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
diff --git a/spec/services/files/delete_service_spec.rb b/spec/services/files/delete_service_spec.rb
index dd99e5f9742..982e1bda5ac 100644
--- a/spec/services/files/delete_service_spec.rb
+++ b/spec/services/files/delete_service_spec.rb
@@ -56,9 +56,10 @@ RSpec.describe Files::DeleteService, feature_category: :source_code_management d
let(:last_commit_sha) { Gitlab::Git::Commit.last_for_path(project.repository, project.default_branch, file_path).parent_id }
it "returns a hash with the correct error message and a :error status" do
- expect { subject.execute }
- .to raise_error(Files::UpdateService::FileChangedError,
- "You are attempting to delete a file that has been previously updated.")
+ expect { subject.execute }.to raise_error(
+ Files::UpdateService::FileChangedError,
+ "You are attempting to delete a file that has been previously updated."
+ )
end
end
diff --git a/spec/services/files/update_service_spec.rb b/spec/services/files/update_service_spec.rb
index 6a9f9d6b86f..be424d5cb0d 100644
--- a/spec/services/files/update_service_spec.rb
+++ b/spec/services/files/update_service_spec.rb
@@ -36,8 +36,10 @@ RSpec.describe Files::UpdateService, feature_category: :source_code_management d
it "returns a hash with the correct error message and a :error status" do
expect { subject.execute }
- .to raise_error(Files::UpdateService::FileChangedError,
- "You are attempting to update a file that has changed since you started editing it.")
+ .to raise_error(
+ Files::UpdateService::FileChangedError,
+ "You are attempting to update a file that has changed since you started editing it."
+ )
end
end
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index 5e43426b9dd..74f1f4bc7ac 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -81,18 +81,18 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
end
it 'creates a pipeline with the right parameters' do
- expect(Ci::CreatePipelineService)
- .to receive(:new)
- .with(project,
- user,
- {
- before: oldrev,
- after: newrev,
- ref: ref,
- checkout_sha: SeedRepo::Commit::ID,
- variables_attributes: [],
- push_options: {}
- }).and_call_original
+ expect(Ci::CreatePipelineService).to receive(:new).with(
+ project,
+ user,
+ {
+ before: oldrev,
+ after: newrev,
+ ref: ref,
+ checkout_sha: SeedRepo::Commit::ID,
+ variables_attributes: [],
+ push_options: {}
+ }
+ ).and_call_original
subject
end
diff --git a/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb b/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb
index 4f2e0bea623..c31e76170d5 100644
--- a/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb
+++ b/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb
@@ -28,13 +28,14 @@ RSpec.describe GoogleCloud::CreateCloudsqlInstanceService, feature_category: :de
it 'triggers creation of a cloudsql instance' do
expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
expected_instance_name = "gitlab-#{project.id}-postgres-8000-test-env-42"
- expect(client).to receive(:create_cloudsql_instance)
- .with(gcp_project_id,
- expected_instance_name,
- String,
- database_version,
- 'us-east1',
- tier)
+ expect(client).to receive(:create_cloudsql_instance).with(
+ gcp_project_id,
+ expected_instance_name,
+ String,
+ database_version,
+ 'us-east1',
+ tier
+ )
end
result = service.execute
@@ -74,13 +75,14 @@ RSpec.describe GoogleCloud::CreateCloudsqlInstanceService, feature_category: :de
it 'uses defined region' do
expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
- expect(client).to receive(:create_cloudsql_instance)
- .with(gcp_project_id,
- String,
- String,
- database_version,
- 'user-defined-region',
- tier)
+ expect(client).to receive(:create_cloudsql_instance).with(
+ gcp_project_id,
+ String,
+ String,
+ database_version,
+ 'user-defined-region',
+ tier
+ )
end
service.execute
diff --git a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb b/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
index e5f06824b9f..f8d5ba99bf6 100644
--- a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
+++ b/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe GoogleCloud::FetchGoogleIpListService, :use_clean_rails_memory_store_caching,
-:clean_gitlab_redis_rate_limiting, feature_category: :build_artifacts do
+ :clean_gitlab_redis_rate_limiting, feature_category: :build_artifacts do
include StubRequests
let(:google_cloud_ips) { File.read(Rails.root.join('spec/fixtures/cdn/google_cloud.json')) }
diff --git a/spec/services/google_cloud/generate_pipeline_service_spec.rb b/spec/services/google_cloud/generate_pipeline_service_spec.rb
index b363b7b17b6..26a1ccb7e3b 100644
--- a/spec/services/google_cloud/generate_pipeline_service_spec.rb
+++ b/spec/services/google_cloud/generate_pipeline_service_spec.rb
@@ -84,11 +84,13 @@ test-java:
stage: test
script: mvn clean test
EOF
- project.repository.create_file(maintainer,
- file_name,
- file_content,
- message: 'Pipeline with three stages and two jobs',
- branch_name: project.default_branch)
+ project.repository.create_file(
+ maintainer,
+ file_name,
+ file_content,
+ message: 'Pipeline with three stages and two jobs',
+ branch_name: project.default_branch
+ )
end
it 'introduces a `deploy` stage and includes the deploy-to-cloud-run job' do
@@ -138,11 +140,13 @@ test-java:
stage: test
script: mvn clean test
EOF
- project.repository.create_file(maintainer,
- file_name,
- file_content,
- message: 'Pipeline with three stages and two jobs',
- branch_name: project.default_branch)
+ project.repository.create_file(
+ maintainer,
+ file_name,
+ file_content,
+ message: 'Pipeline with three stages and two jobs',
+ branch_name: project.default_branch
+ )
end
it 'includes the deploy-to-cloud-run job' do
@@ -178,11 +182,13 @@ stages:
include:
local: 'some-pipeline.yml'
EOF
- project.repository.create_file(maintainer,
- file_name,
- file_content,
- message: 'Pipeline with three stages and two jobs',
- branch_name: project.default_branch)
+ project.repository.create_file(
+ maintainer,
+ file_name,
+ file_content,
+ message: 'Pipeline with three stages and two jobs',
+ branch_name: project.default_branch
+ )
end
it 'includes the deploy-to-cloud-run job' do
@@ -309,11 +315,13 @@ stages:
include:
local: 'some-pipeline.yml'
EOF
- project.repository.create_file(maintainer,
- file_name,
- file_content,
- message: 'Pipeline with three stages and two jobs',
- branch_name: project.default_branch)
+ project.repository.create_file(
+ maintainer,
+ file_name,
+ file_content,
+ message: 'Pipeline with three stages and two jobs',
+ branch_name: project.default_branch
+ )
end
it 'includes the vision ai pipeline' do
diff --git a/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb b/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb
index ed41d0fd487..cd2ad00ac3f 100644
--- a/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb
+++ b/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb
@@ -39,24 +39,26 @@ RSpec.describe GoogleCloud::GetCloudsqlInstancesService, feature_category: :depl
end
it 'result is grouped by environment', :aggregate_failures do
- expect(service.execute).to contain_exactly({
- ref: '*',
- gcp_project: 'value-GCP_PROJECT_ID-*',
- instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-*',
- version: 'value-GCP_CLOUDSQL_VERSION-*'
- },
- {
- ref: 'STG',
- gcp_project: 'value-GCP_PROJECT_ID-STG',
- instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-STG',
- version: 'value-GCP_CLOUDSQL_VERSION-STG'
- },
- {
- ref: 'PRD',
- gcp_project: 'value-GCP_PROJECT_ID-PRD',
- instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-PRD',
- version: 'value-GCP_CLOUDSQL_VERSION-PRD'
- })
+ expect(service.execute).to contain_exactly(
+ {
+ ref: '*',
+ gcp_project: 'value-GCP_PROJECT_ID-*',
+ instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-*',
+ version: 'value-GCP_CLOUDSQL_VERSION-*'
+ },
+ {
+ ref: 'STG',
+ gcp_project: 'value-GCP_PROJECT_ID-STG',
+ instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-STG',
+ version: 'value-GCP_CLOUDSQL_VERSION-STG'
+ },
+ {
+ ref: 'PRD',
+ gcp_project: 'value-GCP_PROJECT_ID-PRD',
+ instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-PRD',
+ version: 'value-GCP_CLOUDSQL_VERSION-PRD'
+ }
+ )
end
end
end
diff --git a/spec/services/gpg_keys/destroy_service_spec.rb b/spec/services/gpg_keys/destroy_service_spec.rb
index b9aa3e351c9..85c1fc2893b 100644
--- a/spec/services/gpg_keys/destroy_service_spec.rb
+++ b/spec/services/gpg_keys/destroy_service_spec.rb
@@ -2,14 +2,28 @@
require 'spec_helper'
-RSpec.describe GpgKeys::DestroyService do
- let(:user) { create(:user) }
+RSpec.describe GpgKeys::DestroyService, feature_category: :source_code_management do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:gpg_key) { create(:gpg_key) }
subject { described_class.new(user) }
it 'destroys the GPG key' do
- gpg_key = create(:gpg_key)
-
expect { subject.execute(gpg_key) }.to change(GpgKey, :count).by(-1)
end
+
+ it 'nullifies the related signatures in batches' do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+
+ first_signature = create(:gpg_signature, gpg_key: gpg_key)
+ second_signature = create(:gpg_signature, gpg_key: gpg_key)
+ third_signature = create(:gpg_signature, gpg_key: create(:another_gpg_key))
+
+ control = ActiveRecord::QueryRecorder.new { subject.execute(gpg_key) }
+ expect(control.count).to eq(5)
+
+ expect(first_signature.reload.gpg_key).to be_nil
+ expect(second_signature.reload.gpg_key).to be_nil
+ expect(third_signature.reload.gpg_key).not_to be_nil
+ end
end
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index 929f7d5b4e3..ebdce07d03c 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -44,8 +44,7 @@ RSpec.describe Groups::DestroyService, feature_category: :groups_and_projects do
destroy_group(group, user, async)
expect(
- Users::GhostUserMigration.where(user: bot,
- initiator_user: user)
+ Users::GhostUserMigration.where(user: bot, initiator_user: user)
).to be_exists
end
end
@@ -70,10 +69,6 @@ RSpec.describe Groups::DestroyService, feature_category: :groups_and_projects do
end
it 'verifies that paths have been deleted' do
- Gitlab::GitalyClient::NamespaceService.allow do
- expect(Gitlab::GitalyClient::NamespaceService.new(project.repository_storage)
- .exists?(group.path)).to be_falsey
- end
expect(removed_repo).not_to exist
end
end
@@ -101,10 +96,6 @@ RSpec.describe Groups::DestroyService, feature_category: :groups_and_projects do
end
it 'verifies original paths and projects still exist' do
- Gitlab::GitalyClient::NamespaceService.allow do
- expect(Gitlab::GitalyClient::NamespaceService.new(project.repository_storage)
- .exists?(group.path)).to be_truthy
- end
expect(removed_repo).not_to exist
expect(Project.unscoped.count).to eq(1)
expect(Group.unscoped.count).to eq(2)
diff --git a/spec/services/groups/group_links/create_service_spec.rb b/spec/services/groups/group_links/create_service_spec.rb
index 9ba664212b8..913d72eff9c 100644
--- a/spec/services/groups/group_links/create_service_spec.rb
+++ b/spec/services/groups/group_links/create_service_spec.rb
@@ -55,8 +55,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute', feature_category:
context 'when sharing outside the hierarchy is disabled' do
let_it_be_with_refind(:group_parent) do
- create(:group,
- namespace_settings: create(:namespace_settings, prevent_sharing_groups_outside_hierarchy: true))
+ create(:group, namespace_settings: create(:namespace_settings, prevent_sharing_groups_outside_hierarchy: true))
end
it_behaves_like 'not shareable'
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 861728f00c6..5e37f33e4f2 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -264,18 +264,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'not allowing a path update'
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
context 'updating the subgroup' do
@@ -283,18 +271,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
end
@@ -306,18 +282,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
context 'updating the subgroup' do
@@ -325,18 +289,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
end
@@ -348,18 +300,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
context 'updating the subgroup' do
@@ -367,18 +307,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
end
end
diff --git a/spec/services/import_export_clean_up_service_spec.rb b/spec/services/import_export_clean_up_service_spec.rb
index 7b638b4948b..138dee3d975 100644
--- a/spec/services/import_export_clean_up_service_spec.rb
+++ b/spec/services/import_export_clean_up_service_spec.rb
@@ -53,9 +53,11 @@ RSpec.describe ImportExportCleanUpService, feature_category: :importers do
context 'with uploader exports' do
it 'removes old files and logs' do
- upload = create(:import_export_upload,
- updated_at: 2.days.ago,
- export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
+ upload = create(
+ :import_export_upload,
+ updated_at: 2.days.ago,
+ export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')
+ )
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger)
@@ -73,9 +75,11 @@ RSpec.describe ImportExportCleanUpService, feature_category: :importers do
end
it 'does not remove new files or logs' do
- upload = create(:import_export_upload,
- updated_at: 1.hour.ago,
- export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
+ upload = create(
+ :import_export_upload,
+ updated_at: 1.hour.ago,
+ export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')
+ )
expect(Gitlab::Import::Logger).not_to receive(:new)
diff --git a/spec/services/incident_management/incidents/create_service_spec.rb b/spec/services/incident_management/incidents/create_service_spec.rb
index e6ded379434..d0f9d414044 100644
--- a/spec/services/incident_management/incidents/create_service_spec.rb
+++ b/spec/services/incident_management/incidents/create_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe IncidentManagement::Incidents::CreateService, feature_category: :incident_management do
let_it_be(:project) { create(:project) }
- let_it_be(:user) { User.alert_bot }
+ let_it_be(:user) { Users::Internal.alert_bot }
let(:description) { 'Incident description' }
diff --git a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
index caa5ee495b7..5bbca91cdcd 100644
--- a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
+++ b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe IncidentManagement::PagerDuty::CreateIncidentIssueService, feature_category: :incident_management do
let_it_be(:project, reload: true) { create(:project) }
- let_it_be(:user) { User.alert_bot }
+ let_it_be(:user) { Users::Internal.alert_bot }
let(:webhook_payload) { Gitlab::Json.parse(fixture_file('pager_duty/webhook_incident_trigger.json')) }
let(:parsed_payload) { ::PagerDuty::WebhookPayloadParser.call(webhook_payload) }
@@ -29,7 +29,7 @@ RSpec.describe IncidentManagement::PagerDuty::CreateIncidentIssueService, featur
end
it 'the issue author is Alert bot' do
- expect(execute.payload[:issue].author).to eq(User.alert_bot)
+ expect(execute.payload[:issue].author).to eq(Users::Internal.alert_bot)
end
it 'issue has a correct title' do
diff --git a/spec/services/issuable/process_assignees_spec.rb b/spec/services/issuable/process_assignees_spec.rb
index 2751267c08b..fac7ef9ce77 100644
--- a/spec/services/issuable/process_assignees_spec.rb
+++ b/spec/services/issuable/process_assignees_spec.rb
@@ -5,75 +5,89 @@ require 'spec_helper'
RSpec.describe Issuable::ProcessAssignees, feature_category: :team_planning do
describe '#execute' do
it 'returns assignee_ids when add_assignee_ids and remove_assignee_ids are not specified' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: nil,
- remove_assignee_ids: nil,
- existing_assignee_ids: %w(1 3 9),
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: nil,
+ remove_assignee_ids: nil,
+ existing_assignee_ids: %w(1 3 9),
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(5, 7, 9)
end
it 'combines other ids when assignee_ids is nil' do
- process = described_class.new(assignee_ids: nil,
- add_assignee_ids: nil,
- remove_assignee_ids: nil,
- existing_assignee_ids: %w(1 3 11),
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: nil,
+ add_assignee_ids: nil,
+ remove_assignee_ids: nil,
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(1, 2, 3, 5, 11, 12)
end
it 'combines other ids when both add_assignee_ids and remove_assignee_ids are not empty' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: %w(2 4 6),
- remove_assignee_ids: %w(4 7 11),
- existing_assignee_ids: %w(1 3 11),
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: %w(2 4 6),
+ remove_assignee_ids: %w(4 7 11),
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(1, 2, 3, 5, 6, 12)
end
it 'combines other ids when remove_assignee_ids is not empty' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: nil,
- remove_assignee_ids: %w(4 7 11),
- existing_assignee_ids: %w(1 3 11),
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: nil,
+ remove_assignee_ids: %w(4 7 11),
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(1, 2, 3, 5, 12)
end
it 'combines other ids when add_assignee_ids is not empty' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: %w(2 4 6),
- remove_assignee_ids: nil,
- existing_assignee_ids: %w(1 3 11),
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: %w(2 4 6),
+ remove_assignee_ids: nil,
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(1, 2, 4, 3, 5, 6, 11, 12)
end
it 'combines ids when existing_assignee_ids and extra_assignee_ids are omitted' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: %w(2 4 6),
- remove_assignee_ids: %w(4 7 11))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: %w(2 4 6),
+ remove_assignee_ids: %w(4 7 11)
+ )
result = process.execute
expect(result.sort).to eq([2, 6].sort)
end
it 'handles mixed string and integer arrays' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: [2, 4, 6],
- remove_assignee_ids: %w(4 7 11),
- existing_assignee_ids: [1, 3, 11],
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: [2, 4, 6],
+ remove_assignee_ids: %w(4 7 11),
+ existing_assignee_ids: [1, 3, 11],
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(1, 2, 3, 5, 6, 12)
diff --git a/spec/services/issue_links/destroy_service_spec.rb b/spec/services/issue_links/destroy_service_spec.rb
index 5c4814f5ad1..c367b0157cb 100644
--- a/spec/services/issue_links/destroy_service_spec.rb
+++ b/spec/services/issue_links/destroy_service_spec.rb
@@ -5,25 +5,22 @@ require 'spec_helper'
RSpec.describe IssueLinks::DestroyService, feature_category: :team_planning do
describe '#execute' do
let_it_be(:project) { create(:project_empty_repo, :private) }
- let_it_be(:user) { create(:user) }
+ let_it_be(:reporter) { create(:user).tap { |user| project.add_reporter(user) } }
let_it_be(:issue_a) { create(:issue, project: project) }
let_it_be(:issue_b) { create(:issue, project: project) }
let!(:issuable_link) { create(:issue_link, source: issue_a, target: issue_b) }
+ let(:user) { reporter }
subject { described_class.new(issuable_link, user).execute }
it_behaves_like 'a destroyable issuable link'
context 'when target is an incident' do
- before do
- project.add_reporter(user)
- end
-
let(:issue_b) { create(:incident, project: project) }
it_behaves_like 'an incident management tracked event', :incident_management_incident_unrelate do
- let(:current_user) { user }
+ let(:current_user) { reporter }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
diff --git a/spec/services/issue_links/list_service_spec.rb b/spec/services/issue_links/list_service_spec.rb
index bfb6127ed56..b5cc8c4dcdc 100644
--- a/spec/services/issue_links/list_service_spec.rb
+++ b/spec/services/issue_links/list_service_spec.rb
@@ -21,18 +21,15 @@ RSpec.describe IssueLinks::ListService, feature_category: :team_planning do
let(:issue_d) { create :issue, project: project }
let!(:issue_link_c) do
- create(:issue_link, source: issue_d,
- target: issue)
+ create(:issue_link, source: issue_d, target: issue)
end
let!(:issue_link_b) do
- create(:issue_link, source: issue,
- target: issue_c)
+ create(:issue_link, source: issue, target: issue_c)
end
let!(:issue_link_a) do
- create(:issue_link, source: issue,
- target: issue_b)
+ create(:issue_link, source: issue, target: issue_b)
end
it 'ensures no N+1 queries are made' do
@@ -53,26 +50,32 @@ RSpec.describe IssueLinks::ListService, feature_category: :team_planning do
it 'returns related issues JSON' do
expect(subject.size).to eq(3)
- expect(subject).to include(include(id: issue_b.id,
- title: issue_b.title,
- state: issue_b.state,
- reference: issue_b.to_reference(project),
- path: "/#{project.full_path}/-/issues/#{issue_b.iid}",
- relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_a.id}"))
-
- expect(subject).to include(include(id: issue_c.id,
- title: issue_c.title,
- state: issue_c.state,
- reference: issue_c.to_reference(project),
- path: "/#{project.full_path}/-/issues/#{issue_c.iid}",
- relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_b.id}"))
-
- expect(subject).to include(include(id: issue_d.id,
- title: issue_d.title,
- state: issue_d.state,
- reference: issue_d.to_reference(project),
- path: "/#{project.full_path}/-/issues/#{issue_d.iid}",
- relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_c.id}"))
+ expect(subject).to include(include(
+ id: issue_b.id,
+ title: issue_b.title,
+ state: issue_b.state,
+ reference: issue_b.to_reference(project),
+ path: "/#{project.full_path}/-/issues/#{issue_b.iid}",
+ relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_a.id}"
+ ))
+
+ expect(subject).to include(include(
+ id: issue_c.id,
+ title: issue_c.title,
+ state: issue_c.state,
+ reference: issue_c.to_reference(project),
+ path: "/#{project.full_path}/-/issues/#{issue_c.iid}",
+ relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_b.id}"
+ ))
+
+ expect(subject).to include(include(
+ id: issue_d.id,
+ title: issue_d.title,
+ state: issue_d.state,
+ reference: issue_d.to_reference(project),
+ path: "/#{project.full_path}/-/issues/#{issue_d.iid}",
+ relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_c.id}"
+ ))
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 47925236a74..dabbd4bfa84 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -321,7 +321,7 @@ RSpec.describe Issues::CloseService, feature_category: :team_planning do
alert = create(:alert_management_alert, issue: issue, project: project)
expect(SystemNoteService).to receive(:change_alert_status)
- .with(alert, User.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}")
+ .with(alert, Users::Internal.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}")
close_issue
@@ -356,7 +356,7 @@ RSpec.describe Issues::CloseService, feature_category: :team_planning do
alerts.each do |alert|
expect(SystemNoteService).to receive(:change_alert_status)
- .with(alert, User.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}")
+ .with(alert, Users::Internal.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}")
end
close_issue
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 2daba8e359d..7cd2cd8f564 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -577,8 +577,10 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
context "when issuable feature is private" do
before do
- project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE,
- merge_requests_access_level: ProjectFeature::PRIVATE)
+ project.project_feature.update!(
+ issues_access_level: ProjectFeature::PRIVATE,
+ merge_requests_access_level: ProjectFeature::PRIVATE
+ )
end
levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC]
@@ -680,7 +682,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
end
context 'with alert bot author' do
- let_it_be(:user) { User.alert_bot }
+ let_it_be(:user) { Users::Internal.alert_bot }
let_it_be(:label) { create(:label, project: project) }
let(:opts) do
diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb
index 1ac64c0301d..31eaa72255d 100644
--- a/spec/services/issues/export_csv_service_spec.rb
+++ b/spec/services/issues/export_csv_service_spec.rb
@@ -43,18 +43,20 @@ RSpec.describe Issues::ExportCsvService, :with_license, feature_category: :team_
# so create these first.
issue.timelogs.create!(time_spent: 360, user: user)
issue.timelogs.create!(time_spent: 200, user: user)
- issue.update!(milestone: milestone,
- assignees: [user],
- description: 'Issue with details',
- state: :opened,
- due_date: DateTime.new(2014, 3, 2),
- created_at: DateTime.new(2015, 4, 3, 2, 1, 0),
- updated_at: DateTime.new(2016, 5, 4, 3, 2, 1),
- closed_at: DateTime.new(2017, 6, 5, 4, 3, 2),
- weight: 4,
- discussion_locked: true,
- labels: [feature_label, idea_label],
- time_estimate: 72000)
+ issue.update!(
+ milestone: milestone,
+ assignees: [user],
+ description: 'Issue with details',
+ state: :opened,
+ due_date: DateTime.new(2014, 3, 2),
+ created_at: DateTime.new(2015, 4, 3, 2, 1, 0),
+ updated_at: DateTime.new(2016, 5, 4, 3, 2, 1),
+ closed_at: DateTime.new(2017, 6, 5, 4, 3, 2),
+ weight: 4,
+ discussion_locked: true,
+ labels: [feature_label, idea_label],
+ time_estimate: 72000
+ )
end
shared_examples 'exports CSVs for issues' do
@@ -158,9 +160,9 @@ RSpec.describe Issues::ExportCsvService, :with_license, feature_category: :team_
context 'with issues filtered by labels and project' do
subject do
described_class.new(
- IssuesFinder.new(user,
- project_id: project.id,
- label_name: %w(Idea Feature)).execute, project)
+ IssuesFinder.new(user, project_id: project.id, label_name: %w(Idea Feature)).execute,
+ project
+ )
end
it 'returns only filtered objects' do
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 12924df3200..55f912fb703 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -390,8 +390,7 @@ RSpec.describe Issues::MoveService, feature_category: :team_planning do
let(:moved_to_issue) { create(:issue) }
let(:old_issue) do
- create(:issue, project: old_project, author: author,
- moved_to: moved_to_issue)
+ create(:issue, project: old_project, author: author, moved_to: moved_to_issue)
end
it { expect { move }.to raise_error(StandardError, /permissions/) }
diff --git a/spec/services/issues/resolve_discussions_spec.rb b/spec/services/issues/resolve_discussions_spec.rb
index c2111bffdda..ea4ad0440ec 100644
--- a/spec/services/issues/resolve_discussions_spec.rb
+++ b/spec/services/issues/resolve_discussions_spec.rb
@@ -60,10 +60,14 @@ RSpec.describe Issues::ResolveDiscussions, feature_category: :team_planning do
end
it "contains all discussions when only a merge request is passed" do
- second_discussion = Discussion.new([create(:diff_note_on_merge_request,
- noteable: merge_request,
- project: merge_request.target_project,
- line_number: 15)])
+ second_discussion = Discussion.new([
+ create(
+ :diff_note_on_merge_request,
+ noteable: merge_request,
+ project: merge_request.target_project,
+ line_number: 15
+ )
+ ])
service = DummyService.new(
container: project,
current_user: user,
@@ -77,11 +81,15 @@ RSpec.describe Issues::ResolveDiscussions, feature_category: :team_planning do
end
it "contains only unresolved discussions" do
- _second_discussion = Discussion.new([create(:diff_note_on_merge_request, :resolved,
- noteable: merge_request,
- project: merge_request.target_project,
- line_number: 15
- )])
+ _second_discussion = Discussion.new([
+ create(
+ :diff_note_on_merge_request,
+ :resolved,
+ noteable: merge_request,
+ project: merge_request.target_project,
+ line_number: 15
+ )
+ ])
service = DummyService.new(
container: project,
current_user: user,
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index c677dc0315c..eb9fe2b4ed7 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -15,11 +15,14 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
let_it_be(:milestone) { create(:milestone, project: project) }
let(:issue) do
- create(:issue, title: 'Old title',
- description: "for #{user2.to_reference}",
- assignee_ids: [user3.id],
- project: project,
- author: create(:user))
+ create(
+ :issue,
+ title: 'Old title',
+ description: "for #{user2.to_reference}",
+ assignee_ids: [user3.id],
+ project: project,
+ author: create(:user)
+ )
end
before_all do
diff --git a/spec/services/labels/available_labels_service_spec.rb b/spec/services/labels/available_labels_service_spec.rb
index c9f75283c75..2b398210034 100644
--- a/spec/services/labels/available_labels_service_spec.rb
+++ b/spec/services/labels/available_labels_service_spec.rb
@@ -108,36 +108,24 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
end
end
- describe '#filter_locked_labels_ids_in_param' do
- let(:label_ids) { labels.map(&:id).push(non_existing_record_id) }
+ describe '#filter_locked_label_ids' do
+ let(:label_ids) { labels.map(&:id) }
context 'when parent is a project' do
- it 'returns only locked label ids' do
- result = described_class.new(user, project, ids: label_ids).filter_locked_labels_ids_in_param(:ids)
+ it 'returns only relevant label ids' do
+ result = described_class.new(user, project, ids: label_ids).filter_locked_label_ids(label_ids)
expect(result).to match_array([project_label_locked.id, group_label_locked.id])
end
-
- it 'returns labels in preserved order' do
- result = described_class.new(user, project, ids: label_ids.reverse).filter_locked_labels_ids_in_param(:ids)
-
- expect(result).to eq([group_label_locked.id, project_label_locked.id])
- end
end
context 'when parent is a group' do
- it 'returns only locked label ids' do
- result = described_class.new(user, group, ids: label_ids).filter_locked_labels_ids_in_param(:ids)
+ it 'returns only relevant label ids' do
+ result = described_class.new(user, group, ids: label_ids).filter_locked_label_ids(label_ids)
expect(result).to match_array([group_label_locked.id])
end
end
-
- it 'accepts a single id parameter' do
- result = described_class.new(user, project, label_id: project_label_locked.id).filter_locked_labels_ids_in_param(:label_id)
-
- expect(result).to match_array([project_label_locked.id])
- end
end
describe '#available_labels' do
diff --git a/spec/services/labels/update_service_spec.rb b/spec/services/labels/update_service_spec.rb
index 9a8868dac10..61e229e3138 100644
--- a/spec/services/labels/update_service_spec.rb
+++ b/spec/services/labels/update_service_spec.rb
@@ -99,6 +99,14 @@ RSpec.describe Labels::UpdateService, feature_category: :team_planning do
expect(label.reload.lock_on_merge).to be_truthy
end
+ it 'does not allow lock_on_merge to be unset' do
+ label_locked = Labels::CreateService.new(title: 'Initial', lock_on_merge: true).execute(project: project)
+ label = described_class.new(title: 'test', lock_on_merge: false).execute(label_locked)
+
+ expect(label.reload.lock_on_merge).to be_truthy
+ expect(label.reload.title).to eq 'test'
+ end
+
it 'does not allow setting lock_on_merge for templates' do
template_label = Labels::CreateService.new(title: 'Initial').execute(template: true)
label = described_class.new(params).execute(template_label)
diff --git a/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
index 86f528d1ea7..d9982b664e5 100644
--- a/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
+++ b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
@@ -88,10 +88,11 @@ RSpec.describe LooseForeignKeys::BatchCleanerService, feature_category: :databas
expect(loose_fk_child_table_1.count).to eq(4)
expect(loose_fk_child_table_2.count).to eq(4)
- described_class.new(parent_table: '_test_loose_fk_parent_table',
- loose_foreign_key_definitions: loose_foreign_key_definitions,
- deleted_parent_records: LooseForeignKeys::DeletedRecord.load_batch_for_table('public._test_loose_fk_parent_table', 100)
- ).execute
+ described_class.new(
+ parent_table: '_test_loose_fk_parent_table',
+ loose_foreign_key_definitions: loose_foreign_key_definitions,
+ deleted_parent_records: LooseForeignKeys::DeletedRecord.load_batch_for_table('public._test_loose_fk_parent_table', 100)
+ ).execute
end
it 'cleans up the child records' do
@@ -125,11 +126,12 @@ RSpec.describe LooseForeignKeys::BatchCleanerService, feature_category: :databas
let(:deleted_records_incremented_counter) { Gitlab::Metrics.registry.get(:loose_foreign_key_incremented_deleted_records) }
let(:cleaner) do
- described_class.new(parent_table: '_test_loose_fk_parent_table',
- loose_foreign_key_definitions: loose_foreign_key_definitions,
- deleted_parent_records: LooseForeignKeys::DeletedRecord.load_batch_for_table('public._test_loose_fk_parent_table', 100),
- modification_tracker: modification_tracker
- )
+ described_class.new(
+ parent_table: '_test_loose_fk_parent_table',
+ loose_foreign_key_definitions: loose_foreign_key_definitions,
+ deleted_parent_records: LooseForeignKeys::DeletedRecord.load_batch_for_table('public._test_loose_fk_parent_table', 100),
+ modification_tracker: modification_tracker
+ )
end
before do
diff --git a/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb b/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb
index b59339b24b4..20a193e3b01 100644
--- a/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb
+++ b/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb
@@ -162,7 +162,9 @@ RSpec.describe LooseForeignKeys::ProcessDeletedRecordsService, feature_category:
end
before do
- stub_const('LooseForeignKeys::ModificationTracker::MAX_DELETES', 2)
+ allow_next_instance_of(LooseForeignKeys::ModificationTracker) do |instance|
+ allow(instance).to receive(:max_deletes).and_return(2)
+ end
stub_const('LooseForeignKeys::CleanerService::DELETE_LIMIT', 1)
end
diff --git a/spec/services/members/invitation_reminder_email_service_spec.rb b/spec/services/members/invitation_reminder_email_service_spec.rb
index 2b72a4919b4..a3c2e994c2e 100644
--- a/spec/services/members/invitation_reminder_email_service_spec.rb
+++ b/spec/services/members/invitation_reminder_email_service_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Members::InvitationReminderEmailService, feature_category: :group
with_them do
# Create an invitation today with an expiration date from 0 to 10 days in the future or without an expiration date
# We chose 10 days here, because we fetch invitations that were created at most 10 days ago.
- (0..10).each do |day|
+ 11.times do |day|
it 'sends an invitation reminder only on the expected days' do
next if day > (expires_at_days || 10) # We don't need to test after the invitation has already expired
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index 6140021c8d2..81fc5661032 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -78,6 +78,51 @@ RSpec.describe MergeRequests::ApprovalService, feature_category: :code_review_wo
service.execute(merge_request)
end
+ context 'when generating a patch_id_sha' do
+ it 'records a value' do
+ service.execute(merge_request)
+
+ expect(merge_request.approvals.last.patch_id_sha).not_to be_nil
+ end
+
+ context 'when base_sha is nil' do
+ it 'records patch_id_sha as nil' do
+ expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
+ expect(diff_ref).to receive(:base_sha).at_least(:once).and_return(nil)
+ end
+
+ service.execute(merge_request)
+
+ expect(merge_request.approvals.last.patch_id_sha).to be_nil
+ end
+ end
+
+ context 'when head_sha is nil' do
+ it 'records patch_id_sha as nil' do
+ expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
+ expect(diff_ref).to receive(:head_sha).at_least(:once).and_return(nil)
+ end
+
+ service.execute(merge_request)
+
+ expect(merge_request.approvals.last.patch_id_sha).to be_nil
+ end
+ end
+
+ context 'when base_sha and head_sha match' do
+ it 'records patch_id_sha as nil' do
+ expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
+ expect(diff_ref).to receive(:base_sha).at_least(:once).and_return("abc123")
+ expect(diff_ref).to receive(:head_sha).at_least(:once).and_return("abc123")
+ end
+
+ service.execute(merge_request)
+
+ expect(merge_request.approvals.last.patch_id_sha).to be_nil
+ end
+ end
+ end
+
it 'publishes MergeRequests::ApprovedEvent' do
expect { service.execute(merge_request) }
.to publish_event(MergeRequests::ApprovedEvent)
diff --git a/spec/services/merge_requests/base_service_spec.rb b/spec/services/merge_requests/base_service_spec.rb
index 1ca4bfe622c..6a8758c8684 100644
--- a/spec/services/merge_requests/base_service_spec.rb
+++ b/spec/services/merge_requests/base_service_spec.rb
@@ -141,4 +141,32 @@ RSpec.describe MergeRequests::BaseService, feature_category: :code_review_workfl
describe '#constructor_container_arg' do
it { expect(described_class.constructor_container_arg("some-value")).to eq({ project: "some-value" }) }
end
+
+ describe '#inspect' do
+ context 'when #merge_request is defined' do
+ let(:klass) do
+ Class.new(described_class) do
+ def merge_request
+ params[:merge_request]
+ end
+ end
+ end
+
+ let(:params) { {} }
+
+ subject do
+ klass
+ .new(project: nil, current_user: nil, params: params)
+ .inspect
+ end
+
+ it { is_expected.to eq "#<#{klass}>" }
+
+ context 'when merge request is present' do
+ let(:params) { { merge_request: build(:merge_request) } }
+
+ it { is_expected.to eq "#<#{klass} #{params[:merge_request].to_reference(full: true)}>" }
+ end
+ end
+ end
end
diff --git a/spec/services/merge_requests/create_ref_service_spec.rb b/spec/services/merge_requests/create_ref_service_spec.rb
index 85ac651c1fa..5f7b9430416 100644
--- a/spec/services/merge_requests/create_ref_service_spec.rb
+++ b/spec/services/merge_requests/create_ref_service_spec.rb
@@ -6,13 +6,14 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
using RSpec::Parameterized::TableSyntax
describe '#execute' do
- let_it_be(:project) { create(:project, :empty_repo) }
+ let_it_be_with_reload(:project) { create(:project, :empty_repo) }
let_it_be(:user) { project.creator }
let_it_be(:first_parent_ref) { project.default_branch_or_main }
let_it_be(:source_branch) { 'branch' }
let(:target_ref) { "refs/merge-requests/#{merge_request.iid}/train" }
let(:source_sha) { project.commit(source_branch).sha }
let(:squash) { false }
+ let(:default_commit_message) { merge_request.default_merge_commit_message(user: user) }
let(:merge_request) do
create(
@@ -84,36 +85,40 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
)
end
- it 'writes the merged result into target_ref', :aggregate_failures do
- expect(result[:status]).to eq :success
- expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
- expect(result[:source_sha]).to eq(project.repository.commit(target_ref).parents[1].sha)
- expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
- expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
- match(
- [
- a_string_matching(/Merge branch '#{source_branch}' into '#{first_parent_ref}'/),
- 'Feature branch commit 2',
- 'Feature branch commit 1',
- 'Base parent commit 2',
- 'Base parent commit 1'
- ]
+ shared_examples_for 'writing with a merge commit' do
+ it 'merges with a merge commit', :aggregate_failures do
+ expect(result[:status]).to eq :success
+ expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(source_branch).sha)
+ expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(result[:merge_commit_sha]).to be_present
+ expect(result[:squash_commit_sha]).not_to be_present
+ expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
+ match(
+ [
+ expected_merge_commit,
+ 'Feature branch commit 2',
+ 'Feature branch commit 1',
+ 'Base parent commit 2',
+ 'Base parent commit 1'
+ ]
+ )
)
- )
+ end
end
- context 'when squash is requested' do
- let(:squash) { true }
-
+ shared_examples_for 'writing with a squash and merge commit' do
it 'writes the squashed result', :aggregate_failures do
expect(result[:status]).to eq :success
expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
- expect(result[:source_sha]).to eq(project.repository.commit(target_ref).parents[1].sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(source_branch).sha)
expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(result[:merge_commit_sha]).to be_present
+ expect(result[:squash_commit_sha]).to be_present
expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
match(
[
- a_string_matching(/Merge branch '#{source_branch}' into '#{first_parent_ref}'/),
+ expected_merge_commit,
"#{merge_request.title}\n",
'Base parent commit 2',
'Base parent commit 1'
@@ -123,23 +128,18 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
end
end
- context 'when semi-linear merges are enabled' do
- before do
- project.merge_method = :rebase_merge
- project.save!
- end
-
- it 'writes the semi-linear merged result', :aggregate_failures do
+ shared_examples_for 'writing with a squash and no merge commit' do
+ it 'writes the squashed result without a merge commit', :aggregate_failures do
expect(result[:status]).to eq :success
expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
- expect(result[:source_sha]).to eq(project.repository.commit(target_ref).parents[1].sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(source_branch).sha)
expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(result[:merge_commit_sha]).not_to be_present
+ expect(result[:squash_commit_sha]).to be_present
expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
match(
[
- a_string_matching(/Merge branch '#{source_branch}' into '#{first_parent_ref}'/),
- 'Feature branch commit 2',
- 'Feature branch commit 1',
+ "#{merge_request.title}\n",
'Base parent commit 2',
'Base parent commit 1'
]
@@ -148,17 +148,14 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
end
end
- context 'when fast-forward merges are enabled' do
- before do
- project.merge_method = :ff
- project.save!
- end
-
+ shared_examples_for 'writing without a merge commit' do
it 'writes the rebased merged result', :aggregate_failures do
expect(result[:status]).to eq :success
expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
- expect(result[:source_sha]).to eq(project.repository.commit(target_ref).sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(source_branch).sha)
expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(result[:merge_commit_sha]).not_to be_present
+ expect(result[:squash_commit_sha]).not_to be_present
expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
eq(
[
@@ -171,6 +168,114 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
)
end
end
+
+ shared_examples 'merge commits without squash' do
+ context 'with a custom template' do
+ let(:expected_merge_commit) { 'This is the merge commit' } # could also be default_commit_message
+
+ before do
+ project.project_setting.update!(merge_commit_template: expected_merge_commit)
+ end
+
+ it_behaves_like 'writing with a merge commit'
+ end
+
+ context 'with no custom template' do
+ let(:expected_merge_commit) { default_commit_message }
+
+ before do
+ project.project_setting.update!(merge_commit_template: nil)
+ end
+
+ it_behaves_like 'writing with a merge commit'
+ end
+ end
+
+ shared_examples 'merge commits with squash' do
+ context 'when squash set' do
+ let(:squash) { true }
+ let(:expected_merge_commit) { merge_request.default_merge_commit_message(user: user) }
+
+ before do
+ project.project_setting.update!(merge_commit_template: 'This is the merge commit')
+ end
+
+ it_behaves_like 'writing with a squash and merge commit'
+ end
+ end
+
+ context 'when the merge commit message is provided at time of merge' do
+ let(:expected_merge_commit) { 'something custom' }
+
+ before do
+ merge_request.merge_params['commit_message'] = expected_merge_commit
+ end
+
+ it 'writes the merged result', :aggregate_failures do
+ expect(result[:status]).to eq :success
+ expect(project.repository.commits(target_ref, limit: 1, order: 'topo').map(&:message)).to(
+ match([expected_merge_commit])
+ )
+ end
+
+ context 'when squash set' do
+ let(:squash) { true }
+
+ it_behaves_like 'writing with a squash and merge commit'
+ end
+ end
+
+ context 'when merged commit strategy' do
+ include_examples 'merge commits without squash'
+ include_examples 'merge commits with squash'
+ end
+
+ context 'when semi-linear merge strategy' do
+ before do
+ project.merge_method = :rebase_merge
+ project.save!
+ end
+
+ include_examples 'merge commits without squash'
+ include_examples 'merge commits with squash'
+
+ context 'when the target ref changes between rebase and merge' do
+ # this tests internal handling of expected_old_oid
+
+ it 'returns an error', :aggregate_failures do
+ expect_next_instance_of(described_class) do |instance|
+ original = instance.method(:maybe_merge!)
+
+ expect(instance).to receive(:maybe_merge!) do |*args|
+ # Corrupt target_ref before the merge, simulating a race with
+ # another instance of the service for the same MR. source_sha is
+ # just an arbitrary valid commit that differs from what was just
+ # written.
+ project.repository.write_ref(target_ref, source_sha)
+ original.call(*args)
+ end
+ end
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to eq "9:Could not update #{target_ref}. Please refresh and try again."
+ end
+ end
+ end
+
+ context 'when fast-forward merge strategy' do
+ before do
+ project.merge_method = :ff
+ project.save!
+ end
+
+ it_behaves_like 'writing without a merge commit'
+
+ context 'when squash set' do
+ let(:squash) { true }
+
+ it_behaves_like 'writing with a squash and no merge commit'
+ end
+ end
end
end
end
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
deleted file mode 100644
index c48ed19e40d..00000000000
--- a/spec/services/merge_requests/ff_merge_service_spec.rb
+++ /dev/null
@@ -1,144 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe MergeRequests::FfMergeService, feature_category: :code_review_workflow do
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
- let(:merge_request) do
- create(
- :merge_request,
- source_branch: 'flatten-dir',
- target_branch: 'improve/awesome',
- assignees: [user2],
- author: create(:user)
- )
- end
-
- let(:project) { merge_request.project }
- let(:valid_merge_params) { { sha: merge_request.diff_head_sha } }
-
- before do
- stub_feature_flags(refactor_merge_service: false)
- project.add_maintainer(user)
- project.add_developer(user2)
- end
-
- describe '#execute' do
- context 'valid params' do
- let(:service) { described_class.new(project: project, current_user: user, params: valid_merge_params) }
-
- def execute_ff_merge
- perform_enqueued_jobs do
- service.execute(merge_request)
- end
- end
-
- before do
- allow(service).to receive(:execute_hooks)
- end
-
- it "does not create merge commit" do
- execute_ff_merge
-
- source_branch_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
- target_branch_sha = merge_request.target_project.repository.commit(merge_request.target_branch).sha
-
- expect(source_branch_sha).to eq(target_branch_sha)
- end
-
- it 'keeps the merge request valid' do
- expect { execute_ff_merge }
- .not_to change { merge_request.valid? }
- end
-
- it 'updates the merge request to merged' do
- expect { execute_ff_merge }
- .to change { merge_request.merged? }
- .from(false)
- .to(true)
- end
-
- it 'sends email to user2 about merge of new merge_request' do
- execute_ff_merge
-
- email = ActionMailer::Base.deliveries.last
- expect(email.to.first).to eq(user2.email)
- expect(email.subject).to include(merge_request.title)
- end
-
- it 'creates resource event about merge_request merge' do
- execute_ff_merge
-
- event = merge_request.resource_state_events.last
- expect(event.state).to eq('merged')
- end
-
- it 'does not update squash_commit_sha if it is not a squash' do
- expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
-
- expect { execute_ff_merge }.not_to change { merge_request.squash_commit_sha }
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
-
- it 'updates squash_commit_sha if it is a squash' do
- expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
-
- merge_request.update!(squash: true)
-
- expect { execute_ff_merge }
- .to change { merge_request.squash_commit_sha }
- .from(nil)
-
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
- end
-
- context 'error handling' do
- let(:service) { described_class.new(project: project, current_user: user, params: valid_merge_params.merge(commit_message: 'Awesome message')) }
-
- before do
- allow(Gitlab::AppLogger).to receive(:error)
- end
-
- it 'logs and saves error if there is an exception' do
- error_message = 'error message'
-
- allow(service).to receive(:repository).and_raise("error message")
- allow(service).to receive(:execute_hooks)
-
- service.execute(merge_request)
-
- expect(Gitlab::AppLogger).to have_received(:error)
- .with(hash_including(message: a_string_matching(error_message)))
- end
-
- it 'logs and saves error if there is an PreReceiveError exception' do
- error_message = 'error message'
- raw_message = 'The truth is out there'
-
- pre_receive_error = Gitlab::Git::PreReceiveError.new(raw_message, fallback_message: error_message)
- allow(service).to receive(:repository).and_raise(pre_receive_error)
- allow(service).to receive(:execute_hooks)
-
- service.execute(merge_request)
-
- expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error)
- .with(hash_including(message: a_string_matching(error_message)))
- end
-
- it 'does not update squash_commit_sha if squash merge is not successful' do
- merge_request.update!(squash: true)
-
- expect(project.repository.raw).to receive(:ff_merge) do
- raise 'Merge error'
- end
-
- expect { service.execute(merge_request) }.not_to change { merge_request.squash_commit_sha }
- end
- end
- end
-end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 1faa1fd3644..6e34f4362c1 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -8,437 +8,506 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
- where(:ff_refactor_merge_service_enabled) { [true, false] }
+ let(:merge_request) { create(:merge_request, :simple, author: user2, assignees: [user2]) }
+ let(:project) { merge_request.project }
- with_them do
- let(:merge_request) { create(:merge_request, :simple, author: user2, assignees: [user2]) }
- let(:project) { merge_request.project }
-
- before do
- stub_feature_flags(refactor_merge_service: ff_refactor_merge_service_enabled)
+ before do
+ project.add_maintainer(user)
+ project.add_developer(user2)
+ end
- project.add_maintainer(user)
- project.add_developer(user2)
+ describe '#execute' do
+ let(:service) { described_class.new(project: project, current_user: user, params: merge_params) }
+ let(:merge_params) do
+ { commit_message: 'Awesome message', sha: merge_request.diff_head_sha }
end
- describe '#execute' do
- let(:service) { described_class.new(project: project, current_user: user, params: merge_params) }
- let(:merge_params) do
- { commit_message: 'Awesome message', sha: merge_request.diff_head_sha }
- end
+ let(:lease_key) { "merge_requests_merge_service:#{merge_request.id}" }
+ let!(:lease) { stub_exclusive_lease(lease_key) }
- let(:lease_key) { "merge_requests_merge_service:#{merge_request.id}" }
- let!(:lease) { stub_exclusive_lease(lease_key) }
+ shared_examples 'with valid params' do
+ before do
+ allow(service).to receive(:execute_hooks)
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- shared_examples 'with valid params' do
- before do
- allow(service).to receive(:execute_hooks)
- expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
-
- perform_enqueued_jobs do
- service.execute(merge_request)
- end
+ perform_enqueued_jobs do
+ service.execute(merge_request)
end
+ end
- it { expect(merge_request).to be_valid }
- it { expect(merge_request).to be_merged }
+ it { expect(merge_request).to be_valid }
+ it { expect(merge_request).to be_merged }
- it 'does not update squash_commit_sha if it is not a squash' do
- expect(merge_request.squash_commit_sha).to be_nil
- end
+ it 'does not update squash_commit_sha if it is not a squash' do
+ expect(merge_request.squash_commit_sha).to be_nil
+ end
- it 'sends email to user2 about merge of new merge_request' do
- email = ActionMailer::Base.deliveries.last
- expect(email.to.first).to eq(user2.email)
- expect(email.subject).to include(merge_request.title)
- end
+ it 'sends email to user2 about merge of new merge_request' do
+ email = ActionMailer::Base.deliveries.last
+ expect(email.to.first).to eq(user2.email)
+ expect(email.subject).to include(merge_request.title)
+ end
- context 'note creation' do
- it 'creates resource state event about merge_request merge' do
- event = merge_request.resource_state_events.last
- expect(event.state).to eq('merged')
- end
+ context 'note creation' do
+ it 'creates resource state event about merge_request merge' do
+ event = merge_request.resource_state_events.last
+ expect(event.state).to eq('merged')
end
end
+ end
- shared_examples 'squashing' do
- # A merge request with 5 commits
- let(:merge_request) do
- create(
- :merge_request,
- :simple,
- author: user2,
- assignees: [user2],
- squash: true,
- source_branch: 'improve/awesome',
- target_branch: 'fix'
- )
- end
+ shared_examples 'squashing' do
+ # A merge request with 5 commits
+ let(:merge_request) do
+ create(
+ :merge_request,
+ :simple,
+ author: user2,
+ assignees: [user2],
+ squash: true,
+ source_branch: 'improve/awesome',
+ target_branch: 'fix'
+ )
+ end
- let(:merge_params) do
- { commit_message: 'Merge commit message',
- squash_commit_message: 'Squash commit message',
- sha: merge_request.diff_head_sha }
- end
+ let(:merge_params) do
+ { commit_message: 'Merge commit message',
+ squash_commit_message: 'Squash commit message',
+ sha: merge_request.diff_head_sha }
+ end
- before do
- allow(service).to receive(:execute_hooks)
- expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
+ before do
+ allow(service).to receive(:execute_hooks)
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- perform_enqueued_jobs do
- service.execute(merge_request)
- end
+ perform_enqueued_jobs do
+ service.execute(merge_request)
end
+ end
- it 'merges the merge request with squashed commits' do
- expect(merge_request).to be_merged
+ it 'merges the merge request with squashed commits' do
+ expect(merge_request).to be_merged
- merge_commit = merge_request.merge_commit
- squash_commit = merge_request.merge_commit.parents.last
+ merge_commit = merge_request.merge_commit
+ squash_commit = merge_request.merge_commit.parents.last
- expect(merge_commit.message).to eq('Merge commit message')
- expect(squash_commit.message).to eq("Squash commit message\n")
- end
+ expect(merge_commit.message).to eq('Merge commit message')
+ expect(squash_commit.message).to eq("Squash commit message\n")
+ end
- it 'persists squash_commit_sha' do
- squash_commit = merge_request.merge_commit.parents.last
+ it 'persists squash_commit_sha' do
+ squash_commit = merge_request.merge_commit.parents.last
- expect(merge_request.squash_commit_sha).to eq(squash_commit.id)
- end
+ expect(merge_request.squash_commit_sha).to eq(squash_commit.id)
end
+ end
- context 'when merge strategy is merge commit' do
- it 'persists merge_commit_sha and nullifies in_progress_merge_commit_sha' do
- service.execute(merge_request)
+ context 'when merge strategy is merge commit' do
+ it 'persists merge_commit_sha and merged_commit_sha and nullifies in_progress_merge_commit_sha' do
+ service.execute(merge_request)
- expect(merge_request.merge_commit_sha).not_to be_nil
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
+ expect(merge_request.merge_commit_sha).not_to be_nil
+ expect(merge_request.merged_commit_sha).to eq merge_request.merge_commit_sha
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
- it_behaves_like 'with valid params'
+ it_behaves_like 'with valid params'
- it_behaves_like 'squashing'
+ it_behaves_like 'squashing'
+ end
+
+ context 'when merge strategy is fast forward' do
+ before do
+ project.update!(merge_requests_ff_only_enabled: true)
end
- context 'when merge strategy is fast forward' do
- before do
- project.update!(merge_requests_ff_only_enabled: true)
- end
+ let(:merge_request) do
+ create(
+ :merge_request,
+ source_branch: 'flatten-dir',
+ target_branch: 'improve/awesome',
+ assignees: [user2],
+ author: create(:user)
+ )
+ end
- let(:merge_request) do
- create(
- :merge_request,
- source_branch: 'flatten-dir',
- target_branch: 'improve/awesome',
- assignees: [user2],
- author: create(:user)
- )
- end
+ it 'does not create merge_commit_sha, but persists merged_commit_sha and nullifies in_progress_merge_commit_sha' do
+ service.execute(merge_request)
- it 'does not create merge_commit_sha and nullifies in_progress_merge_commit_sha' do
- service.execute(merge_request)
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.merged_commit_sha).not_to be_nil
+ expect(merge_request.merged_commit_sha).to eq merge_request.diff_head_sha
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
+ it_behaves_like 'with valid params'
- it_behaves_like 'with valid params'
+ it 'updates squash_commit_sha and merged_commit_sha if it is a squash' do
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- it 'updates squash_commit_sha if it is a squash' do
- expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
+ merge_request.update!(squash: true)
- merge_request.update!(squash: true)
+ expect { service.execute(merge_request) }
+ .to change { merge_request.squash_commit_sha }
+ .from(nil)
- expect { service.execute(merge_request) }
- .to change { merge_request.squash_commit_sha }
- .from(nil)
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.merged_commit_sha).to eq merge_request.squash_commit_sha
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
+ end
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
+ context 'running the service once' do
+ let(:ref) { merge_request.to_reference(full: true) }
+ let(:jid) { SecureRandom.hex }
+
+ let(:messages) do
+ [
+ /#{ref} - Git merge started on JID #{jid}/,
+ /#{ref} - Git merge finished on JID #{jid}/,
+ /#{ref} - Post merge started on JID #{jid}/,
+ /#{ref} - Post merge finished on JID #{jid}/,
+ /#{ref} - Merge process finished on JID #{jid}/
+ ]
+ end
+
+ before do
+ merge_request.update!(merge_jid: jid)
+ ::Gitlab::ApplicationContext.push(caller_id: 'MergeWorker')
end
- context 'running the service once' do
- let(:ref) { merge_request.to_reference(full: true) }
- let(:jid) { SecureRandom.hex }
+ it 'logs status messages' do
+ allow(Gitlab::AppLogger).to receive(:info).and_call_original
- let(:messages) do
- [
- /#{ref} - Git merge started on JID #{jid}/,
- /#{ref} - Git merge finished on JID #{jid}/,
- /#{ref} - Post merge started on JID #{jid}/,
- /#{ref} - Post merge finished on JID #{jid}/,
- /#{ref} - Merge process finished on JID #{jid}/
- ]
+ messages.each do |message|
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ 'meta.caller_id' => 'MergeWorker',
+ message: message,
+ merge_request_info: ref
+ )
+ ).and_call_original
end
- before do
- merge_request.update!(merge_jid: jid)
- ::Gitlab::ApplicationContext.push(caller_id: 'MergeWorker')
- end
+ service.execute(merge_request)
+ end
+ end
- it 'logs status messages' do
- allow(Gitlab::AppLogger).to receive(:info).and_call_original
+ context 'running the service multiple time' do
+ it 'is idempotent' do
+ 2.times { service.execute(merge_request) }
- messages.each do |message|
- expect(Gitlab::AppLogger).to receive(:info).with(
- hash_including(
- 'meta.caller_id' => 'MergeWorker',
- message: message,
- merge_request_info: ref
- )
- ).and_call_original
- end
+ expect(merge_request.merge_error).to be_falsey
+ expect(merge_request).to be_valid
+ expect(merge_request).to be_merged
- service.execute(merge_request)
- end
+ commit_messages = project.repository.commits('master', limit: 2).map(&:message)
+ expect(commit_messages.uniq.size).to eq(2)
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
end
+ end
- context 'running the service multiple time' do
- it 'is idempotent' do
- 2.times { service.execute(merge_request) }
+ context 'when an invalid sha is passed' do
+ let(:merge_request) do
+ create(
+ :merge_request,
+ :simple,
+ author: user2,
+ assignees: [user2],
+ squash: true,
+ source_branch: 'improve/awesome',
+ target_branch: 'fix'
+ )
+ end
- expect(merge_request.merge_error).to be_falsey
- expect(merge_request).to be_valid
- expect(merge_request).to be_merged
+ let(:merge_params) do
+ { sha: merge_request.commits.second.sha }
+ end
- commit_messages = project.repository.commits('master', limit: 2).map(&:message)
- expect(commit_messages.uniq.size).to eq(2)
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
+ it 'does not merge the MR' do
+ service.execute(merge_request)
+
+ expect(merge_request).not_to be_merged
+ expect(merge_request.merge_error).to match(/Branch has been updated/)
end
+ end
- context 'when an invalid sha is passed' do
- let(:merge_request) do
- create(
- :merge_request,
- :simple,
- author: user2,
- assignees: [user2],
- squash: true,
- source_branch: 'improve/awesome',
- target_branch: 'fix'
- )
- end
+ context 'when the `sha` param is missing' do
+ let(:merge_params) { {} }
- let(:merge_params) do
- { sha: merge_request.commits.second.sha }
- end
+ it 'returns the error' do
+ merge_error = 'Branch has been updated since the merge was requested. '\
+ 'Please review the changes.'
- it 'does not merge the MR' do
- service.execute(merge_request)
+ expect { service.execute(merge_request) }
+ .to change { merge_request.merge_error }
+ .from(nil).to(merge_error)
+ end
+ end
- expect(merge_request).not_to be_merged
- expect(merge_request.merge_error).to match(/Branch has been updated/)
- end
+ context 'closes related issues' do
+ before do
+ allow(project).to receive(:default_branch).and_return(merge_request.target_branch)
end
- context 'when the `sha` param is missing' do
- let(:merge_params) { {} }
+ it 'closes GitLab issue tracker issues', :sidekiq_inline do
+ issue = create :issue, project: project
+ commit = double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current)
+ allow(merge_request).to receive(:commits).and_return([commit])
+ merge_request.cache_merge_request_closes_issues!
- it 'returns the error' do
- merge_error = 'Branch has been updated since the merge was requested. '\
- 'Please review the changes.'
+ service.execute(merge_request)
- expect { service.execute(merge_request) }
- .to change { merge_request.merge_error }
- .from(nil).to(merge_error)
- end
+ expect(issue.reload.closed?).to be_truthy
end
- context 'closes related issues' do
+ context 'with Jira integration' do
+ include JiraIntegrationHelpers
+
+ let(:jira_tracker) { project.create_jira_integration }
+ let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
+ let(:commit) { double('commit', safe_message: "Fixes #{jira_issue.to_reference}") }
+
before do
- allow(project).to receive(:default_branch).and_return(merge_request.target_branch)
+ stub_jira_integration_test
+ project.update!(has_external_issue_tracker: true)
+ jira_integration_settings
+ stub_jira_urls(jira_issue.id)
+ allow(merge_request).to receive(:commits).and_return([commit])
end
- it 'closes GitLab issue tracker issues', :sidekiq_inline do
- issue = create :issue, project: project
- commit = double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current)
+ it 'closes issues on Jira issue tracker' do
+ jira_issue = ExternalIssue.new('JIRA-123', project)
+ stub_jira_urls(jira_issue)
+ commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
allow(merge_request).to receive(:commits).and_return([commit])
- merge_request.cache_merge_request_closes_issues!
- service.execute(merge_request)
+ expect_any_instance_of(Integrations::Jira).to receive(:close_issue).with(merge_request, jira_issue, user).once
- expect(issue.reload.closed?).to be_truthy
+ service.execute(merge_request)
end
- context 'with Jira integration' do
- include JiraIntegrationHelpers
-
- let(:jira_tracker) { project.create_jira_integration }
- let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
- let(:commit) { double('commit', safe_message: "Fixes #{jira_issue.to_reference}") }
-
- before do
- stub_jira_integration_test
- project.update!(has_external_issue_tracker: true)
- jira_integration_settings
- stub_jira_urls(jira_issue.id)
- allow(merge_request).to receive(:commits).and_return([commit])
- end
-
- it 'closes issues on Jira issue tracker' do
- jira_issue = ExternalIssue.new('JIRA-123', project)
+ context 'wrong issue markdown' do
+ it 'does not close issues on Jira issue tracker' do
+ jira_issue = ExternalIssue.new('#JIRA-123', project)
stub_jira_urls(jira_issue)
commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
allow(merge_request).to receive(:commits).and_return([commit])
- expect_any_instance_of(Integrations::Jira).to receive(:close_issue).with(merge_request, jira_issue, user).once
+ expect_any_instance_of(Integrations::Jira).not_to receive(:close_issue)
service.execute(merge_request)
end
+ end
+ end
+ end
- context 'wrong issue markdown' do
- it 'does not close issues on Jira issue tracker' do
- jira_issue = ExternalIssue.new('#JIRA-123', project)
- stub_jira_urls(jira_issue)
- commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
- allow(merge_request).to receive(:commits).and_return([commit])
+ context 'closes related todos' do
+ let(:merge_request) { create(:merge_request, assignees: [user], author: user) }
+ let(:project) { merge_request.project }
- expect_any_instance_of(Integrations::Jira).not_to receive(:close_issue)
+ let!(:todo) do
+ create(:todo, :assigned,
+ project: project,
+ author: user,
+ user: user,
+ target: merge_request)
+ end
- service.execute(merge_request)
- end
- end
+ before do
+ allow(service).to receive(:execute_hooks)
+
+ perform_enqueued_jobs do
+ service.execute(merge_request)
+ todo.reload
end
end
- context 'closes related todos' do
- let(:merge_request) { create(:merge_request, assignees: [user], author: user) }
- let(:project) { merge_request.project }
+ it { expect(todo).to be_done }
+ end
- let!(:todo) do
- create(:todo, :assigned,
- project: project,
- author: user,
- user: user,
- target: merge_request)
+ context 'source branch removal' do
+ context 'when the source branch is protected' do
+ let(:service) do
+ described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
end
before do
- allow(service).to receive(:execute_hooks)
-
- perform_enqueued_jobs do
- service.execute(merge_request)
- todo.reload
- end
+ create(:protected_branch, project: project, name: merge_request.source_branch)
end
- it { expect(todo).to be_done }
+ it 'does not delete the source branch' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+
+ service.execute(merge_request)
+ end
end
- context 'source branch removal' do
- context 'when the source branch is protected' do
- let(:service) do
- described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
- end
+ context 'when the source branch is the default branch' do
+ let(:service) do
+ described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
+ end
+ before do
+ allow(project).to receive(:root_ref?).with(merge_request.source_branch).and_return(true)
+ end
+
+ it 'does not delete the source branch' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+ service.execute(merge_request)
+ end
+ end
+
+ context 'when the source branch can be removed' do
+ context 'when MR author set the source branch to be removed' do
before do
- create(:protected_branch, project: project, name: merge_request.source_branch)
+ merge_request.update_attribute(:merge_params, { 'force_remove_source_branch' => '1' })
end
- it 'does not delete the source branch' do
- expect(::Branches::DeleteService).not_to receive(:new)
+ # Not a real use case. When a merger merges a MR , merge param 'should_remove_source_branch' is defined
+ it 'removes the source branch using the author user' do
+ expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, merge_request.author.id)
service.execute(merge_request)
+
+ expect(merge_request.reload.should_remove_source_branch?).to be nil
+ end
+
+ context 'when the merger set the source branch not to be removed' do
+ let(:service) { described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => false)) }
+
+ it 'does not delete the source branch' do
+ expect(::MergeRequests::DeleteSourceBranchWorker).not_to receive(:perform_async)
+
+ service.execute(merge_request)
+
+ expect(merge_request.reload.should_remove_source_branch?).to be false
+ end
end
end
- context 'when the source branch is the default branch' do
+ context 'when MR merger set the source branch to be removed' do
let(:service) do
described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
end
- before do
- allow(project).to receive(:root_ref?).with(merge_request.source_branch).and_return(true)
- end
+ it 'removes the source branch using the current user' do
+ expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, user.id)
- it 'does not delete the source branch' do
- expect(::Branches::DeleteService).not_to receive(:new)
service.execute(merge_request)
+
+ expect(merge_request.reload.should_remove_source_branch?).to be true
end
end
+ end
+ end
- context 'when the source branch can be removed' do
- context 'when MR author set the source branch to be removed' do
- before do
- merge_request.update_attribute(:merge_params, { 'force_remove_source_branch' => '1' })
- end
+ context 'error handling' do
+ before do
+ allow(Gitlab::AppLogger).to receive(:error)
+ end
- # Not a real use case. When a merger merges a MR , merge param 'should_remove_source_branch' is defined
- it 'removes the source branch using the author user' do
- expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, merge_request.author.id)
+ context 'when source is missing' do
+ it 'logs and saves error' do
+ allow(merge_request).to receive(:diff_head_sha) { nil }
- service.execute(merge_request)
+ error_message = 'No source for merge'
- expect(merge_request.reload.should_remove_source_branch?).to be nil
- end
+ service.execute(merge_request)
+
+ expect(merge_request.merge_error).to eq(error_message)
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
+ )
+ end
+ end
- context 'when the merger set the source branch not to be removed' do
- let(:service) { described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => false)) }
+ it 'logs and saves error if there is an exception' do
+ error_message = 'error message'
- it 'does not delete the source branch' do
- expect(::MergeRequests::DeleteSourceBranchWorker).not_to receive(:perform_async)
+ allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
+ allow(strategy).to receive(:execute_git_merge!).and_raise(error_message)
+ end
- service.execute(merge_request)
+ service.execute(merge_request)
- expect(merge_request.reload.should_remove_source_branch?).to be false
- end
- end
- end
+ expect(merge_request.merge_error).to eq(described_class::GENERIC_ERROR_MESSAGE)
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
+ )
+ end
- context 'when MR merger set the source branch to be removed' do
- let(:service) do
- described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
- end
+ it 'logs and saves error if user is not authorized' do
+ stub_exclusive_lease
- it 'removes the source branch using the current user' do
- expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, user.id)
+ unauthorized_user = create(:user)
+ project.add_reporter(unauthorized_user)
- service.execute(merge_request)
+ service = described_class.new(project: project, current_user: unauthorized_user)
- expect(merge_request.reload.should_remove_source_branch?).to be true
- end
- end
- end
+ service.execute(merge_request)
+
+ expect(merge_request.merge_error)
+ .to eq('You are not allowed to merge this merge request')
end
- context 'error handling' do
- before do
- allow(Gitlab::AppLogger).to receive(:error)
+ it 'logs and saves error if there is an PreReceiveError exception' do
+ error_message = 'error message'
+
+ allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
+ allow(strategy).to receive(:execute_git_merge!).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
end
- context 'when source is missing' do
- it 'logs and saves error' do
- allow(merge_request).to receive(:diff_head_sha) { nil }
+ service.execute(merge_request)
- error_message = 'No source for merge'
+ expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
+ )
+ end
- service.execute(merge_request)
+ it 'logs and saves error if commit is not created' do
+ allow_any_instance_of(Repository).to receive(:merge).and_return(false)
+ allow(service).to receive(:execute_hooks)
- expect(merge_request.merge_error).to eq(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
- )
- end
- end
+ service.execute(merge_request)
- it 'logs and saves error if there is an exception' do
- error_message = 'error message'
+ expect(merge_request).to be_open
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(described_class::GENERIC_ERROR_MESSAGE)
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(described_class::GENERIC_ERROR_MESSAGE)
+ )
+ )
+ end
- allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
- allow(strategy).to receive(:execute_git_merge!).and_raise(error_message)
- end
- # we can remove these allows upon refactor_merge_service cleanup
- allow(service).to receive(:repository).and_raise(error_message)
- allow(service).to receive(:execute_hooks)
+ context 'when squashing is required' do
+ before do
+ merge_request.update!(source_branch: 'master', target_branch: 'feature')
+ merge_request.target_project.project_setting.squash_always!
+ end
+
+ it 'raises an error if squashing is not done' do
+ error_message = 'requires squashing commits'
service.execute(merge_request)
- expect(merge_request.merge_error).to eq(described_class::GENERIC_ERROR_MESSAGE)
+ expect(merge_request).to be_open
+
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -446,34 +515,25 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
)
end
+ end
- it 'logs and saves error if user is not authorized' do
- stub_exclusive_lease
-
- unauthorized_user = create(:user)
- project.add_reporter(unauthorized_user)
-
- service = described_class.new(project: project, current_user: unauthorized_user)
-
- service.execute(merge_request)
-
- expect(merge_request.merge_error)
- .to eq('You are not allowed to merge this merge request')
+ context 'when squashing' do
+ before do
+ merge_request.update!(source_branch: 'master', target_branch: 'feature')
end
- it 'logs and saves error if there is an PreReceiveError exception' do
- error_message = 'error message'
+ it 'logs and saves error if there is an error when squashing' do
+ error_message = 'Squashing failed: Squash the commits locally, resolve any conflicts, then push the branch.'
- allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
- allow(strategy).to receive(:execute_git_merge!).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
- end
- # we can remove these allows upon refactor_merge_service cleanup
- allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
- allow(service).to receive(:execute_hooks)
+ allow_any_instance_of(MergeRequests::SquashService).to receive(:squash!).and_return(nil)
+ merge_request.update!(squash: true)
service.execute(merge_request)
- expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
+ expect(merge_request).to be_open
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -482,65 +542,69 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
end
- it 'logs and saves error if commit is not created' do
- allow_any_instance_of(Repository).to receive(:merge).and_return(false)
- allow(service).to receive(:execute_hooks)
+ it 'logs and saves error if there is an PreReceiveError exception' do
+ error_message = 'error message'
+
+ allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
+ allow(strategy).to receive(:execute_git_merge!).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
+ end
+ merge_request.update!(squash: true)
service.execute(merge_request)
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(described_class::GENERIC_ERROR_MESSAGE)
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(described_class::GENERIC_ERROR_MESSAGE)
+ message: a_string_matching(error_message)
)
)
end
- context 'when squashing is required' do
+ context 'when fast-forward merge is not allowed' do
before do
- merge_request.update!(source_branch: 'master', target_branch: 'feature')
- merge_request.target_project.project_setting.squash_always!
+ allow_any_instance_of(Repository).to receive(:ancestor?).and_return(nil)
end
- it 'raises an error if squashing is not done' do
- error_message = 'requires squashing commits'
+ %w(semi-linear ff).each do |merge_method|
+ it "logs and saves error if merge is #{merge_method} only" do
+ merge_method = 'rebase_merge' if merge_method == 'semi-linear'
+ merge_request.project.update!(merge_method: merge_method)
+ error_message = 'Only fast-forward merge is allowed for your project. Please update your source branch'
+ allow(service).to receive(:execute_hooks)
+ expect(lease).to receive(:cancel)
- service.execute(merge_request)
-
- expect(merge_request).to be_open
+ service.execute(merge_request)
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
+ expect(merge_request).to be_open
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(error_message)
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
)
- )
+ end
end
end
+ end
- context 'when squashing' do
+ context 'when not mergeable' do
+ let!(:error_message) { 'Merge request is not mergeable' }
+
+ context 'with failing CI' do
before do
- merge_request.update!(source_branch: 'master', target_branch: 'feature')
+ allow(merge_request).to receive(:mergeable_ci_state?) { false }
end
- it 'logs and saves error if there is an error when squashing' do
- error_message = 'Squashing failed: Squash the commits locally, resolve any conflicts, then push the branch.'
-
- allow_any_instance_of(MergeRequests::SquashService).to receive(:squash!).and_return(nil)
- merge_request.update!(squash: true)
-
+ it 'logs and saves error' do
service.execute(merge_request)
- expect(merge_request).to be_open
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -548,24 +612,16 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
)
end
+ end
- it 'logs and saves error if there is an PreReceiveError exception' do
- error_message = 'error message'
-
- allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
- allow(strategy).to receive(:execute_git_merge!).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
- end
- # we can remove these allows upon refactor_merge_service cleanup
- allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
- allow(service).to receive(:execute_hooks)
- merge_request.update!(squash: true)
+ context 'with unresolved discussions' do
+ before do
+ allow(merge_request).to receive(:mergeable_discussions_state?) { false }
+ end
+ it 'logs and saves error' do
service.execute(merge_request)
- expect(merge_request).to be_open
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -574,102 +630,35 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
end
- context 'when fast-forward merge is not allowed' do
- before do
- allow_any_instance_of(Repository).to receive(:ancestor?).and_return(nil)
- end
-
- %w(semi-linear ff).each do |merge_method|
- it "logs and saves error if merge is #{merge_method} only" do
- merge_method = 'rebase_merge' if merge_method == 'semi-linear'
- merge_request.project.update!(merge_method: merge_method)
- error_message = 'Only fast-forward merge is allowed for your project. Please update your source branch'
- allow(service).to receive(:execute_hooks)
- expect(lease).to receive(:cancel)
-
- service.execute(merge_request)
-
- expect(merge_request).to be_open
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
- )
- end
- end
- end
- end
-
- context 'when not mergeable' do
- let!(:error_message) { 'Merge request is not mergeable' }
-
- context 'with failing CI' do
- before do
- allow(merge_request).to receive(:mergeable_ci_state?) { false }
- end
-
- it 'logs and saves error' do
- service.execute(merge_request)
-
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
- )
- end
- end
-
- context 'with unresolved discussions' do
- before do
- allow(merge_request).to receive(:mergeable_discussions_state?) { false }
- end
-
- it 'logs and saves error' do
- service.execute(merge_request)
-
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
- )
- end
-
- context 'when passing `skip_discussions_check: true` as `options` parameter' do
- it 'merges the merge request' do
- service.execute(merge_request, skip_discussions_check: true)
+ context 'when passing `skip_discussions_check: true` as `options` parameter' do
+ it 'merges the merge request' do
+ service.execute(merge_request, skip_discussions_check: true)
- expect(merge_request).to be_valid
- expect(merge_request).to be_merged
- end
+ expect(merge_request).to be_valid
+ expect(merge_request).to be_merged
end
end
end
+ end
- context 'when passing `check_mergeability_retry_lease: true` as `options` parameter' do
- it 'call mergeable? with check_mergeability_retry_lease' do
- expect(merge_request).to receive(:mergeable?).with(hash_including(check_mergeability_retry_lease: true)).and_call_original
+ context 'when passing `check_mergeability_retry_lease: true` as `options` parameter' do
+ it 'call mergeable? with check_mergeability_retry_lease' do
+ expect(merge_request).to receive(:mergeable?).with(hash_including(check_mergeability_retry_lease: true)).and_call_original
- service.execute(merge_request, check_mergeability_retry_lease: true)
- end
+ service.execute(merge_request, check_mergeability_retry_lease: true)
end
end
+ end
- context 'when the other sidekiq worker has already been running' do
- before do
- stub_exclusive_lease_taken(lease_key)
- end
+ context 'when the other sidekiq worker has already been running' do
+ before do
+ stub_exclusive_lease_taken(lease_key)
+ end
- it 'does not execute service' do
- expect(service).not_to receive(:commit)
+ it 'does not execute service' do
+ expect(service).not_to receive(:commit)
- service.execute(merge_request)
- end
+ service.execute(merge_request)
end
end
end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 06932af26dc..d5b7b56ccdd 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -913,7 +913,7 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
subject { service.execute(oldrev, newrev, 'refs/heads/merge-commit-analyze-before') }
context 'feature enabled' do
- it "updates merge requests' merge_commits" do
+ it "updates merge requests' merge_commit and merged_commit values", :aggregate_failures do
expect(Gitlab::BranchPushMergeCommitAnalyzer).to receive(:new).and_wrap_original do |original_method, commits|
expect(commits.map(&:id)).to eq(%w{646ece5cfed840eca0a4feb21bcd6a81bb19bda3 29284d9bcc350bcae005872d0be6edd016e2efb5 5f82584f0a907f3b30cfce5bb8df371454a90051 8a994512e8c8f0dfcf22bb16df6e876be7a61036 689600b91aabec706e657e38ea706ece1ee8268f db46a1c5a5e474aa169b6cdb7a522d891bc4c5f9})
@@ -927,6 +927,11 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
expect(merge_request.merge_commit.id).to eq('646ece5cfed840eca0a4feb21bcd6a81bb19bda3')
expect(merge_request_side_branch.merge_commit.id).to eq('29284d9bcc350bcae005872d0be6edd016e2efb5')
+ # we need to use read_attribute to bypass the overridden
+ # #merged_commit_sha method, which contains a fallback to
+ # #merge_commit_sha
+ expect(merge_request.read_attribute(:merged_commit_sha)).to eq('646ece5cfed840eca0a4feb21bcd6a81bb19bda3')
+ expect(merge_request_side_branch.read_attribute(:merged_commit_sha)).to eq('29284d9bcc350bcae005872d0be6edd016e2efb5')
end
end
end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 2f6db13a041..72e41f7b814 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -1301,41 +1301,39 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
end
context 'updating labels' do
- let(:label_a) { label }
- let(:label_b) { create(:label, title: 'b', project: project) }
- let(:label_c) { create(:label, title: 'c', project: project) }
- let(:label_locked) { create(:label, title: 'locked', project: project, lock_on_merge: true) }
- let(:issuable) { merge_request }
+ context 'when merge request is not merged' do
+ let(:label_a) { label }
+ let(:label_b) { create(:label, title: 'b', project: project) }
+ let(:label_c) { create(:label, title: 'c', project: project) }
+ let(:label_locked) { create(:label, title: 'locked', project: project, lock_on_merge: true) }
+ let(:issuable) { merge_request }
- it_behaves_like 'updating issuable labels'
- it_behaves_like 'keeps issuable labels sorted after update'
- it_behaves_like 'broadcasting issuable labels updates'
+ it_behaves_like 'updating issuable labels'
+ it_behaves_like 'keeps issuable labels sorted after update'
+ it_behaves_like 'broadcasting issuable labels updates'
+ end
context 'when merge request has been merged' do
- context 'when remove_label_ids contains a locked label' do
- let(:params) { { remove_label_ids: [label_locked.id] } }
+ let(:label_a) { create(:label, title: 'a', project: project, lock_on_merge: true) }
+ let(:label_b) { create(:label, title: 'b', project: project, lock_on_merge: true) }
+ let(:label_c) { create(:label, title: 'c', project: project, lock_on_merge: true) }
+ let(:label_unlocked) { create(:label, title: 'unlocked', project: project) }
+ let(:issuable) { merge_request }
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(enforce_locked_labels_on_merge: false)
- end
+ before do
+ merge_request.update!(state: 'merged')
+ end
- it 'removes locked labels' do
- merge_request.update!(state: 'merged', labels: [label_a, label_locked])
- update_issuable(params)
+ it_behaves_like 'updating merged MR with locked labels'
- expect(merge_request.label_ids).to contain_exactly(label_a.id)
- end
- end
-
- context 'when feature flag is enabled' do
- it 'does not remove locked labels' do
- merge_request.update!(state: 'merged', labels: [label_a, label_locked])
- update_issuable(params)
+ context 'when feature flag is disabled' do
+ let(:label_locked) { create(:label, title: 'locked', project: project, lock_on_merge: true) }
- expect(merge_request.label_ids).to contain_exactly(label_a.id, label_locked.id)
- end
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: false)
end
+
+ it_behaves_like 'updating issuable labels'
end
end
diff --git a/spec/services/metrics/global_metrics_update_service_spec.rb b/spec/services/metrics/global_metrics_update_service_spec.rb
deleted file mode 100644
index 38c7f9282d9..00000000000
--- a/spec/services/metrics/global_metrics_update_service_spec.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::GlobalMetricsUpdateService, :prometheus, feature_category: :metrics do
- describe '#execute' do
- it 'sets gitlab_maintenance_mode gauge metric' do
- metric = subject.maintenance_mode_metric
- expect(Gitlab).to receive(:maintenance_mode?).and_return(true)
-
- expect { subject.execute }.to change { metric.get }.from(0).to(1)
- end
- end
-end
diff --git a/spec/services/metrics/sample_metrics_service_spec.rb b/spec/services/metrics/sample_metrics_service_spec.rb
deleted file mode 100644
index 3442b4303db..00000000000
--- a/spec/services/metrics/sample_metrics_service_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::SampleMetricsService, feature_category: :metrics do
- describe 'query' do
- let(:range_start) { '2019-12-02T23:31:45.000Z' }
- let(:range_end) { '2019-12-03T00:01:45.000Z' }
-
- subject { described_class.new(identifier, range_start: range_start, range_end: range_end).query }
-
- context 'when the file is not found' do
- let(:identifier) { nil }
-
- it { is_expected.to be_nil }
- end
-
- context 'when the file is found' do
- let(:identifier) { 'sample_metric_query_result' }
- let(:source) { File.join(Rails.root, 'spec/fixtures/gitlab/sample_metrics', "#{identifier}.yml") }
- let(:destination) { File.join(Rails.root, Metrics::SampleMetricsService::DIRECTORY, "#{identifier}.yml") }
-
- around do |example|
- FileUtils.mkdir_p(Metrics::SampleMetricsService::DIRECTORY)
- FileUtils.cp(source, destination)
-
- example.run
- ensure
- FileUtils.rm(destination)
- end
-
- subject { described_class.new(identifier, range_start: range_start, range_end: range_end).query }
-
- it 'loads data from the sample file correctly' do
- expect(subject).to eq(YAML.load_file(source)[30])
- end
- end
-
- context 'when the identifier is for a path outside of sample_metrics' do
- let(:identifier) { '../config/secrets' }
-
- it { is_expected.to be_nil }
- end
- end
-end
diff --git a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
deleted file mode 100644
index 8a2ecd5c3e0..00000000000
--- a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
+++ /dev/null
@@ -1,216 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute', feature_category: :purchase do
- subject(:execute_service) { described_class.new(track, interval).execute }
-
- let(:track) { :create }
- let(:interval) { 1 }
-
- let(:frozen_time) { Time.zone.parse('23 Mar 2021 10:14:40 UTC') }
- let(:previous_action_completed_at) { frozen_time - 2.days }
- let(:current_action_completed_at) { nil }
- let(:user_can_perform_current_track_action) { true }
- let(:actions_completed) { { created_at: previous_action_completed_at, git_write_at: current_action_completed_at } }
-
- let_it_be(:group) { create(:group) }
- let_it_be(:user) { create(:user, email_opted_in: true) }
-
- before do
- travel_to(frozen_time)
- create(:onboarding_progress, namespace: group, **actions_completed)
- group.add_developer(user)
- allow(Ability).to receive(:allowed?).with(user, anything, anything).and_return(user_can_perform_current_track_action)
- allow(Notify).to receive(:in_product_marketing_email).and_return(double(deliver_later: nil))
- end
-
- RSpec::Matchers.define :send_in_product_marketing_email do |*args|
- match do
- expect(Notify).to have_received(:in_product_marketing_email).with(*args).once
- end
-
- match_when_negated do
- expect(Notify).not_to have_received(:in_product_marketing_email)
- end
- end
-
- context 'for each track and series with the right conditions' do
- using RSpec::Parameterized::TableSyntax
-
- where(:track, :interval, :actions_completed) do
- :create | 1 | { created_at: frozen_time - 2.days }
- :create | 5 | { created_at: frozen_time - 6.days }
- :create | 10 | { created_at: frozen_time - 11.days }
- :team_short | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days }
- :trial_short | 2 | { created_at: frozen_time - 3.days, git_write_at: frozen_time - 3.days }
- :admin_verify | 3 | { created_at: frozen_time - 4.days, git_write_at: frozen_time - 4.days }
- :verify | 4 | { created_at: frozen_time - 5.days, git_write_at: frozen_time - 5.days }
- :verify | 8 | { created_at: frozen_time - 9.days, git_write_at: frozen_time - 9.days }
- :verify | 13 | { created_at: frozen_time - 14.days, git_write_at: frozen_time - 14.days }
- :trial | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days }
- :trial | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days }
- :trial | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days }
- :team | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days, trial_started_at: frozen_time - 2.days }
- :team | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days, trial_started_at: frozen_time - 6.days }
- :team | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days, trial_started_at: frozen_time - 11.days }
- end
-
- with_them do
- it { is_expected.to send_in_product_marketing_email(user.id, group.id, track, described_class::TRACKS[track][:interval_days].index(interval)) }
- end
- end
-
- context 'when initialized with a different track' do
- let(:track) { :team_short }
-
- it { is_expected.not_to send_in_product_marketing_email }
-
- context 'when the previous track actions have been completed' do
- let(:current_action_completed_at) { frozen_time - 2.days }
-
- it { is_expected.to send_in_product_marketing_email(user.id, group.id, track, 0) }
- end
- end
-
- context 'when initialized with a different interval' do
- let(:interval) { 5 }
-
- it { is_expected.not_to send_in_product_marketing_email }
-
- context 'when the previous track action was completed within the intervals range' do
- let(:previous_action_completed_at) { frozen_time - 6.days }
-
- it { is_expected.to send_in_product_marketing_email(user.id, group.id, :create, 1) }
- end
- end
-
- context 'when the previous track action is not yet completed' do
- let(:previous_action_completed_at) { nil }
-
- it { is_expected.not_to send_in_product_marketing_email }
- end
-
- context 'when the previous track action is completed outside the intervals range' do
- let(:previous_action_completed_at) { frozen_time - 3.days }
-
- it { is_expected.not_to send_in_product_marketing_email }
- end
-
- context 'when the current track action is completed' do
- let(:current_action_completed_at) { frozen_time }
-
- it { is_expected.not_to send_in_product_marketing_email }
- end
-
- context "when the user cannot perform the current track's action" do
- let(:user_can_perform_current_track_action) { false }
-
- it { is_expected.not_to send_in_product_marketing_email }
- end
-
- context 'when the user has not opted into marketing emails' do
- let(:user) { create(:user, email_opted_in: false) }
-
- it { is_expected.not_to send_in_product_marketing_email }
- end
-
- describe 'do not send emails twice' do
- subject { described_class.send_for_all_tracks_and_intervals }
-
- let(:user) { create(:user, email_opted_in: true) }
-
- context 'when user already got a specific email' do
- before do
- create(:in_product_marketing_email, user: user, track: track, series: 0)
- end
-
- it { is_expected.not_to send_in_product_marketing_email(user.id, anything, track, 0) }
- end
-
- context 'when user already got sent the whole track' do
- before do
- 0.upto(2) do |series|
- create(:in_product_marketing_email, user: user, track: track, series: series)
- end
- end
-
- it 'does not send any of the emails anymore', :aggregate_failures do
- 0.upto(2) do |series|
- expect(subject).not_to send_in_product_marketing_email(user.id, anything, track, series)
- end
- end
- end
-
- context 'when user is in two groups' do
- let(:other_group) { create(:group) }
-
- before do
- other_group.add_developer(user)
- end
-
- context 'when both groups would get the same email' do
- before do
- create(:onboarding_progress, namespace: other_group, **actions_completed)
- end
-
- it 'does not send the same email twice' do
- subject
-
- expect(Notify).to have_received(:in_product_marketing_email).with(user.id, anything, :create, 0).once
- end
- end
-
- context 'when other group gets a different email' do
- before do
- create(:onboarding_progress, namespace: other_group, created_at: previous_action_completed_at, git_write_at: frozen_time - 2.days)
- end
-
- it 'sends both emails' do
- subject
-
- expect(Notify).to have_received(:in_product_marketing_email).with(user.id, group.id, :create, 0)
- expect(Notify).to have_received(:in_product_marketing_email).with(user.id, other_group.id, :team_short, 0)
- end
- end
- end
- end
-
- it 'records sent emails' do
- expect { subject }.to change { Users::InProductMarketingEmail.count }.by(1)
-
- expect(
- Users::InProductMarketingEmail.where(
- user: user,
- track: Users::InProductMarketingEmail::ACTIVE_TRACKS[:create],
- series: 0
- )
- ).to exist
- end
-
- context 'when invoked with a non existing track' do
- let(:track) { :foo }
-
- before do
- stub_const("#{described_class}::TRACKS", { bar: {} })
- end
-
- it { expect { subject }.to raise_error(ArgumentError, 'Track foo not defined') }
- end
-
- context 'when group is a sub-group' do
- let(:root_group) { create(:group) }
- let(:group) { create(:group) }
-
- before do
- group.parent = root_group
- group.save!
-
- allow(Ability).to receive(:allowed?).and_call_original
- end
-
- it 'does not raise an exception' do
- expect { execute_service }.not_to raise_error
- end
- end
-end
diff --git a/spec/services/note_summary_spec.rb b/spec/services/note_summary_spec.rb
index 1cbbb68205d..f3233ef925e 100644
--- a/spec/services/note_summary_spec.rb
+++ b/spec/services/note_summary_spec.rb
@@ -24,8 +24,13 @@ RSpec.describe NoteSummary, feature_category: :code_review_workflow do
describe '#note' do
it 'returns note hash' do
freeze_time do
- expect(create_note_summary.note).to eq(noteable: noteable, project: project, author: user, note: 'note',
- created_at: Time.current)
+ expect(create_note_summary.note).to eq(
+ noteable: noteable,
+ project: project,
+ author: user,
+ note: 'note',
+ created_at: Time.current
+ )
end
end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 22509885c92..b5eb5f8037a 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -180,8 +180,9 @@ RSpec.describe Notes::CreateService, feature_category: :team_planning do
execute_create_service
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED }
+ let(:namespace) { project.namespace }
subject(:service_action) { execute_create_service }
end
end
diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb
index 396e23351c9..54782774b4e 100644
--- a/spec/services/notes/destroy_service_spec.rb
+++ b/spec/services/notes/destroy_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Notes::DestroyService, feature_category: :team_planning do
end
describe 'comment removed event tracking', :snowplow do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_REMOVED }
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_REMOVED }
let(:note) { create(:note, project: project, noteable: issue) }
let(:service_action) { described_class.new(project, user).execute(note) }
@@ -39,11 +39,12 @@ RSpec.describe Notes::DestroyService, feature_category: :team_planning do
expect do
service_action
end.to change {
- counter.unique_events(event_names: property, start_date: Date.today.beginning_of_week, end_date: 1.week.from_now)
+ counter.unique_events(event_names: action, start_date: Date.today.beginning_of_week, end_date: 1.week.from_now)
}.by(1)
end
- it_behaves_like 'issue_edit snowplow tracking' do
+ it_behaves_like 'internal event tracking' do
+ let(:namespace) { project.namespace }
subject(:execute_service_action) { service_action }
end
end
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index 0065fd639b8..b6e29299fdd 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -188,6 +188,45 @@ RSpec.describe Notes::QuickActionsService, feature_category: :team_planning do
end
end
+ describe '/confidential' do
+ let_it_be_with_reload(:noteable) { create(:work_item, :issue, project: project) }
+ let_it_be(:note_text) { '/confidential' }
+ let_it_be(:note) { create(:note, noteable: noteable, project: project, note: note_text) }
+
+ context 'when work item does not have children' do
+ it 'leaves the note empty' do
+ expect(execute(note)).to be_empty
+ end
+
+ it 'marks work item as confidential' do
+ expect { execute(note) }.to change { noteable.reload.confidential }.from(false).to(true)
+ end
+ end
+
+ context 'when work item has children' do
+ before do
+ create(:parent_link, work_item: task, work_item_parent: noteable)
+ end
+
+ context 'when children are not confidential' do
+ let(:task) { create(:work_item, :task, project: project) }
+
+ it 'does not mark parent work item as confidential' do
+ expect { execute(note) }.to not_change { noteable.reload.confidential }.from(false)
+ expect(noteable.errors[:base]).to include('A confidential work item cannot have a parent that already has non-confidential children.')
+ end
+ end
+
+ context 'when children are confidential' do
+ let(:task) { create(:work_item, :confidential, :task, project: project) }
+
+ it 'marks parent work item as confidential' do
+ expect { execute(note) }.to change { noteable.reload.confidential }.from(false).to(true)
+ end
+ end
+ end
+ end
+
describe 'note with command & text' do
describe '/close, /label, /assign & /milestone' do
let(:note_text) do
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index e109bfbcd0b..8389db000b8 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -75,6 +75,13 @@ RSpec.describe Notes::UpdateService, feature_category: :team_planning do
update_note({})
end
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED }
+ let(:namespace) { project.namespace }
+
+ subject(:service_action) { update_note(note: 'new text') }
+ end
+
it 'tracks issue usage data', :clean_gitlab_redis_shared_state do
counter = Gitlab::UsageDataCounters::HLLRedisCounter
@@ -85,11 +92,6 @@ RSpec.describe Notes::UpdateService, feature_category: :team_planning do
update_note(note: 'new text')
end.to change { counter.unique_events(event_names: event, start_date: Date.today.beginning_of_week, end_date: 1.week.from_now) }.by(1)
end
-
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED }
- subject(:service_action) { update_note(note: 'new text') }
- end
end
context 'when note text was changed' do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 028c3ea6610..40597c30c4a 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -324,7 +324,7 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
describe "never emails the ghost user" do
- let(:key_options) { { user: User.ghost } }
+ let(:key_options) { { user: Users::Internal.ghost } }
it "does not send email to key owner" do
expect { subject }.not_to have_enqueued_email(key.id, mail: "new_ssh_key_email")
@@ -345,7 +345,7 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
describe "never emails the ghost user" do
- let(:key_options) { { user: User.ghost } }
+ let(:key_options) { { user: Users::Internal.ghost } }
it "does not send email to key owner" do
expect { subject }.not_to have_enqueued_email(key.id, mail: "new_gpg_key_email")
@@ -376,6 +376,74 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
end
+ describe '#resource_access_token_about_to_expire' do
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+ let_it_be(:expiring_token) { create(:personal_access_token, user: project_bot, expires_at: 5.days.from_now) }
+
+ let_it_be(:owner1) { create(:user) }
+ let_it_be(:owner2) { create(:user) }
+
+ subject(:notification_service) do
+ notification.resource_access_tokens_about_to_expire(project_bot, [expiring_token.name])
+ end
+
+ context 'when the resource is a group' do
+ let(:group) { create(:group) }
+
+ before do
+ group.add_owner(owner1)
+ group.add_owner(owner2)
+ group.add_reporter(project_bot)
+ end
+
+ it 'sends emails to the group owners' do
+ expect { notification_service }.to(
+ have_enqueued_email(
+ owner1,
+ project_bot.resource_bot_resource,
+ [expiring_token.name],
+ mail: "resource_access_tokens_about_to_expire_email"
+ ).and(
+ have_enqueued_email(
+ owner2,
+ project_bot.resource_bot_resource,
+ [expiring_token.name],
+ mail: "resource_access_tokens_about_to_expire_email"
+ )
+ )
+ )
+ end
+ end
+
+ context 'when the resource is a project' do
+ let(:project) { create(:project) }
+
+ before do
+ project.add_maintainer(owner1)
+ project.add_maintainer(owner2)
+ project.add_reporter(project_bot)
+ end
+
+ it 'sends emails to the group owners' do
+ expect { notification_service }.to(
+ have_enqueued_email(
+ owner1,
+ project_bot.resource_bot_resource,
+ [expiring_token.name],
+ mail: "resource_access_tokens_about_to_expire_email"
+ ).and(
+ have_enqueued_email(
+ owner2,
+ project_bot.resource_bot_resource,
+ [expiring_token.name],
+ mail: "resource_access_tokens_about_to_expire_email"
+ )
+ )
+ )
+ end
+ end
+ end
+
describe '#access_token_about_to_expire' do
let_it_be(:user) { create(:user) }
let_it_be(:pat) { create(:personal_access_token, user: user, expires_at: 5.days.from_now) }
@@ -534,7 +602,7 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
let(:subject) { described_class.new }
let(:mailer) { double(deliver_later: true) }
- let(:issue) { create(:issue, author: User.support_bot) }
+ let(:issue) { create(:issue, author: Users::Internal.support_bot) }
let(:project) { issue.project }
let(:note) { create(:note, noteable: issue, project: project) }
@@ -576,6 +644,14 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
it_behaves_like 'notification with exact metric events', 1
+
+ context 'when service desk is disabled' do
+ before do
+ project.update!(service_desk_enabled: false)
+ end
+
+ it_behaves_like 'no participants are notified'
+ end
end
context 'do exist and note is confidential' do
@@ -1211,9 +1287,11 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
let_it_be(:member_and_not_mentioned) { create(:user, developer_projects: [project]) }
let_it_be(:non_member_and_mentioned) { create(:user) }
let_it_be(:note) do
- create(:diff_note_on_design,
- noteable: design,
- note: "Hello #{member_and_mentioned.to_reference}, G'day #{non_member_and_mentioned.to_reference}")
+ create(
+ :diff_note_on_design,
+ noteable: design,
+ note: "Hello #{member_and_mentioned.to_reference}, G'day #{non_member_and_mentioned.to_reference}"
+ )
end
let_it_be(:note_2) do
@@ -3498,12 +3576,14 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
let(:commit) { project.commit }
def create_pipeline(user, status)
- create(:ci_pipeline, status,
- project: project,
- user: user,
- ref: 'refs/heads/master',
- sha: commit.id,
- before_sha: '00000000')
+ create(
+ :ci_pipeline, status,
+ project: project,
+ user: user,
+ ref: 'refs/heads/master',
+ sha: commit.id,
+ before_sha: '00000000'
+ )
end
before_all do
@@ -4012,12 +4092,14 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
project.add_maintainer(reviewer)
merge_request.assignees.each { |assignee| project.add_maintainer(assignee) }
- create(:diff_note_on_merge_request,
- project: project,
- noteable: merge_request,
- author: reviewer,
- review: review,
- note: "cc @mention")
+ create(
+ :diff_note_on_merge_request,
+ project: project,
+ noteable: merge_request,
+ author: reviewer,
+ review: review,
+ note: "cc @mention"
+ )
end
it 'sends emails' do
@@ -4059,10 +4141,18 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
subject { notification.inactive_project_deletion_warning(project, deletion_date) }
it "sends email to project owners and maintainers" do
- expect { subject }.to have_enqueued_email(project, maintainer, deletion_date,
- mail: "inactive_project_deletion_warning_email")
- expect { subject }.not_to have_enqueued_email(project, developer, deletion_date,
- mail: "inactive_project_deletion_warning_email")
+ expect { subject }.to have_enqueued_email(
+ project,
+ maintainer,
+ deletion_date,
+ mail: "inactive_project_deletion_warning_email"
+ )
+ expect { subject }.not_to have_enqueued_email(
+ project,
+ developer,
+ deletion_date,
+ mail: "inactive_project_deletion_warning_email"
+ )
end
end
diff --git a/spec/services/packages/ml_model/create_package_file_service_spec.rb b/spec/services/packages/ml_model/create_package_file_service_spec.rb
index 32754279e17..30a6bedd07b 100644
--- a/spec/services/packages/ml_model/create_package_file_service_spec.rb
+++ b/spec/services/packages/ml_model/create_package_file_service_spec.rb
@@ -38,12 +38,12 @@ RSpec.describe Packages::MlModel::CreatePackageFileService, feature_category: :m
it 'creates package file', :aggregate_failures do
expect { execute_service }
- .to change { project.packages.ml_model.count }.by(1)
+ .to change { Packages::MlModel::Package.count }.by(1)
.and change { Packages::PackageFile.count }.by(1)
.and change { Packages::PackageFileBuildInfo.count }.by(0)
.and change { Ml::ModelVersion.count }.by(1)
- new_model = project.packages.ml_model.last
+ new_model = Packages::MlModel::Package.last
package_file = new_model.package_files.last
new_model_version = Ml::ModelVersion.last
diff --git a/spec/services/packages/npm/generate_metadata_service_spec.rb b/spec/services/packages/npm/generate_metadata_service_spec.rb
index fdd0ab0ccee..d8e794405e6 100644
--- a/spec/services/packages/npm/generate_metadata_service_spec.rb
+++ b/spec/services/packages/npm/generate_metadata_service_spec.rb
@@ -70,6 +70,17 @@ RSpec.describe ::Packages::Npm::GenerateMetadataService, feature_category: :pack
it { expect(subject.dig(package2.version, dependency_type)).to be nil }
end
+
+ context 'when generate dependencies' do
+ let(:packages) { ::Packages::Package.where(id: package1.id) }
+
+ it 'loads grouped dependency links', :aggregate_failures do
+ expect(::Packages::DependencyLink).to receive(:dependency_ids_grouped_by_type).and_call_original
+ expect(::Packages::Package).not_to receive(:including_dependency_links)
+
+ subject
+ end
+ end
end
context 'for metadatum' do
diff --git a/spec/services/packages/nuget/check_duplicates_service_spec.rb b/spec/services/packages/nuget/check_duplicates_service_spec.rb
new file mode 100644
index 00000000000..9675aa5f5e2
--- /dev/null
+++ b/spec/services/packages/nuget/check_duplicates_service_spec.rb
@@ -0,0 +1,155 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::CheckDuplicatesService, feature_category: :package_registry do
+ include PackagesManagerApiSpecHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:file_name) { 'package.nupkg' }
+
+ let(:params) do
+ {
+ file_name: file_name,
+ file: temp_file(file_name)
+ }
+ end
+
+ let(:service) { described_class.new(project, user, params) }
+
+ describe '#execute' do
+ subject(:execute) { service.execute }
+
+ shared_examples 'returning error' do |reason:, message:|
+ it 'returns an error' do
+ response = execute
+
+ expect(response.status).to eq(:error)
+ expect(response.reason).to eq(reason)
+ expect(response.message).to eq(message)
+ end
+ end
+
+ shared_examples 'returning success' do
+ it 'returns success' do
+ response = execute
+
+ expect(response.status).to eq(:success)
+ end
+ end
+
+ shared_examples 'handling duplicates disallowed when package exists' do
+ it_behaves_like 'returning error', reason: :conflict,
+ message: 'A package with the same name and version already exists'
+
+ context 'with nuget_duplicate_exception_regex' do
+ before do
+ package_settings.update_column(:nuget_duplicate_exception_regex, ".*#{existing_package.name.last(3)}.*")
+ end
+
+ it_behaves_like 'returning success'
+ end
+ end
+
+ context 'with existing package' do
+ let_it_be(:existing_package) { create(:nuget_package, :with_metadatum, project: project, version: '1.7.15.0') }
+ let_it_be(:metadata) do
+ {
+ package_name: existing_package.name,
+ package_version: existing_package.version,
+ authors: 'authors',
+ description: 'description'
+ }
+ end
+
+ context 'when nuget duplicates are allowed' do
+ before do
+ allow_next_instance_of(Namespace::PackageSetting) do |instance|
+ allow(instance).to receive(:nuget_duplicates_allowed?).and_return(true)
+ end
+ end
+
+ it_behaves_like 'returning success'
+ end
+
+ context 'when nuget duplicates are not allowed' do
+ let!(:package_settings) do
+ create(:namespace_package_setting, :group, namespace: project.namespace, nuget_duplicates_allowed: false)
+ end
+
+ context 'when package file is in object storage' do
+ let(:params) { super().merge(remote_url: 'https://example.com') }
+
+ before do
+ allow_next_instance_of(::Packages::Nuget::ExtractRemoteMetadataFileService) do |instance|
+ allow(instance).to receive(:execute)
+ .and_return(ServiceResponse.success(payload: Nokogiri::XML::Document.new))
+ end
+ allow_next_instance_of(::Packages::Nuget::ExtractMetadataContentService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: metadata))
+ end
+ end
+
+ it_behaves_like 'handling duplicates disallowed when package exists'
+
+ context 'when ExtractRemoteMetadataFileService raises ExtractionError' do
+ before do
+ allow_next_instance_of(::Packages::Nuget::ExtractRemoteMetadataFileService) do |instance|
+ allow(instance).to receive(:execute).and_raise(
+ ::Packages::Nuget::ExtractRemoteMetadataFileService::ExtractionError, 'nuspec file not found'
+ )
+ end
+ end
+
+ it_behaves_like 'returning error', reason: :bad_request, message: 'nuspec file not found'
+ end
+
+ context 'when version is normalized' do
+ let(:metadata) { super().merge(package_version: '1.7.15') }
+
+ it_behaves_like 'handling duplicates disallowed when package exists'
+ end
+ end
+
+ context 'when package file is on disk' do
+ before do
+ allow_next_instance_of(::Packages::Nuget::MetadataExtractionService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: metadata))
+ end
+ end
+
+ it_behaves_like 'handling duplicates disallowed when package exists'
+ end
+ end
+ end
+
+ context 'with non existing package' do
+ let_it_be(:metadata) do
+ { package_name: 'foo', package_version: '1.0.0', authors: 'author', description: 'description' }
+ end
+
+ before do
+ allow_next_instance_of(::Packages::Nuget::MetadataExtractionService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: metadata))
+ end
+ end
+
+ context 'when nuget duplicates are allowed' do
+ let_it_be(:package_settings) do
+ create(:namespace_package_setting, :group, namespace: project.namespace, nuget_duplicates_allowed: true)
+ end
+
+ it_behaves_like 'returning success'
+ end
+
+ context 'when nuget duplicates are not allowed' do
+ let_it_be(:package_settings) do
+ create(:namespace_package_setting, :group, namespace: project.namespace, nuget_duplicates_allowed: false)
+ end
+
+ it_behaves_like 'returning success'
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/nuget/extract_metadata_file_service_spec.rb b/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
index 412c22fe8de..57b08f8773c 100644
--- a/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
+++ b/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :package_registry do
- let_it_be(:package_file) { create(:nuget_package).package_files.first }
+ let_it_be_with_reload(:package_file) { create(:nuget_package).package_files.first }
- let(:service) { described_class.new(package_file.id) }
+ let(:service) { described_class.new(package_file) }
describe '#execute' do
subject { service.execute }
@@ -14,7 +14,7 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
it { expect { subject }.to raise_error(described_class::ExtractionError, error_message) }
end
- context 'with valid package file id' do
+ context 'with valid package file' do
expected_metadata = <<~XML.squish
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
<metadata>
@@ -39,8 +39,8 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
end
end
- context 'with invalid package file id' do
- let(:package_file) { instance_double('Packages::PackageFile', id: 555) }
+ context 'with invalid package file' do
+ let(:package_file) { nil }
it_behaves_like 'raises an error', 'invalid package file'
end
@@ -53,7 +53,7 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
it_behaves_like 'raises an error', 'invalid package file'
end
- context 'with a 0 byte package file id' do
+ context 'with a 0 byte package file' do
before do
allow_next_instance_of(Packages::PackageFileUploader) do |instance|
allow(instance).to receive(:size).and_return(0)
@@ -76,7 +76,7 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
context 'with a too big nuspec file' do
before do
allow_next_instance_of(Zip::File) do |instance|
- allow(instance).to receive(:glob).and_return([instance_double('File', size: 6.megabytes)])
+ allow(instance).to receive(:glob).and_return([instance_double(File, size: 6.megabytes)])
end
end
diff --git a/spec/services/packages/nuget/extract_remote_metadata_file_service_spec.rb b/spec/services/packages/nuget/extract_remote_metadata_file_service_spec.rb
new file mode 100644
index 00000000000..b5aff6e7588
--- /dev/null
+++ b/spec/services/packages/nuget/extract_remote_metadata_file_service_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::ExtractRemoteMetadataFileService, feature_category: :package_registry do
+ let_it_be(:remote_url) { 'http://example.com/package.nupkg' }
+ let_it_be(:nupkg_filepath) { 'packages/nuget/package.nupkg' }
+
+ describe '#execute' do
+ subject(:service) { described_class.new(remote_url) }
+
+ context 'when the remote URL is blank' do
+ let(:remote_url) { '' }
+
+ it { expect { service.execute }.to raise_error(described_class::ExtractionError, 'invalid file url') }
+ end
+
+ context 'when the package file is corrupted' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield('corrupted data')
+ end
+
+ it { expect { service.execute }.to raise_error(described_class::ExtractionError, 'nuspec file not found') }
+ end
+
+ context 'when reaching the maximum received fragments' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield('Fragment 1').and_yield('Fragment 2').and_yield('Fragment 3').and_yield('Fragment 4')
+ .and_yield('Fragment 5').and_yield(fixture_file(nupkg_filepath))
+ end
+
+ it { expect { service.execute }.to raise_error(described_class::ExtractionError, 'nuspec file not found') }
+ end
+
+ context 'when nuspec file is too big' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield(fixture_file(nupkg_filepath))
+ allow_next_instance_of(Zip::Entry) do |instance|
+ allow(instance).to receive(:size).and_return(6.megabytes)
+ end
+ end
+
+ it { expect { service.execute }.to raise_error(described_class::ExtractionError, 'nuspec file too big') }
+ end
+
+ context 'when nuspec file is fragmented' do
+ let_it_be(:nuspec_path) { expand_fixture_path('packages/nuget/with_metadata.nuspec') }
+ let_it_be(:tmp_zip) { Tempfile.new('nuget_zip') }
+ let_it_be(:zipped_nuspec) { zip_nuspec_file(nuspec_path, tmp_zip.path).get_raw_input_stream.read }
+ let_it_be(:fragments) { zipped_nuspec.chars.each_slice(zipped_nuspec.size / 2).map(&:join) }
+
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield(fragments[0]).and_yield(fragments[1])
+ end
+
+ after do
+ tmp_zip.unlink
+ end
+
+ it 'ignores the Zip::DecompressionError and constructs the nuspec file from the fragments' do
+ response = service.execute
+
+ expect(response).to be_success
+ expect(response.payload).to include('<id>DummyProject.WithMetadata</id>')
+ .and include('<version>1.2.3</version>')
+ end
+ end
+
+ context 'when the remote URL is valid' do
+ let(:fragments) { fixture_file(nupkg_filepath).chars.each_slice(1.kilobyte).map(&:join) }
+
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield(fragments[0]).and_yield(fragments[1]).and_yield(fragments[2]).and_yield(fragments[3])
+ end
+
+ it 'returns a success response with the nuspec file content' do
+ response = service.execute
+
+ expect(response).to be_success
+ expect(response.payload).to include('<id>DummyProject.DummyPackage</id>')
+ .and include('<version>1.0.0</version>')
+ end
+ end
+
+ context 'with a corrupted nupkg file with a wrong entry size' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield(fixture_file(nupkg_filepath))
+ allow_next_instance_of(Zip::Entry) do |instance|
+ allow(instance).to receive(:extract).and_raise(Zip::EntrySizeError)
+ end
+ end
+
+ it {
+ expect do
+ service.execute
+ end.to raise_error(described_class::ExtractionError, /nuspec file has the wrong entry size/)
+ }
+ end
+
+ context 'with a Zip::Error exception' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield(fixture_file(nupkg_filepath))
+ allow(Zip::InputStream).to receive(:open).and_raise(Zip::Error)
+ end
+
+ it {
+ expect do
+ service.execute
+ end.to raise_error(described_class::ExtractionError, /Error opening zip stream/)
+ }
+ end
+ end
+
+ def zip_nuspec_file(nuspec_path, zip_path)
+ Zip::File.open(zip_path, Zip::File::CREATE) do |zipfile|
+ zipfile.add('package.nuspec', nuspec_path)
+ end
+ end
+end
diff --git a/spec/services/packages/nuget/metadata_extraction_service_spec.rb b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
index c8c06414830..ea7557b6d64 100644
--- a/spec/services/packages/nuget/metadata_extraction_service_spec.rb
+++ b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :package_registry do
let_it_be(:package_file) { create(:nuget_package).package_files.first }
- subject { described_class.new(package_file.id) }
+ subject { described_class.new(package_file) }
describe '#execute' do
let(:nuspec_file_content) do
@@ -49,7 +49,7 @@ RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :pa
end
it 'calls the necessary services and executes the metadata extraction' do
- expect(::Packages::Nuget::ExtractMetadataFileService).to receive(:new).with(package_file.id) do
+ expect(::Packages::Nuget::ExtractMetadataFileService).to receive(:new).with(package_file) do
double.tap do |service|
expect(service).to receive(:execute).and_return(double(payload: nuspec_file_content))
end
diff --git a/spec/services/packages/nuget/odata_package_entry_service_spec.rb b/spec/services/packages/nuget/odata_package_entry_service_spec.rb
new file mode 100644
index 00000000000..d4c47538ce2
--- /dev/null
+++ b/spec/services/packages/nuget/odata_package_entry_service_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :package_registry do
+ let_it_be(:project) { build_stubbed(:project) }
+ let_it_be(:params) { { package_name: 'dummy', package_version: '1.0.0' } }
+ let(:doc) { Nokogiri::XML(subject.payload) }
+
+ subject { described_class.new(project, params).execute }
+
+ describe '#execute' do
+ shared_examples 'returning a package entry with the correct attributes' do |pkg_version, content_url_pkg_version|
+ it 'returns a package entry with the correct attributes' do
+ expect(doc.root.name).to eq('entry')
+ expect(doc_node('id').text).to include(
+ id_url(project.id, params[:package_name], pkg_version)
+ )
+ expect(doc_node('title').text).to eq(params[:package_name])
+ expect(doc_node('content').attr('src')).to include(
+ content_url(project.id, params[:package_name], content_url_pkg_version)
+ )
+ expect(doc_node('Version').text).to eq(pkg_version)
+ end
+ end
+
+ context 'when package_version is present' do
+ it 'returns a success ServiceResponse' do
+ expect(subject).to be_success
+ end
+
+ it_behaves_like 'returning a package entry with the correct attributes', '1.0.0', '1.0.0'
+ end
+
+ context 'when package_version is nil' do
+ let(:params) { { package_name: 'dummy', package_version: nil } }
+
+ it 'returns a success ServiceResponse' do
+ expect(subject).to be_success
+ end
+
+ it_behaves_like 'returning a package entry with the correct attributes',
+ described_class::SEMVER_LATEST_VERSION_PLACEHOLDER, described_class::LATEST_VERSION_FOR_V2_DOWNLOAD_ENDPOINT
+ end
+
+ context 'when package_version is 0.0.0-latest-version' do
+ let(:params) { { package_name: 'dummy', package_version: described_class::SEMVER_LATEST_VERSION_PLACEHOLDER } }
+
+ it 'returns a success ServiceResponse' do
+ expect(subject).to be_success
+ end
+
+ it_behaves_like 'returning a package entry with the correct attributes',
+ described_class::SEMVER_LATEST_VERSION_PLACEHOLDER, described_class::LATEST_VERSION_FOR_V2_DOWNLOAD_ENDPOINT
+ end
+ end
+
+ def doc_node(name)
+ doc.css('*').detect { |el| el.name == name }
+ end
+
+ def id_url(id, package_name, package_version)
+ "api/v4/projects/#{id}/packages/nuget/v2/Packages(Id='#{package_name}',Version='#{package_version}')"
+ end
+
+ def content_url(id, package_name, package_version)
+ "api/v4/projects/#{id}/packages/nuget/v2/download/#{package_name}/#{package_version}"
+ end
+end
diff --git a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
index b18f62c1c28..e1cce2c87eb 100644
--- a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
+++ b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
@@ -21,9 +21,9 @@ RSpec.describe Pages::MigrateLegacyStorageToDeploymentService, feature_category:
project.mark_pages_as_deployed
expect(project.pages_metadatum.reload.deployed).to eq(true)
- expect(service.execute).to(
- eq(status: :success,
- message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed")
+ expect(service.execute).to eq(
+ status: :success,
+ message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed"
)
expect(project.pages_metadatum.reload.deployed).to eq(false)
@@ -35,9 +35,9 @@ RSpec.describe Pages::MigrateLegacyStorageToDeploymentService, feature_category:
project.mark_pages_as_deployed
expect(project.pages_metadatum.reload.deployed).to eq(true)
- expect(service.execute).to(
- eq(status: :success,
- message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed")
+ expect(service.execute).to eq(
+ status: :success,
+ message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed"
)
expect(project.pages_metadatum.reload.deployed).to eq(true)
@@ -49,9 +49,9 @@ RSpec.describe Pages::MigrateLegacyStorageToDeploymentService, feature_category:
expect(project.pages_metadatum.reload.deployed).to eq(true)
- expect(service.execute).to(
- eq(status: :error,
- message: "Archive not created. Missing public directory in #{project.pages_path}")
+ expect(service.execute).to eq(
+ status: :error,
+ message: "Archive not created. Missing public directory in #{project.pages_path}"
)
expect(project.pages_metadatum.reload.deployed).to eq(true)
@@ -60,9 +60,9 @@ RSpec.describe Pages::MigrateLegacyStorageToDeploymentService, feature_category:
it 'removes pages archive when can not save deployment' do
archive = fixture_file_upload("spec/fixtures/pages.zip")
expect_next_instance_of(::Pages::ZipDirectoryService) do |zip_service|
- expect(zip_service).to receive(:execute).and_return(status: :success,
- archive_path: archive.path,
- entries_count: 3)
+ expect(zip_service).to receive(:execute).and_return(
+ status: :success, archive_path: archive.path, entries_count: 3
+ )
end
expect_next_instance_of(PagesDeployment) do |deployment|
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index 2377fbcf003..7f8992e8bbc 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -132,8 +132,7 @@ RSpec.describe PagesDomains::ObtainLetsEncryptCertificateService, feature_catego
ef.create_extension("basicConstraints", "CA:TRUE", true),
ef.create_extension("subjectKeyIdentifier", "hash")
]
- cert.add_extension ef.create_extension("authorityKeyIdentifier",
- "keyid:always,issuer:always")
+ cert.add_extension ef.create_extension("authorityKeyIdentifier", "keyid:always,issuer:always")
cert.sign key, OpenSSL::Digest.new('SHA256')
diff --git a/spec/services/preview_markdown_service_spec.rb b/spec/services/preview_markdown_service_spec.rb
index 6fa44310ae5..f6aca9970c8 100644
--- a/spec/services/preview_markdown_service_spec.rb
+++ b/spec/services/preview_markdown_service_spec.rb
@@ -28,9 +28,11 @@ RSpec.describe PreviewMarkdownService, feature_category: :team_planning do
let(:text) { "```suggestion\nfoo\n```" }
let(:params) do
- suggestion_params.merge(text: text,
- target_type: 'MergeRequest',
- target_id: merge_request.iid)
+ suggestion_params.merge(
+ text: text,
+ target_type: 'MergeRequest',
+ target_id: merge_request.iid
+ )
end
let(:service) { described_class.new(project, user, params) }
@@ -52,15 +54,16 @@ RSpec.describe PreviewMarkdownService, feature_category: :team_planning do
end
it 'returns suggestions referenced in text' do
- position = Gitlab::Diff::Position.new(new_path: path,
- new_line: line,
- diff_refs: diff_refs)
+ position = Gitlab::Diff::Position.new(new_path: path, new_line: line, diff_refs: diff_refs)
expect(Gitlab::Diff::SuggestionsParser)
.to receive(:parse)
- .with(text, position: position,
- project: merge_request.project,
- supports_suggestion: true)
+ .with(
+ text,
+ position: position,
+ project: merge_request.project,
+ supports_suggestion: true
+ )
.and_call_original
result = service.execute
diff --git a/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
index ecabaa28119..0d7d1254428 100644
--- a/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
@@ -48,7 +48,24 @@ RSpec.describe Projects::ContainerRepository::Gitlab::DeleteTagsService, feature
stub_delete_reference_requests('A' => 500, 'Ba' => 500)
end
- it { is_expected.to eq(status: :error, message: 'could not delete tags') }
+ it { is_expected.to eq(status: :error, message: "could not delete tags: #{tags.join(', ')}") }
+
+ context 'when a large list of tag delete fails' do
+ let(:tags) { Array.new(135) { |i| "tag#{i}" } }
+ let(:container_repository) { instance_double(ContainerRepository) }
+
+ before do
+ allow(ContainerRepository).to receive(:find).with(repository).and_return(container_repository)
+ tags.each do |tag|
+ stub_delete_reference_requests(tag => 500)
+ end
+ allow(container_repository).to receive(:delete_tag_by_name).and_return(false)
+ end
+
+ it 'truncates the log message' do
+ expect(subject).to eq(status: :error, message: "could not delete tags: #{tags.join(', ')}".truncate(1000))
+ end
+ end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 683e438eb08..ce7e5188c7b 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -156,7 +156,7 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :groups_an
project = create_project(bot_user, opts)
expect(project.errors.errors.length).to eq 1
- expect(project.errors.messages[:namespace].first).to eq(("is not valid"))
+ expect(project.errors.messages[:namespace].first).to eq("is not valid")
end
end
end
@@ -640,6 +640,17 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :groups_an
expect(project.project_namespace).to be_in_sync_with_project(project)
end
+ it 'raises when repository fails to create' do
+ expect_next_instance_of(Project) do |instance|
+ expect(instance).to receive(:create_repository).and_return(false)
+ end
+
+ project = create_project(user, opts)
+ expect(project).not_to be_persisted
+ expect(project.errors.messages).to have_key(:base)
+ expect(project.errors.messages[:base].first).to match('Failed to create repository')
+ end
+
context 'when another repository already exists on disk' do
let(:opts) do
{
@@ -1158,4 +1169,17 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :groups_an
expect_not_disabled_features(project, exclude: [:repository, :builds, :merge_requests])
end
end
+
+ it 'adds pages unique domain', feature_category: :pages do
+ stub_pages_setting(enabled: true)
+
+ expect(Gitlab::Pages)
+ .to receive(:add_unique_domain_to)
+ .and_call_original
+
+ project = create_project(user, opts)
+
+ expect(project.project_setting.pages_unique_domain_enabled).to eq(true)
+ expect(project.project_setting.pages_unique_domain).to be_present
+ end
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index 97a3b338069..16b9d2618ca 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -163,72 +163,100 @@ RSpec.describe Projects::ImportService, feature_category: :importers do
context 'when importer does not support refmap' do
it 'succeeds if repository import is successful' do
- expect(project.repository).to receive(:import_repository).and_return(true)
- expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
+ expect_next_instance_of(Gitlab::BitbucketImport::ParallelImporter) do |importer|
expect(importer).to receive(:execute).and_return(true)
end
- expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
- expect(service).to receive(:execute).and_return(status: :success)
- end
-
result = subject.execute
expect(result[:status]).to eq :success
end
it 'fails if repository import fails' do
- expect(project.repository)
- .to receive(:import_repository)
- .with('https://bitbucket.org/vim/vim.git', resolved_address: '')
- .and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
+ expect_next_instance_of(Gitlab::BitbucketImport::ParallelImporter) do |importer|
+ expect(importer).to receive(:execute)
+ .and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
+ end
result = subject.execute
expect(result[:status]).to eq :error
expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository [FILTERED]"
end
- end
- context 'when lfs import fails' do
- it 'logs the error' do
- error_message = 'error message'
+ context 'when bitbucket_parallel_importer feature flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_parallel_importer: false)
+ end
- expect(project.repository).to receive(:import_repository).and_return(true)
+ it 'succeeds if repository import is successful' do
+ expect(project.repository).to receive(:import_repository).and_return(true)
+ expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
+ expect(importer).to receive(:execute).and_return(true)
+ end
- expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
- expect(importer).to receive(:execute).and_return(true)
+ expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
+ expect(service).to receive(:execute).and_return(status: :success)
+ end
+
+ result = subject.execute
+
+ expect(result[:status]).to eq :success
end
- expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
- expect(service).to receive(:execute).and_return(status: :error, message: error_message)
+ it 'fails if repository import fails' do
+ expect(project.repository)
+ .to receive(:import_repository)
+ .with('https://bitbucket.org/vim/vim.git', resolved_address: '')
+ .and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
+
+ result = subject.execute
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository [FILTERED]"
end
- expect(Gitlab::AppLogger).to receive(:error).with("The Lfs import process failed. #{error_message}")
+ context 'when lfs import fails' do
+ it 'logs the error' do
+ error_message = 'error message'
- subject.execute
- end
- end
+ expect(project.repository).to receive(:import_repository).and_return(true)
- context 'when repository import scheduled' do
- before do
- expect(project.repository).to receive(:import_repository).and_return(true)
- allow(subject).to receive(:import_data)
- end
+ expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
+ expect(importer).to receive(:execute).and_return(true)
+ end
- it 'downloads lfs objects if lfs_enabled is enabled for project' do
- allow(project).to receive(:lfs_enabled?).and_return(true)
+ expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
+ expect(service).to receive(:execute).and_return(status: :error, message: error_message)
+ end
- expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute)
+ expect(Gitlab::AppLogger).to receive(:error).with("The Lfs import process failed. #{error_message}")
- subject.execute
- end
+ subject.execute
+ end
+ end
- it 'does not download lfs objects if lfs_enabled is not enabled for project' do
- allow(project).to receive(:lfs_enabled?).and_return(false)
- expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
+ context 'when repository import scheduled' do
+ before do
+ expect(project.repository).to receive(:import_repository).and_return(true)
+ allow(subject).to receive(:import_data)
+ end
- subject.execute
+ it 'downloads lfs objects if lfs_enabled is enabled for project' do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+
+ expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute)
+
+ subject.execute
+ end
+
+ it 'does not download lfs objects if lfs_enabled is not enabled for project' do
+ allow(project).to receive(:lfs_enabled?).and_return(false)
+ expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
+
+ subject.execute
+ end
+ end
end
end
end
diff --git a/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb b/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb
index 4ad6fd0edff..fab8cafd1a0 100644
--- a/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb
+++ b/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb
@@ -4,33 +4,34 @@ require 'spec_helper'
RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_category: :experimentation_adoption do
describe '#execute' do
- let(:user) { create(:user, email_opted_in: true) }
+ let(:user) { create(:user) }
let(:project) { create(:project) }
let(:campaign) { Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE }
+ before do
+ allow(Notify)
+ .to receive(:build_ios_app_guide_email)
+ .and_return(instance_double(ActionMailer::MessageDelivery, deliver_later: true))
+ end
+
subject(:execute) do
described_class.new(project, campaign).execute
end
context 'users can receive marketing emails' do
- let(:owner) { create(:user, email_opted_in: true) }
- let(:maintainer) { create(:user, email_opted_in: true) }
- let(:developer) { create(:user, email_opted_in: true) }
+ let(:maintainer) { create(:user) }
+ let(:developer) { create(:user) }
before do
- project.add_owner(owner)
project.add_developer(developer)
project.add_maintainer(maintainer)
end
it 'sends the email to all project members with access_level >= Developer', :aggregate_failures do
- double = instance_double(ActionMailer::MessageDelivery, deliver_later: true)
-
- [owner, maintainer, developer].each do |user|
+ [project.owner, maintainer, developer].each do |user|
email = user.notification_email_or_default
- expect(Notify).to receive(:build_ios_app_guide_email).with(email) { double }
- expect(double).to receive(:deliver_later)
+ expect(Notify).to receive(:build_ios_app_guide_email).with(email)
end
execute
@@ -39,7 +40,7 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
it 'records sent emails', :aggregate_failures do
expect { execute }.to change { Users::InProductMarketingEmail.count }.from(0).to(3)
- [owner, maintainer, developer].each do |user|
+ [project.owner, maintainer, developer].each do |user|
expect(
Users::InProductMarketingEmail.where(
user: user,
@@ -58,15 +59,24 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
end
end
- shared_examples 'does nothing' do
- it 'does not send the email' do
+ shared_examples 'does not send the email' do
+ it do
email = user.notification_email_or_default
expect(Notify).not_to receive(:build_ios_app_guide_email).with(email)
execute
end
+ end
+
+ shared_examples 'does not create a record of the sent email' do
+ it do
+ expect(
+ Users::InProductMarketingEmail.where(
+ user: user,
+ campaign: campaign
+ )
+ ).not_to exist
- it 'does not create a record of the sent email' do
- expect { execute }.not_to change { Users::InProductMarketingEmail.count }
+ execute
end
end
@@ -94,12 +104,6 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
execute
end
end
-
- context 'when user is not opted in to receive marketing emails' do
- let(:user) { create(:user, email_opted_in: false) }
-
- it_behaves_like 'does nothing'
- end
end
context 'when campaign email has already been sent to the user' do
@@ -108,7 +112,7 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
create(:in_product_marketing_email, :campaign, user: user, campaign: campaign)
end
- it_behaves_like 'does nothing'
+ it_behaves_like 'does not send the email'
end
context "when user is a reporter" do
@@ -116,7 +120,8 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
project.add_reporter(user)
end
- it_behaves_like 'does nothing'
+ it_behaves_like 'does not send the email'
+ it_behaves_like 'does not create a record of the sent email'
end
context "when user is a guest" do
@@ -124,7 +129,8 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
project.add_guest(user)
end
- it_behaves_like 'does nothing'
+ it_behaves_like 'does not send the email'
+ it_behaves_like 'does not create a record of the sent email'
end
end
end
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index 73932887cd9..77e7be6cc0a 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -86,9 +86,9 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :i
end
context 'with simultaneous manual configuration' do
- let_it_be(:integration) { create(:alert_management_prometheus_integration, :legacy, project: project) }
+ let_it_be(:alerting_setting) { create(:project_alerting_setting, :with_http_integration, project: project) }
let_it_be(:old_prometheus_integration) { create(:prometheus_integration, project: project) }
- let_it_be(:alerting_setting) { create(:project_alerting_setting, project: project, token: integration.token) }
+ let_it_be(:integration) { project.alert_management_http_integrations.last! }
subject { service.execute(integration.token, integration) }
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index a113f3506e1..6c767876d05 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -20,13 +20,45 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
let(:custom_root_file_metadata) { "spec/fixtures/pages_with_custom_root.zip.meta" }
let(:metadata) { fixture_file_upload(metadata_filename) if File.exist?(metadata_filename) }
- subject { described_class.new(project, build) }
+ subject(:service) { described_class.new(project, build) }
+
+ RSpec.shared_examples 'pages size limit is' do |size_limit|
+ context "when size is below the limit" do
+ before do
+ allow(metadata).to receive(:total_size).and_return(size_limit - 1.megabyte)
+ allow(metadata).to receive(:entries).and_return([])
+ end
+
+ it 'updates pages correctly' do
+ subject.execute
+
+ deploy_status = GenericCommitStatus.last
+ expect(deploy_status.description).not_to be_present
+ expect(project.pages_metadatum).to be_deployed
+ end
+ end
+
+ context "when size is above the limit" do
+ before do
+ allow(metadata).to receive(:total_size).and_return(size_limit + 1.megabyte)
+ allow(metadata).to receive(:entries).and_return([])
+ end
+
+ it 'limits the maximum size of gitlab pages' do
+ subject.execute
+
+ deploy_status = GenericCommitStatus.last
+ expect(deploy_status.description).to match(/artifacts for pages are too large/)
+ expect(deploy_status).to be_script_failure
+ end
+ end
+ end
context 'when a deploy stage already exists', :aggregate_failures do
let!(:stage) { create(:ci_stage, name: 'deploy', pipeline: pipeline) }
it 'assigns the deploy stage' do
- expect { subject.execute }
+ expect { service.execute }
.to change(GenericCommitStatus, :count).by(1)
.and change(Ci::Stage.where(name: 'deploy'), :count).by(0)
@@ -41,7 +73,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
context 'when a deploy stage does not exists' do
it 'assigns the deploy stage' do
- expect { subject.execute }
+ expect { service.execute }
.to change(GenericCommitStatus, :count).by(1)
.and change(Ci::Stage.where(name: 'deploy'), :count).by(1)
@@ -64,7 +96,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
end
it "doesn't delete artifacts after deploying" do
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
expect(project.pages_metadatum).to be_deployed
expect(build.artifacts?).to eq(true)
@@ -72,7 +104,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
it 'succeeds' do
expect(project.pages_deployed?).to be_falsey
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
expect(project.pages_metadatum).to be_deployed
expect(project.pages_deployed?).to be_truthy
end
@@ -84,12 +116,12 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
root_namespace_id: project.root_namespace.id
}
- expect { subject.execute }.to publish_event(Pages::PageDeployedEvent).with(expected_data)
+ expect { service.execute }.to publish_event(Pages::PageDeployedEvent).with(expected_data)
end
it 'creates pages_deployment and saves it in the metadata' do
expect do
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
end.to change { project.pages_deployments.count }.by(1)
deployment = project.pages_deployments.last
@@ -108,7 +140,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
project.reload
expect do
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
end.to change { project.pages_deployments.count }.by(1)
expect(project.pages_metadatum.reload.pages_deployment).to eq(project.pages_deployments.last)
@@ -127,12 +159,12 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
)
)
- execute
+ service.execute
end
it 'removes older deployments', :sidekiq_inline do
expect do
- execute
+ service.execute
end.not_to change { PagesDeployment.count } # it creates one and deletes one
expect(PagesDeployment.find_by_id(old_deployment.id)).to be_nil
@@ -144,7 +176,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
let(:metadata_filename) { empty_metadata_filename }
it 'returns an error' do
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq("Error: You need to either include a `public/` folder in your artifacts, or specify which one to use for Pages using `publish` in `.gitlab-ci.yml`")
end
@@ -158,7 +190,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
let(:options) { { publish: 'foo' } }
it 'creates pages_deployment and saves it in the metadata' do
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
deployment = project.pages_deployments.last
expect(deployment.root_directory).to eq(options[:publish])
@@ -169,7 +201,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
let(:options) { { publish: 'bar' } }
it 'returns an error' do
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq("Error: You need to either include a `public/` folder in your artifacts, or specify which one to use for Pages using `publish` in `.gitlab-ci.yml`")
end
@@ -181,7 +213,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
let(:metadata_filename) { "spec/fixtures/pages.zip.meta" }
it 'returns an error' do
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq("Error: You need to either include a `public/` folder in your artifacts, or specify which one to use for Pages using `publish` in `.gitlab-ci.yml`")
end
@@ -190,13 +222,13 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
end
it 'limits pages file count' do
create(:plan_limits, :default_plan, pages_file_entries: 2)
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq("pages site contains 3 file entries, while limit is set to 2")
end
@@ -209,9 +241,11 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
end
it 'raises an error' do
- expect { execute }.to raise_error(SocketError)
+ expect { service.execute }.to raise_error(SocketError)
build.reload
+
+ deploy_status = GenericCommitStatus.last
expect(deploy_status).to be_failed
expect(project.pages_metadatum).not_to be_deployed
end
@@ -223,9 +257,11 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
end
it 'does not raise an error as failed job' do
- execute
+ service.execute
build.reload
+
+ deploy_status = GenericCommitStatus.last
expect(deploy_status).to be_failed
expect(project.pages_metadatum).not_to be_deployed
end
@@ -234,7 +270,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
context 'with background jobs running', :sidekiq_inline do
it 'succeeds' do
expect(project.pages_deployed?).to be_falsey
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
end
end
@@ -246,41 +282,43 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
end
end
- shared_examples 'successfully deploys' do
- it 'succeeds' do
- expect do
- expect(execute).to eq(:success)
- end.to change { project.pages_deployments.count }.by(1)
+ it 'creates a new pages deployment and mark it as deployed' do
+ expect do
+ expect(service.execute[:status]).to eq(:success)
+ end.to change { project.pages_deployments.count }.by(1)
- deployment = project.pages_deployments.last
- expect(deployment.ci_build_id).to eq(build.id)
- end
+ deployment = project.pages_deployments.last
+ expect(deployment.ci_build_id).to eq(build.id)
end
- include_examples 'successfully deploys'
-
context 'when old deployment present' do
+ let!(:old_build) { create(:ci_build, name: 'pages', pipeline: old_pipeline, ref: 'HEAD') }
+ let!(:old_deployment) { create(:pages_deployment, ci_build: old_build, project: project) }
+
before do
- old_build = create(:ci_build, pipeline: old_pipeline, ref: 'HEAD')
- old_deployment = create(:pages_deployment, ci_build: old_build, project: project)
project.update_pages_deployment!(old_deployment)
end
- include_examples 'successfully deploys'
+ it 'deactivates old deployments' do
+ expect(service.execute[:status]).to eq(:success)
+
+ expect(old_deployment.reload.deleted_at).not_to be_nil
+ end
end
context 'when newer deployment present' do
before do
new_pipeline = create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha)
- new_build = create(:ci_build, pipeline: new_pipeline, ref: 'HEAD')
+ new_build = create(:ci_build, name: 'pages', pipeline: new_pipeline, ref: 'HEAD')
new_deployment = create(:pages_deployment, ci_build: new_build, project: project)
project.update_pages_deployment!(new_deployment)
end
it 'fails with outdated reference message' do
- expect(execute).to eq(:error)
+ expect(service.execute[:status]).to eq(:error)
expect(project.reload.pages_metadatum).not_to be_deployed
+ deploy_status = GenericCommitStatus.last
expect(deploy_status).to be_failed
expect(deploy_status.description).to eq('build SHA is outdated for this ref')
end
@@ -294,7 +332,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
.and_return(file.size + 1)
end
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq('The uploaded artifact size does not match the expected value')
project.pages_metadatum.reload
@@ -318,18 +356,18 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
it 'fails with exception raised' do
expect do
- execute
+ service.execute
end.to raise_error("Validation failed: File sha256 can't be blank")
end
end
it 'fails if no artifacts' do
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
end
it 'fails for invalid archive' do
create(:ci_job_artifact, :archive, file: invalid_file, job: build)
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
end
describe 'maximum pages artifacts size' do
@@ -383,21 +421,13 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
end
it 'marks older pages:deploy jobs retried' do
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
expect(older_deploy_job.reload).to be_retried
+
+ deploy_status = GenericCommitStatus.last
expect(deploy_status.ci_stage).to eq(stage)
expect(deploy_status.stage_idx).to eq(stage.position)
end
end
-
- private
-
- def deploy_status
- GenericCommitStatus.where(name: 'pages:deploy').last
- end
-
- def execute
- subject.execute[:status]
- end
end
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index d3972009d38..b30c1d30044 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -229,6 +229,27 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
end
end
+ context 'when new shard has a repository pool without the root project' do
+ let!(:new_pool_repository) { create(:pool_repository, :ready, shard: shard_to, disk_path: pool_repository.disk_path) }
+
+ before do
+ pool_repository.update!(source_project: nil)
+ new_pool_repository.update!(source_project: nil)
+ end
+
+ it 'connects project to it' do
+ result = subject.execute
+ expect(result).to be_success
+
+ project.reload.cleanup
+
+ project_pool_repository = project.pool_repository
+
+ expect(project_pool_repository).to eq(new_pool_repository)
+ expect(object_pool_double).to have_received(:link).with(project.repository.raw)
+ end
+ end
+
context 'when repository does not exist' do
let(:project) { create(:project) }
let(:checksum) { nil }
@@ -266,6 +287,32 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
end
end
+ context 'when project belongs to the repository pool without a root project' do
+ let!(:pool_repository) { create(:pool_repository, :ready, shard: shard_from) }
+
+ before do
+ pool_repository.update!(source_project: nil)
+ project.update!(pool_repository: pool_repository)
+ end
+
+ it 'creates a new repository pool without a root project and connects project to it' do
+ result = subject.execute
+ expect(result).to be_success
+
+ project.reload.cleanup
+
+ new_pool_repository = project.pool_repository
+
+ expect(new_pool_repository).not_to eq(pool_repository)
+ expect(new_pool_repository.shard).to eq(shard_second_storage)
+ expect(new_pool_repository.state).to eq('ready')
+ expect(new_pool_repository.source_project).to eq(nil)
+ expect(new_pool_repository.disk_path).to eq(pool_repository.disk_path)
+
+ expect(object_pool_double).to have_received(:link).with(project.repository.raw)
+ end
+ end
+
context 'when object pool checksum does not match' do
let(:new_object_pool_checksum) { 'not_match' }
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index d9090b87514..195cfe78b3f 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -791,72 +791,26 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
end
describe 'when updating pages unique domain', feature_category: :pages do
- let(:group) { create(:group, path: 'group') }
- let(:project) { create(:project, path: 'project', group: group) }
-
- it 'updates project pages unique domain' do
- expect do
- update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: true
- })
- end.to change { project.project_setting.pages_unique_domain_enabled }
-
- expect(project.project_setting.pages_unique_domain_enabled).to eq true
- expect(project.project_setting.pages_unique_domain).to match %r{project-group-\w+}
- end
-
- it 'does not changes unique domain when it already exists' do
- project.project_setting.update!(
- pages_unique_domain_enabled: false,
- pages_unique_domain: 'unique-domain'
- )
-
- expect do
- update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: true
- })
- end.to change { project.project_setting.pages_unique_domain_enabled }
-
- expect(project.project_setting.pages_unique_domain_enabled).to eq true
- expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
+ before do
+ stub_pages_setting(enabled: true)
end
- it 'does not changes unique domain when it disabling unique domain' do
- project.project_setting.update!(
- pages_unique_domain_enabled: true,
- pages_unique_domain: 'unique-domain'
- )
-
- expect do
- update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: false
- })
- end.not_to change { project.project_setting.pages_unique_domain }
+ context 'when turning it on' do
+ it 'adds pages unique domain' do
+ expect(Gitlab::Pages).to receive(:add_unique_domain_to)
- expect(project.project_setting.pages_unique_domain_enabled).to eq false
- expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
+ expect { update_project(project, user, project_setting_attributes: { pages_unique_domain_enabled: true }) }
+ .to change { project.project_setting.pages_unique_domain_enabled }
+ .from(false).to(true)
+ end
end
- context 'when there is another project with the unique domain' do
- it 'fails pages unique domain already exists' do
- create(
- :project_setting,
- pages_unique_domain_enabled: true,
- pages_unique_domain: 'unique-domain'
- )
-
- allow(Gitlab::Pages::RandomDomain)
- .to receive(:generate)
- .and_return('unique-domain')
+ context 'when turning it off' do
+ it 'adds pages unique domain' do
+ expect(Gitlab::Pages).not_to receive(:add_unique_domain_to)
- result = update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: true
- })
-
- expect(result).to eq(
- status: :error,
- message: 'Project setting pages unique domain has already been taken'
- )
+ expect { update_project(project, user, project_setting_attributes: { pages_unique_domain_enabled: false }) }
+ .not_to change { project.project_setting.pages_unique_domain_enabled }
end
end
end
diff --git a/spec/services/protected_branches/api_service_spec.rb b/spec/services/protected_branches/api_service_spec.rb
index f7f5f451a49..d7717f08efb 100644
--- a/spec/services/protected_branches/api_service_spec.rb
+++ b/spec/services/protected_branches/api_service_spec.rb
@@ -6,10 +6,12 @@ RSpec.describe ProtectedBranches::ApiService, feature_category: :compliance_mana
shared_examples 'execute with entity' do
it 'creates a protected branch with prefilled defaults' do
expect(::ProtectedBranches::CreateService).to receive(:new).with(
- entity, user, hash_including(
- push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
- merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }]
- )
+ entity,
+ user,
+ hash_including(
+ push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
+ merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }]
+ )
).and_call_original
expect(described_class.new(entity, user, { name: 'new name' }).create).to be_valid
@@ -17,10 +19,12 @@ RSpec.describe ProtectedBranches::ApiService, feature_category: :compliance_mana
it 'updates a protected branch without prefilled defaults' do
expect(::ProtectedBranches::UpdateService).to receive(:new).with(
- entity, user, hash_including(
- push_access_levels_attributes: [],
- merge_access_levels_attributes: []
- )
+ entity,
+ user,
+ hash_including(
+ push_access_levels_attributes: [],
+ merge_access_levels_attributes: []
+ )
).and_call_original
expect do
diff --git a/spec/services/push_event_payload_service_spec.rb b/spec/services/push_event_payload_service_spec.rb
index 50da5ca9b24..999b71ff754 100644
--- a/spec/services/push_event_payload_service_spec.rb
+++ b/spec/services/push_event_payload_service_spec.rb
@@ -190,9 +190,7 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
end
it 'returns :removed when removing an existing ref' do
- service = described_class.new(event,
- before: '123',
- after: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, before: '123', after: Gitlab::Git::BLANK_SHA)
expect(service.action).to eq(:removed)
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 30a3c212ba5..5e7fb8397e3 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -29,9 +29,11 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
before do
- stub_licensed_features(multiple_issue_assignees: false,
- multiple_merge_request_reviewers: false,
- multiple_merge_request_assignees: false)
+ stub_licensed_features(
+ multiple_issue_assignees: false,
+ multiple_merge_request_reviewers: false,
+ multiple_merge_request_assignees: false
+ )
end
describe '#execute' do
@@ -1394,6 +1396,11 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:issuable) { merge_request }
end
+ it_behaves_like 'subscribe command' do
+ let(:content) { '/subscribe' }
+ let(:issuable) { work_item }
+ end
+
it_behaves_like 'unsubscribe command' do
let(:content) { '/unsubscribe' }
let(:issuable) { issue }
@@ -1404,6 +1411,11 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:issuable) { merge_request }
end
+ it_behaves_like 'unsubscribe command' do
+ let(:content) { '/unsubscribe' }
+ let(:issuable) { work_item }
+ end
+
it_behaves_like 'failed command', 'Could not apply due command.' do
let(:content) { '/due 2016-08-28' }
let(:issuable) { merge_request }
@@ -1860,11 +1872,21 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:issuable) { merge_request }
end
+ it_behaves_like 'award command' do
+ let(:content) { '/award :100:' }
+ let(:issuable) { work_item }
+ end
+
context 'ignores command with no argument' do
it_behaves_like 'failed command' do
let(:content) { '/award' }
let(:issuable) { issue }
end
+
+ it_behaves_like 'failed command' do
+ let(:content) { '/award' }
+ let(:issuable) { work_item }
+ end
end
context 'ignores non-existing / invalid emojis' do
@@ -1877,6 +1899,11 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:content) { '/award :lorem_ipsum:' }
let(:issuable) { issue }
end
+
+ it_behaves_like 'failed command' do
+ let(:content) { '/award :lorem_ipsum:' }
+ let(:issuable) { work_item }
+ end
end
context 'if issuable is a Commit' do
diff --git a/spec/services/releases/create_service_spec.rb b/spec/services/releases/create_service_spec.rb
index ca5dd912e77..0170c3abcaf 100644
--- a/spec/services/releases/create_service_spec.rb
+++ b/spec/services/releases/create_service_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe Releases::CreateService, feature_category: :continuous_integratio
context 'when project is a catalog resource' do
let(:ref) { 'master' }
- let!(:catalog_resource) { create(:catalog_resource, project: project) }
+ let!(:ci_catalog_resource) { create(:ci_catalog_resource, project: project) }
context 'and it is valid' do
let_it_be(:project) { create(:project, :repository, description: 'our components') }
diff --git a/spec/services/releases/destroy_service_spec.rb b/spec/services/releases/destroy_service_spec.rb
index 953490ac379..2b6e96a781e 100644
--- a/spec/services/releases/destroy_service_spec.rb
+++ b/spec/services/releases/destroy_service_spec.rb
@@ -28,6 +28,26 @@ RSpec.describe Releases::DestroyService, feature_category: :release_orchestratio
it 'returns the destroyed object' do
is_expected.to include(status: :success, release: release)
end
+
+ context 'when the release is for a catalog resource' do
+ let!(:catalog_resource) { create(:ci_catalog_resource, project: project, state: 'published') }
+ let!(:version) { create(:ci_catalog_resource_version, catalog_resource: catalog_resource, release: release) }
+
+ it 'does not update the catalog resources if there are still releases' do
+ second_release = create(:release, project: project, tag: 'v1.2.0')
+ create(:ci_catalog_resource_version, catalog_resource: catalog_resource, release: second_release)
+
+ subject
+
+ expect(catalog_resource.reload.state).to eq('published')
+ end
+
+ it 'updates the catalog resource if there are no more releases' do
+ subject
+
+ expect(catalog_resource.reload.state).to eq('draft')
+ end
+ end
end
context 'when tag does not exist in the repository' do
@@ -42,9 +62,7 @@ RSpec.describe Releases::DestroyService, feature_category: :release_orchestratio
let!(:release) {}
it 'returns an error' do
- is_expected.to include(status: :error,
- message: 'Release does not exist',
- http_status: 404)
+ is_expected.to include(status: :error, message: 'Release does not exist', http_status: 404)
end
end
@@ -52,9 +70,7 @@ RSpec.describe Releases::DestroyService, feature_category: :release_orchestratio
let(:user) { repoter }
it 'returns an error' do
- is_expected.to include(status: :error,
- message: 'Access Denied',
- http_status: 403)
+ is_expected.to include(status: :error, message: 'Access Denied', http_status: 403)
end
end
diff --git a/spec/services/resource_access_tokens/revoke_service_spec.rb b/spec/services/resource_access_tokens/revoke_service_spec.rb
index c00146961e3..060697cd1df 100644
--- a/spec/services/resource_access_tokens/revoke_service_spec.rb
+++ b/spec/services/resource_access_tokens/revoke_service_spec.rb
@@ -33,8 +33,7 @@ RSpec.describe ResourceAccessTokens::RevokeService, feature_category: :system_ac
subject
expect(
- Users::GhostUserMigration.where(user: resource_bot,
- initiator_user: user)
+ Users::GhostUserMigration.where(user: resource_bot, initiator_user: user)
).to be_exists
end
diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb
index 8393ce78df8..28b345f8191 100644
--- a/spec/services/resource_events/change_labels_service_spec.rb
+++ b/spec/services/resource_events/change_labels_service_spec.rb
@@ -49,10 +49,8 @@ RSpec.describe ResourceEvents::ChangeLabelsService, feature_category: :team_plan
expect(event.action).to eq(action)
end
- it 'expires resource note etag cache' do
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(
- "/#{resource.project.namespace.to_param}/#{resource.project.to_param}/noteable/issue/#{resource.id}/notes"
- )
+ it 'broadcasts resource note change' do
+ expect(resource).to receive(:broadcast_notes_changed)
described_class.new(resource, author).execute(added_labels: [labels[0]])
end
@@ -126,9 +124,11 @@ RSpec.describe ResourceEvents::ChangeLabelsService, feature_category: :team_plan
change_labels
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL_CHANGED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL_CHANGED }
let(:user) { author }
+ let(:namespace) { project.namespace }
+
subject(:service_action) { change_labels }
end
end
diff --git a/spec/services/resource_events/merge_into_notes_service_spec.rb b/spec/services/resource_events/merge_into_notes_service_spec.rb
index 6eb6780d704..39dd2508728 100644
--- a/spec/services/resource_events/merge_into_notes_service_spec.rb
+++ b/spec/services/resource_events/merge_into_notes_service_spec.rb
@@ -61,8 +61,7 @@ RSpec.describe ResourceEvents::MergeIntoNotesService, feature_category: :team_pl
create_event(created_at: 4.days.ago)
event = create_event(created_at: 1.day.ago)
- notes = described_class.new(resource, user,
- last_fetched_at: 2.days.ago).execute
+ notes = described_class.new(resource, user, last_fetched_at: 2.days.ago).execute
expect(notes.count).to eq 1
expect(notes.first.discussion_id).to eq event.reload.discussion_id
diff --git a/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb b/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb
index 7ac2249642a..abd412097d2 100644
--- a/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb
+++ b/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Security::CiConfiguration::DependencyScanningCreateService, :snowplow,
- feature_category: :software_composition_analysis do
+ feature_category: :software_composition_analysis do
subject(:result) { described_class.new(project, user).execute }
let(:branch_name) { 'set-dependency-scanning-config-1' }
diff --git a/spec/services/security/merge_reports_service_spec.rb b/spec/services/security/merge_reports_service_spec.rb
index 809d0b27c20..c141bbe5b5a 100644
--- a/spec/services/security/merge_reports_service_spec.rb
+++ b/spec/services/security/merge_reports_service_spec.rb
@@ -16,78 +16,87 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
let(:identifier_wasc) { build(:ci_reports_security_identifier, external_id: '13', external_type: 'wasc') }
let(:finding_id_1) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_1_primary, identifier_1_cve],
- scanner: scanner_1,
- severity: :low
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_1_primary, identifier_1_cve],
+ scanner: scanner_1,
+ severity: :low
+ )
end
let(:finding_id_1_extra) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_1_primary, identifier_1_cve],
- scanner: scanner_1,
- severity: :low
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_1_primary, identifier_1_cve],
+ scanner: scanner_1,
+ severity: :low
+ )
end
let(:finding_id_2_loc_1) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_2_primary, identifier_2_cve],
- location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
- scanner: scanner_2,
- severity: :medium
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_2_primary, identifier_2_cve],
+ location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
+ scanner: scanner_2,
+ severity: :medium
+ )
end
let(:finding_id_2_loc_1_extra) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_2_primary, identifier_2_cve],
- location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
- scanner: scanner_2,
- severity: :medium
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_2_primary, identifier_2_cve],
+ location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
+ scanner: scanner_2,
+ severity: :medium
+ )
end
let(:finding_id_2_loc_2) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_2_primary, identifier_2_cve],
- location: build(:ci_reports_security_locations_sast, start_line: 42, end_line: 44),
- scanner: scanner_2,
- severity: :medium
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_2_primary, identifier_2_cve],
+ location: build(:ci_reports_security_locations_sast, start_line: 42, end_line: 44),
+ scanner: scanner_2,
+ severity: :medium
+ )
end
let(:finding_cwe_1) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_cwe],
- scanner: scanner_3,
- severity: :high
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_cwe],
+ scanner: scanner_3,
+ severity: :high
+ )
end
let(:finding_cwe_2) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_cwe],
- scanner: scanner_1,
- severity: :critical
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_cwe],
+ scanner: scanner_1,
+ severity: :critical
+ )
end
let(:finding_wasc_1) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_wasc],
- scanner: scanner_1,
- severity: :medium
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_wasc],
+ scanner: scanner_1,
+ severity: :medium
+ )
end
let(:finding_wasc_2) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_wasc],
- scanner: scanner_2,
- severity: :critical
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_wasc],
+ scanner: scanner_2,
+ severity: :critical
+ )
end
let(:report_1_findings) { [finding_id_1, finding_id_2_loc_1, finding_id_2_loc_1_extra, finding_cwe_2, finding_wasc_1] }
diff --git a/spec/services/service_desk/custom_email_verifications/create_service_spec.rb b/spec/services/service_desk/custom_email_verifications/create_service_spec.rb
index fceb6fc78b4..0046213e0b2 100644
--- a/spec/services/service_desk/custom_email_verifications/create_service_spec.rb
+++ b/spec/services/service_desk/custom_email_verifications/create_service_spec.rb
@@ -14,6 +14,12 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
let(:service) { described_class.new(project: project, current_user: user) }
+ let(:error_feature_flag_disabled) { 'Feature flag service_desk_custom_email is not enabled' }
+ let(:error_user_not_authorized) { s_('ServiceDesk|User cannot manage project.') }
+ let(:error_settings_missing) { s_('ServiceDesk|Service Desk setting missing') }
+ let(:expected_error_message) { error_settings_missing }
+ let(:logger_params) { { category: 'custom_email_verification' } }
+
before do
allow(message_delivery).to receive(:deliver_later)
allow(Notify).to receive(:service_desk_verification_triggered_email).and_return(message_delivery)
@@ -29,6 +35,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
expect(service).to receive(:setup_and_deliver_verification_email).exactly(0).times
expect(Notify).to receive(:service_desk_verification_triggered_email).exactly(0).times
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
expect(response).to be_error
@@ -48,6 +58,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
# Correct amount of result notification emails were sent
expect(Notify).to receive(:service_desk_verification_result_email).exactly(project.owners.size + 1).times
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params.merge(
+ error_message: error_identifier.to_s
+ )).once
+
response = service.execute
expect(response).to be_error
@@ -67,6 +81,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
it_behaves_like 'a verification process that exits early'
context 'when feature flag :service_desk_custom_email is disabled' do
+ let(:expected_error_message) { error_feature_flag_disabled }
+
before do
stub_feature_flags(service_desk_custom_email: false)
end
@@ -77,12 +93,17 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
context 'when service desk setting exists' do
let(:settings) { create(:service_desk_setting, project: project, custom_email: 'user@example.com') }
let(:service) { described_class.new(project: settings.project, current_user: user) }
+ let(:expected_error_message) { error_user_not_authorized }
it 'aborts verification process and exits early', :aggregate_failures do
# Because we exit early it should not send any verification or notification emails
expect(service).to receive(:setup_and_deliver_verification_email).exactly(0).times
expect(Notify).to receive(:service_desk_verification_triggered_email).exactly(0).times
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
settings.reload
@@ -105,6 +126,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
# Check whether the correct amount of notification emails were sent
expect(Notify).to receive(:service_desk_verification_triggered_email).exactly(project.owners.size + 1).times
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params).once
+
response = service.execute
settings.reload
diff --git a/spec/services/service_desk/custom_email_verifications/update_service_spec.rb b/spec/services/service_desk/custom_email_verifications/update_service_spec.rb
index f1e683c0185..d882cd8635a 100644
--- a/spec/services/service_desk/custom_email_verifications/update_service_spec.rb
+++ b/spec/services/service_desk/custom_email_verifications/update_service_spec.rb
@@ -14,6 +14,16 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
let(:service) { described_class.new(project: settings.project, params: { mail: mail_object }) }
+ let(:error_feature_flag_disabled) { 'Feature flag service_desk_custom_email is not enabled' }
+ let(:error_parameter_missing) { s_('ServiceDesk|Service Desk setting or verification object missing') }
+ let(:error_already_finished) { s_('ServiceDesk|Custom email address has already been verified.') }
+ let(:error_already_failed) do
+ s_('ServiceDesk|Custom email address verification has already been processed and failed.')
+ end
+
+ let(:expected_error_message) { error_parameter_missing }
+ let(:logger_params) { { category: 'custom_email_verification' } }
+
before do
allow(message_delivery).to receive(:deliver_later)
allow(Notify).to receive(:service_desk_verification_result_email).and_return(message_delivery)
@@ -23,6 +33,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it 'refuses to verify and sends result emails' do
expect(Notify).to receive(:service_desk_verification_result_email).twice
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params.merge(
+ error_message: expected_error_identifier.to_s
+ )).once
+
response = described_class.new(project: settings.project, params: { mail: mail_object }).execute
settings.reset
@@ -41,6 +55,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it 'exits early' do
expect(Notify).to receive(:service_desk_verification_result_email).exactly(0).times
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
settings.reset
@@ -55,6 +73,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it 'exits early' do
expect(Notify).to receive(:service_desk_verification_result_email).exactly(0).times
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
settings.reset
@@ -64,6 +86,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
end
context 'when feature flag :service_desk_custom_email is disabled' do
+ let(:expected_error_message) { error_feature_flag_disabled }
+
before do
stub_feature_flags(service_desk_custom_email: false)
end
@@ -71,6 +95,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it 'exits early' do
expect(Notify).to receive(:service_desk_verification_result_email).exactly(0).times
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
expect(response).to be_error
@@ -85,6 +113,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it_behaves_like 'a failing verification process', 'mail_not_received_within_timeframe'
context 'when already verified' do
+ let(:expected_error_message) { error_already_finished }
+
before do
verification.mark_as_finished!
end
@@ -93,6 +123,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
end
context 'when we already have an error' do
+ let(:expected_error_message) { error_already_failed }
+
before do
verification.mark_as_failed!(:smtp_host_issue)
end
@@ -112,6 +144,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it 'verifies and sends result emails' do
expect(Notify).to receive(:service_desk_verification_result_email).twice
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params).once
+
response = service.execute
settings.reset
diff --git a/spec/services/service_desk/custom_emails/create_service_spec.rb b/spec/services/service_desk/custom_emails/create_service_spec.rb
index 0d9582ba235..2029c9a0c3f 100644
--- a/spec/services/service_desk/custom_emails/create_service_spec.rb
+++ b/spec/services/service_desk/custom_emails/create_service_spec.rb
@@ -17,9 +17,14 @@ RSpec.describe ServiceDesk::CustomEmails::CreateService, feature_category: :serv
let(:params) { {} }
let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
let(:message) { instance_double(Mail::Message) }
+ let(:logger_params) { { category: 'custom_email' } }
shared_examples 'a service that exits with error' do
it 'exits early' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
expect(response).to be_error
@@ -29,6 +34,10 @@ RSpec.describe ServiceDesk::CustomEmails::CreateService, feature_category: :serv
shared_examples 'a failing service that does not create records' do
it 'exits with error and does not create records' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
project.reset
@@ -148,6 +157,10 @@ RSpec.describe ServiceDesk::CustomEmails::CreateService, feature_category: :serv
end
it 'creates all records returns a successful response' do
+ # Because we also log in ServiceDesk::CustomEmailVerifications::CreateService
+ expect(Gitlab::AppLogger).to receive(:info).with({ category: 'custom_email_verification' }).once
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params).once
+
response = service.execute
project.reset
diff --git a/spec/services/service_desk/custom_emails/destroy_service_spec.rb b/spec/services/service_desk/custom_emails/destroy_service_spec.rb
index f5a22e26865..7f53a941d4e 100644
--- a/spec/services/service_desk/custom_emails/destroy_service_spec.rb
+++ b/spec/services/service_desk/custom_emails/destroy_service_spec.rb
@@ -12,9 +12,14 @@ RSpec.describe ServiceDesk::CustomEmails::DestroyService, feature_category: :ser
let(:error_user_not_authorized) { s_('ServiceDesk|User cannot manage project.') }
let(:error_does_not_exist) { s_('ServiceDesk|Custom email does not exist') }
let(:expected_error_message) { nil }
+ let(:logger_params) { { category: 'custom_email' } }
shared_examples 'a service that exits with error' do
it 'exits early' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
expect(response).to be_error
@@ -24,6 +29,8 @@ RSpec.describe ServiceDesk::CustomEmails::DestroyService, feature_category: :ser
shared_examples 'a successful service that destroys all custom email records' do
it 'ensures no custom email records exist' do
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params).once
+
project.reset
response = service.execute
diff --git a/spec/services/service_desk_settings/update_service_spec.rb b/spec/services/service_desk_settings/update_service_spec.rb
index ff564963677..27978225bcf 100644
--- a/spec/services/service_desk_settings/update_service_spec.rb
+++ b/spec/services/service_desk_settings/update_service_spec.rb
@@ -3,7 +3,12 @@ require 'spec_helper'
RSpec.describe ServiceDeskSettings::UpdateService, feature_category: :service_desk do
describe '#execute' do
- let_it_be(:settings) { create(:service_desk_setting, outgoing_name: 'original name') }
+ let_it_be(:settings) do
+ create(:service_desk_setting, outgoing_name: 'original name', custom_email: 'user@example.com')
+ end
+
+ let_it_be(:credential) { create(:service_desk_custom_email_credential, project: settings.project) }
+ let_it_be(:verification) { create(:service_desk_custom_email_verification, :finished, project: settings.project) }
let_it_be(:user) { create(:user) }
context 'with valid params' do
@@ -16,6 +21,24 @@ RSpec.describe ServiceDeskSettings::UpdateService, feature_category: :service_de
expect(settings.reload.outgoing_name).to eq 'some name'
expect(settings.reload.project_key).to eq 'foo'
end
+
+ context 'with custom email verification in finished state' do
+ let(:params) { { custom_email_enabled: true } }
+
+ before do
+ allow(Gitlab::AppLogger).to receive(:info)
+ end
+
+ it 'allows to enable custom email' do
+ settings.project.reset
+
+ response = described_class.new(settings.project, user, params).execute
+
+ expect(response).to be_success
+ expect(settings.reset.custom_email_enabled).to be true
+ expect(Gitlab::AppLogger).to have_received(:info).with({ category: 'custom_email' })
+ end
+ end
end
context 'when project_key is an empty string' do
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index fc86ecfe7f2..4133609d9ae 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -222,6 +222,9 @@ RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency d
end
context 'spam verdict service advises to block the user' do
+ # create a fresh user to ensure it is in the unbanned state
+ let(:user) { create(:user) }
+
before do
allow(fake_verdict_service).to receive(:execute).and_return(BLOCK_USER)
end
@@ -234,6 +237,14 @@ RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency d
expect(response.message).to match(expected_service_check_response_message)
expect(target).to be_spam
end
+
+ it 'bans the user' do
+ subject
+
+ custom_attribute = user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY_SPAM_LOG_ID).first
+ expect(custom_attribute.value).to eq(target.spam_log.id.to_s)
+ expect(user).to be_banned
+ end
end
context 'when spam verdict service conditionally allows' do
diff --git a/spec/services/system_notes/alert_management_service_spec.rb b/spec/services/system_notes/alert_management_service_spec.rb
index 1e3be24b05f..ac5682a35bb 100644
--- a/spec/services/system_notes/alert_management_service_spec.rb
+++ b/spec/services/system_notes/alert_management_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe ::SystemNotes::AlertManagementService, feature_category: :groups_
subject { described_class.new(noteable: noteable, project: project).create_new_alert('Some Service') }
it_behaves_like 'a system note' do
- let(:author) { User.alert_bot }
+ let(:author) { Users::Internal.alert_bot }
let(:action) { 'new_alert_added' }
end
@@ -62,7 +62,7 @@ RSpec.describe ::SystemNotes::AlertManagementService, feature_category: :groups_
subject { described_class.new(noteable: noteable, project: project).log_resolving_alert('Some Service') }
it_behaves_like 'a system note' do
- let(:author) { User.alert_bot }
+ let(:author) { Users::Internal.alert_bot }
let(:action) { 'new_alert_added' }
end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index af660a9b72e..4a795f2db20 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -28,6 +28,14 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
expect(subject.note).to eq "marked this issue as related to #{noteable_ref.to_reference(project)}"
end
end
+
+ context 'with work items' do
+ let_it_be(:noteable) { create(:work_item, :task, project: project) }
+
+ it 'sets the note text with the correct work item type' do
+ expect(subject.note).to eq "marked this task as related to #{noteable_ref.to_reference(project)}"
+ end
+ end
end
describe '#unrelate_issuable' do
@@ -686,9 +694,10 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
subject
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLONED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLONED }
let(:user) { author }
+ let(:namespace) { project.namespace }
end
end
end
diff --git a/spec/services/system_notes/time_tracking_service_spec.rb b/spec/services/system_notes/time_tracking_service_spec.rb
index a3793880ff1..52b99a6976d 100644
--- a/spec/services/system_notes/time_tracking_service_spec.rb
+++ b/spec/services/system_notes/time_tracking_service_spec.rb
@@ -118,10 +118,10 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
subject
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DUE_DATE_CHANGED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DUE_DATE_CHANGED }
let(:user) { author }
- subject(:service_action) { note }
+ let(:namespace) { project.namespace }
end
context 'when only start_date is added' do
@@ -231,10 +231,10 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
subject
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_ESTIMATE_CHANGED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_ESTIMATE_CHANGED }
let(:user) { author }
- let(:service_action) { subject }
+ let(:namespace) { project.namespace }
end
end
@@ -363,13 +363,10 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
subject
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_SPENT_CHANGED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_SPENT_CHANGED }
let(:user) { author }
- let(:service_action) do
- spend_time!(277200)
- subject
- end
+ let(:namespace) { project.namespace }
end
end
diff --git a/spec/services/users/authorized_build_service_spec.rb b/spec/services/users/authorized_build_service_spec.rb
index 7eed6833cba..a96854f33d9 100644
--- a/spec/services/users/authorized_build_service_spec.rb
+++ b/spec/services/users/authorized_build_service_spec.rb
@@ -12,5 +12,13 @@ RSpec.describe Users::AuthorizedBuildService, feature_category: :user_management
it_behaves_like 'common user build items'
it_behaves_like 'current user not admin build items'
+
+ context 'for additional authorized build allowed params' do
+ before do
+ params.merge!(external: true)
+ end
+
+ it { expect(user).to be_external }
+ end
end
end
diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb
index f3236d40412..5f1949adc32 100644
--- a/spec/services/users/build_service_spec.rb
+++ b/spec/services/users/build_service_spec.rb
@@ -16,6 +16,57 @@ RSpec.describe Users::BuildService, feature_category: :user_management do
it_behaves_like 'common user build items'
it_behaves_like 'current user not admin build items'
+
+ context 'with "user_default_external" application setting' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:user_default_external, :external, :email, :user_default_internal_regex, :result) do
+ true | nil | 'fl@example.com' | nil | true
+ true | true | 'fl@example.com' | nil | true
+ true | false | 'fl@example.com' | nil | true # admin difference
+
+ true | nil | 'fl@example.com' | '' | true
+ true | true | 'fl@example.com' | '' | true
+ true | false | 'fl@example.com' | '' | true # admin difference
+
+ true | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
+ true | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+
+ true | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
+ true | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
+ true | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true # admin difference
+
+ false | nil | 'fl@example.com' | nil | false
+ false | true | 'fl@example.com' | nil | false # admin difference
+ false | false | 'fl@example.com' | nil | false
+
+ false | nil | 'fl@example.com' | '' | false
+ false | true | 'fl@example.com' | '' | false # admin difference
+ false | false | 'fl@example.com' | '' | false
+
+ false | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ false | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
+ false | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+
+ false | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ false | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
+ false | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(user_default_external: user_default_external)
+ stub_application_setting(user_default_internal_regex: user_default_internal_regex)
+
+ params.merge!({ external: external, email: email }.compact)
+ end
+
+ it 'sets the value of Gitlab::CurrentSettings.user_default_external' do
+ expect(user.external).to eq(result)
+ end
+ end
+ end
end
context 'with non admin current_user' do
diff --git a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
index 36b2730a2de..d6fb7a2954d 100644
--- a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
+++ b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
@@ -100,7 +100,11 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
context "when the awardable already has an award emoji of the same name assigned to the ghost user" do
let(:awardable) { create(:issue) }
- let!(:existing_award_emoji) { create(:award_emoji, user: User.ghost, name: "thumbsup", awardable: awardable) }
+
+ let!(:existing_award_emoji) do
+ create(:award_emoji, user: Users::Internal.ghost, name: "thumbsup", awardable: awardable)
+ end
+
let!(:award_emoji) { create(:award_emoji, user: user, name: "thumbsup", awardable: awardable) }
it "migrates the award emoji regardless" do
@@ -108,7 +112,7 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
migrated_record = AwardEmoji.find_by_id(award_emoji.id)
- expect(migrated_record.user).to eq(User.ghost)
+ expect(migrated_record.user).to eq(Users::Internal.ghost)
end
it "does not leave the migrated award emoji in an invalid state" do
@@ -322,7 +326,7 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
service.execute
expect(gitlab_shell.repository_exists?(repo.shard_name, "#{repo.disk_path}.git")).to be(true)
- expect(User.ghost.snippets).to include(repo.snippet)
+ expect(Users::Internal.ghost.snippets).to include(repo.snippet)
end
context 'when an error is raised deleting snippets' do
diff --git a/spec/services/users/upsert_credit_card_validation_service_spec.rb b/spec/services/users/upsert_credit_card_validation_service_spec.rb
index ebd2502398d..4e23b51cae2 100644
--- a/spec/services/users/upsert_credit_card_validation_service_spec.rb
+++ b/spec/services/users/upsert_credit_card_validation_service_spec.rb
@@ -101,6 +101,14 @@ RSpec.describe Users::UpsertCreditCardValidationService, feature_category: :user
end
context 'when unexpected exception happen' do
+ let(:exception) { StandardError.new }
+
+ before do
+ allow_next_instance_of(::Users::CreditCardValidation) do |instance|
+ allow(instance).to receive(:save).and_raise(exception)
+ end
+ end
+
it 'tracks the exception and returns an error' do
logged_params = {
credit_card_validated_at: credit_card_validated_time,
@@ -111,8 +119,7 @@ RSpec.describe Users::UpsertCreditCardValidationService, feature_category: :user
user_id: user_id
}
- expect(::Users::CreditCardValidation).to receive(:upsert).and_raise(e = StandardError.new('My exception!'))
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(e, class: described_class.to_s, params: logged_params)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception, class: described_class.to_s, params: logged_params)
result = service.execute
diff --git a/spec/services/work_items/related_work_item_links/destroy_service_spec.rb b/spec/services/work_items/related_work_item_links/destroy_service_spec.rb
new file mode 100644
index 00000000000..39381078c45
--- /dev/null
+++ b/spec/services/work_items/related_work_item_links/destroy_service_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::RelatedWorkItemLinks::DestroyService, feature_category: :portfolio_management do
+ describe '#execute' do
+ let_it_be(:project) { create(:project_empty_repo, :private) }
+ let_it_be(:other_project) { create(:project_empty_repo, :private) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:source) { create(:work_item, project: project) }
+ let_it_be(:linked_item1) { create(:work_item, project: project) }
+ let_it_be(:linked_item2) { create(:work_item, project: project) }
+ let_it_be(:no_access_item) { create(:work_item, project: other_project) }
+ let_it_be(:not_linked_item) { create(:work_item, project: project) }
+
+ let_it_be(:link1) { create(:work_item_link, source: source, target: linked_item1) }
+ let_it_be(:link2) { create(:work_item_link, source: source, target: linked_item2) }
+ let_it_be(:link3) { create(:work_item_link, source: source, target: no_access_item) }
+
+ let(:ids_to_remove) { [linked_item1.id, linked_item2.id, no_access_item.id, not_linked_item.id] }
+
+ subject(:destroy_links) { described_class.new(source, user, { item_ids: ids_to_remove }).execute }
+
+ context 'when user can `admin_work_item_link` for the work item' do
+ before_all do
+ project.add_guest(user)
+ end
+
+ it 'removes existing linked items with access' do
+ expect { destroy_links }.to change { WorkItems::RelatedWorkItemLink.count }.by(-2)
+ end
+
+ it 'creates notes for the source and target of each removed link' do
+ [linked_item1, linked_item2].each do |item|
+ expect(SystemNoteService).to receive(:unrelate_issuable).with(source, item, user)
+ expect(SystemNoteService).to receive(:unrelate_issuable).with(item, source, user)
+ end
+
+ destroy_links
+ end
+
+ it 'returns correct response message' do
+ message = "Successfully unlinked IDs: #{linked_item1.id} and #{linked_item2.id}. IDs with errors: " \
+ "#{no_access_item.id} could not be removed due to insufficient permissions, " \
+ "#{not_linked_item.id} could not be removed due to not being linked."
+
+ is_expected.to eq(
+ status: :success,
+ message: message,
+ items_removed: [linked_item1.id, linked_item2.id],
+ items_with_errors: [no_access_item.id]
+ )
+ end
+
+ context 'when all items fail' do
+ let(:ids_to_remove) { [no_access_item.id] }
+ let(:params) { { item_ids: [no_access_item.id] } }
+ let(:error_msg) { "IDs with errors: #{ids_to_remove[0]} could not be removed due to insufficient permissions." }
+
+ it 'returns an error response' do
+ expect { destroy_links }.not_to change { WorkItems::RelatedWorkItemLink.count }
+
+ is_expected.to eq(status: :error, message: error_msg)
+ end
+ end
+
+ context 'when item_ids is empty' do
+ let(:ids_to_remove) { [] }
+
+ it 'returns error response' do
+ is_expected.to eq(message: 'No work item IDs provided.', status: :error, http_status: 409)
+ end
+ end
+ end
+
+ context 'when user cannot `admin_work_item_link` for the work item' do
+ it 'returns error response' do
+ is_expected.to eq(message: 'No work item found.', status: :error, http_status: 403)
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index 8e19650d980..38e5d4dc153 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -76,9 +76,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
expect(update_work_item[:status]).to eq(:success)
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TITLE_CHANGED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TITLE_CHANGED }
let(:user) { current_user }
+ let(:namespace) { project.namespace }
subject(:service_action) { update_work_item[:status] }
end