Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb45
-rw-r--r--spec/services/ci/abort_pipelines_service_spec.rb4
-rw-r--r--spec/services/ci/cancel_pipeline_service_spec.rb76
-rw-r--r--spec/services/ci/catalog/resources/create_service_spec.rb4
-rw-r--r--spec/services/ci/catalog/resources/destroy_service_spec.rb4
-rw-r--r--spec/services/ci/catalog/resources/versions/create_service_spec.rb9
-rw-r--r--spec/services/ci/create_pipeline_service/partitioning_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb86
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb2
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb2
-rw-r--r--spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb209
-rw-r--r--spec/services/ci/retry_job_service_spec.rb4
-rw-r--r--spec/services/ci/runners/unregister_runner_manager_service_spec.rb22
-rw-r--r--spec/services/ci/unlock_pipeline_service_spec.rb22
-rw-r--r--spec/services/ci/update_build_queue_service_spec.rb4
-rw-r--r--spec/services/click_house/sync_strategies/base_sync_strategy_spec.rb169
-rw-r--r--spec/services/click_house/sync_strategies/event_sync_strategy_spec.rb128
-rw-r--r--spec/services/cloud_seed/google_cloud/create_cloudsql_instance_service_spec.rb (renamed from spec/services/google_cloud/create_cloudsql_instance_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/create_service_accounts_service_spec.rb (renamed from spec/services/google_cloud/create_service_accounts_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/enable_cloud_run_service_spec.rb (renamed from spec/services/google_cloud/enable_cloud_run_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/enable_cloudsql_service_spec.rb (renamed from spec/services/google_cloud/enable_cloudsql_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/enable_vision_ai_service_spec.rb (renamed from spec/services/google_cloud/enable_vision_ai_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/fetch_google_ip_list_service_spec.rb (renamed from spec/services/google_cloud/fetch_google_ip_list_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service_spec.rb (renamed from spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb (renamed from spec/services/google_cloud/generate_pipeline_service_spec.rb)22
-rw-r--r--spec/services/cloud_seed/google_cloud/get_cloudsql_instances_service_spec.rb (renamed from spec/services/google_cloud/get_cloudsql_instances_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/service_accounts_service_spec.rb (renamed from spec/services/google_cloud/service_accounts_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/setup_cloudsql_instance_service_spec.rb (renamed from spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb)2
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb2
-rw-r--r--spec/services/event_create_service_spec.rb34
-rw-r--r--spec/services/git/base_hooks_service_spec.rb2
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb18
-rw-r--r--spec/services/git/branch_push_service_spec.rb2
-rw-r--r--spec/services/git/process_ref_changes_service_spec.rb26
-rw-r--r--spec/services/git/tag_hooks_service_spec.rb2
-rw-r--r--spec/services/git/tag_push_service_spec.rb2
-rw-r--r--spec/services/git/wiki_push_service_spec.rb2
-rw-r--r--spec/services/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb (renamed from spec/services/integrations/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb)2
-rw-r--r--spec/services/groups/create_service_spec.rb279
-rw-r--r--spec/services/groups/participants_service_spec.rb16
-rw-r--r--spec/services/groups/update_service_spec.rb54
-rw-r--r--spec/services/import/github_service_spec.rb54
-rw-r--r--spec/services/issuable/common_system_notes_service_spec.rb6
-rw-r--r--spec/services/issue_email_participants/create_service_spec.rb10
-rw-r--r--spec/services/issue_email_participants/destroy_service_spec.rb147
-rw-r--r--spec/services/issue_links/list_service_spec.rb4
-rw-r--r--spec/services/issues/export_csv_service_spec.rb4
-rw-r--r--spec/services/issues/referenced_merge_requests_service_spec.rb16
-rw-r--r--spec/services/issues/update_service_spec.rb12
-rw-r--r--spec/services/labels/available_labels_service_spec.rb8
-rw-r--r--spec/services/members/create_service_spec.rb28
-rw-r--r--spec/services/members/update_service_spec.rb2
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb56
-rw-r--r--spec/services/merge_requests/conflicts/list_service_spec.rb2
-rw-r--r--spec/services/merge_requests/get_urls_service_spec.rb6
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb8
-rw-r--r--spec/services/merge_requests/pushed_branches_service_spec.rb6
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb4
-rw-r--r--spec/services/merge_requests/reload_diffs_service_spec.rb6
-rw-r--r--spec/services/merge_requests/remove_approval_service_spec.rb60
-rw-r--r--spec/services/merge_requests/request_review_service_spec.rb8
-rw-r--r--spec/services/milestones/destroy_service_spec.rb4
-rw-r--r--spec/services/milestones/promote_service_spec.rb16
-rw-r--r--spec/services/ml/create_model_service_spec.rb5
-rw-r--r--spec/services/ml/create_model_version_service_spec.rb55
-rw-r--r--spec/services/namespaces/package_settings/update_service_spec.rb12
-rw-r--r--spec/services/notification_recipients/build_service_spec.rb14
-rw-r--r--spec/services/notification_service_spec.rb163
-rw-r--r--spec/services/organizations/create_service_spec.rb2
-rw-r--r--spec/services/organizations/update_service_spec.rb10
-rw-r--r--spec/services/packages/npm/create_package_service_spec.rb66
-rw-r--r--spec/services/packages/terraform_module/create_package_service_spec.rb75
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb24
-rw-r--r--spec/services/post_receive_service_spec.rb2
-rw-r--r--spec/services/preview_markdown_service_spec.rb2
-rw-r--r--spec/services/projects/cleanup_service_spec.rb2
-rw-r--r--spec/services/projects/destroy_service_spec.rb8
-rw-r--r--spec/services/projects/fork_service_spec.rb698
-rw-r--r--spec/services/projects/participants_service_spec.rb36
-rw-r--r--spec/services/projects/unlink_fork_service_spec.rb8
-rw-r--r--spec/services/projects/update_statistics_service_spec.rb34
-rw-r--r--spec/services/push_event_payload_service_spec.rb12
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb129
-rw-r--r--spec/services/repositories/changelog_service_spec.rb2
-rw-r--r--spec/services/resource_access_tokens/revoke_service_spec.rb4
-rw-r--r--spec/services/routes/rename_descendants_service_spec.rb208
-rw-r--r--spec/services/security/merge_reports_service_spec.rb56
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb11
-rw-r--r--spec/services/system_note_service_spec.rb24
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb23
-rw-r--r--spec/services/system_notes/time_tracking_service_spec.rb34
-rw-r--r--spec/services/todo_service_spec.rb18
-rw-r--r--spec/services/todos/destroy/destroyed_issuable_service_spec.rb4
-rw-r--r--spec/services/user_project_access_changed_service_spec.rb4
-rw-r--r--spec/services/users/migrate_records_to_ghost_user_service_spec.rb10
-rw-r--r--spec/services/users/update_todo_count_cache_service_spec.rb4
-rw-r--r--spec/services/work_items/callbacks/assignees_spec.rb (renamed from spec/services/work_items/widgets/assignees_service/update_service_spec.rb)27
-rw-r--r--spec/services/work_items/callbacks/current_user_todos_spec.rb (renamed from spec/services/work_items/widgets/current_user_todos_service/update_service_spec.rb)20
-rw-r--r--spec/services/work_items/callbacks/description_spec.rb (renamed from spec/services/work_items/widgets/description_service/update_service_spec.rb)14
-rw-r--r--spec/services/work_items/callbacks/notifications_spec.rb (renamed from spec/services/work_items/widgets/notifications_service/update_service_spec.rb)8
-rw-r--r--spec/services/work_items/callbacks/start_and_due_date_spec.rb (renamed from spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb)20
-rw-r--r--spec/services/work_items/update_service_spec.rb13
102 files changed, 2639 insertions, 964 deletions
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index 0c3eef69fa5..1a178ce5d60 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -12,11 +12,9 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
let_it_be(:filename) { 'file_download_service_spec' }
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:filepath) { File.join(tmpdir, filename) }
- let_it_be(:content_length) { 1000 }
let(:headers) do
{
- 'content-length' => content_length,
'content-type' => content_type,
'content-disposition' => content_disposition
}
@@ -102,51 +100,27 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
end
end
- context 'when content-length is not valid' do
- context 'when content-length exceeds limit' do
+ context 'when file size is not valid' do
+ context 'when size exceeds limit' do
let(:file_size_limit) { 1 }
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
- 'File size 1000 B exceeds limit of 1 B'
- )
- end
- end
-
- context 'when content-length is missing' do
- let(:content_length) { nil }
-
- it 'raises an error' do
- expect { subject.execute }.to raise_error(
- described_class::ServiceError,
- 'Missing content-length header'
+ 'File size 100 B exceeds limit of 1 B'
)
end
end
end
- context 'when content-length is equals the file size limit' do
- let(:content_length) { 150 }
- let(:file_size_limit) { 150 }
+ context 'when size is equals the file size limit' do
+ let(:file_size_limit) { 100 }
it 'does not raise an error' do
expect { subject.execute }.not_to raise_error
end
end
- context 'when partially downloaded file exceeds limit' do
- let(:content_length) { 151 }
- let(:file_size_limit) { 150 }
-
- it 'raises an error' do
- expect { subject.execute }.to raise_error(
- described_class::ServiceError,
- 'File size 151 B exceeds limit of 150 B'
- )
- end
- end
-
context 'when chunk code is not 200' do
let(:chunk_code) { 404 }
@@ -203,25 +177,23 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
context 'on redirect chunk' do
let(:chunk_code) { 303 }
- it 'does not run content type & length validations' do
+ it 'does not run content type & validation' do
expect(service).not_to receive(:validate_content_type)
- expect(service).not_to receive(:validate_content_length)
service.execute
end
end
context 'when there is one data chunk' do
- it 'validates content type & length' do
+ it 'validates content type' do
expect(service).to receive(:validate_content_type)
- expect(service).to receive(:validate_content_length)
service.execute
end
end
context 'when there are multiple data chunks' do
- it 'validates content type & length only once' do
+ it 'validates content type only once' do
data_chunk = double(
'data chunk',
size: 1000,
@@ -237,7 +209,6 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
end
expect(service).to receive(:validate_content_type).once
- expect(service).to receive(:validate_content_length).once
service.execute
end
diff --git a/spec/services/ci/abort_pipelines_service_spec.rb b/spec/services/ci/abort_pipelines_service_spec.rb
index 60f3ee11442..af6a70989c9 100644
--- a/spec/services/ci/abort_pipelines_service_spec.rb
+++ b/spec/services/ci/abort_pipelines_service_spec.rb
@@ -70,12 +70,12 @@ RSpec.describe Ci::AbortPipelinesService, feature_category: :continuous_integrat
end
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new { abort_project_pipelines }.count
+ control = ActiveRecord::QueryRecorder.new { abort_project_pipelines }
pipelines = create_list(:ci_pipeline, 5, :running, project: project)
create_list(:ci_build, 5, :running, pipeline: pipelines.first)
- expect { abort_project_pipelines }.not_to exceed_query_limit(control_count)
+ expect { abort_project_pipelines }.not_to exceed_query_limit(control)
end
context 'with live build logs' do
diff --git a/spec/services/ci/cancel_pipeline_service_spec.rb b/spec/services/ci/cancel_pipeline_service_spec.rb
index 256d2db1ed2..6051485c4df 100644
--- a/spec/services/ci/cancel_pipeline_service_spec.rb
+++ b/spec/services/ci/cancel_pipeline_service_spec.rb
@@ -13,12 +13,14 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
current_user: current_user,
cascade_to_children: cascade_to_children,
auto_canceled_by_pipeline: auto_canceled_by_pipeline,
- execute_async: execute_async)
+ execute_async: execute_async,
+ safe_cancellation: safe_cancellation)
end
let(:cascade_to_children) { true }
let(:auto_canceled_by_pipeline) { nil }
let(:execute_async) { true }
+ let(:safe_cancellation) { false }
shared_examples 'force_execute' do
context 'when pipeline is not cancelable' do
@@ -30,9 +32,14 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
context 'when pipeline is cancelable' do
before do
- create(:ci_build, :running, pipeline: pipeline)
- create(:ci_build, :created, pipeline: pipeline)
- create(:ci_build, :success, pipeline: pipeline)
+ create(:ci_build, :running, pipeline: pipeline, name: 'build1')
+ create(:ci_build, :created, pipeline: pipeline, name: 'build2')
+ create(:ci_build, :success, pipeline: pipeline, name: 'build3')
+ create(:ci_build, :pending, :interruptible, pipeline: pipeline, name: 'build4')
+
+ create(:ci_bridge, :running, pipeline: pipeline, name: 'bridge1')
+ create(:ci_bridge, :running, :interruptible, pipeline: pipeline, name: 'bridge2')
+ create(:ci_bridge, :success, :interruptible, pipeline: pipeline, name: 'bridge3')
end
it 'logs the event' do
@@ -55,7 +62,15 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
it 'cancels all cancelable jobs' do
expect(response).to be_success
- expect(pipeline.all_jobs.pluck(:status)).to match_array(%w[canceled canceled success])
+ expect(pipeline.all_jobs.pluck(:name, :status)).to match_array([
+ %w[build1 canceled],
+ %w[build2 canceled],
+ %w[build3 success],
+ %w[build4 canceled],
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success]
+ ])
end
context 'when auto_canceled_by_pipeline is provided' do
@@ -74,6 +89,28 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
end
end
+ context 'when cascade_to_children: false and safe_cancellation: true' do
+ # We are testing the `safe_cancellation: true`` case with only `cascade_to_children: false`
+ # because `safe_cancellation` is passed as `true` only when `cascade_to_children` is `false`
+ # from `CancelRedundantPipelinesService`.
+
+ let(:cascade_to_children) { false }
+ let(:safe_cancellation) { true }
+
+ it 'cancels only interruptible jobs' do
+ expect(response).to be_success
+ expect(pipeline.all_jobs.pluck(:name, :status)).to match_array([
+ %w[build1 running],
+ %w[build2 created],
+ %w[build3 success],
+ %w[build4 canceled],
+ %w[bridge1 running],
+ %w[bridge2 canceled],
+ %w[bridge3 success]
+ ])
+ end
+ end
+
context 'when pipeline has child pipelines' do
let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
let!(:child_job) { create(:ci_build, :running, pipeline: child_pipeline) }
@@ -81,8 +118,8 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
let!(:grandchild_job) { create(:ci_build, :running, pipeline: grandchild_pipeline) }
before do
- child_pipeline.source_bridge.update!(status: :running)
- grandchild_pipeline.source_bridge.update!(status: :running)
+ child_pipeline.source_bridge.update!(name: 'child_pipeline_bridge', status: :running)
+ grandchild_pipeline.source_bridge.update!(name: 'grandchild_pipeline_bridge', status: :running)
end
context 'when execute_async: false' do
@@ -91,8 +128,15 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
it 'cancels the bridge jobs and child jobs' do
expect(response).to be_success
- expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
- expect(child_pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success],
+ %w[child_pipeline_bridge canceled]
+ ])
+ expect(child_pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[grandchild_pipeline_bridge canceled]
+ ])
expect(child_job.reload).to be_canceled
expect(grandchild_job.reload).to be_canceled
end
@@ -110,7 +154,12 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
expect(response).to be_success
- expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success],
+ %w[child_pipeline_bridge canceled]
+ ])
end
end
@@ -124,7 +173,12 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
expect(response).to be_success
- expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success],
+ %w[child_pipeline_bridge canceled]
+ ])
expect(child_job.reload).to be_running
end
end
diff --git a/spec/services/ci/catalog/resources/create_service_spec.rb b/spec/services/ci/catalog/resources/create_service_spec.rb
index 202c76acaec..5839b9ac2fe 100644
--- a/spec/services/ci/catalog/resources/create_service_spec.rb
+++ b/spec/services/ci/catalog/resources/create_service_spec.rb
@@ -8,10 +8,6 @@ RSpec.describe Ci::Catalog::Resources::CreateService, feature_category: :pipelin
let(:service) { described_class.new(project, user) }
- before do
- stub_licensed_features(ci_namespace_catalog: true)
- end
-
describe '#execute' do
context 'with an unauthorized user' do
it 'raises an AccessDeniedError' do
diff --git a/spec/services/ci/catalog/resources/destroy_service_spec.rb b/spec/services/ci/catalog/resources/destroy_service_spec.rb
index da5ba7ad0bc..4783506416d 100644
--- a/spec/services/ci/catalog/resources/destroy_service_spec.rb
+++ b/spec/services/ci/catalog/resources/destroy_service_spec.rb
@@ -9,10 +9,6 @@ RSpec.describe Ci::Catalog::Resources::DestroyService, feature_category: :pipeli
let(:service) { described_class.new(project, user) }
- before do
- stub_licensed_features(ci_namespace_catalog: true)
- end
-
describe '#execute' do
context 'with an unauthorized user' do
it 'raises an AccessDeniedError' do
diff --git a/spec/services/ci/catalog/resources/versions/create_service_spec.rb b/spec/services/ci/catalog/resources/versions/create_service_spec.rb
index e614a74a4a1..b57525fc8e1 100644
--- a/spec/services/ci/catalog/resources/versions/create_service_spec.rb
+++ b/spec/services/ci/catalog/resources/versions/create_service_spec.rb
@@ -115,6 +115,7 @@ RSpec.describe Ci::Catalog::Resources::Versions::CreateService, feature_category
expect(response).to be_success
version = Ci::Catalog::Resources::Version.last
+ base_path = "#{Settings.gitlab.host}/#{project.full_path}"
expect(project.ci_components.count).to eq(4)
expect(project.ci_components.first.name).to eq('blank-yaml')
@@ -122,25 +123,25 @@ RSpec.describe Ci::Catalog::Resources::Versions::CreateService, feature_category
expect(project.ci_components.first.inputs).to eq({})
expect(project.ci_components.first.catalog_resource).to eq(version.catalog_resource)
expect(project.ci_components.first.version).to eq(version)
- expect(project.ci_components.first.path).to eq('templates/blank-yaml.yml')
+ expect(project.ci_components.first.path).to eq("#{base_path}/blank-yaml@#{version.name}")
expect(project.ci_components.second.name).to eq('dast')
expect(project.ci_components.second.project).to eq(version.project)
expect(project.ci_components.second.inputs).to eq({})
expect(project.ci_components.second.catalog_resource).to eq(version.catalog_resource)
expect(project.ci_components.second.version).to eq(version)
- expect(project.ci_components.second.path).to eq('templates/dast/template.yml')
+ expect(project.ci_components.second.path).to eq("#{base_path}/dast@#{version.name}")
expect(project.ci_components.third.name).to eq('secret-detection')
expect(project.ci_components.third.project).to eq(version.project)
expect(project.ci_components.third.inputs).to eq({ "website" => nil })
expect(project.ci_components.third.catalog_resource).to eq(version.catalog_resource)
expect(project.ci_components.third.version).to eq(version)
- expect(project.ci_components.third.path).to eq('templates/secret-detection.yml')
+ expect(project.ci_components.third.path).to eq("#{base_path}/secret-detection@#{version.name}")
expect(project.ci_components.fourth.name).to eq('template')
expect(project.ci_components.fourth.project).to eq(version.project)
expect(project.ci_components.fourth.inputs).to eq({ "environment" => nil })
expect(project.ci_components.fourth.catalog_resource).to eq(version.catalog_resource)
expect(project.ci_components.fourth.version).to eq(version)
- expect(project.ci_components.fourth.path).to eq('templates/template.yml')
+ expect(project.ci_components.fourth.path).to eq("#{base_path}/template@#{version.name}")
end
end
end
diff --git a/spec/services/ci/create_pipeline_service/partitioning_spec.rb b/spec/services/ci/create_pipeline_service/partitioning_spec.rb
index 70c4eb49698..574bc05827a 100644
--- a/spec/services/ci/create_pipeline_service/partitioning_spec.rb
+++ b/spec/services/ci/create_pipeline_service/partitioning_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
let(:pipeline) { service.execute(:push).payload }
- let(:current_partition_id) { ci_testing_partition_id }
+ let(:current_partition_id) { ci_testing_partition_id_for_check_constraints }
before do
stub_ci_pipeline_yaml_file(config)
diff --git a/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb b/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb
index 851c6f8fbea..7e8a3ef3d7b 100644
--- a/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb
+++ b/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb
@@ -57,7 +57,49 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'creates a pipeline with errors' do
expect(pipeline).to be_persisted
expect(pipeline.errors.full_messages).to include(
- 'workflow:auto_cancel on new commit must be one of: conservative, interruptible, disabled')
+ 'workflow:auto_cancel on new commit must be one of: conservative, interruptible, none')
+ end
+ end
+
+ context 'when using with workflow:rules' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ auto_cancel:
+ on_new_commit: interruptible
+ rules:
+ - if: $VAR123 == "valid value"
+ auto_cancel:
+ on_new_commit: none
+ - when: always
+
+ test1:
+ script: exit 0
+ YAML
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ context 'when the rule matches' do
+ before do
+ create(:ci_variable, project: project, key: 'VAR123', value: 'valid value')
+ end
+
+ it 'creates a pipeline with on_new_commit' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('none')
+ end
+ end
+
+ context 'when the rule does not match' do
+ it 'creates a pipeline with on_new_commit' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ end
end
end
end
@@ -165,5 +207,47 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
'workflow:auto_cancel on job failure must be one of: none, all')
end
end
+
+ context 'when using with workflow:rules' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ auto_cancel:
+ on_job_failure: none
+ rules:
+ - if: $VAR123 == "valid value"
+ auto_cancel:
+ on_job_failure: all
+ - when: always
+
+ test1:
+ script: exit 0
+ YAML
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ context 'when the rule matches' do
+ before do
+ create(:ci_variable, project: project, key: 'VAR123', value: 'valid value')
+ end
+
+ it 'creates a pipeline with on_job_failure' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ end
+ end
+
+ context 'when the rule does not match' do
+ it 'creates a pipeline with on_job_failure' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ end
+ end
+ end
end
end
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index 3d0ce456aa5..a74b820de09 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe Ci::ExpirePipelineCacheService, feature_category: :continuous_int
create(:ci_sources_pipeline, pipeline: pipeline)
create(:ci_sources_pipeline, source_job: create(:ci_build, pipeline: pipeline))
- expect { subject.execute(pipeline) }.not_to exceed_query_limit(control.count)
+ expect { subject.execute(pipeline) }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index c060c72ffb2..bdb4ed182dc 100644
--- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
more_artifacts
- expect { subject }.not_to exceed_query_limit(control.count)
+ expect { subject }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
index 0d83187f9e4..7b5eef92f53 100644
--- a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
+++ b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
project.update!(auto_cancel_pending_pipelines: 'enabled')
end
- it 'cancels only previous interruptible builds' do
+ it 'cancels only previous non started builds' do
execute
expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
@@ -153,6 +153,36 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
end
+
+ context 'when the child pipeline auto_cancel_on_new_commit is `interruptible`' do
+ before do
+ child_pipeline.create_pipeline_metadata!(
+ project: child_pipeline.project, auto_cancel_on_new_commit: 'interruptible'
+ )
+ end
+
+ it 'cancels interruptible child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'success')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+ end
+ end
+ end
end
context 'when the child pipeline has non-interruptible non-started job' do
@@ -227,6 +257,37 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
end
end
+ context 'when there are non-interruptible completed jobs in the pipeline' do
+ before do
+ create(:ci_build, :failed, pipeline: prev_pipeline)
+ create(:ci_build, :success, pipeline: prev_pipeline)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+
context 'when there are trigger jobs' do
before do
create(:ci_bridge, :created, pipeline: prev_pipeline)
@@ -246,6 +307,152 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
end
end
+ context 'when auto_cancel_on_new_commit is `interruptible`' do
+ before do
+ prev_pipeline.create_pipeline_metadata!(
+ project: prev_pipeline.project, auto_cancel_on_new_commit: 'interruptible'
+ )
+ end
+
+ it 'cancels only interruptible jobs' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'created')
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'cancels non started builds' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
+ context 'when there are non-interruptible completed jobs in the pipeline' do
+ before do
+ create(:ci_build, :failed, pipeline: prev_pipeline)
+ create(:ci_build, :success, pipeline: prev_pipeline)
+ end
+
+ it 'still cancels only interruptible jobs' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'canceled', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly(
+ 'created', 'success', 'running', 'failed', 'success'
+ )
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+ end
+
+ context 'when auto_cancel_on_new_commit is `none`' do
+ before do
+ prev_pipeline.create_pipeline_metadata!(
+ project: prev_pipeline.project, auto_cancel_on_new_commit: 'none'
+ )
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
+ context 'when auto_cancel_on_new_commit is `conservative`' do
+ before do
+ prev_pipeline.create_pipeline_metadata!(
+ project: prev_pipeline.project, auto_cancel_on_new_commit: 'conservative'
+ )
+ end
+
+ it 'cancels only previous non started builds' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'cancels only previous non started builds' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
+ context 'when there are non-interruptible completed jobs in the pipeline' do
+ before do
+ create(:ci_build, :failed, pipeline: prev_pipeline)
+ create(:ci_build, :success, pipeline: prev_pipeline)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+ end
+
+ context 'when auto_cancel_on_new_commit is an invalid value' do
+ before do
+ allow(prev_pipeline).to receive(:auto_cancel_on_new_commit).and_return('invalid')
+ relation = Ci::Pipeline.id_in(prev_pipeline.id)
+ allow(relation).to receive(:each).and_yield(prev_pipeline)
+ allow(Ci::Pipeline).to receive(:id_in).and_return(relation)
+ end
+
+ it 'raises an error' do
+ expect { execute }.to raise_error(ArgumentError, 'Unknown auto_cancel_on_new_commit value: invalid')
+ end
+ end
+
it 'does not cancel future pipelines' do
expect(prev_pipeline.id).to be < pipeline.id
expect(build_statuses(pipeline)).to contain_exactly('pending')
diff --git a/spec/services/ci/retry_job_service_spec.rb b/spec/services/ci/retry_job_service_spec.rb
index 1646afde21d..1708f475e6b 100644
--- a/spec/services/ci/retry_job_service_spec.rb
+++ b/spec/services/ci/retry_job_service_spec.rb
@@ -403,11 +403,11 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
end
it 'does not cause an N+1 when updating the job ownership' do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { service.execute(job) }.count
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { service.execute(job) }
create_list(:ci_build, 2, :skipped, pipeline: pipeline, ci_stage: deploy_stage)
- expect { service.execute(job) }.not_to exceed_all_query_limit(control_count)
+ expect { service.execute(job) }.not_to exceed_all_query_limit(control)
end
end
diff --git a/spec/services/ci/runners/unregister_runner_manager_service_spec.rb b/spec/services/ci/runners/unregister_runner_manager_service_spec.rb
index 590df18469d..0fa2afdcdfc 100644
--- a/spec/services/ci/runners/unregister_runner_manager_service_spec.rb
+++ b/spec/services/ci/runners/unregister_runner_manager_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', feature_category: :fleet_visibility do
+RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', :freeze_time, feature_category: :fleet_visibility do
subject(:execute) { described_class.new(runner, 'some_token', system_id: system_id).execute }
context 'with runner registered with registration token' do
@@ -21,7 +21,7 @@ RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', featur
context 'with runner created in UI' do
let!(:runner_manager1) { create(:ci_runner_machine, runner: runner, system_xid: 'system_id_1') }
let!(:runner_manager2) { create(:ci_runner_machine, runner: runner, system_xid: 'system_id_2') }
- let!(:runner) { create(:ci_runner, registration_type: :authenticated_user) }
+ let!(:runner) { create(:ci_runner, registration_type: :authenticated_user, contacted_at: Time.current) }
context 'with system_id specified' do
let(:system_id) { runner_manager1.system_xid }
@@ -34,6 +34,24 @@ RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', featur
expect(runner[:errors]).to be_nil
expect(runner.runner_managers).to contain_exactly(runner_manager2)
end
+
+ it 'does not clear runner heartbeat' do
+ expect(runner).not_to receive(:clear_heartbeat)
+
+ expect(execute).to be_success
+ end
+
+ context "when there are no runner managers left after deletion" do
+ let!(:runner_manager2) { nil }
+
+ it 'clears the heartbeat attributes' do
+ expect(runner).to receive(:clear_heartbeat).and_call_original
+
+ expect do
+ expect(execute).to be_success
+ end.to change { runner.reload.read_attribute(:contacted_at) }.from(Time.current).to(nil)
+ end
+ end
end
context 'with unknown system_id' do
diff --git a/spec/services/ci/unlock_pipeline_service_spec.rb b/spec/services/ci/unlock_pipeline_service_spec.rb
index 1a1150dca9e..16537ce5eaa 100644
--- a/spec/services/ci/unlock_pipeline_service_spec.rb
+++ b/spec/services/ci/unlock_pipeline_service_spec.rb
@@ -39,6 +39,28 @@ RSpec.describe Ci::UnlockPipelineService, :unlock_pipelines, :clean_gitlab_redis
)
end
+ context 'when disable_ci_partition_pruning is disabled' do
+ before do
+ stub_feature_flags(disable_ci_partition_pruning: false)
+ end
+
+ it 'unlocks the pipeline and all its artifacts' do
+ expect { execute }
+ .to change { pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
+ .and change { pipeline.reload.job_artifacts.all?(&:artifact_unlocked?) }.to(true)
+ .and change { pipeline.reload.pipeline_artifacts.all?(&:artifact_unlocked?) }.to(true)
+
+ expect(execute).to eq(
+ status: :success,
+ skipped_already_leased: false,
+ skipped_already_unlocked: false,
+ exec_timeout: false,
+ unlocked_job_artifacts: pipeline.job_artifacts.count,
+ unlocked_pipeline_artifacts: pipeline.pipeline_artifacts.count
+ )
+ end
+ end
+
context 'and pipeline is already unlocked' do
before do
described_class.new(pipeline).execute
diff --git a/spec/services/ci/update_build_queue_service_spec.rb b/spec/services/ci/update_build_queue_service_spec.rb
index 4fd4492278d..c5959127f34 100644
--- a/spec/services/ci/update_build_queue_service_spec.rb
+++ b/spec/services/ci/update_build_queue_service_spec.rb
@@ -331,11 +331,11 @@ RSpec.describe Ci::UpdateBuildQueueService, feature_category: :continuous_integr
let!(:project_runner) { create(:ci_runner, :project, :online, projects: [project], tag_list: %w[a b c]) }
it 'does execute the same amount of queries regardless of number of runners' do
- control_count = ActiveRecord::QueryRecorder.new { subject.tick(build) }.count
+ control = ActiveRecord::QueryRecorder.new { subject.tick(build) }
create_list(:ci_runner, 10, :project, :online, projects: [project], tag_list: %w[b c d])
- expect { subject.tick(build) }.not_to exceed_all_query_limit(control_count)
+ expect { subject.tick(build) }.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/services/click_house/sync_strategies/base_sync_strategy_spec.rb b/spec/services/click_house/sync_strategies/base_sync_strategy_spec.rb
new file mode 100644
index 00000000000..eb9324fd24b
--- /dev/null
+++ b/spec/services/click_house/sync_strategies/base_sync_strategy_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::SyncStrategies::BaseSyncStrategy, feature_category: :value_stream_management do
+ let(:strategy) { described_class.new }
+
+ describe '#execute' do
+ subject(:execute) { strategy.execute }
+
+ context 'when clickhouse configuration database is available', :click_house do
+ before do
+ allow(strategy).to receive(:model_class).and_return(::Event)
+ allow(strategy).to receive(:projections).and_return([:id])
+ allow(strategy).to receive(:csv_mapping).and_return({ id: :id })
+ allow(strategy).to receive(:insert_query).and_return("INSERT INTO events (id) SETTINGS async_insert=1,
+ wait_for_async_insert=1 FORMAT CSV")
+ end
+
+ context 'when there is nothing to sync' do
+ it 'adds metadata for the worker' do
+ expect(execute).to eq({ status: :processed, records_inserted: 0, reached_end_of_table: true })
+
+ events = ClickHouse::Client.select('SELECT * FROM events', :main)
+ expect(events).to be_empty
+ end
+ end
+
+ context 'when syncing records' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:project_event2) { create(:event, :closed, project: project, target: issue) }
+ let_it_be(:event_without_parent) { create(:event, :joined, project: nil, group: nil) }
+ let_it_be(:group_event) { create(:event, :created, group: group, project: nil) }
+ let_it_be(:project_event1) { create(:event, :created, project: project, target: issue) }
+
+ it 'inserts all records' do
+ expect(execute).to eq({ status: :processed, records_inserted: 4, reached_end_of_table: true })
+
+ expected_records = [
+ hash_including('id' => project_event2.id),
+ hash_including('id' => event_without_parent.id),
+ hash_including('id' => group_event.id),
+ hash_including('id' => project_event1.id)
+ ]
+
+ events = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main)
+
+ expect(events).to match(expected_records)
+
+ last_processed_id = ClickHouse::SyncCursor.cursor_for(:events)
+ expect(last_processed_id).to eq(project_event1.id)
+ end
+
+ context 'when multiple batches are needed' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ stub_const("#{described_class}::INSERT_BATCH_SIZE", 1)
+ end
+
+ it 'inserts all records' do
+ expect(execute).to eq({ status: :processed, records_inserted: 4, reached_end_of_table: true })
+
+ events = ClickHouse::Client.select('SELECT * FROM events', :main)
+ expect(events.size).to eq(4)
+ end
+
+ context 'when new records are inserted while processing' do
+ it 'does not process new records created during the iteration' do
+ # Simulating the case when there is an insert during the iteration
+ call_count = 0
+ allow(strategy).to receive(:next_batch).and_wrap_original do |method|
+ call_count += 1
+ create(:event) if call_count == 3
+ method.call
+ end
+
+ expect(execute).to eq({ status: :processed, records_inserted: 4, reached_end_of_table: true })
+ end
+ end
+ end
+
+ context 'when time limit is reached' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ it 'stops the processing' do
+ allow_next_instance_of(Analytics::CycleAnalytics::RuntimeLimiter) do |runtime_limiter|
+ allow(runtime_limiter).to receive(:over_time?).and_return(false, true)
+ end
+
+ expect(execute).to eq({ status: :processed, records_inserted: 2, reached_end_of_table: false })
+
+ last_processed_id = ClickHouse::SyncCursor.cursor_for(:events)
+ expect(last_processed_id).to eq(event_without_parent.id)
+ end
+ end
+
+ context 'when syncing from a certain point' do
+ before do
+ ClickHouse::SyncCursor.update_cursor_for(:events, project_event2.id)
+ end
+
+ it 'syncs records after the cursor' do
+ expect(execute).to eq({ status: :processed, records_inserted: 3, reached_end_of_table: true })
+
+ events = ClickHouse::Client.select('SELECT id FROM events ORDER BY id', :main)
+
+ expect(events).to eq([{ 'id' => event_without_parent.id }, { 'id' => group_event.id },
+ { 'id' => project_event1.id }])
+ end
+
+ context 'when there is nothing to sync' do
+ it 'does nothing' do
+ ClickHouse::SyncCursor.update_cursor_for(:events, project_event1.id)
+
+ expect(execute).to eq({ status: :processed, records_inserted: 0, reached_end_of_table: true })
+
+ events = ClickHouse::Client.select('SELECT id FROM events ORDER BY id', :main)
+ expect(events).to be_empty
+ end
+ end
+ end
+ end
+ end
+
+ context 'when clickhouse is not configured' do
+ before do
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({})
+ end
+
+ it 'skips execution' do
+ expect(execute).to eq({ status: :disabled })
+ end
+ end
+
+ context 'when exclusive lease error happens' do
+ it 'skips execution' do
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({ main: :some_db })
+
+ expect(strategy).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ expect(execute).to eq({ status: :skipped })
+ end
+ end
+ end
+
+ describe '#projections' do
+ it 'raises a NotImplementedError' do
+ expect { strategy.send(:projections) }.to raise_error(NotImplementedError,
+ "Subclasses must implement `projections`")
+ end
+ end
+
+ describe '#csv_mapping' do
+ it 'raises a NotImplementedError' do
+ expect { strategy.send(:csv_mapping) }.to raise_error(NotImplementedError,
+ "Subclasses must implement `csv_mapping`")
+ end
+ end
+
+ describe '#insert_query' do
+ it 'raises a NotImplementedError' do
+ expect { strategy.send(:insert_query) }.to raise_error(NotImplementedError,
+ "Subclasses must implement `insert_query`")
+ end
+ end
+end
diff --git a/spec/services/click_house/sync_strategies/event_sync_strategy_spec.rb b/spec/services/click_house/sync_strategies/event_sync_strategy_spec.rb
new file mode 100644
index 00000000000..05fcf6ddeb3
--- /dev/null
+++ b/spec/services/click_house/sync_strategies/event_sync_strategy_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::SyncStrategies::EventSyncStrategy, feature_category: :value_stream_management do
+ let(:strategy) { described_class.new }
+
+ describe '#execute' do
+ subject(:execute) { strategy.execute }
+
+ context 'when syncing records', :click_house do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:project_event2) { create(:event, :closed, project: project, target: issue) }
+ let_it_be(:event_without_parent) { create(:event, :joined, project: nil, group: nil) }
+ let_it_be(:group_event) { create(:event, :created, group: group, project: nil) }
+ let_it_be(:project_event1) { create(:event, :created, project: project, target: issue) }
+ # looks invalid but we have some records like this on PRD
+
+ it 'correctly inserts all records' do
+ expect(execute).to eq({ status: :processed, records_inserted: 4, reached_end_of_table: true })
+
+ expected_records = [
+ hash_including('id' => project_event2.id, 'path' => "#{group.id}/#{project.project_namespace.id}/",
+ 'target_type' => 'Issue'),
+ hash_including('id' => event_without_parent.id, 'path' => '', 'target_type' => ''),
+ hash_including('id' => group_event.id, 'path' => "#{group.id}/", 'target_type' => ''),
+ hash_including('id' => project_event1.id, 'path' => "#{group.id}/#{project.project_namespace.id}/",
+ 'target_type' => 'Issue')
+ ]
+
+ events = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main)
+
+ expect(events).to match(expected_records)
+
+ last_processed_id = ClickHouse::SyncCursor.cursor_for(:events)
+ expect(last_processed_id).to eq(project_event1.id)
+ end
+ end
+ end
+
+ describe '#projections' do
+ it 'returns correct projections' do
+ expect(strategy.send(:projections)).to match_array([
+ :id,
+ described_class::PATH_COLUMN,
+ :author_id,
+ :target_id,
+ :target_type,
+ 'action AS raw_action',
+ 'EXTRACT(epoch FROM created_at) AS casted_created_at',
+ 'EXTRACT(epoch FROM updated_at) AS casted_updated_at'
+ ])
+ end
+ end
+
+ describe '#csv_mapping' do
+ it 'returns correct csv mapping' do
+ expect(strategy.send(:csv_mapping)).to eq({
+ id: :id,
+ path: :path,
+ author_id: :author_id,
+ target_id: :target_id,
+ target_type: :target_type,
+ action: :raw_action,
+ created_at: :casted_created_at,
+ updated_at: :casted_updated_at
+ })
+ end
+ end
+
+ describe '#insert_query' do
+ let(:expected_query) do
+ <<~SQL.squish
+ INSERT INTO events (id, path, author_id,
+ target_id, target_type,
+ action, created_at, updated_at)
+ SETTINGS async_insert=1,
+ wait_for_async_insert=1 FORMAT CSV
+ SQL
+ end
+
+ it 'returns correct insert query' do
+ expect(strategy.send(:insert_query)).to eq(expected_query)
+ end
+ end
+
+ describe '#model_class' do
+ it 'returns the correct model class' do
+ expect(strategy.send(:model_class)).to eq(::Event)
+ end
+ end
+
+ describe '#enabled?' do
+ context 'when the clickhouse database is configured the feature flag is enabled' do
+ before do
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({ main: :some_db })
+ stub_feature_flags(event_sync_worker_for_click_house: true)
+ end
+
+ it 'returns true' do
+ expect(strategy.send(:enabled?)).to be_truthy
+ end
+ end
+
+ context 'when the clickhouse database is not configured' do
+ before do
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({})
+ end
+
+ it 'returns false' do
+ expect(strategy.send(:enabled?)).to be_falsey
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({ main: :some_db })
+ stub_feature_flags(event_sync_worker_for_click_house: false)
+ end
+
+ it 'returns false' do
+ expect(strategy.send(:enabled?)).to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb b/spec/services/cloud_seed/google_cloud/create_cloudsql_instance_service_spec.rb
index c31e76170d5..f6f1206e753 100644
--- a/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/create_cloudsql_instance_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::CreateCloudsqlInstanceService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::CreateCloudsqlInstanceService, feature_category: :deployment_management do
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:gcp_project_id) { 'gcp_project_120' }
diff --git a/spec/services/google_cloud/create_service_accounts_service_spec.rb b/spec/services/cloud_seed/google_cloud/create_service_accounts_service_spec.rb
index 3b57f2a9e5f..da30037963b 100644
--- a/spec/services/google_cloud/create_service_accounts_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/create_service_accounts_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::CreateServiceAccountsService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::CreateServiceAccountsService, feature_category: :deployment_management do
describe '#execute' do
before do
mock_google_oauth2_creds = Struct.new(:app_id, :app_secret)
diff --git a/spec/services/google_cloud/enable_cloud_run_service_spec.rb b/spec/services/cloud_seed/google_cloud/enable_cloud_run_service_spec.rb
index 3de9e7fcd5c..09f1b3460cc 100644
--- a/spec/services/google_cloud/enable_cloud_run_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/enable_cloud_run_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::EnableCloudRunService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::EnableCloudRunService, feature_category: :deployment_management do
describe 'when a project does not have any gcp projects' do
let_it_be(:project) { create(:project) }
diff --git a/spec/services/google_cloud/enable_cloudsql_service_spec.rb b/spec/services/cloud_seed/google_cloud/enable_cloudsql_service_spec.rb
index b14b827e8b8..137393e4544 100644
--- a/spec/services/google_cloud/enable_cloudsql_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/enable_cloudsql_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::EnableCloudsqlService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::EnableCloudsqlService, feature_category: :deployment_management do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:params) do
diff --git a/spec/services/google_cloud/enable_vision_ai_service_spec.rb b/spec/services/cloud_seed/google_cloud/enable_vision_ai_service_spec.rb
index 5adafcffe69..c37b5681a4b 100644
--- a/spec/services/google_cloud/enable_vision_ai_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/enable_vision_ai_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::EnableVisionAiService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::EnableVisionAiService, feature_category: :deployment_management do
describe 'when a project does not have any gcp projects' do
let_it_be(:project) { create(:project) }
diff --git a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb b/spec/services/cloud_seed/google_cloud/fetch_google_ip_list_service_spec.rb
index f8d5ba99bf6..c4a0be78213 100644
--- a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/fetch_google_ip_list_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::FetchGoogleIpListService, :use_clean_rails_memory_store_caching,
+RSpec.describe CloudSeed::GoogleCloud::FetchGoogleIpListService, :use_clean_rails_memory_store_caching,
:clean_gitlab_redis_rate_limiting, feature_category: :build_artifacts do
include StubRequests
diff --git a/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb b/spec/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service_spec.rb
index a748fed7134..2af03291484 100644
--- a/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::GcpRegionAddOrReplaceService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::GcpRegionAddOrReplaceService, feature_category: :deployment_management do
it 'adds and replaces GCP region vars' do
project = create(:project, :public)
service = described_class.new(project)
diff --git a/spec/services/google_cloud/generate_pipeline_service_spec.rb b/spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb
index 8f49e1af901..14c1e6bae7f 100644
--- a/spec/services/google_cloud/generate_pipeline_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::GeneratePipelineService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::GeneratePipelineService, feature_category: :deployment_management do
describe 'for cloud-run' do
describe 'when there is no existing pipeline' do
let_it_be(:project) { create(:project, :repository) }
@@ -64,7 +64,10 @@ RSpec.describe GoogleCloud::GeneratePipelineService, feature_category: :deployme
describe 'when there is an existing pipeline without `deploy` stage' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before_all do
@@ -119,7 +122,10 @@ EOF
describe 'when there is an existing pipeline with `deploy` stage' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before do
@@ -166,7 +172,10 @@ EOF
describe 'when there is an existing pipeline with `includes`' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before do
@@ -210,7 +219,10 @@ EOF
describe 'when there is no existing pipeline' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_STORAGE } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_STORAGE }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before do
diff --git a/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb b/spec/services/cloud_seed/google_cloud/get_cloudsql_instances_service_spec.rb
index cd2ad00ac3f..fb17d578af7 100644
--- a/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/get_cloudsql_instances_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::GetCloudsqlInstancesService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::GetCloudsqlInstancesService, feature_category: :deployment_management do
let(:service) { described_class.new(project) }
let(:project) { create(:project) }
diff --git a/spec/services/google_cloud/service_accounts_service_spec.rb b/spec/services/cloud_seed/google_cloud/service_accounts_service_spec.rb
index c900bf7d300..62d58b3198a 100644
--- a/spec/services/google_cloud/service_accounts_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/service_accounts_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::ServiceAccountsService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::ServiceAccountsService, feature_category: :deployment_management do
let(:service) { described_class.new(project) }
describe 'find_for_project' do
diff --git a/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb b/spec/services/cloud_seed/google_cloud/setup_cloudsql_instance_service_spec.rb
index 5095277f61a..ce02672e3fa 100644
--- a/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/setup_cloudsql_instance_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::SetupCloudsqlInstanceService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::SetupCloudsqlInstanceService, feature_category: :deployment_management do
let(:random_user) { create(:user) }
let(:project) { create(:project) }
let(:list_databases_empty) { Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(items: []) }
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index 4ab0080d8a2..5ad3fa1bca6 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
before do
if issue.design_collection.repository.exists?
issue.design_collection.repository.expire_all_method_caches
- issue.design_collection.repository.raw.delete_all_refs_except([Gitlab::Git::BLANK_SHA])
+ issue.design_collection.repository.raw.delete_all_refs_except([Gitlab::Git::SHA1_BLANK_SHA])
end
allow(DesignManagement::NewVersionWorker)
diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb
index 6a4769d77d5..f7041fb818e 100644
--- a/spec/services/event_create_service_spec.rb
+++ b/spec/services/event_create_service_spec.rb
@@ -364,19 +364,37 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
end
end
- describe 'Project' do
- describe '#join_project' do
- subject { service.join_project(project, user) }
+ describe '#join_source' do
+ let(:source) { project }
+ subject(:join_source) { service.join_source(source, user) }
+
+ context 'when source is a group' do
+ let_it_be(:source) { create(:group) }
+
+ it { is_expected.to be_falsey }
+
+ specify do
+ expect { join_source }.not_to change { Event.count }
+ end
+ end
+
+ context 'when source is a project' do
it { is_expected.to be_truthy }
- it { expect { subject }.to change { Event.count }.from(0).to(1) }
+
+ specify do
+ expect { join_source }.to change { Event.count }.from(0).to(1)
+ end
end
+ end
- describe '#expired_leave_project' do
- subject { service.expired_leave_project(project, user) }
+ describe '#expired_leave_project' do
+ subject(:expired_leave_project) { service.expired_leave_project(project, user) }
- it { is_expected.to be_truthy }
- it { expect { subject }.to change { Event.count }.from(0).to(1) }
+ it { is_expected.to be_truthy }
+
+ specify do
+ expect { expired_leave_project }.to change { Event.count }.from(0).to(1)
end
end
diff --git a/spec/services/git/base_hooks_service_spec.rb b/spec/services/git/base_hooks_service_spec.rb
index e083c8d7316..2a041d9b3e2 100644
--- a/spec/services/git/base_hooks_service_spec.rb
+++ b/spec/services/git/base_hooks_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Git::BaseHooksService, feature_category: :source_code_management
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:newrev) { "8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b" } # gitlab-test: git rev-parse refs/tags/v1.1.0
let(:ref) { 'refs/tags/v1.1.0' }
let(:checkout_sha) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' }
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index 8fd542542ae..39a5f28060c 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -160,7 +160,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
end
context "with a new default branch" do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'generates a push event with more than one commit' do
execute_service
@@ -178,7 +178,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
end
context "with a new non-default branch" do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:branch) { 'fix' }
let(:commit_id) { project.commit(branch).id }
@@ -198,7 +198,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
end
context 'removing a branch' do
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'generates a push event with no commits' do
execute_service
@@ -222,7 +222,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
)
end
- let(:blank_sha) { Gitlab::Git::BLANK_SHA }
+ let(:blank_sha) { Gitlab::Git::SHA1_BLANK_SHA }
def clears_cache(extended: [])
expect(service).to receive(:invalidated_file_types).and_return(extended)
@@ -361,7 +361,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
end
context 'creating the default branch' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'processes a limited number of commit messages' do
expect(project.repository)
@@ -414,7 +414,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
end
context 'removing the default branch' do
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'does not process commit messages' do
expect(project.repository).not_to receive(:commits)
@@ -429,7 +429,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
context 'creating a normal branch' do
let(:branch) { 'fix' }
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'processes a limited number of commit messages' do
expect(project.repository)
@@ -463,7 +463,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
context 'removing a normal branch' do
let(:branch) { 'fix' }
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'does not process commit messages' do
expect(project.repository).not_to receive(:commits)
@@ -530,7 +530,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
let(:branch) { 'fix' }
context 'oldrev is the blank SHA' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'is treated as a new branch' do
expect(service).to receive(:branch_create_hooks)
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index db4f3ace64b..bb5fe1b7b11 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
let_it_be(:user) { create(:user) }
let_it_be_with_refind(:project) { create(:project, :repository) }
- let(:blankrev) { Gitlab::Git::BLANK_SHA }
+ let(:blankrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:oldrev) { sample_commit.parent_id }
let(:newrev) { sample_commit.id }
let(:branch) { 'master' }
diff --git a/spec/services/git/process_ref_changes_service_spec.rb b/spec/services/git/process_ref_changes_service_spec.rb
index 93d65b0b344..c117988f0a1 100644
--- a/spec/services/git/process_ref_changes_service_spec.rb
+++ b/spec/services/git/process_ref_changes_service_spec.rb
@@ -21,9 +21,9 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
let(:changes) do
[
- { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create" },
+ { index: 0, oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create" },
{ index: 1, oldrev: '123456', newrev: '789012', ref: "#{ref_prefix}/update" },
- { index: 2, oldrev: '123456', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/delete" }
+ { index: 2, oldrev: '123456', newrev: Gitlab::Git::SHA1_BLANK_SHA, ref: "#{ref_prefix}/delete" }
]
end
@@ -71,9 +71,9 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
let(:changes) do
[
- { oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create" },
+ { oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create" },
{ oldrev: '123456', newrev: '789012', ref: "#{ref_prefix}/update" },
- { oldrev: '123456', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/delete" }
+ { oldrev: '123456', newrev: Gitlab::Git::SHA1_BLANK_SHA, ref: "#{ref_prefix}/delete" }
].map do |change|
multiple_changes(change, push_event_activities_limit + 1)
end.flatten
@@ -216,19 +216,19 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
context 'when there are merge requests associated with branches' do
let(:tag_changes) do
[
- { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "refs/tags/v10.0.0" }
+ { index: 0, oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789012', ref: "refs/tags/v10.0.0" }
]
end
let(:branch_changes) do
[
- { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create1" },
- { index: 1, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789013', ref: "#{ref_prefix}/create2" },
- { index: 2, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789014', ref: "#{ref_prefix}/create3" },
+ { index: 0, oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create1" },
+ { index: 1, oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789013', ref: "#{ref_prefix}/create2" },
+ { index: 2, oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789014', ref: "#{ref_prefix}/create3" },
{ index: 3, oldrev: '789015', newrev: '789016', ref: "#{ref_prefix}/changed1" },
{ index: 4, oldrev: '789017', newrev: '789018', ref: "#{ref_prefix}/changed2" },
- { index: 5, oldrev: '789019', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/removed1" },
- { index: 6, oldrev: '789020', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/removed2" }
+ { index: 5, oldrev: '789019', newrev: Gitlab::Git::SHA1_BLANK_SHA, ref: "#{ref_prefix}/removed1" },
+ { index: 6, oldrev: '789020', newrev: Gitlab::Git::SHA1_BLANK_SHA, ref: "#{ref_prefix}/removed2" }
]
end
@@ -246,7 +246,7 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(
project.id,
user.id,
- Gitlab::Git::BLANK_SHA,
+ Gitlab::Git::SHA1_BLANK_SHA,
'789012',
"#{ref_prefix}/create1",
{ 'push_options' => nil }).ordered
@@ -254,7 +254,7 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(
project.id,
user.id,
- Gitlab::Git::BLANK_SHA,
+ Gitlab::Git::SHA1_BLANK_SHA,
'789013',
"#{ref_prefix}/create2",
{ 'push_options' => nil }).ordered
@@ -271,7 +271,7 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
project.id,
user.id,
'789020',
- Gitlab::Git::BLANK_SHA,
+ Gitlab::Git::SHA1_BLANK_SHA,
"#{ref_prefix}/removed2",
{ 'push_options' => nil }).ordered
diff --git a/spec/services/git/tag_hooks_service_spec.rb b/spec/services/git/tag_hooks_service_spec.rb
index 3e06443126b..afa8a4e72d3 100644
--- a/spec/services/git/tag_hooks_service_spec.rb
+++ b/spec/services/git/tag_hooks_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Git::TagHooksService, :service, feature_category: :source_code_ma
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:newrev) { "8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b" } # gitlab-test: git rev-parse refs/tags/v1.1.0
let(:ref) { "refs/tags/#{tag_name}" }
let(:tag_name) { 'v1.1.0' }
diff --git a/spec/services/git/tag_push_service_spec.rb b/spec/services/git/tag_push_service_spec.rb
index 0d40c331d11..ba0f94d6fe6 100644
--- a/spec/services/git/tag_push_service_spec.rb
+++ b/spec/services/git/tag_push_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Git::TagPushService, feature_category: :source_code_management do
let(:project) { create(:project, :repository) }
let(:service) { described_class.new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }) }
- let(:blankrev) { Gitlab::Git::BLANK_SHA }
+ let(:blankrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:oldrev) { blankrev }
let(:newrev) { "8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b" } # gitlab-test: git rev-parse refs/tags/v1.1.0
let(:tag) { 'v1.1.0' }
diff --git a/spec/services/git/wiki_push_service_spec.rb b/spec/services/git/wiki_push_service_spec.rb
index b076b2d51ef..e584b0db63f 100644
--- a/spec/services/git/wiki_push_service_spec.rb
+++ b/spec/services/git/wiki_push_service_spec.rb
@@ -347,7 +347,7 @@ RSpec.describe Git::WikiPushService, services: true, feature_category: :wiki do
end
def current_sha
- repository.commit('master')&.id || Gitlab::Git::BLANK_SHA
+ repository.commit('master')&.id || Gitlab::Git::SHA1_BLANK_SHA
end
# It is important not to re-use the WikiPage services here, since they create
diff --git a/spec/services/integrations/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb b/spec/services/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb
index 3f57add10e3..f19cbaa21cd 100644
--- a/spec/services/integrations/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb
+++ b/spec/services/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Integrations::GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService, feature_category: :container_registry do
+RSpec.describe GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService, feature_category: :container_registry do
let_it_be(:project) { create(:project, :private) }
let(:user) { project.owner }
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index b2b27a1a075..8ce69d12b3f 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -3,45 +3,58 @@
require 'spec_helper'
RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_projects do
- let!(:user) { create(:user) }
- let!(:group_params) { { path: "group_path", visibility_level: Gitlab::VisibilityLevel::PUBLIC } }
+ let_it_be(:user, reload: true) { create(:user) }
+ let(:current_user) { user }
+ let(:group_params) { { path: 'group_path', visibility_level: Gitlab::VisibilityLevel::PUBLIC }.merge(extra_params) }
+ let(:extra_params) { {} }
+ let(:created_group) { response }
- subject { service.execute }
+ subject(:response) { described_class.new(current_user, group_params).execute }
shared_examples 'has sync-ed traversal_ids' do
- specify { expect(subject.reload.traversal_ids).to eq([subject.parent&.traversal_ids, subject.id].flatten.compact) }
+ specify do
+ expect(created_group.traversal_ids).to eq([created_group.parent&.traversal_ids, created_group.id].flatten.compact)
+ end
+ end
+
+ shared_examples 'creating a group' do
+ specify do
+ expect { response }.to change { Group.count }
+ expect(created_group).to be_persisted
+ end
end
- describe 'visibility level restrictions' do
- let!(:service) { described_class.new(user, group_params) }
+ shared_examples 'does not create a group' do
+ specify do
+ expect { response }.not_to change { Group.count }
+ expect(created_group).not_to be_persisted
+ end
+ end
- context "create groups without restricted visibility level" do
- it { is_expected.to be_persisted }
+ context 'for visibility level restrictions' do
+ context 'without restricted visibility level' do
+ it_behaves_like 'creating a group'
end
- context "cannot create group with restricted visibility level" do
+ context 'with restricted visibility level' do
before do
- allow_any_instance_of(ApplicationSetting).to receive(:restricted_visibility_levels).and_return([Gitlab::VisibilityLevel::PUBLIC])
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
end
- it { is_expected.not_to be_persisted }
+ it_behaves_like 'does not create a group'
end
end
- context 'when `setup_for_company:true` is passed' do
- let(:params) { group_params.merge(setup_for_company: true) }
- let(:service) { described_class.new(user, params) }
- let(:created_group) { service.execute }
+ context 'with `setup_for_company` attribute' do
+ let(:extra_params) { { setup_for_company: true } }
- it 'creates group with the specified setup_for_company' do
+ it 'has the specified setup_for_company' do
expect(created_group.setup_for_company).to eq(true)
end
end
- context 'creating a group with `default_branch_protection` attribute' do
- let(:params) { group_params.merge(default_branch_protection: Gitlab::Access::PROTECTION_NONE) }
- let(:service) { described_class.new(user, params) }
- let(:created_group) { service.execute }
+ context 'with `default_branch_protection` attribute' do
+ let(:extra_params) { { default_branch_protection: Gitlab::Access::PROTECTION_NONE } }
context 'for users who have the ability to create a group with `default_branch_protection`' do
it 'creates group with the specified branch protection level' do
@@ -52,23 +65,22 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
context 'for users who do not have the ability to create a group with `default_branch_protection`' do
it 'does not create the group with the specified branch protection level' do
allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :create_group_with_default_branch_protection) { false }
+ allow(Ability).to receive(:allowed?).with(user, :create_group_with_default_branch_protection).and_return(false)
expect(created_group.default_branch_protection).not_to eq(Gitlab::Access::PROTECTION_NONE)
end
end
end
- context 'creating a group with `default_branch_protection_defaults` attribute' do
+ context 'with `default_branch_protection_defaults` attribute' do
let(:branch_protection) { ::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys }
- let(:params) { group_params.merge(default_branch_protection_defaults: branch_protection) }
- let(:service) { described_class.new(user, params) }
- let(:created_group) { service.execute }
+ let(:extra_params) { { default_branch_protection_defaults: branch_protection } }
context 'for users who have the ability to create a group with `default_branch_protection`' do
before do
allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :update_default_branch_protection, an_instance_of(Group)).and_return(true)
+ allow(Ability)
+ .to receive(:allowed?).with(user, :update_default_branch_protection, an_instance_of(Group)).and_return(true)
end
it 'creates group with the specified default branch protection settings' do
@@ -79,31 +91,26 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
context 'for users who do not have the ability to create a group with `default_branch_protection_defaults`' do
it 'does not create the group with the specified default branch protection settings' do
allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :create_group_with_default_branch_protection) { false }
+ allow(Ability).to receive(:allowed?).with(user, :create_group_with_default_branch_protection).and_return(false)
expect(created_group.default_branch_protection_defaults).not_to eq(Gitlab::Access::PROTECTION_NONE)
end
end
end
- context 'creating a group with `allow_mfa_for_subgroups` attribute' do
- let(:params) { group_params.merge(allow_mfa_for_subgroups: false) }
- let(:service) { described_class.new(user, params) }
+ context 'with `allow_mfa_for_subgroups` attribute' do
+ let(:extra_params) { { allow_mfa_for_subgroups: false } }
- it 'creates group without error' do
- expect(service.execute).to be_persisted
- end
+ it_behaves_like 'creating a group'
end
- describe 'creating a top level group' do
- let(:service) { described_class.new(user, group_params) }
-
+ context 'for a top level group' do
context 'when user can create a group' do
before do
user.update_attribute(:can_create_group, true)
end
- it { is_expected.to be_persisted }
+ it_behaves_like 'creating a group'
context 'with before_commit callback' do
it_behaves_like 'has sync-ed traversal_ids'
@@ -115,144 +122,167 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
user.update_attribute(:can_create_group, false)
end
- it { is_expected.not_to be_persisted }
+ it_behaves_like 'does not create a group'
end
end
- describe 'creating subgroup' do
- let!(:group) { create(:group) }
- let!(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
+ context 'when creating a group within an organization' do
+ context 'when organization is provided' do
+ let_it_be(:organization) { create(:organization) }
+ let(:extra_params) { { organization_id: organization.id } }
- context 'as group owner' do
+ context 'when user can create the group' do
+ before do
+ create(:organization_user, user: user, organization: organization)
+ end
+
+ it_behaves_like 'creating a group'
+ end
+
+ context 'when user is an admin', :enable_admin_mode do
+ let(:current_user) { create(:admin) }
+
+ it_behaves_like 'creating a group'
+ end
+
+ context 'when user can not create the group' do
+ it_behaves_like 'does not create a group'
+
+ it 'returns an error and does not set organization_id' do
+ expect(created_group.errors[:organization_id].first)
+ .to eq(s_("CreateGroup|You don't have permission to create a group in the provided organization."))
+ expect(created_group.organization_id).to be_nil
+ end
+ end
+ end
+
+ context 'when organization is the default organization and not set by params' do
before do
- group.add_owner(user)
+ create(:organization, :default)
end
- it { is_expected.to be_persisted }
+ it_behaves_like 'creating a group'
+ end
+ end
+
+ context 'for a subgroup' do
+ let_it_be(:group) { create(:group) }
+ let(:extra_params) { { parent_id: group.id } }
+
+ context 'as group owner' do
+ before_all do
+ group.add_owner(user)
+ end
+ it_behaves_like 'creating a group'
it_behaves_like 'has sync-ed traversal_ids'
end
context 'as guest' do
- it 'does not save group and returns an error' do
- is_expected.not_to be_persisted
+ it_behaves_like 'does not create a group'
- expect(subject.errors[:parent_id].first).to eq(s_('CreateGroup|You don’t have permission to create a subgroup in this group.'))
- expect(subject.parent_id).to be_nil
+ it 'returns an error and does not set parent_id' do
+ expect(created_group.errors[:parent_id].first)
+ .to eq(s_('CreateGroup|You don’t have permission to create a subgroup in this group.'))
+ expect(created_group.parent_id).to be_nil
end
end
context 'as owner' do
- before do
+ before_all do
group.add_owner(user)
end
- it { is_expected.to be_persisted }
+ it_behaves_like 'creating a group'
end
context 'as maintainer' do
- before do
+ before_all do
group.add_maintainer(user)
end
- it { is_expected.to be_persisted }
+ it_behaves_like 'creating a group'
end
end
- describe "when visibility level is passed as a string" do
- let(:service) { described_class.new(user, group_params) }
- let(:group_params) { { path: 'group_path', visibility: 'public' } }
-
- it "assigns the correct visibility level" do
- group = service.execute
+ context 'when visibility level is passed as a string' do
+ let(:extra_params) { { visibility: 'public' } }
- expect(group.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ it 'assigns the correct visibility level' do
+ expect(created_group.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
end
end
- describe 'creating a mattermost team' do
- let!(:params) { group_params.merge(create_chat_team: "true") }
- let!(:service) { described_class.new(user, params) }
+ context 'for creating a mattermost team' do
+ let(:extra_params) { { create_chat_team: 'true' } }
before do
stub_mattermost_setting(enabled: true)
end
it 'create the chat team with the group' do
- allow_any_instance_of(::Mattermost::Team).to receive(:create)
- .and_return({ 'name' => 'tanuki', 'id' => 'lskdjfwlekfjsdifjj' })
+ allow_next_instance_of(::Mattermost::Team) do |instance|
+ allow(instance).to receive(:create).and_return({ 'name' => 'tanuki', 'id' => 'lskdjfwlekfjsdifjj' })
+ end
- expect { subject }.to change { ChatTeam.count }.from(0).to(1)
+ expect { response }.to change { ChatTeam.count }.from(0).to(1)
end
end
- describe 'creating a setting record' do
- let(:service) { described_class.new(user, group_params) }
-
+ context 'for creating a setting record' do
it 'create the settings record connected to the group' do
- group = subject
- expect(group.namespace_settings).to be_persisted
+ expect(created_group.namespace_settings).to be_persisted
end
end
- describe 'creating a details record' do
- let(:service) { described_class.new(user, group_params) }
-
+ context 'for creating a details record' do
it 'create the details record connected to the group' do
- group = subject
- expect(group.namespace_details).to be_persisted
+ expect(created_group.namespace_details).to be_persisted
end
end
- describe 'create service for the group' do
- let(:service) { described_class.new(user, group_params) }
- let(:created_group) { service.execute }
+ context 'with an active instance-level integration' do
+ let_it_be(:instance_integration) do
+ create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/')
+ end
+
+ it 'creates a service from the instance-level integration' do
+ expect(created_group.integrations.count).to eq(1)
+ expect(created_group.integrations.first.api_url).to eq(instance_integration.api_url)
+ expect(created_group.integrations.first.inherit_from_id).to eq(instance_integration.id)
+ end
- context 'with an active instance-level integration' do
- let!(:instance_integration) { create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/') }
+ context 'with an active group-level integration' do
+ let(:extra_params) { { parent_id: group.id } }
+ let_it_be(:group) { create(:group) { |g| g.add_owner(user) } }
+ let_it_be(:group_integration) do
+ create(:prometheus_integration, :group, group: group, api_url: 'https://prometheus.group.com/')
+ end
- it 'creates a service from the instance-level integration' do
+ it 'creates a service from the group-level integration' do
expect(created_group.integrations.count).to eq(1)
- expect(created_group.integrations.first.api_url).to eq(instance_integration.api_url)
- expect(created_group.integrations.first.inherit_from_id).to eq(instance_integration.id)
+ expect(created_group.integrations.first.api_url).to eq(group_integration.api_url)
+ expect(created_group.integrations.first.inherit_from_id).to eq(group_integration.id)
end
- context 'with an active group-level integration' do
- let(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
- let!(:group_integration) { create(:prometheus_integration, :group, group: group, api_url: 'https://prometheus.group.com/') }
- let(:group) do
- create(:group).tap do |group|
- group.add_owner(user)
- end
+ context 'with an active subgroup' do
+ let(:extra_params) { { parent_id: subgroup.id } }
+ let_it_be(:subgroup) { create(:group, parent: group) { |g| g.add_owner(user) } }
+ let_it_be(:subgroup_integration) do
+ create(:prometheus_integration, :group, group: subgroup, api_url: 'https://prometheus.subgroup.com/')
end
- it 'creates a service from the group-level integration' do
+ it 'creates a service from the subgroup-level integration' do
expect(created_group.integrations.count).to eq(1)
- expect(created_group.integrations.first.api_url).to eq(group_integration.api_url)
- expect(created_group.integrations.first.inherit_from_id).to eq(group_integration.id)
- end
-
- context 'with an active subgroup' do
- let(:service) { described_class.new(user, group_params.merge(parent_id: subgroup.id)) }
- let!(:subgroup_integration) { create(:prometheus_integration, :group, group: subgroup, api_url: 'https://prometheus.subgroup.com/') }
- let(:subgroup) do
- create(:group, parent: group).tap do |subgroup|
- subgroup.add_owner(user)
- end
- end
-
- it 'creates a service from the subgroup-level integration' do
- expect(created_group.integrations.count).to eq(1)
- expect(created_group.integrations.first.api_url).to eq(subgroup_integration.api_url)
- expect(created_group.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
- end
+ expect(created_group.integrations.first.api_url).to eq(subgroup_integration.api_url)
+ expect(created_group.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
end
end
end
end
- context 'shared runners configuration' do
- context 'parent group present' do
+ context 'with shared runners configuration' do
+ context 'when parent group is present' do
using RSpec::Parameterized::TableSyntax
where(:shared_runners_config, :descendants_override_disabled_shared_runners_config) do
@@ -263,30 +293,31 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
end
with_them do
- let!(:group) { create(:group, shared_runners_enabled: shared_runners_config, allow_descendants_override_disabled_shared_runners: descendants_override_disabled_shared_runners_config) }
- let!(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
+ let(:extra_params) { { parent_id: group.id } }
+ let(:group) do
+ create(
+ :group,
+ shared_runners_enabled: shared_runners_config,
+ allow_descendants_override_disabled_shared_runners: descendants_override_disabled_shared_runners_config
+ )
+ end
before do
group.add_owner(user)
end
it 'creates group following the parent config' do
- new_group = service.execute
-
- expect(new_group.shared_runners_enabled).to eq(shared_runners_config)
- expect(new_group.allow_descendants_override_disabled_shared_runners).to eq(descendants_override_disabled_shared_runners_config)
+ expect(created_group.shared_runners_enabled).to eq(shared_runners_config)
+ expect(created_group.allow_descendants_override_disabled_shared_runners)
+ .to eq(descendants_override_disabled_shared_runners_config)
end
end
end
- context 'root group' do
- let!(:service) { described_class.new(user) }
-
+ context 'for root group' do
it 'follows default config' do
- new_group = service.execute
-
- expect(new_group.shared_runners_enabled).to eq(true)
- expect(new_group.allow_descendants_override_disabled_shared_runners).to eq(false)
+ expect(created_group.shared_runners_enabled).to eq(true)
+ expect(created_group.allow_descendants_override_disabled_shared_runners).to eq(false)
end
end
end
diff --git a/spec/services/groups/participants_service_spec.rb b/spec/services/groups/participants_service_spec.rb
index e934921317d..beab7311b93 100644
--- a/spec/services/groups/participants_service_spec.rb
+++ b/spec/services/groups/participants_service_spec.rb
@@ -10,7 +10,8 @@ RSpec.describe Groups::ParticipantsService, feature_category: :groups_and_projec
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:subproject) { create(:project, group: subgroup) }
- let(:service) { described_class.new(group, developer) }
+ let(:params) { {} }
+ let(:service) { described_class.new(group, developer, params) }
subject(:service_result) { service.execute(nil) }
@@ -74,6 +75,19 @@ RSpec.describe Groups::ParticipantsService, feature_category: :groups_and_projec
it { is_expected.to include(private_group_member.username) }
end
+
+ context 'when search param is given' do
+ let(:params) { { search: 'johnd' } }
+
+ let_it_be(:member_1) { create(:user, name: 'John Doe').tap { |u| group.add_guest(u) } }
+ let_it_be(:member_2) { create(:user, name: 'Jane Doe ').tap { |u| group.add_guest(u) } }
+
+ it 'only returns matching members' do
+ users = service_result.select { |hash| hash[:type].eql?('User') }
+
+ expect(users.pluck(:username)).to eq([member_1.username])
+ end
+ end
end
def user_to_autocompletable(user)
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 78deb3cf254..f50163041f8 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -444,6 +444,60 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
end
end
+ context 'when setting enable_namespace_descendants_cache' do
+ let(:params) { { enable_namespace_descendants_cache: true } }
+
+ subject(:result) { described_class.new(public_group, user, params).execute }
+
+ context 'when the group_hierarchy_optimization feature flag is enabled' do
+ before do
+ stub_feature_flags(group_hierarchy_optimization: true)
+ end
+
+ context 'when enabling the setting' do
+ it 'creates the initial Namespaces::Descendants record' do
+ expect { result }.to change { public_group.reload.namespace_descendants.present? }.from(false).to(true)
+ end
+ end
+
+ context 'when accidentally enabling the setting again' do
+ it 'does nothing' do
+ namespace_descendants = create(:namespace_descendants, namespace: public_group)
+
+ expect { result }.not_to change { namespace_descendants.reload }
+ end
+ end
+
+ context 'when disabling the setting' do
+ before do
+ params[:enable_namespace_descendants_cache] = false
+ end
+
+ it 'removes the Namespaces::Descendants record' do
+ create(:namespace_descendants, namespace: public_group)
+
+ expect { result }.to change { public_group.reload.namespace_descendants }.to(nil)
+ end
+
+ context 'when the Namespaces::Descendants record is missing' do
+ it 'does not raise error' do
+ expect { result }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ context 'when the group_hierarchy_optimization feature flag is disabled' do
+ before do
+ stub_feature_flags(group_hierarchy_optimization: false)
+ end
+
+ it 'does nothing' do
+ expect { result }.not_to change { public_group.reload.namespace_descendants.present? }.from(false)
+ end
+ end
+ end
+
context 'EventStore' do
let(:service) { described_class.new(group, user, **params) }
let(:root_group) { create(:group, path: 'root') }
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index fc649b61426..6fe17a31f3e 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -21,16 +21,24 @@ RSpec.describe Import::GithubService, feature_category: :importers do
}
end
+ let(:headers) do
+ {
+ 'x-oauth-scopes' => 'read:org'
+ }
+ end
+
let(:client) { Gitlab::GithubImport::Client.new(token) }
let(:project_double) { instance_double(Project, persisted?: true) }
subject(:github_importer) { described_class.new(client, user, params) }
before do
+ allow(client).to receive_message_chain(:octokit, :last_response, :headers).and_return(headers)
allow(Gitlab::GithubImport::Settings).to receive(:new).with(project_double).and_return(settings)
allow(settings)
.to receive(:write)
.with(
+ extended_events: true,
optional_stages: optional_stages,
timeout_strategy: timeout_strategy
)
@@ -92,6 +100,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(settings)
.to have_received(:write)
.with(optional_stages: nil,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@@ -117,6 +126,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: nil,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@@ -149,6 +159,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: nil,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@@ -185,11 +196,30 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
end
end
+ context 'validates scopes when collaborator import is true' do
+ let(:optional_stages) do
+ {
+ collaborators_import: true
+ }
+ end
+
+ let(:headers) do
+ {
+ 'x-oauth-scopes' => 'read:user'
+ }
+ end
+
+ it 'returns error when scope is not adequate' do
+ expect(subject.execute(access_params, :github)).to include(scope_error)
+ end
+ end
+
context 'when timeout strategy param is present' do
let(:timeout_strategy) { 'pessimistic' }
@@ -200,6 +230,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
end
@@ -213,10 +244,25 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
end
end
+
+ context 'when `github_import_extended_events`` feature flag is disabled' do
+ before do
+ stub_feature_flags(github_import_extended_events: false)
+ end
+
+ it 'saves extend_events to import_data' do
+ expect(settings)
+ .to receive(:write)
+ .with(a_hash_including(extended_events: false))
+
+ subject.execute(access_params, :github)
+ end
+ end
end
context 'when import source is disabled' do
@@ -309,6 +355,14 @@ RSpec.describe Import::GithubService, feature_category: :importers do
}
end
+ def scope_error
+ {
+ status: :error,
+ http_status: :unprocessable_entity,
+ message: 'Your GitHub access token does not have the correct scope to import collaborators.'
+ }
+ end
+
def blocked_url_error(url)
{
status: :error,
diff --git a/spec/services/issuable/common_system_notes_service_spec.rb b/spec/services/issuable/common_system_notes_service_spec.rb
index 3d83c9ec9c2..4ea7bb89d61 100644
--- a/spec/services/issuable/common_system_notes_service_spec.rb
+++ b/spec/services/issuable/common_system_notes_service_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe Issuable::CommonSystemNotesService, feature_category: :team_plann
it_behaves_like 'system note creation', { title: 'New title' }, 'changed title'
it_behaves_like 'system note creation', { description: 'New description' }, 'changed the description'
it_behaves_like 'system note creation', { discussion_locked: true }, 'locked the discussion in this issue'
- it_behaves_like 'system note creation', { time_estimate: 5 }, 'changed time estimate'
+ it_behaves_like 'system note creation', { time_estimate: 5 }, 'added time estimate of 5s'
context 'when new label is added' do
let(:label) { create(:label, project: project) }
@@ -142,5 +142,9 @@ RSpec.describe Issuable::CommonSystemNotesService, feature_category: :team_plann
context 'when changing dates' do
it_behaves_like 'system note for issuable date changes'
end
+
+ context 'when setting an estimae' do
+ it_behaves_like 'system note creation', { time_estimate: 5 }, 'added time estimate of 5s', false
+ end
end
end
diff --git a/spec/services/issue_email_participants/create_service_spec.rb b/spec/services/issue_email_participants/create_service_spec.rb
index fcfdeeb08f3..dc8d5a6ea74 100644
--- a/spec/services/issue_email_participants/create_service_spec.rb
+++ b/spec/services/issue_email_participants/create_service_spec.rb
@@ -41,8 +41,8 @@ RSpec.describe IssueEmailParticipants::CreateService, feature_category: :service
let(:expected_emails) { emails }
let(:error_feature_flag) { "Feature flag issue_email_participants is not enabled for this project." }
- let(:error_underprivileged) { _("You don't have permission to add email participants.") }
- let(:error_no_participants) do
+ let(:error_underprivileged) { _("You don't have permission to manage email participants.") }
+ let(:error_no_participants_added) do
_("No email participants were added. Either none were provided, or they already exist.")
end
@@ -58,7 +58,7 @@ RSpec.describe IssueEmailParticipants::CreateService, feature_category: :service
end
context 'when no emails are provided' do
- let(:error_message) { error_no_participants }
+ let(:error_message) { error_no_participants_added }
it_behaves_like 'a failed service execution'
end
@@ -69,7 +69,7 @@ RSpec.describe IssueEmailParticipants::CreateService, feature_category: :service
it_behaves_like 'a successful service execution'
context 'when email is already a participant of the issue' do
- let(:error_message) { error_no_participants }
+ let(:error_message) { error_no_participants_added }
before do
issue.issue_email_participants.create!(email: emails.first)
@@ -89,7 +89,7 @@ RSpec.describe IssueEmailParticipants::CreateService, feature_category: :service
end
let(:emails) { ['over-max@example.com'] }
- let(:error_message) { error_no_participants }
+ let(:error_message) { error_no_participants_added }
it_behaves_like 'a failed service execution'
diff --git a/spec/services/issue_email_participants/destroy_service_spec.rb b/spec/services/issue_email_participants/destroy_service_spec.rb
new file mode 100644
index 00000000000..70e09bb8d3b
--- /dev/null
+++ b/spec/services/issue_email_participants/destroy_service_spec.rb
@@ -0,0 +1,147 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IssueEmailParticipants::DestroyService, feature_category: :service_desk do
+ shared_examples 'a successful service execution' do
+ it 'removes participants', :aggregate_failures do
+ expect(response).to be_success
+
+ issue.reset
+ note = issue.notes.last
+ expect(note.system?).to be true
+ expect(note.author).to eq(user)
+
+ participants_emails = issue.email_participants_emails_downcase
+
+ expected_emails.each do |email|
+ expect(participants_emails).not_to include(email)
+ expect(response.message).to include(email)
+ expect(note.note).to include(email)
+ end
+ end
+ end
+
+ shared_examples 'a failed service execution' do
+ it 'returns error ServiceResponse with message', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq(error_message)
+ end
+ end
+
+ describe '#execute' do
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:issue) { create(:issue, project: project) }
+
+ let(:emails) { nil }
+ let(:service) { described_class.new(target: issue, current_user: user, emails: emails) }
+ let(:expected_emails) { emails }
+
+ let(:error_feature_flag) { "Feature flag issue_email_participants is not enabled for this project." }
+ let(:error_underprivileged) { _("You don't have permission to manage email participants.") }
+ let(:error_no_participants_removed) do
+ _("No email participants were removed. Either none were provided, or they don't exist.")
+ end
+
+ subject(:response) { service.execute }
+
+ context 'when the user is not a project member' do
+ let(:error_message) { error_underprivileged }
+
+ it_behaves_like 'a failed service execution'
+ end
+
+ context 'when user has reporter role in project' do
+ before_all do
+ project.add_reporter(user)
+ end
+
+ context 'when no emails are provided' do
+ let(:error_message) { error_no_participants_removed }
+
+ it_behaves_like 'a failed service execution'
+ end
+
+ context 'when one email is provided' do
+ let(:emails) { ['user@example.com'] }
+ let(:error_message) { error_no_participants_removed }
+
+ it_behaves_like 'a failed service execution'
+
+ context 'when email is a participant of the issue' do
+ before do
+ issue.issue_email_participants.create!(email: 'user@example.com')
+ end
+
+ it_behaves_like 'a successful service execution'
+
+ context 'when email is formatted in a different case' do
+ let(:emails) { ['USER@example.com'] }
+ let(:expected_emails) { emails.map(&:downcase) }
+ let(:error_message) { error_no_participants_removed }
+
+ it_behaves_like 'a successful service execution'
+ end
+ end
+ end
+
+ context 'when multiple emails are provided' do
+ let(:emails) { ['user@example.com', 'user2@example.com'] }
+ let(:error_message) { error_no_participants_removed }
+
+ it_behaves_like 'a failed service execution'
+
+ context 'when duplicate email provided' do
+ let(:emails) { ['user@example.com', 'user@example.com'] }
+ let(:expected_emails) { emails[...-1] }
+
+ it_behaves_like 'a failed service execution'
+ end
+
+ context 'when one email is a participant of the issue' do
+ let(:expected_emails) { emails[...-1] }
+
+ before do
+ issue.issue_email_participants.create!(email: emails.first)
+ end
+
+ it_behaves_like 'a successful service execution'
+ end
+
+ context 'when both emails are a participant of the issue' do
+ before do
+ emails.each do |email|
+ issue.issue_email_participants.create!(email: email)
+ end
+ end
+
+ it_behaves_like 'a successful service execution'
+ end
+ end
+
+ context 'when more than the allowed number of emails are provided' do
+ let(:emails) { (1..7).map { |i| "user#{i}@example.com" } }
+ let(:expected_emails) { emails[...-1] }
+
+ before do
+ emails.each do |email|
+ issue.issue_email_participants.create!(email: email)
+ end
+ end
+
+ it_behaves_like 'a successful service execution'
+ end
+ end
+
+ context 'when feature flag issue_email_participants is disabled' do
+ let(:error_message) { error_feature_flag }
+
+ before do
+ stub_feature_flags(issue_email_participants: false)
+ end
+
+ it_behaves_like 'a failed service execution'
+ end
+ end
+end
diff --git a/spec/services/issue_links/list_service_spec.rb b/spec/services/issue_links/list_service_spec.rb
index b5cc8c4dcdc..f9e5e88aff0 100644
--- a/spec/services/issue_links/list_service_spec.rb
+++ b/spec/services/issue_links/list_service_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe IssueLinks::ListService, feature_category: :team_planning do
end
it 'ensures no N+1 queries are made' do
- control_count = ActiveRecord::QueryRecorder.new { subject }.count
+ control = ActiveRecord::QueryRecorder.new { subject }
project = create :project, :public
milestone = create :milestone, project: project
@@ -44,7 +44,7 @@ RSpec.describe IssueLinks::ListService, feature_category: :team_planning do
create :issue_link, source: issue_x, target: issue_z
create :issue_link, source: issue_y, target: issue_z
- expect { subject }.not_to exceed_query_limit(control_count)
+ expect { subject }.not_to exceed_query_limit(control)
end
it 'returns related issues JSON' do
diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb
index 83dfca923fb..016174f9888 100644
--- a/spec/services/issues/export_csv_service_spec.rb
+++ b/spec/services/issues/export_csv_service_spec.rb
@@ -175,11 +175,11 @@ RSpec.describe Issues::ExportCsvService, :with_license, feature_category: :team_
let(:labeled_issues) { create_list(:labeled_issue, 2, project: project, author: user, labels: [feature_label, idea_label]) }
it 'does not run a query for each label link' do
- control_count = ActiveRecord::QueryRecorder.new { csv }.count
+ control = ActiveRecord::QueryRecorder.new { csv }
labeled_issues
- expect { csv }.not_to exceed_query_limit(control_count)
+ expect { csv }.not_to exceed_query_limit(control)
expect(csv.count).to eq(4)
end
diff --git a/spec/services/issues/referenced_merge_requests_service_spec.rb b/spec/services/issues/referenced_merge_requests_service_spec.rb
index 4781daf7688..6748292d389 100644
--- a/spec/services/issues/referenced_merge_requests_service_spec.rb
+++ b/spec/services/issues/referenced_merge_requests_service_spec.rb
@@ -39,13 +39,13 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p
context 'performance' do
it 'does not run extra queries when extra namespaces are included', :use_clean_rails_memory_store_caching do
service.execute(issue) # warm cache
- control_count = ActiveRecord::QueryRecorder.new { service.execute(issue) }.count
+ control = ActiveRecord::QueryRecorder.new { service.execute(issue) }
third_project = create(:project, :public)
create_closing_mr(source_project: third_project)
service.execute(issue) # warm cache
- expect { service.execute(issue) }.not_to exceed_query_limit(control_count)
+ expect { service.execute(issue) }.not_to exceed_query_limit(control)
end
it 'preloads the head pipeline for each merge request, and its routes' do
@@ -58,12 +58,12 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p
end
closing_mr_other_project.update!(head_pipeline: create(:ci_pipeline))
- control_count = ActiveRecord::QueryRecorder.new { service.execute(reloaded_issue).each(&pipeline_routes) }
+ control = ActiveRecord::QueryRecorder.new { service.execute(reloaded_issue).each(&pipeline_routes) }
closing_mr.update!(head_pipeline: create(:ci_pipeline))
expect { service.execute(issue).each(&pipeline_routes) }
- .not_to exceed_query_limit(control_count)
+ .not_to exceed_query_limit(control)
end
it 'only loads issue notes once' do
@@ -95,12 +95,12 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p
context 'performance' do
it 'does not run a query for each note author', :use_clean_rails_memory_store_caching do
service.referenced_merge_requests(issue) # warm cache
- control_count = ActiveRecord::QueryRecorder.new { service.referenced_merge_requests(issue) }.count
+ control = ActiveRecord::QueryRecorder.new { service.referenced_merge_requests(issue) }
create(:note, project: project, noteable: issue, author: create(:user))
service.referenced_merge_requests(issue) # warm cache
- expect { service.referenced_merge_requests(issue) }.not_to exceed_query_limit(control_count)
+ expect { service.referenced_merge_requests(issue) }.not_to exceed_query_limit(control)
end
end
end
@@ -121,12 +121,12 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p
context 'performance' do
it 'does not run a query for each note author', :use_clean_rails_memory_store_caching do
service.closed_by_merge_requests(issue) # warm cache
- control_count = ActiveRecord::QueryRecorder.new { service.closed_by_merge_requests(issue) }.count
+ control = ActiveRecord::QueryRecorder.new { service.closed_by_merge_requests(issue) }
create(:note, :system, project: project, noteable: issue, author: create(:user))
service.closed_by_merge_requests(issue) # warm cache
- expect { service.closed_by_merge_requests(issue) }.not_to exceed_query_limit(control_count)
+ expect { service.closed_by_merge_requests(issue) }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 0cb13bfb917..e8bcdc2c44b 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -592,11 +592,19 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
update_issue(confidential: true)
end
+ it 'allows assignment of guest users' do
+ update_issue(confidential: true)
+
+ update_issue(assignee_ids: [guest.id])
+
+ expect(issue.reload.assignees).to contain_exactly(guest)
+ end
+
it 'does not update assignee_id with unauthorized users' do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
update_issue(confidential: true)
+
non_member = create(:user)
- original_assignees = issue.assignees
+ original_assignees = issue.assignees.to_a
update_issue(assignee_ids: [non_member.id])
diff --git a/spec/services/labels/available_labels_service_spec.rb b/spec/services/labels/available_labels_service_spec.rb
index 2b398210034..3a1474e4fef 100644
--- a/spec/services/labels/available_labels_service_spec.rb
+++ b/spec/services/labels/available_labels_service_spec.rb
@@ -42,11 +42,15 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
it 'do not cause additional query for finding labels' do
label_titles = [project_label.title]
- control_count = ActiveRecord::QueryRecorder.new { described_class.new(user, project, labels: label_titles).find_or_create_by_titles }
+ control = ActiveRecord::QueryRecorder.new do
+ described_class.new(user, project, labels: label_titles).find_or_create_by_titles
+ end
new_label = create(:label, project: project)
label_titles = [project_label.title, new_label.title]
- expect { described_class.new(user, project, labels: label_titles).find_or_create_by_titles }.not_to exceed_query_limit(control_count)
+ expect do
+ described_class.new(user, project, labels: label_titles).find_or_create_by_titles
+ end.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index b977292bcf4..c08b40e9528 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'adds a user to members' do
expect(execute_service[:status]).to eq(:success)
- expect(source.users).to include member
+ expect(source).to have_user(member)
expect(Onboarding::Progress.completed?(source, :user_added)).to be(true)
end
@@ -119,14 +119,34 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
before do
# validations will fail because we try to invite them to the project as a guest
source.group.add_developer(member)
+ allow(Gitlab::EventStore).to receive(:publish)
end
- it 'triggers the members added and authorizations changed events' do
+ it 'triggers the authorizations changed events' do
expect(Gitlab::EventStore)
- .to receive(:publish)
- .with(an_instance_of(ProjectAuthorizations::AuthorizationsChangedEvent))
+ .to receive(:publish_group)
+ .with(array_including(an_instance_of(ProjectAuthorizations::AuthorizationsAddedEvent)))
.and_call_original
+ execute_service
+ end
+
+ context 'when feature flag "add_policy_approvers_to_rules" is disabled' do
+ before do
+ stub_feature_flags(add_policy_approvers_to_rules: false)
+ end
+
+ it 'triggers the authorizations changed event' do
+ expect(Gitlab::EventStore)
+ .to receive(:publish)
+ .with(an_instance_of(ProjectAuthorizations::AuthorizationsChangedEvent))
+ .and_call_original
+
+ execute_service
+ end
+ end
+
+ it 'triggers the members added event' do
expect(Gitlab::EventStore)
.to receive(:publish)
.with(an_instance_of(Members::MembersAddedEvent))
diff --git a/spec/services/members/update_service_spec.rb b/spec/services/members/update_service_spec.rb
index 3860543a85e..b23f5856575 100644
--- a/spec/services/members/update_service_spec.rb
+++ b/spec/services/members/update_service_spec.rb
@@ -263,7 +263,7 @@ RSpec.describe Members::UpdateService, feature_category: :groups_and_projects do
it 'emails the users that their group membership expiry has changed' do
members.each do |member|
- expect(notification_service).to receive(:updated_group_member_expiration).with(member)
+ expect(notification_service).to receive(:updated_member_expiration).with(member)
end
subject
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index 8761aba432f..6e20c42c8f6 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -16,11 +16,7 @@ RSpec.describe MergeRequests::ApprovalService, feature_category: :code_review_wo
stub_feature_flags ff_require_saml_auth_to_approve: false
end
- context 'with invalid approval' do
- before do
- allow(merge_request.approvals).to receive(:new).and_return(double(save: false))
- end
-
+ shared_examples 'no-op call' do
it 'does not reset approvals' do
expect(merge_request.approvals).not_to receive(:reset)
@@ -47,22 +43,34 @@ RSpec.describe MergeRequests::ApprovalService, feature_category: :code_review_wo
end
end
+ context 'with invalid approval' do
+ before do
+ allow(merge_request.approvals).to receive(:new).and_return(double(save: false))
+ end
+
+ it_behaves_like 'no-op call'
+ end
+
context 'with an already approved MR' do
before do
merge_request.approvals.create!(user: user)
end
- it 'does not create an approval' do
- expect { service.execute(merge_request) }.not_to change { merge_request.approvals.size }
- end
+ it_behaves_like 'no-op call'
+ end
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
- let(:action) { service.execute(merge_request) }
- end
+ context 'with a merged MR' do
+ let(:merge_request) { create(:merge_request, :merged) }
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
- let(:action) { service.execute(merge_request) }
+ it_behaves_like 'no-op call'
+ end
+
+ context 'user cannot update the merge request' do
+ before do
+ project.add_guest(user)
end
+
+ it_behaves_like 'no-op call'
end
context 'with valid approval' do
@@ -115,27 +123,5 @@ RSpec.describe MergeRequests::ApprovalService, feature_category: :code_review_wo
let(:action) { service.execute(merge_request) }
end
end
-
- context 'user cannot update the merge request' do
- before do
- project.add_guest(user)
- end
-
- it 'does not update approvals' do
- expect { service.execute(merge_request) }.not_to change { merge_request.approvals.size }
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
- let(:action) { service.execute(merge_request) }
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
- let(:action) { service.execute(merge_request) }
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestApprovalStateUpdated' do
- let(:action) { service.execute(merge_request) }
- end
- end
end
end
diff --git a/spec/services/merge_requests/conflicts/list_service_spec.rb b/spec/services/merge_requests/conflicts/list_service_spec.rb
index 5eb53b1bcba..416b28bff05 100644
--- a/spec/services/merge_requests/conflicts/list_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/list_service_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe MergeRequests::Conflicts::ListService, feature_category: :code_re
it 'returns a falsey value when the MR has a missing revision after a force push' do
merge_request = create_merge_request('conflict-resolvable')
service = conflicts_service(merge_request)
- allow(merge_request).to receive_message_chain(:target_branch_head, :raw, :id).and_return(Gitlab::Git::BLANK_SHA)
+ allow(merge_request).to receive_message_chain(:target_branch_head, :raw, :id).and_return(Gitlab::Git::SHA1_BLANK_SHA)
expect(service.can_be_resolved_in_ui?).to be_falsey
end
diff --git a/spec/services/merge_requests/get_urls_service_spec.rb b/spec/services/merge_requests/get_urls_service_spec.rb
index 31b3e513a51..85a84f07094 100644
--- a/spec/services/merge_requests/get_urls_service_spec.rb
+++ b/spec/services/merge_requests/get_urls_service_spec.rb
@@ -10,8 +10,8 @@ RSpec.describe MergeRequests::GetUrlsService, feature_category: :code_review_wor
let(:source_branch) { "merge-test" }
let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{source_branch}" }
let(:show_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/#{merge_request.iid}" }
- let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
- let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::BLANK_SHA} refs/heads/#{source_branch}" }
+ let(:new_branch_changes) { "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
+ let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::SHA1_BLANK_SHA} refs/heads/#{source_branch}" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
let(:default_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master" }
@@ -131,7 +131,7 @@ RSpec.describe MergeRequests::GetUrlsService, feature_category: :code_review_wor
context 'pushing new branch and existing branch (with merge request created) at once' do
let!(:merge_request) { create(:merge_request, source_project: project, source_branch: "markdown") }
- let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
+ let(:new_branch_changes) { "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/markdown" }
let(:changes) { "#{new_branch_changes}\n#{existing_branch_changes}" }
let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch" }
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index 038977e4fd0..e34eb804a82 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -34,9 +34,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService, feature_category: :sour
let(:label1) { 'mylabel1' }
let(:label2) { 'mylabel2' }
let(:label3) { 'mylabel3' }
- let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
+ let(:new_branch_changes) { "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
- let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::BLANK_SHA} refs/heads/#{source_branch}" }
+ let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::SHA1_BLANK_SHA} refs/heads/#{source_branch}" }
let(:default_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{project.default_branch}" }
let(:error_mr_required) { "A merge_request.create push option is required to create a merge request for branch #{source_branch}" }
@@ -802,7 +802,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService, feature_category: :sour
let(:changes) do
[
new_branch_changes,
- "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/feature_conflict"
+ "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/feature_conflict"
]
end
@@ -814,7 +814,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService, feature_category: :sour
let(:limit) { MergeRequests::PushOptionsHandlerService::LIMIT }
let(:changes) do
TestEnv::BRANCH_SHA.to_a[0..limit].map do |x|
- "#{Gitlab::Git::BLANK_SHA} #{x.first} refs/heads/#{x.last}"
+ "#{Gitlab::Git::SHA1_BLANK_SHA} #{x.first} refs/heads/#{x.last}"
end
end
diff --git a/spec/services/merge_requests/pushed_branches_service_spec.rb b/spec/services/merge_requests/pushed_branches_service_spec.rb
index de99fb244d3..bcde2fd5165 100644
--- a/spec/services/merge_requests/pushed_branches_service_spec.rb
+++ b/spec/services/merge_requests/pushed_branches_service_spec.rb
@@ -37,11 +37,11 @@ RSpec.describe MergeRequests::PushedBranchesService, feature_category: :source_c
end
it 'returns empty result without any SQL query performed' do
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
expect(service.execute).to be_empty
- end.count
+ end
- expect(control_count).to be_zero
+ expect(control.count).to be_zero
end
end
end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index dd50dfa49e0..e2b1c91d6eb 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -712,10 +712,10 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
it 'refreshes the merge request' do
expect(refresh_service).to receive(:execute_hooks)
- .with(@fork_merge_request, 'update', old_rev: Gitlab::Git::BLANK_SHA)
+ .with(@fork_merge_request, 'update', old_rev: Gitlab::Git::SHA1_BLANK_SHA)
allow_any_instance_of(Repository).to receive(:merge_base).and_return(@oldrev)
- refresh_service.execute(Gitlab::Git::BLANK_SHA, @newrev, 'refs/heads/master')
+ refresh_service.execute(Gitlab::Git::SHA1_BLANK_SHA, @newrev, 'refs/heads/master')
reload_mrs
expect(@merge_request.notes).to be_empty
diff --git a/spec/services/merge_requests/reload_diffs_service_spec.rb b/spec/services/merge_requests/reload_diffs_service_spec.rb
index 77056cbe541..a6654989374 100644
--- a/spec/services/merge_requests/reload_diffs_service_spec.rb
+++ b/spec/services/merge_requests/reload_diffs_service_spec.rb
@@ -45,11 +45,11 @@ RSpec.describe MergeRequests::ReloadDiffsService, :use_clean_rails_memory_store_
current_user
merge_request
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
subject.execute
- end.count
+ end
- expect { subject.execute }.not_to exceed_query_limit(control_count)
+ expect { subject.execute }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/services/merge_requests/remove_approval_service_spec.rb b/spec/services/merge_requests/remove_approval_service_spec.rb
index e4e54db5013..b0109a022eb 100644
--- a/spec/services/merge_requests/remove_approval_service_spec.rb
+++ b/spec/services/merge_requests/remove_approval_service_spec.rb
@@ -19,6 +19,34 @@ RSpec.describe MergeRequests::RemoveApprovalService, feature_category: :code_rev
project.add_developer(user)
end
+ shared_examples 'no-op call' do
+ it 'does not create an unapproval note and triggers web hook' do
+ expect(service).not_to receive(:execute_hooks)
+ expect(SystemNoteService).not_to receive(:unapprove_mr)
+
+ execute!
+ end
+
+ it 'does not track merge request unapprove action' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_unapprove_mr_action).with(user: user)
+
+ execute!
+ end
+
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { execute! }
+ end
+
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
+ let(:action) { execute! }
+ end
+
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestApprovalStateUpdated' do
+ let(:action) { execute! }
+ end
+ end
+
context 'with a user who has approved' do
let!(:approval) { create(:approval, user: user, merge_request: merge_request) }
let(:notification_service) { NotificationService.new }
@@ -27,6 +55,12 @@ RSpec.describe MergeRequests::RemoveApprovalService, feature_category: :code_rev
allow(service).to receive(:notification_service).and_return(notification_service)
end
+ context 'when the merge request is merged' do
+ let(:merge_request) { create(:merge_request, :merged, source_project: project) }
+
+ it_behaves_like 'no-op call'
+ end
+
it 'removes the approval' do
expect { execute! }.to change { merge_request.approvals.size }.from(2).to(1)
end
@@ -60,31 +94,7 @@ RSpec.describe MergeRequests::RemoveApprovalService, feature_category: :code_rev
end
context 'with a user who has not approved' do
- it 'does not create an unapproval note and triggers web hook' do
- expect(service).not_to receive(:execute_hooks)
- expect(SystemNoteService).not_to receive(:unapprove_mr)
-
- execute!
- end
-
- it 'does not track merge request unapprove action' do
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .not_to receive(:track_unapprove_mr_action).with(user: user)
-
- execute!
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
- let(:action) { execute! }
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
- let(:action) { execute! }
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestApprovalStateUpdated' do
- let(:action) { execute! }
- end
+ it_behaves_like 'no-op call'
end
end
end
diff --git a/spec/services/merge_requests/request_review_service_spec.rb b/spec/services/merge_requests/request_review_service_spec.rb
index ef96bf11e0b..a5f0d5b5c5a 100644
--- a/spec/services/merge_requests/request_review_service_spec.rb
+++ b/spec/services/merge_requests/request_review_service_spec.rb
@@ -71,6 +71,14 @@ RSpec.describe MergeRequests::RequestReviewService, feature_category: :code_revi
service.execute(merge_request, user)
end
+ it 'creates a sytem note' do
+ expect(SystemNoteService)
+ .to receive(:request_review)
+ .with(merge_request, project, current_user, user)
+
+ service.execute(merge_request, user)
+ end
+
it_behaves_like 'triggers GraphQL subscription mergeRequestReviewersUpdated' do
let(:action) { result }
end
diff --git a/spec/services/milestones/destroy_service_spec.rb b/spec/services/milestones/destroy_service_spec.rb
index 209177c348b..a05e11c34d7 100644
--- a/spec/services/milestones/destroy_service_spec.rb
+++ b/spec/services/milestones/destroy_service_spec.rb
@@ -6,13 +6,14 @@ RSpec.describe Milestones::DestroyService, feature_category: :team_planning do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:milestone) { create(:milestone, title: 'Milestone v1.0', project: project) }
+ let(:container) { project }
before do
project.add_maintainer(user)
end
def service
- described_class.new(project, user, {})
+ described_class.new(container, user, {})
end
describe '#execute' do
@@ -45,6 +46,7 @@ RSpec.describe Milestones::DestroyService, feature_category: :team_planning do
context 'group milestones' do
let(:group) { create(:group) }
let(:group_milestone) { create(:milestone, group: group) }
+ let(:container) { group }
before do
project.update!(namespace: group)
diff --git a/spec/services/milestones/promote_service_spec.rb b/spec/services/milestones/promote_service_spec.rb
index 203ac2d3f40..caaf6488e40 100644
--- a/spec/services/milestones/promote_service_spec.rb
+++ b/spec/services/milestones/promote_service_spec.rb
@@ -62,13 +62,20 @@ RSpec.describe Milestones::PromoteService, feature_category: :team_planning do
it 'sets issuables with new promoted milestone' do
issue = create(:issue, milestone: milestone, project: project)
+ create(:resource_milestone_event, issue: issue, milestone: milestone)
+
merge_request = create(:merge_request, milestone: milestone, source_project: project)
+ create(:resource_milestone_event, merge_request: merge_request, milestone: milestone)
promoted_milestone = service.execute(milestone)
expect(promoted_milestone).to be_group_milestone
+
expect(issue.reload.milestone).to eq(promoted_milestone)
expect(merge_request.reload.milestone).to eq(promoted_milestone)
+
+ expect(ResourceMilestoneEvent.where(milestone_id: promoted_milestone).count).to eq(2)
+ expect(ResourceMilestoneEvent.where(milestone_id: milestone).count).to eq(0)
end
end
@@ -101,9 +108,14 @@ RSpec.describe Milestones::PromoteService, feature_category: :team_planning do
it 'sets all issuables with new promoted milestone' do
issue = create(:issue, milestone: milestone, project: project)
+ create(:resource_milestone_event, issue: issue, milestone: milestone)
issue_2 = create(:issue, milestone: milestone_2, project: project_2)
+ create(:resource_milestone_event, issue: issue_2, milestone: milestone_2)
+
merge_request = create(:merge_request, milestone: milestone, source_project: project)
+ create(:resource_milestone_event, merge_request: merge_request, milestone: milestone)
merge_request_2 = create(:merge_request, milestone: milestone_2, source_project: project_2)
+ create(:resource_milestone_event, merge_request: merge_request_2, milestone: milestone_2)
promoted_milestone = service.execute(milestone)
@@ -111,6 +123,10 @@ RSpec.describe Milestones::PromoteService, feature_category: :team_planning do
expect(issue_2.reload.milestone).to eq(promoted_milestone)
expect(merge_request.reload.milestone).to eq(promoted_milestone)
expect(merge_request_2.reload.milestone).to eq(promoted_milestone)
+
+ expect(ResourceMilestoneEvent.where(milestone_id: promoted_milestone).count).to eq(4)
+ expect(ResourceMilestoneEvent.where(milestone_id: milestone).count).to eq(0)
+ expect(ResourceMilestoneEvent.where(milestone_id: milestone_2).count).to eq(0)
end
end
end
diff --git a/spec/services/ml/create_model_service_spec.rb b/spec/services/ml/create_model_service_spec.rb
index 74c1dd5fec7..88e7c00d1f9 100644
--- a/spec/services/ml/create_model_service_spec.rb
+++ b/spec/services/ml/create_model_service_spec.rb
@@ -50,9 +50,10 @@ RSpec.describe ::Ml::CreateModelService, feature_category: :mlops do
let(:name) { existing_model.name }
let(:project) { existing_model.project }
- it 'raises an error', :aggregate_failures do
- expect { create_model }.to raise_error(ActiveRecord::RecordInvalid)
+ it 'returns a model with errors', :aggregate_failures do
+ expect(create_model).not_to be_persisted
expect(Gitlab::InternalEvents).not_to have_received(:track_event)
+ expect(create_model.errors.full_messages).to eq(["Name has already been taken"])
end
end
diff --git a/spec/services/ml/create_model_version_service_spec.rb b/spec/services/ml/create_model_version_service_spec.rb
index b3aead4a92c..be2bfc86b54 100644
--- a/spec/services/ml/create_model_version_service_spec.rb
+++ b/spec/services/ml/create_model_version_service_spec.rb
@@ -75,5 +75,60 @@ RSpec.describe ::Ml::CreateModelVersionService, feature_category: :mlops do
expect(model.reload.latest_version.package.name).to eq(model.name)
expect(model.latest_version.package.version).to eq(model.latest_version.version)
end
+
+ context 'when metadata are supplied, add them as metadata' do
+ let(:metadata) { [{ key: 'key1', value: 'value1' }, { key: 'key2', value: 'value2' }] }
+ let(:params) { { metadata: metadata } }
+
+ it 'creates metadata records', :aggregate_failures do
+ expect { service }.to change { Ml::ModelVersion.count }.by(1)
+
+ expect(service.metadata.count).to be 2
+ end
+ end
+
+ # TODO: Ensure consisted error responses https://gitlab.com/gitlab-org/gitlab/-/issues/429731
+ context 'for metadata with duplicate keys, it does not create duplicate records' do
+ let(:metadata) { [{ key: 'key1', value: 'value1' }, { key: 'key1', value: 'value2' }] }
+ let(:params) { { metadata: metadata } }
+
+ it 'raises an error', :aggregate_failures do
+ expect { service }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
+ # # TODO: Ensure consisted error responses https://gitlab.com/gitlab-org/gitlab/-/issues/429731
+ context 'for metadata with invalid keys, it does not create invalid records' do
+ let(:metadata) { [{ key: 'key1', value: 'value1' }, { key: '', value: 'value2' }] }
+ let(:params) { { metadata: metadata } }
+
+ it 'raises an error', :aggregate_failures do
+ expect { service }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
+
+ context 'when a version string is supplied during creation' do
+ let(:params) { { version: '1.2.3' } }
+
+ it 'creates a package' do
+ expect { service }.to change { Ml::ModelVersion.count }.by(1).and change {
+ Packages::MlModel::Package.count
+ }.by(1)
+ expect(model.reload.latest_version.version).to eq('1.2.3')
+ expect(model.latest_version.package.version).to eq('1.2.3')
+ end
+ end
+
+ context 'when a nil version string is supplied during creation' do
+ let(:params) { { version: nil } }
+
+ it 'creates a package' do
+ expect { service }.to change { Ml::ModelVersion.count }.by(1).and change {
+ Packages::MlModel::Package.count
+ }.by(1)
+ expect(model.reload.latest_version.version).to eq('1.0.0')
+ expect(model.latest_version.package.version).to eq('1.0.0')
+ end
end
end
diff --git a/spec/services/namespaces/package_settings/update_service_spec.rb b/spec/services/namespaces/package_settings/update_service_spec.rb
index 41f3499a1bb..002c7df9284 100644
--- a/spec/services/namespaces/package_settings/update_service_spec.rb
+++ b/spec/services/namespaces/package_settings/update_service_spec.rb
@@ -46,7 +46,9 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
lock_npm_package_requests_forwarding: false,
pypi_package_requests_forwarding: nil,
lock_pypi_package_requests_forwarding: false,
- nuget_symbol_server_enabled: false
+ nuget_symbol_server_enabled: false,
+ terraform_module_duplicates_allowed: false,
+ terraform_module_duplicate_exception_regex: 'foo'
}, to: {
maven_duplicates_allowed: false,
maven_duplicate_exception_regex: 'RELEASE',
@@ -60,7 +62,9 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
lock_pypi_package_requests_forwarding: true,
- nuget_symbol_server_enabled: true
+ nuget_symbol_server_enabled: true,
+ terraform_module_duplicates_allowed: true,
+ terraform_module_duplicate_exception_regex: 'bar'
}
it_behaves_like 'returning a success'
@@ -112,7 +116,9 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
lock_pypi_package_requests_forwarding: true,
- nuget_symbol_server_enabled: true
+ nuget_symbol_server_enabled: true,
+ terraform_module_duplicates_allowed: true,
+ terraform_module_duplicate_exception_regex: 'bar'
}
end
diff --git a/spec/services/notification_recipients/build_service_spec.rb b/spec/services/notification_recipients/build_service_spec.rb
index bfd1dcd7d80..b4788428f14 100644
--- a/spec/services/notification_recipients/build_service_spec.rb
+++ b/spec/services/notification_recipients/build_service_spec.rb
@@ -21,13 +21,13 @@ RSpec.describe NotificationRecipients::BuildService, feature_category: :team_pla
service.build_new_note_recipients(note)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
service.build_new_note_recipients(note)
end
create_user
- expect { service.build_new_note_recipients(note) }.not_to exceed_query_limit(control_count).with_threshold(threshold)
+ expect { service.build_new_note_recipients(note) }.not_to exceed_query_limit(control).with_threshold(threshold)
end
end
@@ -76,13 +76,15 @@ RSpec.describe NotificationRecipients::BuildService, feature_category: :team_pla
service.build_new_review_recipients(review)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
service.build_new_review_recipients(review)
end
create_user
- expect { service.build_new_review_recipients(review) }.not_to exceed_query_limit(control_count).with_threshold(threshold)
+ expect do
+ service.build_new_review_recipients(review)
+ end.not_to exceed_query_limit(control).with_threshold(threshold)
end
end
@@ -130,13 +132,13 @@ RSpec.describe NotificationRecipients::BuildService, feature_category: :team_pla
service.build_requested_review_recipients(note)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
service.build_requested_review_recipients(note)
end
create_user
- expect { service.build_requested_review_recipients(note) }.not_to exceed_query_limit(control_count)
+ expect { service.build_requested_review_recipients(note) }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 40597c30c4a..15e7f794795 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -3179,6 +3179,22 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
end
+ describe '#invite_member' do
+ let_it_be(:group_member) { create(:group_member) }
+
+ subject(:invite_member) { notification.invite_member(group_member, 'token') }
+
+ it 'sends exactly one email' do
+ expect(Notify)
+ .to receive(:member_invited_email).with('Group', group_member.id, 'token').at_least(:once).and_call_original
+
+ invite_member
+
+ expect_delivery_jobs_count(1)
+ expect_enqueud_email('Group', group_member.id, 'token', mail: 'member_invited_email')
+ end
+ end
+
describe '#new_instance_access_request', :deliver_mails_inline do
let_it_be(:user) { create(:user, :blocked_pending_approval) }
let_it_be(:admins) { create_list(:admin, 12, :with_sign_ins) }
@@ -3278,43 +3294,6 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
let(:source) { group }
end
end
-
- describe '#new_group_member' do
- let(:group) { create(:group) }
-
- it 'sends a notification' do
- group.add_guest(added_user)
- should_only_email(added_user)
- end
-
- describe 'when notifications are disabled' do
- before do
- create_global_setting_for(added_user, :disabled)
- end
-
- it 'does not send a notification' do
- group.add_guest(added_user)
- should_not_email_anyone
- end
- end
-
- it_behaves_like 'group emails are disabled' do
- let(:notification_target) { group }
- let(:notification_trigger) { group.add_guest(added_user) }
- end
- end
-
- describe '#updated_group_member_expiration' do
- let_it_be(:group_member) { create(:group_member) }
-
- it 'emails the user that their group membership expiry has changed' do
- expect_next_instance_of(NotificationService) do |notification|
- allow(notification).to receive(:updated_group_member_expiration).with(group_member)
- end
-
- group_member.update!(expires_at: 5.days.from_now)
- end
- end
end
describe 'ProjectMember', :deliver_mails_inline do
@@ -3444,29 +3423,6 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
end
- describe '#new_project_member' do
- it do
- create_member!
- should_only_email(added_user)
- end
-
- it_behaves_like 'project emails are disabled' do
- let(:notification_target) { project }
- let(:notification_trigger) { create_member! }
- end
-
- context 'when notifications are disabled' do
- before do
- create_global_setting_for(added_user, :disabled)
- end
-
- it do
- create_member!
- should_not_email_anyone
- end
- end
- end
-
describe '#member_about_to_expire' do
let_it_be(:group_member) { create(:group_member, expires_at: 7.days.from_now.to_date) }
let_it_be(:project_member) { create(:project_member, expires_at: 7.days.from_now.to_date) }
@@ -3487,9 +3443,92 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
end
end
+ end
+
+ describe '#new_member', :deliver_mails_inline do
+ let_it_be(:source) { create(:group) }
+ let_it_be(:added_user) { create(:user) }
+
+ subject(:new_member) { notification.new_member(member) }
+
+ shared_examples_for 'new member added' do |source_type|
+ it 'triggers a notification about about the added access', deliver_mails_inline: false do
+ new_member
+
+ expect_delivery_jobs_count(1)
+ expect_enqueud_email(source_type, member.id, mail: 'member_access_granted_email')
+ end
+ end
+
+ context 'when source is a Group' do
+ it_behaves_like 'new member added', 'Group' do
+ let_it_be(:member) { create(:group_member, source: source) }
+ end
+
+ it_behaves_like 'group emails are disabled' do
+ let(:notification_target) { source }
+ let(:notification_trigger) { notification_target.add_guest(added_user) }
+ end
+ end
+
+ context 'when source is a Project' do
+ let_it_be(:source) { create(:project) }
+
+ it_behaves_like 'new member added', 'Project' do
+ let_it_be(:member) { create(:project_member, source: project) }
+ end
+
+ it_behaves_like 'project emails are disabled' do
+ let_it_be(:notification_target) { source }
+ let(:notification_trigger) { source.add_guest(added_user) }
+ end
+ end
- def create_member!
- create(:project_member, user: added_user, project: project)
+ context 'when notifications are disabled' do
+ before do
+ create_global_setting_for(added_user, :disabled)
+ end
+
+ it 'does not send a notification' do
+ source.add_guest(added_user)
+ should_not_email_anyone
+ end
+ end
+ end
+
+ describe '#updated_member_expiration' do
+ subject(:updated_member_expiration) { notification.updated_member_expiration(member) }
+
+ context 'for group member' do
+ let_it_be(:member) { create(:group_member) }
+
+ it 'triggers a notification about the expiration change' do
+ updated_member_expiration
+
+ expect_delivery_jobs_count(1)
+ expect_enqueud_email('Group', member.id, mail: 'member_expiration_date_updated_email')
+ end
+ end
+
+ context 'for project member' do
+ let_it_be(:member) { create(:project_member) }
+
+ it 'does not trigger a notification' do
+ updated_member_expiration
+
+ expect_delivery_jobs_count(0)
+ end
+ end
+ end
+
+ describe '#updated_member_access_level' do
+ let_it_be(:member) { create(:group_member) }
+
+ it 'triggers a notification about the access_level change' do
+ notification.updated_member_access_level(member)
+
+ expect_delivery_jobs_count(1)
+ expect_enqueud_email('Group', member.id, mail: 'member_access_granted_email')
end
end
diff --git a/spec/services/organizations/create_service_spec.rb b/spec/services/organizations/create_service_spec.rb
index aae89517c15..bbc0f3d7515 100644
--- a/spec/services/organizations/create_service_spec.rb
+++ b/spec/services/organizations/create_service_spec.rb
@@ -29,11 +29,13 @@ RSpec.describe Organizations::CreateService, feature_category: :cell do
shared_examples 'creating an organization' do
it 'creates the organization' do
expect { response }.to change { Organizations::Organization.count }
+ .and change { Organizations::OrganizationUser.count }.by(1)
expect(response).to be_success
expect(created_organization.name).to eq(params[:name])
expect(created_organization.path).to eq(params[:path])
expect(created_organization.description).to eq(params[:description])
expect(created_organization.avatar.filename).to eq(avatar_filename)
+ expect(created_organization.owner?(current_user)).to be(true)
end
end
diff --git a/spec/services/organizations/update_service_spec.rb b/spec/services/organizations/update_service_spec.rb
index 148840770db..30c07ae1d13 100644
--- a/spec/services/organizations/update_service_spec.rb
+++ b/spec/services/organizations/update_service_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Organizations::UpdateService, feature_category: :cell do
context 'when user has permission' do
before_all do
- create(:organization_user, organization: organization, user: current_user)
+ create(:organization_user, :owner, organization: organization, user: current_user)
end
shared_examples 'updating an organization' do
@@ -60,6 +60,14 @@ RSpec.describe Organizations::UpdateService, feature_category: :cell do
it_behaves_like 'updating an organization'
end
+ context 'when avatar is set to nil' do
+ let_it_be(:organization_detail) { create(:organization_detail, organization: organization) }
+ let(:extra_params) { { avatar: nil } }
+ let(:description) { organization_detail.description }
+
+ it_behaves_like 'updating an organization'
+ end
+
include_examples 'updating an organization'
context 'when the organization is not updated' do
diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb
index f02e53b67cb..7a91fdfc5b9 100644
--- a/spec/services/packages/npm/create_package_service_spec.rb
+++ b/spec/services/packages/npm/create_package_service_spec.rb
@@ -25,7 +25,13 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
let(:version_data) { params.dig('versions', version) }
let(:lease_key) { "packages:npm:create_package_service:packages:#{project.id}_#{package_name}_#{version}" }
+ shared_examples 'valid service response' do
+ it { is_expected.to be_success }
+ end
+
shared_examples 'valid package' do
+ let(:package) { subject[:package] }
+
it 'creates a package' do
expect { subject }
.to change { Packages::Package.count }.by(1)
@@ -34,30 +40,27 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
.and change { Packages::Npm::Metadatum.count }.by(1)
end
- it_behaves_like 'assigns the package creator' do
- let(:package) { subject }
- end
-
- it { is_expected.to be_valid }
+ it_behaves_like 'assigns the package creator'
- it 'creates a package with name and version' do
- package = subject
+ it 'returns a valid package' do
+ subject
- expect(package.name).to eq(package_name)
- expect(package.version).to eq(version)
+ expect(package).to be_valid
+ .and have_attributes name: package_name, version: version
+ expect(package.npm_metadatum.package_json).to eq(version_data)
end
- it { expect(subject.npm_metadatum.package_json).to eq(version_data) }
-
- it { expect(subject.name).to eq(package_name) }
- it { expect(subject.version).to eq(version) }
-
context 'with build info' do
let_it_be(:job) { create(:ci_build, user: user) }
let(:params) { super().merge(build: job) }
- it_behaves_like 'assigns build to package'
- it_behaves_like 'assigns status to package'
+ it_behaves_like 'assigns build to package' do
+ subject { super().payload.fetch(:package) }
+ end
+
+ it_behaves_like 'assigns status to package' do
+ subject { super().payload.fetch(:package) }
+ end
it 'creates a package file build info' do
expect { subject }.to change { Packages::PackageFileBuildInfo.count }.by(1)
@@ -163,31 +166,35 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
.and change { Packages::Package.npm.count }.by(1)
.and change { Packages::Tag.count }.by(1)
.and change { Packages::Npm::Metadatum.count }.by(1)
- expect(subject.npm_metadatum.package_json[field]).to be_blank
+ expect(package.npm_metadatum.package_json[field]).to be_blank
end
end
end
end
context 'scoped package' do
+ it_behaves_like 'valid service response'
it_behaves_like 'valid package'
end
context 'when user is no project member' do
let_it_be(:user) { create(:user) }
+ it_behaves_like 'valid service response'
it_behaves_like 'valid package'
end
context 'scoped package not following the naming convention' do
let(:package_name) { '@any-scope/package' }
+ it_behaves_like 'valid service response'
it_behaves_like 'valid package'
end
context 'unscoped package' do
let(:package_name) { 'unscoped-package' }
+ it_behaves_like 'valid service response'
it_behaves_like 'valid package'
end
@@ -195,8 +202,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
let(:package_name) { "@#{namespace.path}/my_package" }
let!(:existing_package) { create(:npm_package, project: project, name: package_name, version: '1.0.1') }
- it { expect(subject[:http_status]).to eq 403 }
- it { expect(subject[:message]).to be 'Package already exists.' }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'Package already exists.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_PACKAGE_EXISTS }
context 'marked as pending_destruction' do
before do
@@ -217,10 +224,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
let(:max_file_size) { 5.bytes }
shared_examples_for 'max file size validation failure' do
- it 'returns a 400 error', :aggregate_failures do
- expect(subject[:http_status]).to eq 400
- expect(subject[:message]).to be 'File is too large.'
- end
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'File is too large.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_INVALID_PARAMETER }
end
before do
@@ -280,8 +285,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
context 'with empty versions' do
let(:params) { super().merge!({ versions: {} }) }
- it { expect(subject[:http_status]).to eq 400 }
- it { expect(subject[:message]).to eq 'Version is empty.' }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'Version is empty.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_INVALID_PARAMETER }
end
context 'with invalid versions' do
@@ -303,8 +308,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
context 'with empty attachment data' do
let(:params) { super().merge({ _attachments: { "#{package_name}-#{version}.tgz" => { data: '' } } }) }
- it { expect(subject[:http_status]).to eq 400 }
- it { expect(subject[:message]).to eq 'Attachment data is empty.' }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'Attachment data is empty.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_INVALID_PARAMETER }
end
it 'obtains a lease to create a new package' do
@@ -318,8 +323,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
stub_exclusive_lease_taken(lease_key, timeout: described_class::DEFAULT_LEASE_TIMEOUT)
end
- it { expect(subject[:http_status]).to eq 400 }
- it { expect(subject[:message]).to eq 'Could not obtain package lease. Please try again.' }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'Could not obtain package lease. Please try again.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_PACKAGE_LEASE_TAKEN }
end
context 'when feature flag :packages_protected_packages disabled' do
@@ -364,7 +369,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
let(:service) { described_class.new(project, current_user, params) }
shared_examples 'protected package' do
- it { is_expected.to include http_status: 403, message: 'Package protected.' }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'Package protected.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_PACKAGE_PROTECTED }
it 'does not create any npm-related package records' do
expect { subject }
diff --git a/spec/services/packages/terraform_module/create_package_service_spec.rb b/spec/services/packages/terraform_module/create_package_service_spec.rb
index 3355dfcf5ec..c1a41cd9676 100644
--- a/spec/services/packages/terraform_module/create_package_service_spec.rb
+++ b/spec/services/packages/terraform_module/create_package_service_spec.rb
@@ -2,10 +2,11 @@
require 'spec_helper'
RSpec.describe Packages::TerraformModule::CreatePackageService, feature_category: :package_registry do
- let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:namespace) { create(:group) }
let_it_be(:project) { create(:project, namespace: namespace) }
let_it_be(:user) { create(:user) }
let_it_be(:sha256) { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
+ let_it_be(:package_settings) { create(:namespace_package_setting, namespace: namespace) }
let(:overrides) { {} }
@@ -36,10 +37,72 @@ RSpec.describe Packages::TerraformModule::CreatePackageService, feature_category
context 'package already exists elsewhere' do
let(:project2) { create(:project, namespace: namespace) }
- let!(:existing_package) { create(:terraform_module_package, project: project2, name: 'foo/bar', version: '1.0.0') }
+ let!(:existing_package) do
+ create(:terraform_module_package, project: project2, name: 'foo/bar', version: '1.0.0')
+ end
+
+ context 'when duplicates not allowed' do
+ it { expect(subject.reason).to eq :forbidden }
+ it { expect(subject.message).to be 'A package with the same name already exists in the namespace' }
+ end
+
+ context 'when duplicates allowed' do
+ before do
+ package_settings.update_column(:terraform_module_duplicates_allowed, true)
+ end
+
+ it_behaves_like 'creating a package'
+ end
+
+ context 'with duplicate regex exception' do
+ before do
+ package_settings.update_columns(
+ terraform_module_duplicates_allowed: false,
+ terraform_module_duplicate_exception_regex: regex
+ )
+ end
+
+ context 'when regex matches' do
+ let(:regex) { ".*#{existing_package.name.last(3)}.*" }
+
+ it_behaves_like 'creating a package'
+ end
- it { expect(subject[:http_status]).to eq 403 }
- it { expect(subject[:message]).to be 'Access Denied' }
+ context 'when regex does not match' do
+ let(:regex) { '.*not-a-match.*' }
+
+ it { expect(subject.reason).to eq :forbidden }
+ it { expect(subject.message).to be 'A package with the same name already exists in the namespace' }
+ end
+ end
+
+ context 'for ancestor namespace' do
+ let_it_be(:package_settings) { create(:namespace_package_setting, :group) }
+ let_it_be(:parent_namespace) { package_settings.namespace }
+
+ before do
+ namespace.update!(parent: parent_namespace)
+ end
+
+ context 'when duplicates allowed in an ancestor' do
+ before do
+ package_settings.update_column(:terraform_module_duplicates_allowed, true)
+ end
+
+ it_behaves_like 'creating a package'
+ end
+
+ context 'when duplicates allowed in an ancestor with exception' do
+ before do
+ package_settings.update_columns(
+ terraform_module_duplicates_allowed: false,
+ terraform_module_duplicate_exception_regex: ".*#{existing_package.name.last(3)}.*"
+ )
+ end
+
+ it_behaves_like 'creating a package'
+ end
+ end
context 'marked as pending_destruction' do
before do
@@ -53,7 +116,7 @@ RSpec.describe Packages::TerraformModule::CreatePackageService, feature_category
context 'version already exists' do
let!(:existing_version) { create(:terraform_module_package, project: project, name: 'foo/bar', version: '1.0.1') }
- it { expect(subject[:http_status]).to eq 403 }
+ it { expect(subject[:reason]).to eq :forbidden }
it { expect(subject[:message]).to be 'Package version already exists.' }
context 'marked as pending_destruction' do
@@ -68,7 +131,7 @@ RSpec.describe Packages::TerraformModule::CreatePackageService, feature_category
context 'with empty version' do
let(:overrides) { { module_version: '' } }
- it { expect(subject[:http_status]).to eq 400 }
+ it { expect(subject[:reason]).to eq :bad_request }
it { expect(subject[:message]).to eq 'Version is empty.' }
end
end
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index 63b5d54a18d..0e46391c0ad 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -188,28 +188,4 @@ RSpec.describe PagesDomains::ObtainLetsEncryptCertificateService, feature_catego
service.execute
end
end
-
- context 'when the domain URL is longer than 64 characters' do
- let(:long_domain) { "a.b.c.#{'d' * 63}" }
- let(:pages_domain) { create(:pages_domain, :without_certificate, :without_key, domain: long_domain) }
- let(:service) { described_class.new(pages_domain) }
-
- it 'logs an error and does not proceed with certificate acquisition' do
- expect(Gitlab::AppLogger).to receive(:error).with(
- hash_including(
- message: "Domain name too long for Let's Encrypt certificate",
- pages_domain: long_domain,
- pages_domain_bytesize: long_domain.bytesize,
- max_allowed_bytesize: described_class::MAX_DOMAIN_LENGTH,
- project_id: pages_domain.project_id
- )
- )
-
- # Ensure that the certificate acquisition is not attempted
- expect(::PagesDomains::CreateAcmeOrderService).not_to receive(:new)
- expect(PagesDomainSslRenewalWorker).not_to receive(:perform_in)
-
- service.execute
- end
- end
end
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index 167baed06e7..c0bc8e1cc6e 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe PostReceiveService, feature_category: :team_planning do
let(:repository) { project.repository }
let(:changes) do
- "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{branch_name}"
+ "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{branch_name}"
end
let(:params) do
diff --git a/spec/services/preview_markdown_service_spec.rb b/spec/services/preview_markdown_service_spec.rb
index f6aca9970c8..6c185024d28 100644
--- a/spec/services/preview_markdown_service_spec.rb
+++ b/spec/services/preview_markdown_service_spec.rb
@@ -126,7 +126,7 @@ RSpec.describe PreviewMarkdownService, feature_category: :team_planning do
result = service.execute
- expect(result[:text]).to eq "Please do it\n\n/assign #{user.to_reference}"
+ expect(result[:text]).to eq "Please do it\n<p>/assign #{user.to_reference}</p>"
end
end
diff --git a/spec/services/projects/cleanup_service_spec.rb b/spec/services/projects/cleanup_service_spec.rb
index 533a09f7bc7..90f360c5dbd 100644
--- a/spec/services/projects/cleanup_service_spec.rb
+++ b/spec/services/projects/cleanup_service_spec.rb
@@ -190,7 +190,7 @@ RSpec.describe Projects::CleanupService, feature_category: :source_code_manageme
Gitaly::ApplyBfgObjectMapStreamResponse::Entry.new(
type: :COMMIT,
old_oid: old_oid,
- new_oid: Gitlab::Git::BLANK_SHA
+ new_oid: Gitlab::Git::SHA1_BLANK_SHA
)
end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 3aea329a45f..e5dd17a3c7c 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -166,6 +166,14 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
end
end
+ context 'deleting a project with deployments' do
+ let!(:deployment) { create(:deployment, project: project) }
+
+ it 'deletes deployments' do
+ expect { destroy_project(project, user, {}) }.to change(Deployment, :count).by(-1)
+ end
+ end
+
it_behaves_like 'deleting the project'
context 'personal projects count cache' do
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index e6418c7b4ea..949421c205f 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -3,165 +3,159 @@
require 'spec_helper'
RSpec.describe Projects::ForkService, feature_category: :source_code_management do
- include ProjectForksHelper
+ subject(:service) { described_class.new(project, user, params) }
+
+ let_it_be_with_reload(:project) { create(:project, :repository, star_count: 100, description: 'project') }
+ let_it_be_with_reload(:user) { create(:user) }
+
+ let(:params) { { namespace: namespace } }
+ let(:namespace) { user.namespace }
shared_examples 'forks count cache refresh' do
it 'flushes the forks count cache of the source project', :clean_gitlab_redis_cache do
expect(from_project.forks_count).to be_zero
- fork_project(from_project, to_user, using_service: true)
+ described_class.new(from_project, to_user, params).execute
+
BatchLoader::Executor.clear_current
- expect(from_project.forks_count).to eq(1)
+ expect(from_project.reload.forks_count).to eq(1)
end
end
- context 'when forking a new project' do
- describe 'fork by user' do
+ describe '#execute' do
+ subject(:fork_of_project) { service.execute }
+
+ before do
+ # NOTE: Avatar file is dropped after project reload. Explicitly re-add it for each test.
+ project.avatar = fixture_file_upload("spec/fixtures/dk.png", "image/png")
+ end
+
+ context 'when forker is a guest' do
before do
- @from_user = create(:user)
- @from_namespace = @from_user.namespace
- avatar = fixture_file_upload("spec/fixtures/dk.png", "image/png")
- @from_project = create(
- :project,
- :repository,
- creator_id: @from_user.id,
- namespace: @from_namespace,
- star_count: 107,
- avatar: avatar,
- description: 'wow such project',
- external_authorization_classification_label: 'classification-label'
- )
- @to_user = create(:user)
- @to_namespace = @to_user.namespace
- @from_project.add_member(@to_user, :developer)
+ project.add_member(user, :guest)
end
- context 'fork project' do
- context 'when forker is a guest' do
- before do
- @guest = create(:user)
- @from_project.add_member(@guest, :guest)
- end
- subject { fork_project(@from_project, @guest, using_service: true) }
+ it 'does not create a fork' do
+ is_expected.not_to be_persisted
+ expect(subject.errors[:forked_from_project_id]).to eq(['is forbidden'])
+ end
- it { is_expected.not_to be_persisted }
- it { expect(subject.errors[:forked_from_project_id]).to eq(['is forbidden']) }
+ it 'does not create a fork network' do
+ expect { subject }.not_to change { project.reload.fork_network }
+ end
+ end
- it 'does not create a fork network' do
- expect { subject }.not_to change { @from_project.reload.fork_network }
- end
- end
+ context 'when forker is a developer' do
+ before do
+ project.add_member(user, :developer)
+ end
- it_behaves_like 'forks count cache refresh' do
- let(:from_project) { @from_project }
- let(:to_user) { @to_user }
- end
-
- describe "successfully creates project in the user namespace" do
- let(:to_project) { fork_project(@from_project, @to_user, namespace: @to_user.namespace, using_service: true) }
-
- it { expect(to_project).to be_persisted }
- it { expect(to_project.errors).to be_empty }
- it { expect(to_project.first_owner).to eq(@to_user) }
- it { expect(to_project.namespace).to eq(@to_user.namespace) }
- it { expect(to_project.star_count).to be_zero }
- it { expect(to_project.description).to eq(@from_project.description) }
- it { expect(to_project.avatar.file).to be_exists }
- it { expect(to_project.ci_config_path).to eq(@from_project.ci_config_path) }
- it { expect(to_project.external_authorization_classification_label).to eq(@from_project.external_authorization_classification_label) }
- it { expect(to_project.suggestion_commit_message).to eq(@from_project.suggestion_commit_message) }
- it { expect(to_project.merge_commit_template).to eq(@from_project.merge_commit_template) }
- it { expect(to_project.squash_commit_template).to eq(@from_project.squash_commit_template) }
-
- # This test is here because we had a bug where the from-project lost its
- # avatar after being forked.
- # https://gitlab.com/gitlab-org/gitlab-foss/issues/26158
- it "after forking the from-project still has its avatar" do
- # If we do not fork the project first we cannot detect the bug.
- expect(to_project).to be_persisted
-
- expect(@from_project.avatar.file).to be_exists
- end
+ it 'creates a fork of the project' do
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.errors).to be_empty
+ expect(fork_of_project.first_owner).to eq(user)
+ expect(fork_of_project.namespace).to eq(user.namespace)
+ expect(fork_of_project.star_count).to be_zero
+ expect(fork_of_project.description).to eq(project.description)
+ expect(fork_of_project.avatar.file).to be_exists
+ expect(fork_of_project.ci_config_path).to eq(project.ci_config_path)
+ expect(fork_of_project.external_authorization_classification_label).to eq(project.external_authorization_classification_label)
+ expect(fork_of_project.suggestion_commit_message).to eq(project.suggestion_commit_message)
+ expect(fork_of_project.merge_commit_template).to eq(project.merge_commit_template)
+ expect(fork_of_project.squash_commit_template).to eq(project.squash_commit_template)
+ end
- it_behaves_like 'forks count cache refresh' do
- let(:from_project) { @from_project }
- let(:to_user) { @to_user }
- end
+ # This test is here because we had a bug where the from-project lost its
+ # avatar after being forked.
+ # https://gitlab.com/gitlab-org/gitlab-foss/issues/26158
+ it 'after forking the original project still has its avatar' do
+ # If we do not fork the project first we cannot detect the bug.
+ expect(fork_of_project).to be_persisted
- it 'creates a fork network with the new project and the root project set' do
- to_project
- fork_network = @from_project.reload.fork_network
+ expect(project.avatar.file).to be_exists
+ end
- expect(fork_network).not_to be_nil
- expect(fork_network.root_project).to eq(@from_project)
- expect(fork_network.projects).to contain_exactly(@from_project, to_project)
- end
+ it_behaves_like 'forks count cache refresh' do
+ let(:from_project) { project }
+ let(:to_user) { user }
+ end
- it 'imports the repository of the forked project', :sidekiq_might_not_need_inline do
- to_project = fork_project(@from_project, @to_user, repository: true, using_service: true)
+ it 'creates a fork network with the new project and the root project set' do
+ subject
- expect(to_project.empty_repo?).to be_falsy
- end
- end
+ fork_network = project.reload.fork_network
- context 'creating a fork of a fork' do
- let(:from_forked_project) { fork_project(@from_project, @to_user, using_service: true) }
- let(:other_namespace) do
- group = create(:group)
- group.add_owner(@to_user)
- group
- end
+ expect(fork_network).not_to be_nil
+ expect(fork_network.root_project).to eq(project)
+ expect(fork_network.projects).to contain_exactly(project, fork_of_project)
+ end
- let(:to_project) { fork_project(from_forked_project, @to_user, namespace: other_namespace, using_service: true) }
+ it 'imports the repository of the forked project', :sidekiq_might_not_need_inline do
+ expect(fork_of_project).to be_persisted
- it 'sets the root of the network to the root project' do
- expect(to_project.fork_network.root_project).to eq(@from_project)
- end
+ # The call to project.repository.after_import in RepositoryForkWorker does
+ # not reset the @exists variable of this fork_of_project.repository
+ # so we have to explicitly call this method to clear the @exists variable.
+ # of the instance we're returning here.
+ fork_of_project.repository.expire_content_cache
- it 'sets the forked_from_project on the membership' do
- expect(to_project.fork_network_member.forked_from_project).to eq(from_forked_project)
- end
+ expect(fork_of_project.empty_repo?).to be_falsey
+ end
- context 'when the forked project has higher visibility than the root project' do
- let(:root_project) { create(:project, :public) }
+ context 'when creating fork of the fork' do
+ let_it_be(:other_namespace) { create(:group).tap { |group| group.add_owner(user) } }
- it 'successfully creates a fork of the fork with correct visibility' do
- forked_project = fork_project(root_project, @to_user, using_service: true)
+ it 'creates a new project' do
+ fork_of_project = described_class.new(project, user, params).execute
+ expect(fork_of_project).to be_persisted
- root_project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ fork_of_fork = described_class.new(fork_of_project, user, { namespace: other_namespace }).execute
+ expect(fork_of_fork).to be_persisted
- # Forked project visibility is not affected by root project visibility change
- expect(forked_project).to have_attributes(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ expect(fork_of_fork).to be_valid
+ expect(fork_of_fork.fork_network.root_project).to eq(project)
+ expect(fork_of_fork.fork_network_member.forked_from_project).to eq(fork_of_project)
+ end
- fork_of_the_fork = fork_project(forked_project, @to_user, namespace: other_namespace, using_service: true)
+ context 'when the forked project has higher visibility than the root project' do
+ let_it_be(:root_project) { create(:project, :public) }
- expect(fork_of_the_fork).to be_valid
- expect(fork_of_the_fork).to have_attributes(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- end
- end
+ it 'successfully creates a fork of the fork with correct visibility' do
+ fork_of_project = described_class.new(root_project, user, params).execute
+ expect(fork_of_project).to be_persisted
- it_behaves_like 'forks count cache refresh' do
- let(:from_project) { from_forked_project }
- let(:to_user) { @to_user }
+ root_project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+
+ # Forked project visibility is not affected by root project visibility change
+ expect(fork_of_project).to have_attributes(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ fork_of_fork = described_class.new(fork_of_project, user, { namespace: other_namespace }).execute
+ expect(fork_of_fork).to be_persisted
+
+ expect(fork_of_fork).to be_valid
+ expect(fork_of_fork).to have_attributes(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
end
end
+
+ it_behaves_like 'forks count cache refresh' do
+ let(:from_project) { described_class.new(project, user, { namespace: other_namespace }).execute }
+ let(:to_user) { user }
+ end
end
- context 'project already exists' do
- it "fails due to validation, not transaction failure" do
- @existing_project = create(:project, :repository, creator_id: @to_user.id, path: @from_project.path, namespace: @to_namespace)
- @to_project = fork_project(@from_project, @to_user, namespace: @to_namespace, using_service: true)
- expect(@existing_project).to be_persisted
+ context 'when project already exists' do
+ it 'fails due to validation, not transaction failure' do
+ existing_project = create(:project, namespace: namespace, path: project.path)
+ expect(existing_project).to be_persisted
- expect(@to_project).not_to be_persisted
- expect(@to_project.errors[:path]).to eq(['has already been taken'])
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors[:path]).to eq(['has already been taken'])
end
end
- context 'repository in legacy storage already exists' do
- let(:raw_fake_repo) { Gitlab::Git::Repository.new('default', File.join(@to_user.namespace.full_path, "#{@from_project.path}.git"), nil, nil) }
- let(:params) { { namespace: @to_user.namespace, using_service: true } }
+ context 'when repository in legacy storage already exists' do
+ let(:raw_fake_repo) { Gitlab::Git::Repository.new('default', File.join(user.namespace.full_path, "#{project.path}.git"), nil, nil) }
before do
stub_application_setting(hashed_storage_enabled: false)
@@ -172,59 +166,54 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
raw_fake_repo.remove
end
- subject { fork_project(@from_project, @to_user, params) }
-
it 'does not allow creation' do
- expect(subject).not_to be_persisted
- expect(subject.errors.messages).to have_key(:base)
- expect(subject.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ fork_of_project
+
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors.messages).to have_key(:base)
+ expect(fork_of_project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
end
context 'when repository disk validation is explicitly skipped' do
let(:params) { super().merge(skip_disk_validation: true) }
it 'allows fork project creation' do
- expect(subject).to be_persisted
- expect(subject.errors.messages).to be_empty
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.errors.messages).to be_empty
end
end
end
- context "CI/CD settings" do
- let(:to_project) { fork_project(@from_project, @to_user, using_service: true) }
+ context 'CI/CD settings' do
+ context 'when origin has git depth specified' do
+ it 'inherits default_git_depth from the origin project' do
+ project.update!(ci_default_git_depth: 42)
- context "when origin has git depth specified" do
- before do
- @from_project.update!(ci_default_git_depth: 42)
- end
-
- it "inherits default_git_depth from the origin project" do
- expect(to_project.ci_default_git_depth).to eq(42)
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.ci_default_git_depth).to eq(42)
end
end
- context "when origin does not define git depth" do
- before do
- @from_project.update!(ci_default_git_depth: nil)
- end
+ context 'when origin does not define git depth' do
+ it 'the fork has git depth set to 0' do
+ project.update!(ci_default_git_depth: nil)
- it "the fork has git depth set to 0" do
- expect(to_project.ci_default_git_depth).to eq(0)
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.ci_default_git_depth).to eq(0)
end
end
end
- context "when project has restricted visibility level" do
- context "and only one visibility level is restricted" do
+ context 'when project has restricted visibility level' do
+ context 'and only one visibility level is restricted' do
before do
- @from_project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
end
- it "creates fork with lowest level" do
- forked_project = fork_project(@from_project, @to_user, using_service: true)
-
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ it 'creates fork with lowest level' do
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
end
end
@@ -233,289 +222,284 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC, Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PRIVATE])
end
- it "creates fork with private visibility levels" do
- forked_project = fork_project(@from_project, @to_user, using_service: true)
-
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ it "doesn't create a fork" do
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors[:visibility_level]).to eq ['private has been restricted by your GitLab administrator']
end
end
end
context 'when forking is disabled' do
before do
- @from_project.project_feature.update_attribute(
- :forking_access_level, ProjectFeature::DISABLED)
+ project.project_feature.update_attribute(:forking_access_level, ProjectFeature::DISABLED)
end
- it 'fails' do
- to_project = fork_project(@from_project, @to_user, namespace: @to_user.namespace, using_service: true)
-
- expect(to_project.errors[:forked_from_project_id]).to eq(['is forbidden'])
+ it 'does not create a fork' do
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors[:forked_from_project_id]).to eq(['is forbidden'])
end
end
- end
- describe 'fork to namespace' do
- before do
- @group_owner = create(:user)
- @developer = create(:user)
- @project = create(
- :project, :repository,
- creator_id: @group_owner.id,
- star_count: 777,
- description: 'Wow, such a cool project!',
- ci_config_path: 'debian/salsa-ci.yml'
- )
- @group = create(:group)
- @group.add_member(@group_owner, GroupMember::OWNER)
- @group.add_member(@developer, GroupMember::DEVELOPER)
- @project.add_member(@developer, :developer)
- @project.add_member(@group_owner, :developer)
- @opts = { namespace: @group, using_service: true }
- end
+ context 'when forking to the group namespace' do
+ context 'when user owns a target group' do
+ let_it_be_with_reload(:namespace) { create(:group).tap { |group| group.add_owner(user) } }
+
+ it 'creates a fork in the group' do
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.first_owner).to eq(user)
+ expect(fork_of_project.namespace).to eq(namespace)
+ end
+
+ context 'when project already exists in group' do
+ it 'fails due to validation, not transaction failure' do
+ existing_project = create(:project, :repository, path: project.path, namespace: namespace)
+ expect(existing_project).to be_persisted
+
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors[:path]).to eq(['has already been taken'])
+ end
+ end
- context 'fork project for group' do
- it 'group owner successfully forks project into the group' do
- to_project = fork_project(@project, @group_owner, @opts)
+ context 'when the namespace has a lower visibility level than the project' do
+ let_it_be(:namespace) { create(:group, :private).tap { |group| group.add_owner(user) } }
+ let_it_be(:project) { create(:project, :public) }
- expect(to_project).to be_persisted
- expect(to_project.errors).to be_empty
- expect(to_project.first_owner).to eq(@group_owner)
- expect(to_project.namespace).to eq(@group)
- expect(to_project.name).to eq(@project.name)
- expect(to_project.path).to eq(@project.path)
- expect(to_project.description).to eq(@project.description)
- expect(to_project.ci_config_path).to eq(@project.ci_config_path)
- expect(to_project.star_count).to be_zero
+ it 'creates the project with the lower visibility level' do
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
end
- end
- context 'fork project for group when user not owner' do
- it 'group developer fails to fork project into the group' do
- to_project = fork_project(@project, @developer, @opts)
+ context 'when user is not a group owner' do
+ let_it_be(:namespace) { create(:group).tap { |group| group.add_developer(user) } }
- expect(to_project.errors[:namespace]).to eq(['is not valid'])
+ it 'does not create a fork' do
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors[:namespace]).to eq(['is not valid'])
+ end
end
end
- context 'project already exists in group' do
- it 'fails due to validation, not transaction failure' do
- existing_project = create(:project, :repository, path: @project.path, namespace: @group)
- to_project = fork_project(@project, @group_owner, @opts)
- expect(existing_project.persisted?).to be_truthy
- expect(to_project.errors[:path]).to eq(['has already been taken'])
+ context 'with optional attributes' do
+ let(:params) { super().merge(path: 'forked', name: 'My Fork', description: 'Description', visibility: 'private') }
+
+ it 'sets optional attributes to specified values' do
+ expect(fork_of_project).to be_persisted
+
+ expect(fork_of_project.path).to eq('forked')
+ expect(fork_of_project.name).to eq('My Fork')
+ expect(fork_of_project.description).to eq('Description')
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
end
- end
- context 'when the namespace has a lower visibility level than the project' do
- it 'creates the project with the lower visibility level' do
- public_project = create(:project, :public)
- private_group = create(:group, :private)
- group_owner = create(:user)
- private_group.add_owner(group_owner)
+ context 'when an unknown visibility is requested' do
+ let_it_be(:project) { create(:project, :public) }
+
+ let(:params) { super().merge(visibility: 'unknown') }
- forked_project = fork_project(public_project, group_owner, namespace: private_group, using_service: true)
+ it 'sets visibility level to private' do
+ expect(fork_of_project).to be_persisted
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
end
- end
- end
- describe 'fork with optional attributes' do
- let(:public_project) { create(:project, :public) }
-
- it 'sets optional attributes to specified values' do
- forked_project = fork_project(
- public_project,
- nil,
- namespace: public_project.namespace,
- path: 'forked',
- name: 'My Fork',
- description: 'Description',
- visibility: 'internal',
- using_service: true
- )
-
- expect(forked_project.path).to eq('forked')
- expect(forked_project.name).to eq('My Fork')
- expect(forked_project.description).to eq('Description')
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
+ context 'when requested visibility level is greater than allowed' do
+ let_it_be(:project) { create(:project, :internal) }
- it 'sets visibility level to private if an unknown visibility is requested' do
- forked_project = fork_project(public_project, nil, using_service: true, visibility: 'unknown')
+ let(:params) { super().merge(visibility: 'public') }
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
+ it 'sets visibility level to project visibility' do
+ expect(fork_of_project).to be_persisted
- it 'sets visibility level to project visibility level if requested visibility is greater' do
- private_project = create(:project, :private)
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ end
+ end
- forked_project = fork_project(private_project, nil, using_service: true, visibility: 'public')
+ context 'when target namespace has lower visibility than a project' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:namespace) { create(:group, :private).tap { |group| group.add_owner(user) } }
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
+ it 'sets visibility level to target namespace visibility level' do
+ expect(fork_of_project).to be_persisted
+
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
- it 'sets visibility level to target namespace visibility level if requested visibility is greater' do
- private_group = create(:group, :private)
+ context 'when project has custom visibility settings' do
+ let_it_be(:project) { create(:project, :public) }
- forked_project = fork_project(public_project, nil, namespace: private_group, using_service: true, visibility: 'public')
+ let(:attrs) do
+ ProjectFeature::FEATURES.to_h do |f|
+ ["#{f}_access_level", ProjectFeature::PRIVATE]
+ end
+ end
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ before do
+ project.project_feature.update!(attrs)
+ end
+
+ it 'copies project features visibility settings to the fork' do
+ expect(fork_of_project).to be_persisted
+
+ expect(fork_of_project.project_feature.slice(attrs.keys)).to eq(attrs)
+ end
+ end
end
- it 'copies project features visibility settings to the fork', :aggregate_failures do
- attrs = ProjectFeature::FEATURES.to_h do |f|
- ["#{f}_access_level", ProjectFeature::PRIVATE]
+ context 'when a project is already forked' do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:group) { create(:group).tap { |group| group.add_owner(user) } }
+
+ before do
+ # Stub everything required to move a project to a Gitaly shard that does not exist
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
+ stub_storage_settings('test_second_storage' => {})
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_repository)
+ .and_return(true)
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:replicate)
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum)
+ .and_return(::Gitlab::Git::SHA1_BLANK_SHA)
+ allow_next_instance_of(Gitlab::Git::ObjectPool) do |object_pool|
+ allow(object_pool).to receive(:link)
+ end
end
- public_project.project_feature.update!(attrs)
+ it 'creates a new pool repository after the project is moved to a new shard' do
+ fork_before_move = subject
- user = create(:user, developer_projects: [public_project])
- forked_project = described_class.new(public_project, user).execute
+ storage_move = create(
+ :project_repository_storage_move,
+ :scheduled,
+ container: project,
+ destination_storage_name: 'test_second_storage'
+ )
+ Projects::UpdateRepositoryStorageService.new(storage_move).execute
- expect(forked_project.project_feature.slice(attrs.keys)).to eq(attrs)
- end
- end
- end
+ fork_after_move = described_class.new(project.reload, user, namespace: group).execute
- context 'when a project is already forked' do
- it 'creates a new pool repository after the project is moved to a new shard' do
- project = create(:project, :public, :repository)
- fork_before_move = fork_project(project, nil, using_service: true)
-
- # Stub everything required to move a project to a Gitaly shard that does not exist
- allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
- allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
- stub_storage_settings('test_second_storage' => {})
- allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_repository)
- .and_return(true)
- allow_any_instance_of(Gitlab::Git::Repository).to receive(:replicate)
- allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum)
- .and_return(::Gitlab::Git::BLANK_SHA)
- allow_next_instance_of(Gitlab::Git::ObjectPool) do |object_pool|
- allow(object_pool).to receive(:link)
+ pool_repository_before_move = PoolRepository.joins(:shard)
+ .find_by(source_project: project, shards: { name: 'default' })
+ pool_repository_after_move = PoolRepository.joins(:shard)
+ .find_by(source_project: project, shards: { name: 'test_second_storage' })
+
+ expect(fork_before_move.pool_repository).to eq(pool_repository_before_move)
+ expect(fork_after_move.pool_repository).to eq(pool_repository_after_move)
+ end
end
- storage_move = create(
- :project_repository_storage_move,
- :scheduled,
- container: project,
- destination_storage_name: 'test_second_storage'
- )
- Projects::UpdateRepositoryStorageService.new(storage_move).execute
- fork_after_move = fork_project(project.reload, nil, using_service: true)
- pool_repository_before_move = PoolRepository.joins(:shard)
- .find_by(source_project: project, shards: { name: 'default' })
- pool_repository_after_move = PoolRepository.joins(:shard)
- .find_by(source_project: project, shards: { name: 'test_second_storage' })
-
- expect(fork_before_move.pool_repository).to eq(pool_repository_before_move)
- expect(fork_after_move.pool_repository).to eq(pool_repository_after_move)
- end
- end
+ context 'when forking with object pools' do
+ let_it_be(:project) { create(:project, :public, :repository) }
- context 'when forking with object pools' do
- let(:fork_from_project) { create(:project, :repository, :public) }
- let(:forker) { create(:user) }
+ context 'when no pool exists' do
+ it 'creates a new object pool' do
+ expect { fork_of_project }.to change { PoolRepository.count }.by(1)
- context 'when no pool exists' do
- it 'creates a new object pool' do
- forked_project = fork_project(fork_from_project, forker, using_service: true)
+ expect(fork_of_project.pool_repository).to eq(project.pool_repository)
+ end
- expect(forked_project.pool_repository).to eq(fork_from_project.pool_repository)
- end
- end
+ context 'when project is private' do
+ let_it_be(:project) { create(:project, :private, :repository) }
+
+ it 'does not create an object pool' do
+ expect { fork_of_project }.not_to change { PoolRepository.count }
- context 'when a pool already exists' do
- let!(:pool_repository) { create(:pool_repository, source_project: fork_from_project) }
+ expect(fork_of_project.pool_repository).to be_nil
+ end
+ end
+ end
- it 'joins the object pool' do
- forked_project = fork_project(fork_from_project, forker, using_service: true)
+ context 'when a pool already exists' do
+ let!(:pool_repository) { create(:pool_repository, source_project: project) }
- expect(forked_project.pool_repository).to eq(fork_from_project.pool_repository)
+ it 'joins the object pool' do
+ expect { fork_of_project }.not_to change { PoolRepository.count }
+
+ expect(fork_of_project.pool_repository).to eq(pool_repository)
+ end
+ end
end
- end
- end
- context 'when linking fork to an existing project' do
- let(:fork_from_project) { create(:project, :public) }
- let(:fork_to_project) { create(:project, :public) }
- let(:user) do
- create(:user).tap { |u| fork_to_project.add_maintainer(u) }
- end
+ context 'when linking fork to an existing project' do
+ let_it_be_with_reload(:unlinked_fork) { create(:project, :public) }
- subject { described_class.new(fork_from_project, user) }
+ before_all do
+ unlinked_fork.add_developer(user)
+ end
- def forked_from_project(project)
- project.fork_network_member&.forked_from_project
- end
+ def forked_from_project(project)
+ project.fork_network_member&.forked_from_project
+ end
- context 'if project is already forked' do
- it 'does not create fork relation' do
- allow(fork_to_project).to receive(:forked?).and_return(true)
- expect(forked_from_project(fork_to_project)).to be_nil
- expect(subject.execute(fork_to_project)).to be_nil
- expect(forked_from_project(fork_to_project)).to be_nil
- end
- end
+ context 'if project is already forked' do
+ it 'does not create fork relation' do
+ allow(unlinked_fork).to receive(:forked?).and_return(true)
- context 'if project is not forked' do
- it 'creates fork relation' do
- expect(fork_to_project.forked?).to be_falsy
- expect(forked_from_project(fork_to_project)).to be_nil
+ expect(forked_from_project(unlinked_fork)).to be_nil
- subject.execute(fork_to_project)
+ expect(service.execute(unlinked_fork)).to be_nil
- fork_to_project.reload
+ expect(forked_from_project(unlinked_fork)).to be_nil
+ end
+ end
- expect(fork_to_project.forked?).to be true
- expect(forked_from_project(fork_to_project)).to eq fork_from_project
- expect(fork_to_project.forked_from_project).to eq fork_from_project
- end
+ context 'if project is not forked' do
+ it 'creates fork relation' do
+ expect(unlinked_fork.forked?).to be_falsy
+ expect(forked_from_project(unlinked_fork)).to be_nil
- it 'flushes the forks count cache of the source project' do
- expect(fork_from_project.forks_count).to be_zero
+ service.execute(unlinked_fork)
- subject.execute(fork_to_project)
- BatchLoader::Executor.clear_current
+ unlinked_fork.reload
- expect(fork_from_project.forks_count).to eq(1)
- end
+ expect(unlinked_fork.forked?).to be true
+ expect(forked_from_project(unlinked_fork)).to eq project
+ expect(unlinked_fork.forked_from_project).to eq project
+ end
+
+ it 'flushes the forks count cache of the source project' do
+ expect(project.forks_count).to be_zero
+
+ service.execute(unlinked_fork)
+ BatchLoader::Executor.clear_current
+
+ expect(project.forks_count).to eq(1)
+ end
- context 'if the fork is not allowed' do
- let(:fork_from_project) { create(:project, :private) }
+ context 'if the fork is not allowed' do
+ let_it_be(:project) { create(:project, :private) }
- it 'does not delete the LFS objects' do
- create(:lfs_objects_project, project: fork_to_project)
+ it 'does not delete the LFS objects' do
+ create(:lfs_objects_project, project: unlinked_fork)
- expect { subject.execute(fork_to_project) }
- .not_to change { fork_to_project.lfs_objects_projects.size }
+ expect { service.execute(unlinked_fork) }
+ .not_to change { unlinked_fork.lfs_objects_projects.size }
+ end
+ end
end
end
end
end
describe '#valid_fork_targets' do
+ subject { service.valid_fork_targets }
+
let(:finder_mock) { instance_double('ForkTargetsFinder', execute: ['finder_return_value']) }
- let(:current_user) { instance_double('User') }
- let(:project) { instance_double('Project') }
before do
- allow(ForkTargetsFinder).to receive(:new).with(project, current_user).and_return(finder_mock)
+ allow(ForkTargetsFinder).to receive(:new).with(project, user).and_return(finder_mock)
end
it 'returns whatever finder returns' do
- expect(described_class.new(project, current_user).valid_fork_targets).to eq ['finder_return_value']
+ is_expected.to eq ['finder_return_value']
end
end
describe '#valid_fork_branch?' do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :small_repo, creator_id: user.id) }
- let_it_be(:branch) { nil }
-
- subject { described_class.new(project, user).valid_fork_branch?(branch) }
+ subject { service.valid_fork_branch?(branch) }
context 'when branch exists' do
let(:branch) { project.default_branch_or_main }
@@ -531,12 +515,11 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
end
describe '#valid_fork_target?' do
- let(:project) { Project.new }
+ subject { service.valid_fork_target? }
+
let(:params) { {} }
context 'when target is not passed' do
- subject { described_class.new(project, user, params).valid_fork_target? }
-
context 'when current user is an admin' do
let(:user) { build(:user, :admin) }
@@ -547,7 +530,6 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
let(:user) { create(:user) }
let(:finder_mock) { instance_double('ForkTargetsFinder', execute: [user.namespace]) }
- let(:project) { create(:project) }
before do
allow(ForkTargetsFinder).to receive(:new).with(project, user).and_return(finder_mock)
@@ -568,9 +550,9 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
end
context 'when target is passed' do
- let(:target) { create(:group) }
+ subject { service.valid_fork_target?(target) }
- subject { described_class.new(project, user, params).valid_fork_target?(target) }
+ let(:target) { create(:group) }
context 'when current user is an admin' do
let(:user) { build(:user, :admin) }
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index 692f43eb205..167df7996ca 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -8,14 +8,18 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
let_it_be(:project) { create(:project, :public) }
let_it_be(:noteable) { create(:issue, project: project) }
+ let(:params) { {} }
+
before_all do
project.add_developer(user)
+ end
+ before do
stub_feature_flags(disable_all_mention: false)
end
def run_service
- described_class.new(project, user).execute(noteable)
+ described_class.new(project, user, params).execute(noteable)
end
it 'returns results in correct order' do
@@ -39,27 +43,27 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
it 'avoids N+1 UserDetail queries' do
project.add_developer(create(:user))
- control_count = ActiveRecord::QueryRecorder.new { run_service.to_a }.count
+ control = ActiveRecord::QueryRecorder.new { run_service.to_a }
BatchLoader::Executor.clear_current
project.add_developer(create(:user, status: build(:user_status, availability: :busy)))
- expect { run_service.to_a }.not_to exceed_query_limit(control_count)
+ expect { run_service.to_a }.not_to exceed_query_limit(control)
end
it 'avoids N+1 groups queries' do
group_1 = create(:group)
group_1.add_owner(user)
- control_count = ActiveRecord::QueryRecorder.new { run_service }.count
+ control = ActiveRecord::QueryRecorder.new { run_service }
BatchLoader::Executor.clear_current
group_2 = create(:group)
group_2.add_owner(user)
- expect { run_service }.not_to exceed_query_limit(control_count)
+ expect { run_service }.not_to exceed_query_limit(control)
end
end
@@ -129,6 +133,16 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
group_1.full_path, subgroup.full_path, group_2.full_path
])
end
+
+ context 'when search param is given' do
+ let(:params) { { search: 'bb' } }
+
+ it 'only returns matching groups' do
+ expect(group_items.pluck(:username)).to eq([
+ group_1.full_path, subgroup.full_path
+ ])
+ end
+ end
end
end
@@ -229,5 +243,17 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
end
end
end
+
+ context 'when search param is given' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:member_1) { create(:user, name: 'John Doe').tap { |u| project.add_guest(u) } }
+ let_it_be(:member_2) { create(:user, name: 'Jane Doe ').tap { |u| project.add_guest(u) } }
+
+ let(:service) { described_class.new(project, create(:user), search: 'johnd') }
+
+ it 'only returns matching members' do
+ expect(usernames).to eq([member_1.username])
+ end
+ end
end
end
diff --git a/spec/services/projects/unlink_fork_service_spec.rb b/spec/services/projects/unlink_fork_service_spec.rb
index 2e1a6c03c90..3199614104f 100644
--- a/spec/services/projects/unlink_fork_service_spec.rb
+++ b/spec/services/projects/unlink_fork_service_spec.rb
@@ -70,14 +70,6 @@ RSpec.describe Projects::UnlinkForkService, :use_clean_rails_memory_store_cachin
subject.execute(refresh_statistics: false)
end
- it 'does not refresh project statistics when the feature flag is disabled' do
- stub_feature_flags(refresh_statistics_on_unlink_fork: false)
-
- expect(ProjectCacheWorker).not_to receive(:perform_async)
-
- subject.execute
- end
-
context 'when the original project was deleted' do
it 'does not fail when the original project is deleted' do
source = forked_project.forked_from_project
diff --git a/spec/services/projects/update_statistics_service_spec.rb b/spec/services/projects/update_statistics_service_spec.rb
index 5311b8daeb1..c90da48af8b 100644
--- a/spec/services/projects/update_statistics_service_spec.rb
+++ b/spec/services/projects/update_statistics_service_spec.rb
@@ -17,6 +17,12 @@ RSpec.describe Projects::UpdateStatisticsService, feature_category: :groups_and_
service.execute
end
+
+ it_behaves_like 'does not record an onboarding progress action' do
+ subject do
+ service.execute
+ end
+ end
end
context 'with an existing project' do
@@ -64,5 +70,33 @@ RSpec.describe Projects::UpdateStatisticsService, feature_category: :groups_and_
service.execute
end
end
+
+ context 'with an existing project with project repository' do
+ let_it_be(:project) { create(:project) }
+
+ subject { service.execute }
+
+ context 'when the repository is empty' do
+ it_behaves_like 'does not record an onboarding progress action'
+ end
+
+ context 'when the repository has more than one commit or more than one branch' do
+ where(:commit_count, :branch_count) do
+ 2 | 1
+ 1 | 2
+ 2 | 2
+ end
+
+ with_them do
+ before do
+ allow(project.repository).to receive_messages(commit_count: commit_count, branch_count: branch_count)
+ end
+
+ it_behaves_like 'records an onboarding progress action', :code_added do
+ let(:namespace) { project.namespace }
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/services/push_event_payload_service_spec.rb b/spec/services/push_event_payload_service_spec.rb
index 999b71ff754..ef722fb34e7 100644
--- a/spec/services/push_event_payload_service_spec.rb
+++ b/spec/services/push_event_payload_service_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
describe '#commit_from_id' do
it 'returns nil when creating a new ref' do
- service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, before: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.commit_from_id).to be_nil
end
@@ -84,7 +84,7 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
describe '#commit_to_id' do
it 'returns nil when removing an existing ref' do
- service = described_class.new(event, after: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, after: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.commit_to_id).to be_nil
end
@@ -156,7 +156,7 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
describe '#create?' do
it 'returns true when creating a new ref' do
- service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, before: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.create?).to eq(true)
end
@@ -170,7 +170,7 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
describe '#remove?' do
it 'returns true when removing an existing ref' do
- service = described_class.new(event, after: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, after: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.remove?).to eq(true)
end
@@ -184,13 +184,13 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
describe '#action' do
it 'returns :created when creating a ref' do
- service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, before: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.action).to eq(:created)
end
it 'returns :removed when removing an existing ref' do
- service = described_class.new(event, before: '123', after: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, before: '123', after: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.action).to eq(:removed)
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index dc93fd96aee..8de71f2ddf8 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -564,7 +564,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
it 'returns the reaction message' do
_, _, message = service.execute(content, issuable)
- expect(message).to eq('Toggled :100: emoji award.')
+ expect(message).to eq('Toggled :100: emoji reaction.')
end
end
@@ -1911,8 +1911,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:content) { "#{command} :100:" }
let(:issuable) { commit }
- # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/434446
- it_behaves_like 'failed command', "Could not apply award command."
+ it_behaves_like 'failed command', "Could not apply react command."
end
end
end
@@ -2325,7 +2324,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
- context 'invite_email command' do
+ describe 'invite_email command' do
let_it_be(:issuable) { issue }
it_behaves_like 'failed command', "No email participants were added. Either none were provided, or they already exist." do
@@ -2455,6 +2454,102 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
+ describe 'remove_email command' do
+ let_it_be_with_reload(:issuable) { issue }
+
+ it 'is not part of the available commands' do
+ expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :remove_email))
+ end
+
+ context 'with existing email participant' do
+ let(:content) { '/remove_email user@example.com' }
+
+ subject(:remove_email) { service.execute(content, issuable) }
+
+ before do
+ issuable.issue_email_participants.create!(email: "user@example.com")
+ end
+
+ it 'returns message' do
+ _, _, message = service.execute(content, issuable)
+
+ expect(message).to eq('Removed user@example.com.')
+ end
+
+ it 'removes 1 participant' do
+ expect { remove_email }.to change { issue.issue_email_participants.count }.by(-1)
+ end
+
+ context 'with mixed case email' do
+ let(:content) { '/remove_email FirstLast@GitLab.com' }
+
+ before do
+ issuable.issue_email_participants.create!(email: "FirstLast@GitLab.com")
+ end
+
+ it 'returns correctly cased message' do
+ _, _, message = service.execute(content, issuable)
+
+ expect(message).to eq('Removed FirstLast@GitLab.com.')
+ end
+
+ it 'removes 1 participant' do
+ expect { remove_email }.to change { issue.issue_email_participants.count }.by(-1)
+ end
+ end
+
+ context 'with invalid email' do
+ let(:content) { '/remove_email user@example.com bad_email' }
+
+ it 'only removes valid emails' do
+ expect { remove_email }.to change { issue.issue_email_participants.count }.by(-1)
+ end
+ end
+
+ context 'with non-existing email address' do
+ let(:content) { '/remove_email NonExistent@gitlab.com' }
+
+ it 'returns message' do
+ _, _, message = service.execute(content, issuable)
+
+ expect(message).to eq("No email participants were removed. Either none were provided, or they don't exist.")
+ end
+ end
+
+ context 'with more than the max number of emails' do
+ let(:content) { '/remove_email user@example.com user1@example.com' }
+
+ before do
+ stub_const("IssueEmailParticipants::DestroyService::MAX_NUMBER_OF_EMAILS", 1)
+ # user@example.com has already been added above
+ issuable.issue_email_participants.create!(email: "user1@example.com")
+ end
+
+ it 'only removes the max allowed number of emails' do
+ expect { remove_email }.to change { issue.issue_email_participants.count }.by(-1)
+ end
+ end
+ end
+
+ context 'with non-persisted issue' do
+ let(:issuable) { build(:issue) }
+
+ it 'is not part of the available commands' do
+ expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :remove_email))
+ end
+ end
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(issue_email_participants: false)
+ end
+
+ it 'is not part of the available commands' do
+ expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :remove_email))
+ end
+ end
+ end
+
context 'severity command' do
let_it_be_with_reload(:issuable) { create(:incident, project: project) }
@@ -2533,6 +2628,16 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
+ context 'when MR is already merged' do
+ before do
+ merge_request.mark_as_merged!
+ end
+
+ it_behaves_like 'approve command unavailable' do
+ let(:issuable) { merge_request }
+ end
+ end
+
it_behaves_like 'approve command unavailable' do
let(:issuable) { issue }
end
@@ -2574,11 +2679,21 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
expect(merge_request.approved_by_users).to eq([developer])
end
+ end
+
+ context 'when MR is already merged' do
+ before do
+ merge_request.mark_as_merged!
+ end
it_behaves_like 'unapprove command unavailable' do
- let(:issuable) { issue }
+ let(:issuable) { merge_request }
end
end
+
+ it_behaves_like 'unapprove command unavailable' do
+ let(:issuable) { issue }
+ end
end
context 'crm_contact commands' do
@@ -2877,7 +2992,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
it 'includes the emoji' do
_, explanations = service.explain(content, issue)
- expect(explanations).to eq(['Toggles :confetti_ball: emoji award.'])
+ expect(explanations).to eq(['Toggles :confetti_ball: emoji reaction.'])
end
end
@@ -3097,7 +3212,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
it 'keeps quick actions' do
content_result, explanations = service.explain(content, issue, keep_actions: true)
- expect(content_result).to eq("\n/close")
+ expect(content_result).to eq("<p>/close</p>")
expect(explanations).to eq(['Closes this issue.'])
end
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index 1b5300672e3..d77a68288a5 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -164,7 +164,7 @@ RSpec.describe Repositories::ChangelogService, feature_category: :source_code_ma
RequestStore.clear!
- expect { request.call(sha3) }.not_to exceed_query_limit(control.count)
+ expect { request.call(sha3) }.not_to exceed_query_limit(control)
end
context 'when one of commits does not exist' do
diff --git a/spec/services/resource_access_tokens/revoke_service_spec.rb b/spec/services/resource_access_tokens/revoke_service_spec.rb
index 060697cd1df..aab22cb2815 100644
--- a/spec/services/resource_access_tokens/revoke_service_spec.rb
+++ b/spec/services/resource_access_tokens/revoke_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe ResourceAccessTokens::RevokeService, feature_category: :system_ac
it 'removes membership of bot user' do
subject
- expect(resource.reload.users).not_to include(resource_bot)
+ expect(resource.reload).not_to have_user(resource_bot)
end
it 'initiates user removal' do
@@ -56,7 +56,7 @@ RSpec.describe ResourceAccessTokens::RevokeService, feature_category: :system_ac
it 'does not remove bot from member list' do
subject
- expect(resource.reload.users).to include(resource_bot)
+ expect(resource.reload).to have_user(resource_bot)
end
it 'does not transfer issuables of bot user to ghost user' do
diff --git a/spec/services/routes/rename_descendants_service_spec.rb b/spec/services/routes/rename_descendants_service_spec.rb
new file mode 100644
index 00000000000..72e43ddca26
--- /dev/null
+++ b/spec/services/routes/rename_descendants_service_spec.rb
@@ -0,0 +1,208 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Routes::RenameDescendantsService, feature_category: :groups_and_projects do
+ let_it_be(:parent_group) { create(:group, name: 'old-name', path: 'old-path') }
+ let_it_be(:parent_route) { parent_group.route }
+ let_it_be(:subgroups) { create_list(:group, 4, parent: parent_group) }
+ let_it_be(:subgroup_projects) { subgroups.map { |subgroup| create(:project, group: subgroup) } }
+
+ let(:subgroup_routes) { Route.for_routable(subgroups) }
+ let(:subgroup_projects_routes) { Route.for_routable(subgroup_projects) }
+
+ let(:subgroup_routes_with_old_path) { subgroup_routes.where('path LIKE ?', '%old-path%') }
+ let(:subgroup_projects_routes_with_old_path) { subgroup_projects_routes.where('path LIKE ?', '%old-path%') }
+ let(:subgroup_routes_with_new_path) { subgroup_routes.where('path LIKE ?', '%new-path%') }
+ let(:subgroup_projects_routes_with_new_path) { subgroup_projects_routes.where('path LIKE ?', '%new-path%') }
+
+ let(:subgroup_routes_with_old_name) { subgroup_routes.where('name LIKE ?', '%old-name%') }
+ let(:subgroup_projects_routes_with_old_name) { subgroup_projects_routes.where('name LIKE ?', '%old-name%') }
+ let(:subgroup_routes_with_new_name) { subgroup_routes.where('name LIKE ?', '%new-name%') }
+ let(:subgroup_projects_routes_with_new_name) { subgroup_projects_routes.where('name LIKE ?', '%new-name%') }
+
+ describe '#execute' do
+ shared_examples_for 'descendant paths are updated' do
+ it do
+ expect { execute }.to change {
+ subgroup_routes_with_old_path.size
+ }.from(4).to(0).and change {
+ subgroup_projects_routes_with_old_path.size
+ }.from(4).to(0).and change {
+ subgroup_routes_with_new_path.size
+ }.from(0).to(4).and change {
+ subgroup_projects_routes_with_new_path.size
+ }.from(0).to(4)
+ end
+ end
+
+ shared_examples_for 'descendant paths are not updated' do
+ it do
+ expect { execute }.to change {
+ subgroup_routes_with_old_path.size
+ }.by(0).and change {
+ subgroup_projects_routes_with_old_path.size
+ }.by(0).and change {
+ subgroup_routes_with_new_path.size
+ }.by(0).and change {
+ subgroup_projects_routes_with_new_path.size
+ }.by(0)
+ end
+ end
+
+ shared_examples_for 'descendant names are updated' do
+ it do
+ expect { execute }.to change {
+ subgroup_routes_with_old_name.size
+ }.from(4).to(0).and change {
+ subgroup_projects_routes_with_old_name.size
+ }.from(4).to(0).and change {
+ subgroup_routes_with_new_name.size
+ }.from(0).to(4).and change {
+ subgroup_projects_routes_with_new_name.size
+ }.from(0).to(4)
+ end
+ end
+
+ shared_examples_for 'descendant names are not updated' do
+ it do
+ expect { execute }.to change {
+ subgroup_routes_with_old_name.size
+ }.by(0).and change {
+ subgroup_projects_routes_with_old_name.size
+ }.by(0).and change {
+ subgroup_routes_with_new_name.size
+ }.by(0).and change {
+ subgroup_projects_routes_with_new_name.size
+ }.by(0)
+ end
+ end
+
+ shared_examples_for 'creates redirect_routes for all descendants' do
+ let(:subgroup_redirect_routes) { RedirectRoute.where(source: subgroups) }
+ let(:subgroup_projects_redirect_routes) { RedirectRoute.where(source: subgroup_projects) }
+
+ it do
+ expect { execute }.to change {
+ subgroup_redirect_routes.where('path LIKE ?', '%old-path%').size
+ }.from(0).to(4).and change {
+ subgroup_projects_redirect_routes.where('path LIKE ?', '%old-path%').size
+ }.from(0).to(4)
+ end
+ end
+
+ shared_examples_for 'does not create any redirect_routes' do
+ it do
+ expect { execute }.not_to change { RedirectRoute.count }
+ end
+ end
+
+ subject(:execute) do
+ described_class.new(parent_route).execute(changes)
+ end
+
+ before do
+ parent_route.name = 'new-name'
+ parent_route.path = 'new-path'
+ end
+
+ context 'on updating both name and path' do
+ let!(:changes) do
+ {
+ path: { saved: true, old_value: 'old-path' },
+ name: { saved: true, old_value: 'old-name' }
+ }
+ end
+
+ it_behaves_like 'descendant paths are updated'
+ it_behaves_like 'descendant names are updated'
+ it_behaves_like 'creates redirect_routes for all descendants'
+ end
+
+ context 'on updating only path' do
+ let!(:changes) do
+ {
+ path: { saved: true, old_value: 'old-path' },
+ name: { saved: false, old_value: 'old-name' }
+ }
+ end
+
+ it_behaves_like 'descendant paths are updated'
+ it_behaves_like 'descendant names are not updated'
+ it_behaves_like 'creates redirect_routes for all descendants'
+ end
+
+ context 'on updating only name' do
+ let!(:changes) do
+ {
+ path: { saved: false, old_value: 'old-path' },
+ name: { saved: true, old_value: 'old-name' }
+ }
+ end
+
+ it_behaves_like 'descendant paths are not updated'
+ it_behaves_like 'descendant names are updated'
+ it_behaves_like 'does not create any redirect_routes'
+ end
+
+ context 'on not updating both path and name' do
+ let!(:changes) do
+ {
+ path: { saved: false, old_value: 'old-path' },
+ name: { saved: false, old_value: 'old-name' }
+ }
+ end
+
+ it_behaves_like 'descendant paths are not updated'
+ it_behaves_like 'descendant names are not updated'
+ it_behaves_like 'does not create any redirect_routes'
+ end
+
+ context 'when `changes` are not in the expected format' do
+ let!(:changes) do
+ {
+ not_path: { saved: false, old_value: 'old-path' },
+ name: { saved: true, old_value: 'old-name' }
+ }
+ end
+
+ it 'errors out' do
+ expect { execute }.to raise_error(KeyError)
+ end
+ end
+
+ context 'for batching' do
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ end
+
+ let!(:changes) do
+ {
+ path: { saved: true, old_value: 'old-path' },
+ name: { saved: true, old_value: 'old-name' }
+ }
+ end
+
+ it 'bulk updates and bulk inserts records in batches' do
+ query_recorder = ActiveRecord::QueryRecorder.new do
+ execute
+ end
+
+ # There are 8 descendants to this group.
+ # 4 subgroups, and 1 project each in each subgroup == total of 8.
+ # With a batch size of 2, that is
+ # 4 queries to update `routes` and 4 queries to insert `redirect_routes`
+ update_routes_queries = query_recorder.log.grep(
+ /INSERT INTO "routes" .* ON CONFLICT \("id"\) DO UPDATE SET/
+ )
+
+ insert_redirect_routes_queries = query_recorder.log.grep(
+ /INSERT INTO "redirect_routes" .* ON CONFLICT \(lower\(\(path\)::text\) varchar_pattern_ops\) DO NOTHING/
+ )
+
+ expect(update_routes_queries.count).to eq(4)
+ expect(insert_redirect_routes_queries.count).to eq(4)
+ end
+ end
+ end
+end
diff --git a/spec/services/security/merge_reports_service_spec.rb b/spec/services/security/merge_reports_service_spec.rb
index c141bbe5b5a..a65e73bd8ce 100644
--- a/spec/services/security/merge_reports_service_spec.rb
+++ b/spec/services/security/merge_reports_service_spec.rb
@@ -20,7 +20,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_1_primary, identifier_1_cve],
scanner: scanner_1,
- severity: :low
+ severity: :low,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94610'
)
end
@@ -29,7 +30,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_1_primary, identifier_1_cve],
scanner: scanner_1,
- severity: :low
+ severity: :low,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94611'
)
end
@@ -39,7 +41,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
identifiers: [identifier_2_primary, identifier_2_cve],
location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
scanner: scanner_2,
- severity: :medium
+ severity: :medium,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94614'
)
end
@@ -49,7 +52,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
identifiers: [identifier_2_primary, identifier_2_cve],
location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
scanner: scanner_2,
- severity: :medium
+ severity: :medium,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94613'
)
end
@@ -59,7 +63,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
identifiers: [identifier_2_primary, identifier_2_cve],
location: build(:ci_reports_security_locations_sast, start_line: 42, end_line: 44),
scanner: scanner_2,
- severity: :medium
+ severity: :medium,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94612'
)
end
@@ -68,7 +73,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_cwe],
scanner: scanner_3,
- severity: :high
+ severity: :high,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94615'
)
end
@@ -77,7 +83,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_cwe],
scanner: scanner_1,
- severity: :critical
+ severity: :critical,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94616'
)
end
@@ -86,7 +93,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_wasc],
scanner: scanner_1,
- severity: :medium
+ severity: :medium,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94617'
)
end
@@ -95,7 +103,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_wasc],
scanner: scanner_2,
- severity: :critical
+ severity: :critical,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94618'
)
end
@@ -226,9 +235,32 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
let(:identifier_cve) { build(:ci_reports_security_identifier, external_id: 'CVE-2019-123', external_type: 'cve') }
let(:identifier_semgrep) { build(:ci_reports_security_identifier, external_id: 'rules.bandit.B105', external_type: 'semgrep_id') }
- let(:finding_id_1) { build(:ci_reports_security_finding, identifiers: [identifier_bandit, identifier_cve], scanner: bandit_scanner, report_type: :sast) }
- let(:finding_id_2) { build(:ci_reports_security_finding, identifiers: [identifier_cve], scanner: semgrep_scanner, report_type: :sast) }
- let(:finding_id_3) { build(:ci_reports_security_finding, identifiers: [identifier_semgrep], scanner: semgrep_scanner, report_type: :sast) }
+ let(:finding_id_1) do
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_bandit, identifier_cve],
+ scanner: bandit_scanner,
+ report_type: :sast,
+ uuid: '21ab978a-7052-5428-af0b-c7a4b3fe5020')
+ end
+
+ let(:finding_id_2) do
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_cve],
+ scanner: semgrep_scanner,
+ report_type: :sast,
+ uuid: '21ab978a-7052-5428-af0b-c7a4b3fe5021')
+ end
+
+ let(:finding_id_3) do
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_semgrep],
+ scanner: semgrep_scanner,
+ report_type: :sast,
+ uuid: '21ab978a-7052-5428-af0b-c7a4b3fe5022')
+ end
let(:bandit_report) do
build(:ci_reports_security_report,
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index 361742699b0..8669bca90bd 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -263,11 +263,10 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
end
context 'if the endpoint is accessible' do
- let(:user_scores) { Abuse::UserTrustScore.new(user) }
-
before do
allow(service).to receive(:spamcheck_client).and_return(spam_client)
allow(spam_client).to receive(:spam?).and_return(spam_client_result)
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
end
context 'if the result is a NOOP verdict' do
@@ -275,8 +274,8 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
let(:verdict_value) { ::Spamcheck::SpamVerdict::Verdict::NOOP }
it 'returns the verdict' do
+ expect(Abuse::TrustScoreWorker).not_to receive(:perform_async)
is_expected.to eq(NOOP)
- expect(user_scores.spam_score).to eq(0.0)
end
end
@@ -286,8 +285,8 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
context 'the result was evaluated' do
it 'returns the verdict and updates the spam score' do
+ expect(Abuse::TrustScoreWorker).to receive(:perform_async).once.with(user.id, :spamcheck, instance_of(Float), 'cid')
is_expected.to eq(ALLOW)
- expect(user_scores.spam_score).to be_within(0.000001).of(verdict_score)
end
end
@@ -295,8 +294,8 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
let(:verdict_evaluated) { false }
it 'returns the verdict and does not update the spam score' do
+ expect(Abuse::TrustScoreWorker).not_to receive(:perform_async)
expect(subject).to eq(ALLOW)
- expect(user_scores.spam_score).to eq(0.0)
end
end
end
@@ -317,8 +316,8 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
with_them do
it "returns expected spam constant and updates the spam score" do
+ expect(Abuse::TrustScoreWorker).to receive(:perform_async).once.with(user.id, :spamcheck, instance_of(Float), 'cid')
is_expected.to eq(expected)
- expect(user_scores.spam_score).to be_within(0.000001).of(verdict_score)
end
end
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 1eb11c80264..2e5545b610a 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -77,6 +77,18 @@ RSpec.describe SystemNoteService, feature_category: :shared do
end
end
+ describe '.request_review' do
+ let(:reviewer) { double }
+
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:request_review).with(reviewer)
+ end
+
+ described_class.request_review(noteable, project, author, reviewer)
+ end
+ end
+
describe '.change_issuable_contacts' do
let(:added_count) { 5 }
let(:removed_count) { 3 }
@@ -515,6 +527,18 @@ RSpec.describe SystemNoteService, feature_category: :shared do
end
end
+ describe '.email_participants' do
+ let(:body) { 'added user@example.com' }
+
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:email_participants).with(body)
+ end
+
+ described_class.email_participants(noteable, project, author, body)
+ end
+ end
+
describe '.discussion_lock' do
let(:issuable) { double }
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 0ba20ee5be1..2b48b24b2b4 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -213,6 +213,21 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
end
end
+ describe '#request_review' do
+ subject(:request_review) { service.request_review(reviewer) }
+
+ let_it_be(:reviewer) { create(:user) }
+ let_it_be(:noteable) { create(:merge_request, :simple, source_project: project, reviewers: [reviewer]) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'reviewer' }
+ end
+
+ it 'builds a correct phrase when a reviewer has been requested from a reviewer' do
+ expect(request_review.note).to eq "requested review from #{reviewer.to_reference}"
+ end
+ end
+
describe '#change_issuable_contacts' do
subject { service.change_issuable_contacts(1, 1) }
@@ -770,6 +785,14 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
end
end
+ describe '#email_participants' do
+ let(:body) { "added user@example.com" }
+
+ subject(:system_note) { service.email_participants(body) }
+
+ it { expect(system_note.note).to eq(body) }
+ end
+
describe '#discussion_lock' do
subject { service.discussion_lock }
diff --git a/spec/services/system_notes/time_tracking_service_spec.rb b/spec/services/system_notes/time_tracking_service_spec.rb
index cf994220946..6502aa5d2a2 100644
--- a/spec/services/system_notes/time_tracking_service_spec.rb
+++ b/spec/services/system_notes/time_tracking_service_spec.rb
@@ -198,11 +198,13 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
let(:action) { 'time_tracking' }
end
- context 'with a time estimate' do
- it 'sets the note text' do
+ context 'when adding a time estimate' do
+ before do
noteable.update_attribute(:time_estimate, 277200)
+ end
- expect(subject.note).to eq "changed time estimate to 1w 4d 5h"
+ it 'sets the note text' do
+ expect(subject.note).to eq "added time estimate of 1w 4d 5h"
end
context 'when time_tracking_limit_to_hours setting is true' do
@@ -211,16 +213,32 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
end
it 'sets the note text' do
- noteable.update_attribute(:time_estimate, 277200)
-
- expect(subject.note).to eq "changed time estimate to 77h"
+ expect(subject.note).to eq "added time estimate of 77h"
end
end
end
- context 'without a time estimate' do
+ context 'when removing a time estimate' do
+ before do
+ noteable.update_attribute(:time_estimate, 277200)
+ noteable.save!
+ noteable.update_attribute(:time_estimate, 0)
+ end
+
+ it 'sets the note text' do
+ expect(subject.note).to eq "removed time estimate of 1w 4d 5h"
+ end
+ end
+
+ context 'when changing a time estimate' do
+ before do
+ noteable.update_attribute(:time_estimate, 277200)
+ noteable.save!
+ noteable.update_attribute(:time_estimate, 3600)
+ end
+
it 'sets the note text' do
- expect(subject.note).to eq "removed time estimate"
+ expect(subject.note).to eq "changed time estimate to 1h from 1w 4d 5h"
end
end
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 0b4cf9e53db..df00859fd52 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -675,6 +675,8 @@ RSpec.describe TodoService, feature_category: :team_planning do
service.mark_todo(unassigned_issue, author)
should_create_todo(user: author, target: unassigned_issue, action: Todo::MARKED)
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter)
+ .not_to receive(:track_work_item_todo_marked_action)
end
context 'when issue belongs to a group' do
@@ -690,6 +692,8 @@ RSpec.describe TodoService, feature_category: :team_planning do
project: nil,
group: group
)
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter)
+ .not_to receive(:track_work_item_todo_marked_action)
end
end
end
@@ -748,10 +752,13 @@ RSpec.describe TodoService, feature_category: :team_planning do
end
describe 'Work Items' do
- let_it_be(:work_item) { create(:work_item, :task, project: project, author: author) }
+ let(:work_item) { create(:work_item, :objective, project: project, author: author) }
+ let(:activity_counter_class) { Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter }
describe '#mark_todo' do
it 'creates a todo from a work item' do
+ expect(activity_counter_class).to receive(:track_work_item_mark_todo_action).with(author: author)
+
service.mark_todo(work_item, author)
should_create_todo(user: author, target: work_item, action: Todo::MARKED)
@@ -760,6 +767,9 @@ RSpec.describe TodoService, feature_category: :team_planning do
context 'when work item belongs to a group' do
it 'creates a todo from a work item' do
group_work_item = create(:work_item, :group_level, namespace: group)
+
+ expect(activity_counter_class).to receive(:track_work_item_mark_todo_action).with(author: group_work_item.author)
+
service.mark_todo(group_work_item, group_work_item.author)
should_create_todo(
@@ -1120,6 +1130,8 @@ RSpec.describe TodoService, feature_category: :team_planning do
service.mark_todo(unassigned_mr, author)
should_create_todo(user: author, target: unassigned_mr, action: Todo::MARKED)
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter)
+ .not_to receive(:track_work_item_todo_marked_action)
end
end
@@ -1264,9 +1276,9 @@ RSpec.describe TodoService, feature_category: :team_planning do
# Excluding queries for user permissions because those do execute N+1 queries
allow_any_instance_of(User).to receive(:can?).and_return(true)
- control_count = ActiveRecord::QueryRecorder.new { service.update_note(note_mentioning_1_user, author, skip_users) }.count
+ control = ActiveRecord::QueryRecorder.new { service.update_note(note_mentioning_1_user, author, skip_users) }
- expect { service.update_note(note_mentioning_3_users, author, skip_users) }.not_to exceed_query_limit(control_count)
+ expect { service.update_note(note_mentioning_3_users, author, skip_users) }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/services/todos/destroy/destroyed_issuable_service_spec.rb b/spec/services/todos/destroy/destroyed_issuable_service_spec.rb
index 63ff189ede5..cccf1a2cfa8 100644
--- a/spec/services/todos/destroy/destroyed_issuable_service_spec.rb
+++ b/spec/services/todos/destroy/destroyed_issuable_service_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Todos::Destroy::DestroyedIssuableService, feature_category: :team
let_it_be(:done_todo) { create(:todo, :done, project: target.project, target: target, user: user) }
it 'deletes todos for specified target ID and type' do
- control_count = ActiveRecord::QueryRecorder.new { subject }.count
+ control = ActiveRecord::QueryRecorder.new { subject }
# Create more todos for the target
create(:todo, :pending, project: target.project, target: target, user: user)
@@ -22,7 +22,7 @@ RSpec.describe Todos::Destroy::DestroyedIssuableService, feature_category: :team
create(:todo, :done, project: target.project, target: target, user: user)
create(:todo, :done, project: target.project, target: target, user: user)
- expect { subject }.not_to exceed_query_limit(control_count)
+ expect { subject }.not_to exceed_query_limit(control)
end
it 'invalidates todos cache counts of todo users', :use_clean_rails_redis_caching do
diff --git a/spec/services/user_project_access_changed_service_spec.rb b/spec/services/user_project_access_changed_service_spec.rb
index a50bd3ee2f1..8236d892072 100644
--- a/spec/services/user_project_access_changed_service_spec.rb
+++ b/spec/services/user_project_access_changed_service_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe UserProjectAccessChangedService, feature_category: :system_access
it 'avoids N+1 cached queries', :use_sql_query_cache, :request_store do
# Run this once to establish a baseline
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
service.execute
end
@@ -87,7 +87,7 @@ RSpec.describe UserProjectAccessChangedService, feature_category: :system_access
.with([[1], [2], [3], [4], [5]])
.and_return(10)
- expect { service.execute }.not_to exceed_all_query_limit(control_count.count)
+ expect { service.execute }.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
index 57378c07dd7..522b793036b 100644
--- a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
+++ b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
@@ -150,6 +150,16 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
let(:created_record) { create(:user_achievement, awarded_by_user: user, revoked_by_user: user) }
end
end
+
+ context 'when user is a bot user and has associated access tokens' do
+ let_it_be(:user) { create(:user, :project_bot) }
+ let_it_be(:token) { create(:personal_access_token, user: user) }
+
+ it "deletes the access token" do
+ service.execute
+ expect(PersonalAccessToken.find_by(id: token.id)).to eq nil
+ end
+ end
end
context 'on post-migrate cleanups' do
diff --git a/spec/services/users/update_todo_count_cache_service_spec.rb b/spec/services/users/update_todo_count_cache_service_spec.rb
index eec637cf5b4..d69a4ba99b7 100644
--- a/spec/services/users/update_todo_count_cache_service_spec.rb
+++ b/spec/services/users/update_todo_count_cache_service_spec.rb
@@ -44,9 +44,9 @@ RSpec.describe Users::UpdateTodoCountCacheService, feature_category: :team_plann
end
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new { execute_single }.count
+ control = ActiveRecord::QueryRecorder.new { execute_single }
- expect { execute_all }.not_to exceed_query_limit(control_count)
+ expect { execute_all }.not_to exceed_query_limit(control)
end
it 'executes one query per batch of users' do
diff --git a/spec/services/work_items/widgets/assignees_service/update_service_spec.rb b/spec/services/work_items/callbacks/assignees_spec.rb
index 66e30e2f882..e6f57c54104 100644
--- a/spec/services/work_items/widgets/assignees_service/update_service_spec.rb
+++ b/spec/services/work_items/callbacks/assignees_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time, feature_category: :portfolio_management do
+RSpec.describe WorkItems::Callbacks::Assignees, :freeze_time, feature_category: :portfolio_management do
let_it_be(:reporter) { create(:user) }
let_it_be(:project) { create(:project, :private) }
let_it_be(:new_assignee) { create(:user) }
@@ -11,7 +11,6 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
create(:work_item, project: project, updated_at: 1.day.ago)
end
- let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Assignees) } }
let(:current_user) { reporter }
let(:params) { { assignee_ids: [new_assignee.id] } }
@@ -20,13 +19,13 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
project.add_guest(new_assignee)
end
- describe '#before_update_in_transaction' do
- let(:service) { described_class.new(widget: widget, current_user: current_user) }
+ describe '#before_update' do
+ let(:service) { described_class.new(issuable: work_item, current_user: current_user, params: params) }
- subject { service.before_update_in_transaction(params: params) }
+ subject(:before_update_callback) { service.before_update }
it 'updates the assignees and sets updated_at to the current time' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to contain_exactly(new_assignee.id)
expect(work_item.updated_at).to be_like_time(Time.current)
@@ -40,7 +39,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
end
it 'removes existing assignees' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to be_empty
expect(work_item.updated_at).to be_like_time(Time.current)
@@ -51,7 +50,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
let(:current_user) { create(:user) }
it 'does not update the assignees' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to be_empty
expect(work_item.updated_at).to be_like_time(1.day.ago)
@@ -67,7 +66,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
end
it 'sets all the given assignees' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to contain_exactly(new_assignee.id, reporter.id)
expect(work_item.updated_at).to be_like_time(Time.current)
@@ -80,7 +79,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
end
it 'only sets the first assignee' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to contain_exactly(new_assignee.id)
expect(work_item.updated_at).to be_like_time(Time.current)
@@ -92,7 +91,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
let(:params) { { assignee_ids: [create(:user).id] } }
it 'does not set the assignee' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to be_empty
expect(work_item.updated_at).to be_like_time(1.day.ago)
@@ -105,7 +104,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
end
it 'does not touch updated_at' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to contain_exactly(new_assignee.id)
expect(work_item.updated_at).to be_like_time(1.day.ago)
@@ -116,12 +115,12 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
let(:params) { {} }
before do
- allow(service).to receive(:new_type_excludes_widget?).and_return(true)
+ allow(service).to receive(:excluded_in_new_type?).and_return(true)
work_item.assignee_ids = [new_assignee.id]
end
it "resets the work item's assignees" do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to be_empty
end
diff --git a/spec/services/work_items/widgets/current_user_todos_service/update_service_spec.rb b/spec/services/work_items/callbacks/current_user_todos_spec.rb
index aa7257e9e62..0f16687e620 100644
--- a/spec/services/work_items/widgets/current_user_todos_service/update_service_spec.rb
+++ b/spec/services/work_items/callbacks/current_user_todos_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, feature_category: :team_planning do
+RSpec.describe WorkItems::Callbacks::CurrentUserTodos, feature_category: :team_planning do
let_it_be(:reporter) { create(:user) }
let_it_be(:project) { create(:project, :private) }
let_it_be(:current_user) { reporter }
@@ -25,16 +25,16 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
create(:todo, state: :pending, target: work_item, target_type: work_item.class.name, user: create(:user))
end
- let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::CurrentUserTodos) } }
+ let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Callbacks::CurrentUserTodos) } }
before_all do
project.add_reporter(reporter)
end
describe '#before_update_in_transaction' do
- subject do
- described_class.new(widget: widget, current_user: current_user)
- .before_update_in_transaction(params: params)
+ subject(:service) do
+ described_class.new(issuable: work_item, current_user: current_user, params: params)
+ .before_update
end
context 'when adding a todo' do
@@ -44,7 +44,7 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
let(:current_user) { create(:user) }
it 'does add a todo' do
- expect { subject }.not_to change { Todo.count }
+ expect { service }.not_to change { Todo.count }
end
end
@@ -52,7 +52,7 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
let(:params) { { action: "add" } }
it 'creates a new todo for the user and the work item' do
- expect { subject }.to change { current_user.todos.count }.by(1)
+ expect { service }.to change { current_user.todos.count }.by(1)
todo = current_user.todos.last
@@ -69,7 +69,7 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
let(:current_user) { create(:user) }
it 'does not change todo status' do
- subject
+ service
expect(pending_todo1.reload).to be_pending
expect(pending_todo2.reload).to be_pending
@@ -80,7 +80,7 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
context 'when resolving all todos of the work item', :aggregate_failures do
it 'resolves todos of the user for the work item' do
- subject
+ service
expect(pending_todo1.reload).to be_done
expect(pending_todo2.reload).to be_done
@@ -93,7 +93,7 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
let(:params) { { action: "mark_as_done", todo_id: pending_todo1.id } }
it 'resolves todos of the user for the work item' do
- subject
+ service
expect(pending_todo1.reload).to be_done
expect(pending_todo2.reload).to be_pending
diff --git a/spec/services/work_items/widgets/description_service/update_service_spec.rb b/spec/services/work_items/callbacks/description_spec.rb
index 84704d3e002..27413c9ab14 100644
--- a/spec/services/work_items/widgets/description_service/update_service_spec.rb
+++ b/spec/services/work_items/callbacks/description_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_category: :portfolio_management do
+RSpec.describe WorkItems::Callbacks::Description, feature_category: :portfolio_management do
let_it_be(:random_user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:guest) { create(:user) }
@@ -22,12 +22,10 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
)
end
- let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Description) } }
-
describe '#update' do
- let(:service) { described_class.new(widget: widget, current_user: current_user) }
+ let(:service) { described_class.new(issuable: work_item, current_user: current_user, params: params) }
- subject(:before_update_callback) { service.before_update_callback(params: params) }
+ subject(:before_update_callback) { service.before_update }
shared_examples 'sets work item description' do
it 'correctly sets work item description value' do
@@ -59,7 +57,7 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
context 'when user is a project reporter' do
let(:current_user) { reporter }
- before do
+ before_all do
project.add_reporter(reporter)
end
@@ -91,7 +89,7 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
let(:params) { {} }
before do
- allow(service).to receive(:new_type_excludes_widget?).and_return(true)
+ allow(service).to receive(:excluded_in_new_type?).and_return(true)
work_item.update!(description: 'test')
end
@@ -108,7 +106,7 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
context 'when user is a project guest' do
let(:current_user) { guest }
- before do
+ before_all do
project.add_guest(guest)
end
diff --git a/spec/services/work_items/widgets/notifications_service/update_service_spec.rb b/spec/services/work_items/callbacks/notifications_spec.rb
index 9615020fe49..2d11dc46fcb 100644
--- a/spec/services/work_items/widgets/notifications_service/update_service_spec.rb
+++ b/spec/services/work_items/callbacks/notifications_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::NotificationsService::UpdateService, feature_category: :team_planning do
+RSpec.describe WorkItems::Callbacks::Notifications, feature_category: :team_planning do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :private, group: group) }
let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } }
@@ -10,13 +10,13 @@ RSpec.describe WorkItems::Widgets::NotificationsService::UpdateService, feature_
let_it_be_with_reload(:work_item) { create(:work_item, project: project, author: author) }
let_it_be(:current_user) { guest }
- let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Notifications) } }
- let(:service) { described_class.new(widget: widget, current_user: current_user) }
+ let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Callbacks::Notifications) } }
+ let(:service) { described_class.new(issuable: work_item, current_user: current_user, params: params) }
describe '#before_update_in_transaction' do
let(:expected) { params[:subscribed] }
- subject(:update_notifications) { service.before_update_in_transaction(params: params) }
+ subject(:update_notifications) { service.before_update }
shared_examples 'failing to update subscription' do
context 'when user is subscribed with a subscription record' do
diff --git a/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb b/spec/services/work_items/callbacks/start_and_due_date_spec.rb
index f9708afd313..b26a33976fa 100644
--- a/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb
+++ b/spec/services/work_items/callbacks/start_and_due_date_spec.rb
@@ -2,19 +2,19 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, feature_category: :portfolio_management do
+RSpec.describe WorkItems::Callbacks::StartAndDueDate, feature_category: :portfolio_management do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user).tap { |user| project.add_reporter(user) } }
let_it_be_with_reload(:work_item) { create(:work_item, project: project) }
- let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::StartAndDueDate) } }
+ let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Callbacks::StartAndDueDate) } }
describe '#before_update_callback' do
let(:start_date) { Date.today }
let(:due_date) { 1.week.from_now.to_date }
- let(:service) { described_class.new(widget: widget, current_user: user) }
+ let(:service) { described_class.new(issuable: work_item, current_user: user, params: params) }
- subject(:update_params) { service.before_update_callback(params: params) }
+ subject(:update_params) { service.before_update }
context 'when start and due date params are present' do
let(:params) { { start_date: Date.today, due_date: 1.week.from_now.to_date } }
@@ -22,8 +22,8 @@ RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, featur
it 'correctly sets date values' do
expect do
update_params
- end.to change(work_item, :start_date).from(nil).to(start_date).and(
- change(work_item, :due_date).from(nil).to(due_date)
+ end.to change { work_item.start_date }.from(nil).to(start_date).and(
+ change { work_item.due_date }.from(nil).to(due_date)
)
end
@@ -59,7 +59,7 @@ RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, featur
it 'sets only one date to null' do
expect do
update_params
- end.to change(work_item, :start_date).from(start_date).to(nil).and(
+ end.to change { work_item.start_date }.from(start_date).to(nil).and(
not_change(work_item, :due_date).from(due_date)
)
end
@@ -70,15 +70,15 @@ RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, featur
let(:params) { {} }
before do
- allow(service).to receive(:new_type_excludes_widget?).and_return(true)
+ allow(service).to receive(:excluded_in_new_type?).and_return(true)
work_item.update!(start_date: start_date, due_date: due_date)
end
it 'sets both dates to null' do
expect do
update_params
- end.to change(work_item, :start_date).from(start_date).to(nil).and(
- change(work_item, :due_date).from(due_date).to(nil)
+ end.to change { work_item.start_date }.from(start_date).to(nil).and(
+ change { work_item.due_date }.from(due_date).to(nil)
)
end
end
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index 557617f61bb..591dc1c1034 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -191,14 +191,14 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
let(:supported_widgets) do
[
- { klass: WorkItems::Widgets::DescriptionService::UpdateService, callback: :before_update_callback, params: { description: 'foo' } },
+ { klass: WorkItems::Callbacks::Description, callback: :before_update },
{ klass: WorkItems::Widgets::HierarchyService::UpdateService, callback: :before_update_in_transaction, params: { parent: parent } }
]
end
end
context 'when updating widgets' do
- let(:widget_service_class) { WorkItems::Widgets::DescriptionService::UpdateService }
+ let(:widget_service_class) { WorkItems::Callbacks::Description }
let(:widget_params) { { description_widget: { description: 'changed' } } }
context 'when widget service is not present' do
@@ -215,8 +215,8 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
before do
allow_next_instance_of(widget_service_class) do |instance|
allow(instance)
- .to receive(:before_update_callback)
- .with(params: { description: 'changed' }).and_return(nil)
+ .to receive(:before_update)
+ .and_return(nil)
end
end
@@ -269,7 +269,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
expect(service).to receive(:update).and_call_original
expect(service).not_to receive(:execute_widgets).with(callback: :update, widget_params: widget_params)
- expect { update_work_item }.not_to change(work_item, :description)
+ expect do
+ update_work_item
+ work_item.reload
+ end.not_to change(work_item, :description)
end
end
end