Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-09-20 02:18:09 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-09-20 02:18:09 +0300
commit6ed4ec3e0b1340f96b7c043ef51d1b33bbe85fde (patch)
treedc4d20fe6064752c0bd323187252c77e0a89144b /spec/services
parent9868dae7fc0655bd7ce4a6887d4e6d487690eeed (diff)
Add latest changes from gitlab-org/gitlab@15-4-stable-eev15.4.0-rc42
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb6
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb2
-rw-r--r--spec/services/bulk_imports/tree_export_service_spec.rb4
-rw-r--r--spec/services/ci/after_requeue_job_service_spec.rb97
-rw-r--r--spec/services/ci/archive_trace_service_spec.rb9
-rw-r--r--spec/services/ci/build_erase_service_spec.rb88
-rw-r--r--spec/services/ci/compare_reports_base_service_spec.rb18
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb67
-rw-r--r--spec/services/ci/create_pipeline_service/artifacts_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/cache_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/custom_config_content_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/dry_run_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/environment_spec.rb48
-rw-r--r--spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/include_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/logger_spec.rb8
-rw-r--r--spec/services/ci/create_pipeline_service/merge_requests_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/needs_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/parallel_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/parameter_content_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb4
-rw-r--r--spec/services/ci/create_pipeline_service/partitioning_spec.rb146
-rw-r--r--spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/rate_limit_spec.rb4
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/tags_spec.rb4
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb4
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb17
-rw-r--r--spec/services/ci/job_artifacts/delete_service_spec.rb41
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb11
-rw-r--r--spec/services/ci/job_artifacts/track_artifact_report_service_spec.rb122
-rw-r--r--spec/services/ci/job_token_scope/add_project_service_spec.rb20
-rw-r--r--spec/services/ci/job_token_scope/remove_project_service_spec.rb20
-rw-r--r--spec/services/ci/list_config_variables_service_spec.rb4
-rw-r--r--spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb24
-rw-r--r--spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb24
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb27
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb2
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_fail_and_retry_1.yml55
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_fail_and_retry_2.yml63
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_subsequent_manual_jobs.yml65
-rw-r--r--spec/services/ci/pipeline_schedule_service_spec.rb11
-rw-r--r--spec/services/ci/pipelines/add_job_service_spec.rb8
-rw-r--r--spec/services/ci/pipelines/hook_service_spec.rb2
-rw-r--r--spec/services/ci/play_manual_stage_service_spec.rb2
-rw-r--r--spec/services/ci/process_sync_events_service_spec.rb12
-rw-r--r--spec/services/ci/queue/pending_builds_strategy_spec.rb24
-rw-r--r--spec/services/ci/register_job_service_spec.rb8
-rw-r--r--spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb15
-rw-r--r--spec/services/ci/retry_job_service_spec.rb52
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb121
-rw-r--r--spec/services/ci/runners/set_runner_associated_projects_service_spec.rb88
-rw-r--r--spec/services/ci/runners/update_runner_service_spec.rb80
-rw-r--r--spec/services/ci/unlock_artifacts_service_spec.rb34
-rw-r--r--spec/services/container_expiration_policies/cleanup_service_spec.rb2
-rw-r--r--spec/services/deployments/link_merge_requests_service_spec.rb4
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb27
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb8
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb16
-rw-r--r--spec/services/discussions/capture_diff_note_positions_service_spec.rb4
-rw-r--r--spec/services/environments/stop_service_spec.rb36
-rw-r--r--spec/services/event_create_service_spec.rb12
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb2
-rw-r--r--spec/services/git/branch_push_service_spec.rb8
-rw-r--r--spec/services/git/wiki_push_service_spec.rb32
-rw-r--r--spec/services/google_cloud/enable_cloudsql_service_spec.rb22
-rw-r--r--spec/services/google_cloud/fetch_google_ip_list_service_spec.rb73
-rw-r--r--spec/services/groups/create_service_spec.rb4
-rw-r--r--spec/services/groups/destroy_service_spec.rb39
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb28
-rw-r--r--spec/services/import/github_service_spec.rb10
-rw-r--r--spec/services/issuable/bulk_update_service_spec.rb2
-rw-r--r--spec/services/issues/build_service_spec.rb2
-rw-r--r--spec/services/issues/create_service_spec.rb60
-rw-r--r--spec/services/issues/relative_position_rebalancing_service_spec.rb15
-rw-r--r--spec/services/issues/update_service_spec.rb37
-rw-r--r--spec/services/members/create_service_spec.rb26
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb73
-rw-r--r--spec/services/merge_requests/build_service_spec.rb24
-rw-r--r--spec/services/merge_requests/create_pipeline_service_spec.rb2
-rw-r--r--spec/services/merge_requests/create_service_spec.rb2
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb8
-rw-r--r--spec/services/merge_requests/handle_assignees_change_service_spec.rb2
-rw-r--r--spec/services/merge_requests/mergeability/detailed_merge_status_service_spec.rb97
-rw-r--r--spec/services/merge_requests/mergeability/logger_spec.rb121
-rw-r--r--spec/services/merge_requests/mergeability/run_checks_service_spec.rb10
-rw-r--r--spec/services/merge_requests/update_assignees_service_spec.rb43
-rw-r--r--spec/services/merge_requests/update_service_spec.rb111
-rw-r--r--spec/services/metrics/dashboard/clone_dashboard_service_spec.rb2
-rw-r--r--spec/services/notes/create_service_spec.rb10
-rw-r--r--spec/services/notes/destroy_service_spec.rb11
-rw-r--r--spec/services/onboarding/progress_service_spec.rb (renamed from spec/services/onboarding_progress_service_spec.rb)16
-rw-r--r--spec/services/packages/debian/parse_debian822_service_spec.rb2
-rw-r--r--spec/services/packages/debian/process_changes_service_spec.rb6
-rw-r--r--spec/services/packages/rpm/repository_metadata/base_builder_spec.rb22
-rw-r--r--spec/services/packages/rpm/repository_metadata/build_filelist_xml_spec.rb21
-rw-r--r--spec/services/packages/rpm/repository_metadata/build_other_xml_spec.rb21
-rw-r--r--spec/services/packages/rpm/repository_metadata/build_primary_xml_spec.rb21
-rw-r--r--spec/services/packages/rpm/repository_metadata/build_repomd_xml_spec.rb66
-rw-r--r--spec/services/post_receive_service_spec.rb3
-rw-r--r--spec/services/projects/blame_service_spec.rb12
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb456
-rw-r--r--spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb183
-rw-r--r--spec/services/projects/create_service_spec.rb26
-rw-r--r--spec/services/projects/destroy_service_spec.rb43
-rw-r--r--spec/services/projects/update_pages_service_spec.rb19
-rw-r--r--spec/services/protected_branches/cache_service_spec.rb10
-rw-r--r--spec/services/protected_branches/destroy_service_spec.rb2
-rw-r--r--spec/services/protected_branches/update_service_spec.rb2
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb57
-rw-r--r--spec/services/releases/create_service_spec.rb22
-rw-r--r--spec/services/resource_access_tokens/revoke_service_spec.rb33
-rw-r--r--spec/services/resource_events/change_labels_service_spec.rb19
-rw-r--r--spec/services/security/ci_configuration/sast_parser_service_spec.rb2
-rw-r--r--spec/services/service_ping/submit_service_ping_service_spec.rb151
-rw-r--r--spec/services/service_response_spec.rb15
-rw-r--r--spec/services/snippets/bulk_destroy_service_spec.rb4
-rw-r--r--spec/services/spam/spam_action_service_spec.rb78
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb78
-rw-r--r--spec/services/suggestions/apply_service_spec.rb53
-rw-r--r--spec/services/system_notes/time_tracking_service_spec.rb91
-rw-r--r--spec/services/topics/merge_service_spec.rb10
-rw-r--r--spec/services/users/destroy_service_spec.rb586
-rw-r--r--spec/services/users/email_verification/generate_token_service_spec.rb37
-rw-r--r--spec/services/users/email_verification/validate_token_service_spec.rb97
-rw-r--r--spec/services/users/migrate_records_to_ghost_user_in_batches_service_spec.rb31
-rw-r--r--spec/services/users/migrate_records_to_ghost_user_service_spec.rb259
-rw-r--r--spec/services/users/reject_service_spec.rb26
-rw-r--r--spec/services/work_items/update_service_spec.rb12
-rw-r--r--spec/services/work_items/widgets/description_service/update_service_spec.rb2
133 files changed, 3655 insertions, 1415 deletions
diff --git a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
index 6f28f892f00..73d185283b6 100644
--- a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
+++ b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
@@ -217,18 +217,20 @@ RSpec.describe AutoMerge::MergeWhenPipelineSucceedsService do
let(:ref) { mr_merge_if_green_enabled.source_branch }
let(:sha) { project.commit(ref).id }
+ let(:build_stage) { create(:ci_stage, name: 'build', pipeline: pipeline) }
+
let(:pipeline) do
create(:ci_empty_pipeline, ref: ref, sha: sha, project: project)
end
let!(:build) do
create(:ci_build, :created, pipeline: pipeline, ref: ref,
- name: 'build', stage: 'build')
+ name: 'build', ci_stage: build_stage )
end
let!(:test) do
create(:ci_build, :created, pipeline: pipeline, ref: ref,
- name: 'test', stage: 'test')
+ name: 'test')
end
before do
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index 81229cc8431..ec9cc719e24 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -277,7 +277,7 @@ RSpec.describe BulkImports::FileDownloadService do
let_it_be(:content_disposition) { 'filename="../../xxx.b"' }
before do
- stub_const("#{described_class}::FILENAME_SIZE_LIMIT", 1)
+ stub_const('BulkImports::FileDownloads::FilenameFetch::FILENAME_SIZE_LIMIT', 1)
end
it 'raises an error when the filename is not provided in the request header' do
diff --git a/spec/services/bulk_imports/tree_export_service_spec.rb b/spec/services/bulk_imports/tree_export_service_spec.rb
index ffb81fe2b5f..6e26cb6dc2b 100644
--- a/spec/services/bulk_imports/tree_export_service_spec.rb
+++ b/spec/services/bulk_imports/tree_export_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe BulkImports::TreeExportService do
let(:relation) { 'issues' }
- subject(:service) { described_class.new(project, export_path, relation) }
+ subject(:service) { described_class.new(project, export_path, relation, project.owner) }
describe '#execute' do
it 'executes export service and archives exported data' do
@@ -21,7 +21,7 @@ RSpec.describe BulkImports::TreeExportService do
context 'when unsupported relation is passed' do
it 'raises an error' do
- service = described_class.new(project, export_path, 'unsupported')
+ service = described_class.new(project, export_path, 'unsupported', project.owner)
expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
end
diff --git a/spec/services/ci/after_requeue_job_service_spec.rb b/spec/services/ci/after_requeue_job_service_spec.rb
index fb67ee18fb2..1f692bdb71a 100644
--- a/spec/services/ci/after_requeue_job_service_spec.rb
+++ b/spec/services/ci/after_requeue_job_service_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
check_jobs_statuses(
a1: 'pending',
a2: 'created',
- a3: 'skipped',
+ a3: 'created',
b1: 'success',
b2: 'created',
c1: 'created',
@@ -120,6 +120,26 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
)
end
+ context 'when the FF ci_requeue_with_dag_object_hierarchy is disabled' do
+ before do
+ stub_feature_flags(ci_requeue_with_dag_object_hierarchy: false)
+ end
+
+ it 'marks subsequent skipped jobs as processable but leaves a3 created' do
+ execute_after_requeue_service(a1)
+
+ check_jobs_statuses(
+ a1: 'pending',
+ a2: 'created',
+ a3: 'skipped',
+ b1: 'success',
+ b2: 'created',
+ c1: 'created',
+ c2: 'created'
+ )
+ end
+ end
+
context 'when executed by a different user than the original owner' do
let(:retryer) { create(:user).tap { |u| project.add_maintainer(u) } }
let(:service) { described_class.new(project, retryer) }
@@ -140,7 +160,7 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
expect(jobs_name_status_owner_needs).to contain_exactly(
{ 'name' => 'a1', 'status' => 'pending', 'user_id' => user.id, 'needs' => [] },
{ 'name' => 'a2', 'status' => 'created', 'user_id' => retryer.id, 'needs' => ['a1'] },
- { 'name' => 'a3', 'status' => 'skipped', 'user_id' => user.id, 'needs' => ['a2'] },
+ { 'name' => 'a3', 'status' => 'created', 'user_id' => retryer.id, 'needs' => ['a2'] },
{ 'name' => 'b1', 'status' => 'success', 'user_id' => user.id, 'needs' => [] },
{ 'name' => 'b2', 'status' => 'created', 'user_id' => retryer.id, 'needs' => ['a2'] },
{ 'name' => 'c1', 'status' => 'created', 'user_id' => retryer.id, 'needs' => ['b2'] },
@@ -237,6 +257,79 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
end
end
+ context 'with same-stage needs' do
+ let(:config) do
+ <<-EOY
+ a:
+ script: exit $(($RANDOM % 2))
+
+ b:
+ script: exit 0
+ needs: [a]
+
+ c:
+ script: exit 0
+ needs: [b]
+ EOY
+ end
+
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
+ end
+
+ let(:a) { find_job('a') }
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ check_jobs_statuses(
+ a: 'pending',
+ b: 'created',
+ c: 'created'
+ )
+
+ a.drop!
+ check_jobs_statuses(
+ a: 'failed',
+ b: 'skipped',
+ c: 'skipped'
+ )
+
+ new_a = Ci::RetryJobService.new(project, user).clone!(a)
+ new_a.enqueue!
+ check_jobs_statuses(
+ a: 'pending',
+ b: 'skipped',
+ c: 'skipped'
+ )
+ end
+
+ it 'marks subsequent skipped jobs as processable' do
+ execute_after_requeue_service(a)
+
+ check_jobs_statuses(
+ a: 'pending',
+ b: 'created',
+ c: 'created'
+ )
+ end
+
+ context 'when the FF ci_requeue_with_dag_object_hierarchy is disabled' do
+ before do
+ stub_feature_flags(ci_requeue_with_dag_object_hierarchy: false)
+ end
+
+ it 'marks the next subsequent skipped job as processable but leaves c skipped' do
+ execute_after_requeue_service(a)
+
+ check_jobs_statuses(
+ a: 'pending',
+ b: 'created',
+ c: 'skipped'
+ )
+ end
+ end
+ end
+
private
def find_job(name)
diff --git a/spec/services/ci/archive_trace_service_spec.rb b/spec/services/ci/archive_trace_service_spec.rb
index bf2e5302d2e..359ea0699e4 100644
--- a/spec/services/ci/archive_trace_service_spec.rb
+++ b/spec/services/ci/archive_trace_service_spec.rb
@@ -17,21 +17,12 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
context 'integration hooks' do
it do
- stub_feature_flags(datadog_integration_logs_collection: [job.project])
-
expect(job.project).to receive(:execute_integrations) do |data, hook_type|
expect(data).to eq Gitlab::DataBuilder::ArchiveTrace.build(job)
expect(hook_type).to eq :archive_trace_hooks
end
expect { subject }.not_to raise_error
end
-
- it 'with feature flag disabled' do
- stub_feature_flags(datadog_integration_logs_collection: false)
-
- expect(job.project).not_to receive(:execute_integrations)
- expect { subject }.not_to raise_error
- end
end
context 'when trace is already archived' do
diff --git a/spec/services/ci/build_erase_service_spec.rb b/spec/services/ci/build_erase_service_spec.rb
new file mode 100644
index 00000000000..e750a163621
--- /dev/null
+++ b/spec/services/ci/build_erase_service_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::BuildEraseService do
+ let_it_be(:user) { user }
+
+ let(:build) { create(:ci_build, :artifacts, :trace_artifact, artifacts_expire_at: 100.days.from_now) }
+
+ subject(:service) { described_class.new(build, user) }
+
+ describe '#execute' do
+ context 'when build is erasable' do
+ before do
+ allow(build).to receive(:erasable?).and_return(true)
+ end
+
+ it 'is successful' do
+ result = service.execute
+
+ expect(result).to be_success
+ end
+
+ it 'erases artifacts' do
+ service.execute
+
+ expect(build.artifacts_file).not_to be_present
+ expect(build.artifacts_metadata).not_to be_present
+ end
+
+ it 'erases trace' do
+ service.execute
+
+ expect(build.trace).not_to exist
+ end
+
+ it 'records erasure detail' do
+ freeze_time do
+ service.execute
+
+ expect(build.erased_by).to eq(user)
+ expect(build.erased_at).to eq(Time.current)
+ expect(build.artifacts_expire_at).to be_nil
+ end
+ end
+
+ context 'when project is undergoing statistics refresh' do
+ before do
+ allow(build.project).to receive(:refreshing_build_artifacts_size?).and_return(true)
+ end
+
+ it 'logs a warning' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger)
+ .to receive(:warn_artifact_deletion_during_stats_refresh)
+ .with(method: 'Ci::BuildEraseService#execute', project_id: build.project_id)
+
+ service.execute
+ end
+ end
+ end
+
+ context 'when build is not erasable' do
+ before do
+ allow(build).to receive(:erasable?).and_return(false)
+ end
+
+ it 'is not successful' do
+ result = service.execute
+
+ expect(result).to be_error
+ expect(result.http_status).to eq(:unprocessable_entity)
+ end
+
+ it 'does not erase artifacts' do
+ service.execute
+
+ expect(build.artifacts_file).to be_present
+ expect(build.artifacts_metadata).to be_present
+ end
+
+ it 'does not erase trace' do
+ service.execute
+
+ expect(build.trace).to exist
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/compare_reports_base_service_spec.rb b/spec/services/ci/compare_reports_base_service_spec.rb
index 9ce58c4972d..20d8cd37553 100644
--- a/spec/services/ci/compare_reports_base_service_spec.rb
+++ b/spec/services/ci/compare_reports_base_service_spec.rb
@@ -6,13 +6,13 @@ RSpec.describe Ci::CompareReportsBaseService do
let(:service) { described_class.new(project) }
let(:project) { create(:project, :repository) }
+ let!(:base_pipeline) { nil }
+ let!(:head_pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
+ let!(:key) { service.send(:key, base_pipeline, head_pipeline) }
+
describe '#latest?' do
subject { service.latest?(base_pipeline, head_pipeline, data) }
- let!(:base_pipeline) { nil }
- let!(:head_pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
- let!(:key) { service.send(:key, base_pipeline, head_pipeline) }
-
context 'when cache key is latest' do
let(:data) { { key: key } }
@@ -35,4 +35,14 @@ RSpec.describe Ci::CompareReportsBaseService do
it { is_expected.to be_falsy }
end
end
+
+ describe '#execute' do
+ context 'when base_pipeline is running' do
+ let!(:base_pipeline) { create(:ci_pipeline, :running, project: project) }
+
+ subject { service.execute(base_pipeline, head_pipeline) }
+
+ it { is_expected.to eq(status: :parsing, key: key) }
+ end
+ end
end
diff --git a/spec/services/ci/create_downstream_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index 11fb564b843..9c02c5218f1 100644
--- a/spec/services/ci/create_downstream_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -5,9 +5,12 @@ require 'spec_helper'
RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
include Ci::SourcePipelineHelpers
- let_it_be(:user) { create(:user) }
+ # Using let_it_be on user and projects for these specs can cause
+ # spec-ordering failures due to the project-based permissions
+ # associating them. They should be recreated every time.
+ let(:user) { create(:user) }
let(:upstream_project) { create(:project, :repository) }
- let_it_be(:downstream_project, refind: true) { create(:project, :repository) }
+ let(:downstream_project) { create(:project, :repository) }
let!(:upstream_pipeline) do
create(:ci_pipeline, :running, project: upstream_project)
@@ -440,10 +443,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
let!(:trigger_project_bridge) do
create(
- :ci_bridge, status: :pending,
- user: user,
- options: trigger_downstream_project,
- pipeline: child_pipeline
+ :ci_bridge, status: :pending, user: user, options: trigger_downstream_project, pipeline: child_pipeline
)
end
@@ -819,5 +819,60 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
end
end
end
+
+ context 'when a downstream pipeline has sibling pipelines' do
+ it_behaves_like 'logs downstream pipeline creation' do
+ let(:expected_root_pipeline) { upstream_pipeline }
+ let(:expected_downstream_relationship) { :multi_project }
+
+ # New downstream, plus upstream, plus two children of upstream created below
+ let(:expected_hierarchy_size) { 4 }
+
+ before do
+ create_list(:ci_pipeline, 2, child_of: upstream_pipeline)
+ end
+ end
+ end
+
+ context 'when the pipeline tree is too large' do
+ let_it_be(:parent) { create(:ci_pipeline) }
+ let_it_be(:child) { create(:ci_pipeline, child_of: parent) }
+ let_it_be(:sibling) { create(:ci_pipeline, child_of: parent) }
+
+ before do
+ stub_const("#{described_class}::MAX_HIERARCHY_SIZE", 3)
+ end
+
+ let(:bridge) do
+ create(:ci_bridge, status: :pending, user: user, options: trigger, pipeline: child)
+ end
+
+ it 'does not create a new pipeline' do
+ expect { subject }.not_to change { Ci::Pipeline.count }
+ end
+
+ it 'drops the trigger job with an explanatory reason' do
+ subject
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq('reached_max_pipeline_hierarchy_size')
+ end
+
+ context 'with :ci_limit_complete_hierarchy_size disabled' do
+ before do
+ stub_feature_flags(ci_limit_complete_hierarchy_size: false)
+ end
+
+ it 'creates a new pipeline' do
+ expect { subject }.to change { Ci::Pipeline.count }.by(1)
+ end
+
+ it 'marks the bridge job as successful' do
+ subject
+
+ expect(bridge.reload).to be_success
+ end
+ end
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service/artifacts_spec.rb b/spec/services/ci/create_pipeline_service/artifacts_spec.rb
index 1ec30d68666..e5e405492a0 100644
--- a/spec/services/ci/create_pipeline_service/artifacts_spec.rb
+++ b/spec/services/ci/create_pipeline_service/artifacts_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/cache_spec.rb b/spec/services/ci/create_pipeline_service/cache_spec.rb
index fe777bc50d9..82c3d374636 100644
--- a/spec/services/ci/create_pipeline_service/cache_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cache_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
context 'cache' do
let(:project) { create(:project, :custom_repo, files: files) }
let(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
index a920b90b97d..0ebcecdd6e6 100644
--- a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
+++ b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
describe 'creation errors and warnings' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
index e1d60ed57ef..74d3534eb45 100644
--- a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService, '#execute' do
+RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_flag_corectness do
let_it_be(:group) { create(:group, name: 'my-organization') }
let(:upstream_project) { create(:project, :repository, name: 'upstream', group: group) }
diff --git a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
index a0cbf14d936..dafa227c4c8 100644
--- a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
+++ b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb b/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb
index 716a929830e..3b042f05fc0 100644
--- a/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb
+++ b/spec/services/ci/create_pipeline_service/custom_yaml_tags_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
describe '!reference tags' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/dry_run_spec.rb b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
index 9a7e97fb12b..de1ed251c82 100644
--- a/spec/services/ci/create_pipeline_service/dry_run_spec.rb
+++ b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/environment_spec.rb b/spec/services/ci/create_pipeline_service/environment_spec.rb
index 43b5220334c..438cb6ac895 100644
--- a/spec/services/ci/create_pipeline_service/environment_spec.rb
+++ b/spec/services/ci/create_pipeline_service/environment_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:developer) { create(:user) }
@@ -45,5 +45,51 @@ RSpec.describe Ci::CreatePipelineService do
end
end
end
+
+ context 'when variables are dependent on stage name' do
+ let(:config) do
+ <<~YAML
+ deploy-review-app-1:
+ stage: deploy
+ environment: 'test/$CI_JOB_STAGE/1'
+ script:
+ - echo $SCOPED_VARIABLE
+ rules:
+ - if: $SCOPED_VARIABLE == 'my-value-1'
+
+ deploy-review-app-2:
+ stage: deploy
+ script:
+ - echo $SCOPED_VARIABLE
+ environment: 'test/$CI_JOB_STAGE/2'
+ rules:
+ - if: $SCOPED_VARIABLE == 'my-value-2'
+ YAML
+ end
+
+ before do
+ create(:ci_variable, key: 'SCOPED_VARIABLE', value: 'my-value-1', environment_scope: '*', project: project)
+ create(:ci_variable,
+ key: 'SCOPED_VARIABLE',
+ value: 'my-value-2',
+ environment_scope: 'test/deploy/*',
+ project: project
+ )
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'creates the pipeline successfully', :aggregate_failures do
+ pipeline = subject
+ build = pipeline.builds.first
+
+ expect(pipeline).to be_created_successfully
+ expect(Environment.find_by_name('test/deploy/2')).to be_persisted
+ expect(pipeline.builds.size).to eq(1)
+ expect(build.persisted_environment.name).to eq('test/deploy/2')
+ expect(build.name).to eq('deploy-review-app-2')
+ expect(build.environment).to eq('test/$CI_JOB_STAGE/2')
+ expect(build.variables.to_hash['SCOPED_VARIABLE']).to eq('my-value-2')
+ end
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb b/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb
index 7c698242921..e84726d31f6 100644
--- a/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb
+++ b/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
let_it_be(:group) { create(:group, :private) }
let_it_be(:group_variable) { create(:ci_group_variable, group: group, key: 'RUNNER_TAG', value: 'group') }
let_it_be(:project) { create(:project, :repository, group: group) }
diff --git a/spec/services/ci/create_pipeline_service/include_spec.rb b/spec/services/ci/create_pipeline_service/include_spec.rb
index 849eb5885f6..67d8530525a 100644
--- a/spec/services/ci/create_pipeline_service/include_spec.rb
+++ b/spec/services/ci/create_pipeline_service/include_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
context 'include:' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/logger_spec.rb b/spec/services/ci/create_pipeline_service/logger_spec.rb
index 53e5f0dd7f2..2be23802757 100644
--- a/spec/services/ci/create_pipeline_service/logger_spec.rb
+++ b/spec/services/ci/create_pipeline_service/logger_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
context 'pipeline logger' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
@@ -19,9 +19,9 @@ RSpec.describe Ci::CreatePipelineService do
let(:counters) do
{
'count' => a_kind_of(Numeric),
- 'avg' => a_kind_of(Numeric),
- 'max' => a_kind_of(Numeric),
- 'min' => a_kind_of(Numeric)
+ 'avg' => a_kind_of(Numeric),
+ 'max' => a_kind_of(Numeric),
+ 'min' => a_kind_of(Numeric)
}
end
diff --git a/spec/services/ci/create_pipeline_service/merge_requests_spec.rb b/spec/services/ci/create_pipeline_service/merge_requests_spec.rb
index de19ef363fb..80f48451e5c 100644
--- a/spec/services/ci/create_pipeline_service/merge_requests_spec.rb
+++ b/spec/services/ci/create_pipeline_service/merge_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
context 'merge requests handling' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/needs_spec.rb b/spec/services/ci/create_pipeline_service/needs_spec.rb
index abd17ccdd6a..38e330316ea 100644
--- a/spec/services/ci/create_pipeline_service/needs_spec.rb
+++ b/spec/services/ci/create_pipeline_service/needs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
context 'needs' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/parallel_spec.rb b/spec/services/ci/create_pipeline_service/parallel_spec.rb
index ae28b74fef5..5ee378a9719 100644
--- a/spec/services/ci/create_pipeline_service/parallel_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parallel_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/parameter_content_spec.rb b/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
index f593707f460..cae88bb67cf 100644
--- a/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index 4326fa5533f..513cbbed6cd 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService, '#execute' do
+RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_flag_corectness do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@@ -36,7 +36,7 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
expect(pipeline.statuses).to match_array [test, bridge]
expect(bridge.options).to eq(expected_bridge_options)
expect(bridge.yaml_variables)
- .to include(key: 'CROSS', value: 'downstream', public: true)
+ .to include(key: 'CROSS', value: 'downstream')
end
end
diff --git a/spec/services/ci/create_pipeline_service/partitioning_spec.rb b/spec/services/ci/create_pipeline_service/partitioning_spec.rb
new file mode 100644
index 00000000000..43fbb74ede4
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/partitioning_spec.rb
@@ -0,0 +1,146 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, :aggregate_failures do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.first_owner }
+
+ let(:service) { described_class.new(project, user, { ref: 'master' }) }
+ let(:config) do
+ <<-YAML
+ stages:
+ - build
+ - test
+ - deploy
+
+ build:
+ stage: build
+ script: make build
+
+ test:
+ stage: test
+ trigger:
+ include: child.yml
+
+ deploy:
+ stage: deploy
+ script: make deploy
+ environment: review/$CI_JOB_NAME
+ YAML
+ end
+
+ let(:pipeline) { service.execute(:push).payload }
+ let(:current_partition_id) { 123 }
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ allow(Ci::Pipeline).to receive(:current_partition_value) { current_partition_id }
+ end
+
+ it 'assigns partition_id to pipeline' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.partition_id).to eq(current_partition_id)
+ end
+
+ it 'assigns partition_id to stages' do
+ stage_partition_ids = pipeline.stages.map(&:partition_id).uniq
+
+ expect(stage_partition_ids).to eq([current_partition_id])
+ end
+
+ it 'assigns partition_id to processables' do
+ processables_partition_ids = pipeline.processables.map(&:partition_id).uniq
+
+ expect(processables_partition_ids).to eq([current_partition_id])
+ end
+
+ it 'assigns partition_id to metadata' do
+ metadata_partition_ids = pipeline.processables.map { |job| job.metadata.partition_id }.uniq
+
+ expect(metadata_partition_ids).to eq([current_partition_id])
+ end
+
+ it 'correctly assigns partition and environment' do
+ metadata = find_metadata('deploy')
+
+ expect(metadata.partition_id).to eq(current_partition_id)
+ expect(metadata.expanded_environment_name).to eq('review/deploy')
+ end
+
+ context 'with pipeline variables' do
+ let(:variables_attributes) do
+ [
+ { key: 'SOME_VARIABLE', secret_value: 'SOME_VAL' },
+ { key: 'OTHER_VARIABLE', secret_value: 'OTHER_VAL' }
+ ]
+ end
+
+ let(:service) do
+ described_class.new(
+ project,
+ user,
+ { ref: 'master', variables_attributes: variables_attributes })
+ end
+
+ it 'assigns partition_id to pipeline' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.partition_id).to eq(current_partition_id)
+ end
+
+ it 'assigns partition_id to variables' do
+ variables_partition_ids = pipeline.variables.map(&:partition_id).uniq
+
+ expect(pipeline.variables.size).to eq(2)
+ expect(variables_partition_ids).to eq([current_partition_id])
+ end
+ end
+
+ context 'with parent child pipelines' do
+ before do
+ allow(Ci::Pipeline)
+ .to receive(:current_partition_value)
+ .and_return(current_partition_id, 301, 302)
+
+ allow_next_found_instance_of(Ci::Bridge) do |bridge|
+ allow(bridge).to receive(:yaml_for_downstream).and_return(child_config)
+ end
+ end
+
+ let(:config) do
+ <<-YAML
+ test:
+ trigger:
+ include: child.yml
+ YAML
+ end
+
+ let(:child_config) do
+ <<-YAML
+ test:
+ script: make test
+ YAML
+ end
+
+ it 'assigns partition values to child pipelines', :aggregate_failures, :sidekiq_inline do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.child_pipelines).to all be_created_successfully
+
+ child_partition_ids = pipeline.child_pipelines.map(&:partition_id).uniq
+ child_jobs = CommitStatus.where(commit_id: pipeline.child_pipelines)
+
+ expect(pipeline.partition_id).to eq(current_partition_id)
+ expect(child_partition_ids).to eq([current_partition_id])
+
+ expect(child_jobs).to all be_a(Ci::Build)
+ expect(child_jobs.pluck(:partition_id).uniq).to eq([current_partition_id])
+ end
+ end
+
+ def find_metadata(name)
+ pipeline
+ .processables
+ .find { |job| job.name == name }
+ .metadata
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb b/spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb
index c6e69862422..db110bdc608 100644
--- a/spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb
+++ b/spec/services/ci/create_pipeline_service/pre_post_stages_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
describe '.pre/.post stages' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/rate_limit_spec.rb b/spec/services/ci/create_pipeline_service/rate_limit_spec.rb
index 0000296230f..dfa74870341 100644
--- a/spec/services/ci/create_pipeline_service/rate_limit_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rate_limit_spec.rb
@@ -1,7 +1,9 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService, :freeze_time, :clean_gitlab_redis_rate_limiting do
+RSpec.describe Ci::CreatePipelineService, :freeze_time,
+ :clean_gitlab_redis_rate_limiting,
+ :yaml_processor_feature_flag_corectness do
describe 'rate limiting' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index 6e48141226d..fc57ca66d3a 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
let(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
let(:ref) { 'refs/heads/master' }
diff --git a/spec/services/ci/create_pipeline_service/tags_spec.rb b/spec/services/ci/create_pipeline_service/tags_spec.rb
index 0774f9fff2a..7450df11eac 100644
--- a/spec/services/ci/create_pipeline_service/tags_spec.rb
+++ b/spec/services/ci/create_pipeline_service/tags_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
describe 'tags:' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
@@ -37,7 +37,7 @@ RSpec.describe Ci::CreatePipelineService do
context 'tags persistence' do
let(:config) do
{
- build: {
+ build: {
script: 'ls',
stage: 'build',
tags: build_tag_list(label: 'build')
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index a9442b0dc68..c2e80316d26 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, :clean_gitlab_redis_cache do
include ProjectForksHelper
let_it_be_with_refind(:project) { create(:project, :repository) }
@@ -463,7 +463,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'pull it from Auto-DevOps' do
pipeline = execute_service.payload
expect(pipeline).to be_auto_devops_source
- expect(pipeline.builds.map(&:name)).to match_array(%w[brakeman-sast build code_quality container_scanning eslint-sast secret_detection semgrep-sast test])
+ expect(pipeline.builds.map(&:name)).to match_array(%w[brakeman-sast build code_quality container_scanning secret_detection semgrep-sast test])
end
end
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index 7b3f67b192f..a2259f9813b 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -151,9 +151,8 @@ RSpec.describe Ci::JobArtifacts::CreateService do
expect { subject }.not_to change { Ci::JobArtifact.count }
expect(subject).to match(
- a_hash_including(http_status: :bad_request,
- message: 'another artifact of the same type already exists',
- status: :error))
+ a_hash_including(
+ http_status: :bad_request, message: 'another artifact of the same type already exists', status: :error))
end
end
end
@@ -182,6 +181,18 @@ RSpec.describe Ci::JobArtifacts::CreateService do
end
end
+ context 'with job partitioning' do
+ let(:job) { create(:ci_build, project: project, partition_id: 123) }
+
+ it 'sets partition_id on artifacts' do
+ expect { subject }.to change { Ci::JobArtifact.count }
+
+ artifacts_partitions = job.job_artifacts.map(&:partition_id).uniq
+
+ expect(artifacts_partitions).to eq([123])
+ end
+ end
+
shared_examples 'rescues object storage error' do |klass, message, expected_message|
it "handles #{klass}" do
allow_next_instance_of(JobArtifactUploader) do |uploader|
diff --git a/spec/services/ci/job_artifacts/delete_service_spec.rb b/spec/services/ci/job_artifacts/delete_service_spec.rb
new file mode 100644
index 00000000000..62a755eb44a
--- /dev/null
+++ b/spec/services/ci/job_artifacts/delete_service_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::JobArtifacts::DeleteService do
+ let_it_be(:build, reload: true) do
+ create(:ci_build, :artifacts, :trace_artifact, artifacts_expire_at: 100.days.from_now)
+ end
+
+ subject(:service) { described_class.new(build) }
+
+ describe '#execute' do
+ it 'is successful' do
+ result = service.execute
+
+ expect(result).to be_success
+ end
+
+ it 'deletes erasable artifacts' do
+ expect { service.execute }.to change { build.job_artifacts.erasable.count }.from(2).to(0)
+ end
+
+ it 'does not delete trace' do
+ expect { service.execute }.not_to change { build.has_trace? }.from(true)
+ end
+
+ context 'when project is undergoing statistics refresh' do
+ before do
+ allow(build.project).to receive(:refreshing_build_artifacts_size?).and_return(true)
+ end
+
+ it 'logs a warning' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger)
+ .to receive(:warn_artifact_deletion_during_stats_refresh)
+ .with(method: 'Ci::JobArtifacts::DeleteService#execute', project_id: build.project_id)
+
+ service.execute
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
index 9ca39d4d32e..54d1cacc068 100644
--- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -221,6 +221,15 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
end
context 'with update_stats: false' do
+ let_it_be(:extra_artifact_with_file) do
+ create(:ci_job_artifact, :zip, project: artifact_with_file.project)
+ end
+
+ let(:artifacts) do
+ Ci::JobArtifact.where(id: [artifact_with_file.id, extra_artifact_with_file.id,
+ artifact_without_file.id, trace_artifact.id])
+ end
+
it 'does not update project statistics' do
expect(ProjectStatistics).not_to receive(:increment_statistic)
@@ -230,7 +239,7 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
it 'returns size statistics' do
expected_updates = {
statistics_updates: {
- artifact_with_file.project => -artifact_with_file.file.size,
+ artifact_with_file.project => -(artifact_with_file.file.size + extra_artifact_with_file.file.size),
artifact_without_file.project => 0
}
}
diff --git a/spec/services/ci/job_artifacts/track_artifact_report_service_spec.rb b/spec/services/ci/job_artifacts/track_artifact_report_service_spec.rb
new file mode 100644
index 00000000000..6d9fc4c8e34
--- /dev/null
+++ b/spec/services/ci/job_artifacts/track_artifact_report_service_spec.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::JobArtifacts::TrackArtifactReportService do
+ describe '#execute', :clean_gitlab_redis_shared_state do
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ let(:test_event_name) { 'i_testing_test_report_uploaded' }
+ let(:counter) { Gitlab::UsageDataCounters::HLLRedisCounter }
+ let(:start_time) { 1.week.ago }
+ let(:end_time) { 1.week.from_now }
+
+ subject(:track_artifact_report) { described_class.new.execute(pipeline) }
+
+ context 'when pipeline has test reports' do
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user1) }
+
+ before do
+ 2.times do
+ pipeline.builds << build(:ci_build, :test_reports, pipeline: pipeline, project: pipeline.project)
+ end
+ end
+
+ it 'tracks the event using HLLRedisCounter' do
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event)
+ .with(test_event_name, values: user1.id)
+ .and_call_original
+
+ expect { track_artifact_report }
+ .to change {
+ counter.unique_events(event_names: test_event_name,
+ start_date: start_time,
+ end_date: end_time)
+ }
+ .by 1
+ end
+ end
+
+ context 'when pipeline does not have test reports' do
+ let_it_be(:pipeline) { create(:ci_empty_pipeline) }
+
+ it 'does not track the event' do
+ track_artifact_report
+
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .not_to receive(:track_event)
+ .with(anything, test_event_name)
+ end
+ end
+
+ context 'when a single user started multiple pipelines with test reports' do
+ let_it_be(:pipeline1) { create(:ci_pipeline, :with_test_reports, project: project, user: user1) }
+ let_it_be(:pipeline2) { create(:ci_pipeline, :with_test_reports, project: project, user: user1) }
+
+ it 'tracks all pipelines using HLLRedisCounter by one user_id' do
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event)
+ .with(test_event_name, values: user1.id)
+ .and_call_original
+
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event)
+ .with(test_event_name, values: user1.id)
+ .and_call_original
+
+ expect do
+ described_class.new.execute(pipeline1)
+ described_class.new.execute(pipeline2)
+ end
+ .to change {
+ counter.unique_events(event_names: test_event_name,
+ start_date: start_time,
+ end_date: end_time)
+ }
+ .by 1
+ end
+ end
+
+ context 'when multiple users started multiple pipelines with test reports' do
+ let_it_be(:pipeline1) { create(:ci_pipeline, :with_test_reports, project: project, user: user1) }
+ let_it_be(:pipeline2) { create(:ci_pipeline, :with_test_reports, project: project, user: user2) }
+
+ it 'tracks all pipelines using HLLRedisCounter by multiple users' do
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event)
+ .with(test_event_name, values: user1.id)
+ .and_call_original
+
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event)
+ .with(test_event_name, values: user1.id)
+ .and_call_original
+
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event)
+ .with(test_event_name, values: user2.id)
+ .and_call_original
+
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event)
+ .with(test_event_name, values: user2.id)
+ .and_call_original
+
+ expect do
+ described_class.new.execute(pipeline1)
+ described_class.new.execute(pipeline2)
+ end
+ .to change {
+ counter.unique_events(event_names: test_event_name,
+ start_date: start_time,
+ end_date: end_time)
+ }
+ .by 2
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/job_token_scope/add_project_service_spec.rb b/spec/services/ci/job_token_scope/add_project_service_spec.rb
index ba889465fac..bb6df4268dd 100644
--- a/spec/services/ci/job_token_scope/add_project_service_spec.rb
+++ b/spec/services/ci/job_token_scope/add_project_service_spec.rb
@@ -8,6 +8,14 @@ RSpec.describe Ci::JobTokenScope::AddProjectService do
let_it_be(:target_project) { create(:project) }
let_it_be(:current_user) { create(:user) }
+ shared_examples 'adds project' do |context|
+ it 'adds the project to the scope' do
+ expect do
+ expect(result).to be_success
+ end.to change { Ci::JobToken::ProjectScopeLink.count }.by(1)
+ end
+ end
+
describe '#execute' do
subject(:result) { service.execute(target_project) }
@@ -18,10 +26,14 @@ RSpec.describe Ci::JobTokenScope::AddProjectService do
target_project.add_developer(current_user)
end
- it 'adds the project to the scope' do
- expect do
- expect(result).to be_success
- end.to change { Ci::JobToken::ProjectScopeLink.count }.by(1)
+ it_behaves_like 'adds project'
+
+ context 'when token scope is disabled' do
+ before do
+ project.ci_cd_settings.update!(job_token_scope_enabled: false)
+ end
+
+ it_behaves_like 'adds project'
end
end
diff --git a/spec/services/ci/job_token_scope/remove_project_service_spec.rb b/spec/services/ci/job_token_scope/remove_project_service_spec.rb
index 238fc879f54..155e60ac48e 100644
--- a/spec/services/ci/job_token_scope/remove_project_service_spec.rb
+++ b/spec/services/ci/job_token_scope/remove_project_service_spec.rb
@@ -14,6 +14,14 @@ RSpec.describe Ci::JobTokenScope::RemoveProjectService do
target_project: target_project)
end
+ shared_examples 'removes project' do |context|
+ it 'removes the project from the scope' do
+ expect do
+ expect(result).to be_success
+ end.to change { Ci::JobToken::ProjectScopeLink.count }.by(-1)
+ end
+ end
+
describe '#execute' do
subject(:result) { service.execute(target_project) }
@@ -24,10 +32,14 @@ RSpec.describe Ci::JobTokenScope::RemoveProjectService do
target_project.add_developer(current_user)
end
- it 'removes the project from the scope' do
- expect do
- expect(result).to be_success
- end.to change { Ci::JobToken::ProjectScopeLink.count }.by(-1)
+ it_behaves_like 'removes project'
+
+ context 'when token scope is disabled' do
+ before do
+ project.ci_cd_settings.update!(job_token_scope_enabled: false)
+ end
+
+ it_behaves_like 'removes project'
end
end
diff --git a/spec/services/ci/list_config_variables_service_spec.rb b/spec/services/ci/list_config_variables_service_spec.rb
index 1735f4cfc97..4953b18bfcc 100644
--- a/spec/services/ci/list_config_variables_service_spec.rb
+++ b/spec/services/ci/list_config_variables_service_spec.rb
@@ -40,8 +40,8 @@ RSpec.describe Ci::ListConfigVariablesService, :use_clean_rails_memory_store_cac
it 'returns variable list' do
expect(subject['KEY1']).to eq({ value: 'val 1', description: 'description 1' })
expect(subject['KEY2']).to eq({ value: 'val 2', description: '' })
- expect(subject['KEY3']).to eq({ value: 'val 3', description: nil })
- expect(subject['KEY4']).to eq({ value: 'val 4', description: nil })
+ expect(subject['KEY3']).to eq({ value: 'val 3' })
+ expect(subject['KEY4']).to eq({ value: 'val 4' })
end
end
diff --git a/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb b/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
index 31548793bac..6d4dcf28108 100644
--- a/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
+++ b/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
@@ -51,6 +51,30 @@ RSpec.describe Ci::PipelineArtifacts::CoverageReportService do
let!(:pipeline) { create(:ci_pipeline, :with_coverage_reports, project: project) }
it_behaves_like 'creating or updating a pipeline coverage report'
+
+ context 'when ci_update_unlocked_pipeline_artifacts feature flag is enabled' do
+ it "artifact has pipeline's locked status" do
+ subject
+
+ artifact = Ci::PipelineArtifact.first
+
+ expect(artifact.locked).to eq(pipeline.locked)
+ end
+ end
+
+ context 'when ci_update_unlocked_pipeline_artifacts is disabled' do
+ before do
+ stub_feature_flags(ci_update_unlocked_pipeline_artifacts: false)
+ end
+
+ it 'artifact has unknown locked status' do
+ subject
+
+ artifact = Ci::PipelineArtifact.first
+
+ expect(artifact.locked).to eq('unknown')
+ end
+ end
end
context 'when pipeline has coverage report from child pipeline' do
diff --git a/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb b/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb
index 5568052e346..75233248113 100644
--- a/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb
+++ b/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb
@@ -51,6 +51,30 @@ RSpec.describe ::Ci::PipelineArtifacts::CreateCodeQualityMrDiffReportService do
end
end
+ context 'when ci_update_unlocked_pipeline_artifacts feature flag is enabled' do
+ it "artifact has pipeline's locked status" do
+ subject
+
+ artifact = Ci::PipelineArtifact.first
+
+ expect(artifact.locked).to eq(head_pipeline.locked)
+ end
+ end
+
+ context 'when ci_update_unlocked_pipeline_artifacts is disabled' do
+ before do
+ stub_feature_flags(ci_update_unlocked_pipeline_artifacts: false)
+ end
+
+ it 'artifact has unknown locked status' do
+ subject
+
+ artifact = Ci::PipelineArtifact.first
+
+ expect(artifact.locked).to eq('unknown')
+ end
+ end
+
it 'does not persist the same artifact twice' do
2.times { described_class.new(head_pipeline).execute }
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
index 289e004fcce..7578afa7c50 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
@@ -6,11 +6,28 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection
using RSpec::Parameterized::TableSyntax
let_it_be(:pipeline) { create(:ci_pipeline) }
- let_it_be(:build_a) { create(:ci_build, :success, name: 'build-a', stage: 'build', stage_idx: 0, pipeline: pipeline) }
- let_it_be(:build_b) { create(:ci_build, :failed, name: 'build-b', stage: 'build', stage_idx: 0, pipeline: pipeline) }
- let_it_be(:test_a) { create(:ci_build, :running, name: 'test-a', stage: 'test', stage_idx: 1, pipeline: pipeline) }
- let_it_be(:test_b) { create(:ci_build, :pending, name: 'test-b', stage: 'test', stage_idx: 1, pipeline: pipeline) }
- let_it_be(:deploy) { create(:ci_build, :created, name: 'deploy', stage: 'deploy', stage_idx: 2, pipeline: pipeline) }
+ let_it_be(:build_stage) { create(:ci_stage, name: 'build', pipeline: pipeline) }
+ let_it_be(:test_stage) { create(:ci_stage, name: 'test', pipeline: pipeline) }
+ let_it_be(:deploy_stage) { create(:ci_stage, name: 'deploy', pipeline: pipeline) }
+ let_it_be(:build_a) do
+ create(:ci_build, :success, name: 'build-a', ci_stage: build_stage, stage_idx: 0, pipeline: pipeline)
+ end
+
+ let_it_be(:build_b) do
+ create(:ci_build, :failed, name: 'build-b', ci_stage: build_stage, stage_idx: 0, pipeline: pipeline)
+ end
+
+ let_it_be(:test_a) do
+ create(:ci_build, :running, name: 'test-a', ci_stage: test_stage, stage_idx: 1, pipeline: pipeline)
+ end
+
+ let_it_be(:test_b) do
+ create(:ci_build, :pending, name: 'test-b', ci_stage: test_stage, stage_idx: 1, pipeline: pipeline)
+ end
+
+ let_it_be(:deploy) do
+ create(:ci_build, :created, name: 'deploy', ci_stage: deploy_stage, stage_idx: 2, pipeline: pipeline)
+ end
let(:collection) { described_class.new(pipeline) }
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index 5bc508447c1..06bb6d39fe5 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -55,6 +55,8 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
statuses.each do |status|
if event == 'play'
status.play(user)
+ elsif event == 'retry'
+ ::Ci::RetryJobService.new(project, user).execute(status)
else
status.public_send("#{event}!")
end
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_fail_and_retry_1.yml b/spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_fail_and_retry_1.yml
new file mode 100644
index 00000000000..b9b8eb2f532
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_fail_and_retry_1.yml
@@ -0,0 +1,55 @@
+config:
+ build:
+ script: exit $(($RANDOM % 2))
+
+ test:
+ script: exit 0
+ needs: [build]
+
+ deploy:
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ test: failed
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
+
+ - event: retry
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ test: pending
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ jobs:
+ build: success
+ test: pending
+ deploy: created
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_fail_and_retry_2.yml b/spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_fail_and_retry_2.yml
new file mode 100644
index 00000000000..c875ebab3c9
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_fail_and_retry_2.yml
@@ -0,0 +1,63 @@
+config:
+ build:
+ script: exit $(($RANDOM % 2))
+
+ test1:
+ script: exit 0
+ needs: [build]
+
+ test2:
+ script: exit 0
+ when: manual
+
+ deploy:
+ script: exit 0
+ needs: [test1, test2]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ jobs:
+ build: pending
+ test1: created
+ test2: manual
+ deploy: skipped
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ test: failed
+ jobs:
+ build: failed
+ test1: skipped
+ test2: manual
+ deploy: skipped
+
+ - event: retry
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ test: pending
+ jobs:
+ build: pending
+ test1: created
+ test2: manual
+ deploy: skipped
+
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ jobs:
+ build: success
+ test1: pending
+ test2: manual
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_subsequent_manual_jobs.yml b/spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_subsequent_manual_jobs.yml
new file mode 100644
index 00000000000..03ffda1caab
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_same_stage_with_subsequent_manual_jobs.yml
@@ -0,0 +1,65 @@
+config:
+ job1:
+ script: exit 0
+ when: manual
+
+ job2:
+ script: exit 0
+ needs: [job1]
+
+ job3:
+ script: exit 0
+ when: manual
+ needs: [job2]
+
+ job4:
+ script: exit 0
+ needs: [job3]
+
+init:
+ expect:
+ pipeline: skipped
+ stages:
+ test: skipped
+ jobs:
+ job1: manual
+ job2: skipped
+ job3: skipped
+ job4: skipped
+
+transitions:
+ - event: play
+ jobs: [job1]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ jobs:
+ job1: pending
+ job2: created
+ job3: created
+ job4: created
+
+ - event: success
+ jobs: [job1]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ jobs:
+ job1: success
+ job2: pending
+ job3: created
+ job4: created
+
+ - event: success
+ jobs: [job2]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ jobs:
+ job1: success
+ job2: success
+ job3: manual
+ job4: skipped
diff --git a/spec/services/ci/pipeline_schedule_service_spec.rb b/spec/services/ci/pipeline_schedule_service_spec.rb
index b8e4fb19f5d..2f094583f1a 100644
--- a/spec/services/ci/pipeline_schedule_service_spec.rb
+++ b/spec/services/ci/pipeline_schedule_service_spec.rb
@@ -3,14 +3,15 @@
require 'spec_helper'
RSpec.describe Ci::PipelineScheduleService do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
let(:service) { described_class.new(project, user) }
describe '#execute' do
subject { service.execute(schedule) }
- let(:schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
+ let_it_be(:schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
it 'schedules next run' do
expect(schedule).to receive(:schedule_next_run!)
@@ -34,9 +35,7 @@ RSpec.describe Ci::PipelineScheduleService do
end
context 'when the project is missing' do
- before do
- project.delete
- end
+ let(:project) { create(:project).tap(&:delete) }
it 'does not raise an exception' do
expect { subject }.not_to raise_error
diff --git a/spec/services/ci/pipelines/add_job_service_spec.rb b/spec/services/ci/pipelines/add_job_service_spec.rb
index 560724a1c6a..e735b2752d9 100644
--- a/spec/services/ci/pipelines/add_job_service_spec.rb
+++ b/spec/services/ci/pipelines/add_job_service_spec.rb
@@ -34,6 +34,14 @@ RSpec.describe Ci::Pipelines::AddJobService do
).and change { job.metadata.project }.to(pipeline.project)
end
+ it 'assigns partition_id to job and metadata' do
+ pipeline.partition_id = 123
+
+ expect { execute }
+ .to change(job, :partition_id).to(pipeline.partition_id)
+ .and change { job.metadata.partition_id }.to(pipeline.partition_id)
+ end
+
it 'returns a service response with the job as payload' do
expect(execute).to be_success
expect(execute.payload[:job]).to eq(job)
diff --git a/spec/services/ci/pipelines/hook_service_spec.rb b/spec/services/ci/pipelines/hook_service_spec.rb
index 0e1ef6afd0d..8d138a3d957 100644
--- a/spec/services/ci/pipelines/hook_service_spec.rb
+++ b/spec/services/ci/pipelines/hook_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Ci::Pipelines::HookService do
describe '#execute_hooks' do
let_it_be(:namespace) { create(:namespace) }
let_it_be(:project) { create(:project, :repository, namespace: namespace) }
- let_it_be(:pipeline) { create(:ci_empty_pipeline, :created, project: project) }
+ let_it_be(:pipeline, reload: true) { create(:ci_empty_pipeline, :created, project: project) }
let(:hook_enabled) { true }
let!(:hook) { create(:project_hook, project: project, pipeline_events: hook_enabled) }
diff --git a/spec/services/ci/play_manual_stage_service_spec.rb b/spec/services/ci/play_manual_stage_service_spec.rb
index b3ae92aa787..24f0a21f3dd 100644
--- a/spec/services/ci/play_manual_stage_service_spec.rb
+++ b/spec/services/ci/play_manual_stage_service_spec.rb
@@ -75,7 +75,6 @@ RSpec.describe Ci::PlayManualStageService, '#execute' do
options.merge!({
when: 'manual',
pipeline: pipeline,
- stage: stage.name,
stage_id: stage.id,
user: pipeline.user
})
@@ -87,7 +86,6 @@ RSpec.describe Ci::PlayManualStageService, '#execute' do
options.merge!({
when: 'manual',
pipeline: pipeline,
- stage: stage.name,
stage_id: stage.id,
user: pipeline.user,
downstream: downstream_project
diff --git a/spec/services/ci/process_sync_events_service_spec.rb b/spec/services/ci/process_sync_events_service_spec.rb
index 241ac4995ff..7ab7911e578 100644
--- a/spec/services/ci/process_sync_events_service_spec.rb
+++ b/spec/services/ci/process_sync_events_service_spec.rb
@@ -120,13 +120,15 @@ RSpec.describe Ci::ProcessSyncEventsService do
before do
Namespaces::SyncEvent.delete_all
+ # Creates a sync event for group, and the ProjectNamespace of project1 & project2: 3 in total
group.update!(parent: parent_group_2)
+ # Creates a sync event for parent_group2 and all the children: 4 in total
parent_group_2.update!(parent: parent_group_1)
end
shared_examples 'event consuming' do
it 'consumes events' do
- expect { execute }.to change(Namespaces::SyncEvent, :count).from(2).to(0)
+ expect { execute }.to change(Namespaces::SyncEvent, :count).from(7).to(0)
expect(group.reload.ci_namespace_mirror).to have_attributes(
traversal_ids: [parent_group_1.id, parent_group_2.id, group.id]
@@ -134,6 +136,12 @@ RSpec.describe Ci::ProcessSyncEventsService do
expect(parent_group_2.reload.ci_namespace_mirror).to have_attributes(
traversal_ids: [parent_group_1.id, parent_group_2.id]
)
+ expect(project1.reload.project_namespace).to have_attributes(
+ traversal_ids: [parent_group_1.id, parent_group_2.id, group.id, project1.project_namespace.id]
+ )
+ expect(project2.reload.project_namespace).to have_attributes(
+ traversal_ids: [parent_group_1.id, parent_group_2.id, group.id, project2.project_namespace.id]
+ )
end
end
@@ -157,7 +165,7 @@ RSpec.describe Ci::ProcessSyncEventsService do
end
it 'does not enqueue Namespaces::ProcessSyncEventsWorker if no left' do
- stub_const("#{described_class}::BATCH_SIZE", 2)
+ stub_const("#{described_class}::BATCH_SIZE", 7)
expect(Namespaces::ProcessSyncEventsWorker).not_to receive(:perform_async)
diff --git a/spec/services/ci/queue/pending_builds_strategy_spec.rb b/spec/services/ci/queue/pending_builds_strategy_spec.rb
new file mode 100644
index 00000000000..6f22c256c17
--- /dev/null
+++ b/spec/services/ci/queue/pending_builds_strategy_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Queue::PendingBuildsStrategy do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let!(:build_1) { create(:ci_build, :created, pipeline: pipeline) }
+ let!(:build_2) { create(:ci_build, :created, pipeline: pipeline) }
+ let!(:build_3) { create(:ci_build, :created, pipeline: pipeline) }
+ let!(:pending_build_1) { create(:ci_pending_build, build: build_2, project: project) }
+ let!(:pending_build_2) { create(:ci_pending_build, build: build_3, project: project) }
+ let!(:pending_build_3) { create(:ci_pending_build, build: build_1, project: project) }
+
+ describe 'builds_for_group_runner' do
+ it 'returns builds ordered by build ID' do
+ strategy = described_class.new(group_runner)
+ expect(strategy.builds_for_group_runner).to eq([pending_build_3, pending_build_1, pending_build_2])
+ end
+ end
+end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index cabd60a22d1..e2e760b9812 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -571,10 +571,6 @@ module Ci
context 'when artifacts of depended job has been erased' do
let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
- before do
- pre_stage_job.erase
- end
-
it_behaves_like 'not pick'
end
@@ -612,10 +608,6 @@ module Ci
context 'when artifacts of depended job has been erased' do
let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
- before do
- pre_stage_job.erase
- end
-
it { expect(subject).to eq(pending_job) }
end
end
diff --git a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
index 194203a422c..3d1abe290bc 100644
--- a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
+++ b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupService do
+ include ConcurrentHelpers
+
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
@@ -134,6 +136,19 @@ RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupService do
end
end
end
+
+ context 'when parallel services are running' do
+ it 'can run the same command in parallel' do
+ parallel_num = 4
+
+ blocks = Array.new(parallel_num).map do
+ -> { subject }
+ end
+
+ run_parallel(blocks)
+ expect(build.reload).to be_pending
+ end
+ end
end
context 'when there are no available resources' do
diff --git a/spec/services/ci/retry_job_service_spec.rb b/spec/services/ci/retry_job_service_spec.rb
index b14e4187c7a..69f19c5acf2 100644
--- a/spec/services/ci/retry_job_service_spec.rb
+++ b/spec/services/ci/retry_job_service_spec.rb
@@ -7,14 +7,13 @@ RSpec.describe Ci::RetryJobService do
let_it_be(:developer) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) do
- create(:ci_pipeline, project: project,
- sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0')
+ create(:ci_pipeline, project: project, sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0')
end
let_it_be(:stage) do
create(:ci_stage, project: project,
- pipeline: pipeline,
- name: 'test')
+ pipeline: pipeline,
+ name: 'test')
end
let(:job_variables_attributes) { [{ key: 'MANUAL_VAR', value: 'manual test var' }] }
@@ -31,9 +30,8 @@ RSpec.describe Ci::RetryJobService do
let_it_be(:downstream_project) { create(:project, :repository) }
let_it_be_with_refind(:job) do
- create(
- :ci_bridge, :success, pipeline: pipeline, downstream: downstream_project,
- description: 'a trigger job', stage_id: stage.id
+ create(:ci_bridge, :success,
+ pipeline: pipeline, downstream: downstream_project, description: 'a trigger job', ci_stage: stage
)
end
@@ -45,13 +43,13 @@ RSpec.describe Ci::RetryJobService do
end
shared_context 'retryable build' do
- let_it_be_with_refind(:job) { create(:ci_build, :success, pipeline: pipeline, stage_id: stage.id) }
+ let_it_be_with_refind(:job) { create(:ci_build, :success, pipeline: pipeline, ci_stage: stage) }
let_it_be(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
let_it_be(:job_to_clone) do
create(:ci_build, :failed, :picked, :expired, :erased, :queued, :coverage, :tags,
:allowed_to_fail, :on_tag, :triggered, :teardown_environment, :resource_group,
- description: 'my-job', stage: 'test', stage_id: stage.id,
+ description: 'my-job', ci_stage: stage,
pipeline: pipeline, auto_canceled_by: another_pipeline,
scheduled_at: 10.seconds.since)
end
@@ -66,8 +64,7 @@ RSpec.describe Ci::RetryJobService do
let(:job) { job_to_clone }
before_all do
- # Make sure that job has both `stage_id` and `stage`
- job_to_clone.update!(stage: 'test', stage_id: stage.id)
+ job_to_clone.update!(ci_stage: stage)
create(:ci_build_need, build: job_to_clone)
end
@@ -126,16 +123,16 @@ RSpec.describe Ci::RetryJobService do
end
context 'when there are subsequent processables that are skipped' do
+ let_it_be(:stage) { create(:ci_stage, pipeline: pipeline, name: 'deploy') }
+
let!(:subsequent_build) do
create(:ci_build, :skipped, stage_idx: 2,
pipeline: pipeline,
- stage: 'deploy')
+ ci_stage: stage)
end
let!(:subsequent_bridge) do
- create(:ci_bridge, :skipped, stage_idx: 2,
- pipeline: pipeline,
- stage: 'deploy')
+ create(:ci_bridge, :skipped, stage_idx: 2, pipeline: pipeline, ci_stage: stage)
end
it 'resumes pipeline processing in the subsequent stage' do
@@ -156,8 +153,8 @@ RSpec.describe Ci::RetryJobService do
context 'when the pipeline has other jobs' do
let!(:stage2) { create(:ci_stage, project: project, pipeline: pipeline, name: 'deploy') }
- let!(:build2) { create(:ci_build, pipeline: pipeline, stage_id: stage.id ) }
- let!(:deploy) { create(:ci_build, pipeline: pipeline, stage_id: stage2.id) }
+ let!(:build2) { create(:ci_build, pipeline: pipeline, ci_stage: stage ) }
+ let!(:deploy) { create(:ci_build, pipeline: pipeline, ci_stage: stage2) }
let!(:deploy_needs_build2) { create(:ci_build_need, build: deploy, name: build2.name) }
context 'when job has a nil scheduling_type' do
@@ -227,7 +224,7 @@ RSpec.describe Ci::RetryJobService do
context 'when a build with a deployment is retried' do
let!(:job) do
create(:ci_build, :with_deployment, :deploy_to_production,
- pipeline: pipeline, stage_id: stage.id, project: project)
+ pipeline: pipeline, ci_stage: stage, project: project)
end
it 'creates a new deployment' do
@@ -245,10 +242,13 @@ RSpec.describe Ci::RetryJobService do
let(:environment_name) { 'review/$CI_COMMIT_REF_SLUG-$GITLAB_USER_ID' }
let!(:job) do
- create(:ci_build, :with_deployment, environment: environment_name,
- options: { environment: { name: environment_name } },
- pipeline: pipeline, stage_id: stage.id, project: project,
- user: other_developer)
+ create(:ci_build, :with_deployment,
+ environment: environment_name,
+ options: { environment: { name: environment_name } },
+ pipeline: pipeline,
+ ci_stage: stage,
+ project: project,
+ user: other_developer)
end
it 'creates a new deployment' do
@@ -307,22 +307,24 @@ RSpec.describe Ci::RetryJobService do
it_behaves_like 'retries the job'
context 'when there are subsequent jobs that are skipped' do
+ let_it_be(:stage) { create(:ci_stage, pipeline: pipeline, name: 'deploy') }
+
let!(:subsequent_build) do
create(:ci_build, :skipped, stage_idx: 2,
pipeline: pipeline,
- stage: 'deploy')
+ stage_id: stage.id)
end
let!(:subsequent_bridge) do
create(:ci_bridge, :skipped, stage_idx: 2,
pipeline: pipeline,
- stage: 'deploy')
+ stage_id: stage.id)
end
it 'does not cause an N+1 when updating the job ownership' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { service.execute(job) }.count
- create_list(:ci_build, 2, :skipped, stage_idx: job.stage_idx + 1, pipeline: pipeline, stage: 'deploy')
+ create_list(:ci_build, 2, :skipped, stage_idx: job.stage_idx + 1, pipeline: pipeline, stage_id: stage.id)
expect { service.execute(job) }.not_to exceed_all_query_limit(control_count)
end
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 24272801480..0a1e767539d 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -9,6 +9,9 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
let(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:service) { described_class.new(project, user) }
+ let(:build_stage) { create(:ci_stage, name: 'build', position: 0, pipeline: pipeline) }
+ let(:test_stage) { create(:ci_stage, name: 'test', position: 1, pipeline: pipeline) }
+ let(:deploy_stage) { create(:ci_stage, name: 'deploy', position: 2, pipeline: pipeline) }
context 'when user has full ability to modify pipeline' do
before do
@@ -20,8 +23,8 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there are already retried jobs present' do
before do
- create_build('rspec', :canceled, 0, retried: true)
- create_build('rspec', :failed, 0)
+ create_build('rspec', :canceled, build_stage, retried: true)
+ create_build('rspec', :failed, build_stage)
end
it 'does not retry jobs that has already been retried' do
@@ -33,9 +36,9 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there are failed builds in the last stage' do
before do
- create_build('rspec 1', :success, 0)
- create_build('rspec 2', :failed, 1)
- create_build('rspec 3', :canceled, 1)
+ create_build('rspec 1', :success, build_stage)
+ create_build('rspec 2', :failed, test_stage)
+ create_build('rspec 3', :canceled, test_stage)
end
it 'enqueues all builds in the last stage' do
@@ -49,10 +52,10 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there are failed or canceled builds in the first stage' do
before do
- create_build('rspec 1', :failed, 0)
- create_build('rspec 2', :canceled, 0)
- create_build('rspec 3', :canceled, 1)
- create_build('spinach 1', :canceled, 2)
+ create_build('rspec 1', :failed, build_stage)
+ create_build('rspec 2', :canceled, build_stage)
+ create_build('rspec 3', :canceled, test_stage)
+ create_build('spinach 1', :canceled, deploy_stage)
end
it 'retries builds failed builds and marks subsequent for processing' do
@@ -80,10 +83,10 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there is failed build present which was run on failure' do
before do
- create_build('rspec 1', :failed, 0)
- create_build('rspec 2', :canceled, 0)
- create_build('rspec 3', :canceled, 1)
- create_build('report 1', :failed, 2)
+ create_build('rspec 1', :failed, build_stage)
+ create_build('rspec 2', :canceled, build_stage)
+ create_build('rspec 3', :canceled, test_stage)
+ create_build('report 1', :failed, deploy_stage)
end
it 'retries builds only in the first stage' do
@@ -105,9 +108,9 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there is a failed test in a DAG' do
before do
- create_build('build', :success, 0)
- create_build('build2', :success, 0)
- test_build = create_build('test', :failed, 1, scheduling_type: :dag)
+ create_build('build', :success, build_stage)
+ create_build('build2', :success, build_stage)
+ test_build = create_build('test', :failed, test_stage, scheduling_type: :dag)
create(:ci_build_need, build: test_build, name: 'build')
create(:ci_build_need, build: test_build, name: 'build2')
end
@@ -123,7 +126,7 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there is a failed DAG test without needs' do
before do
- create_build('deploy', :failed, 2, scheduling_type: :dag)
+ create_build('deploy', :failed, deploy_stage, scheduling_type: :dag)
end
it 'retries the test' do
@@ -139,10 +142,10 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when the last stage was skipped' do
before do
- create_build('build 1', :success, 0)
- create_build('test 2', :failed, 1)
- create_build('report 3', :skipped, 2)
- create_build('report 4', :skipped, 2)
+ create_build('build 1', :success, build_stage)
+ create_build('test 2', :failed, test_stage)
+ create_build('report 3', :skipped, deploy_stage)
+ create_build('report 4', :skipped, deploy_stage)
end
it 'retries builds only in the first stage' do
@@ -160,9 +163,9 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there are optional manual actions only' do
context 'when there is a canceled manual action in first stage' do
before do
- create_build('rspec 1', :failed, 0)
- create_build('staging', :canceled, 0, when: :manual, allow_failure: true)
- create_build('rspec 2', :canceled, 1)
+ create_build('rspec 1', :failed, build_stage)
+ create_build('staging', :canceled, build_stage, when: :manual, allow_failure: true)
+ create_build('rspec 2', :canceled, test_stage)
end
it 'retries failed builds and marks subsequent for processing' do
@@ -189,9 +192,9 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when pipeline has blocking manual actions defined' do
context 'when pipeline retry should enqueue builds' do
before do
- create_build('test', :failed, 0)
- create_build('deploy', :canceled, 0, when: :manual, allow_failure: false)
- create_build('verify', :canceled, 1)
+ create_build('test', :failed, build_stage)
+ create_build('deploy', :canceled, build_stage, when: :manual, allow_failure: false)
+ create_build('verify', :canceled, test_stage)
end
it 'retries failed builds' do
@@ -206,10 +209,10 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when pipeline retry should block pipeline immediately' do
before do
- create_build('test', :success, 0)
- create_build('deploy:1', :success, 1, when: :manual, allow_failure: false)
- create_build('deploy:2', :failed, 1, when: :manual, allow_failure: false)
- create_build('verify', :canceled, 2)
+ create_build('test', :success, build_stage)
+ create_build('deploy:1', :success, test_stage, when: :manual, allow_failure: false)
+ create_build('deploy:2', :failed, test_stage, when: :manual, allow_failure: false)
+ create_build('verify', :canceled, deploy_stage)
end
it 'reprocesses blocking manual action and blocks pipeline' do
@@ -225,9 +228,9 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there is a skipped manual action in last stage' do
before do
- create_build('rspec 1', :canceled, 0)
- create_build('rspec 2', :skipped, 0, when: :manual, allow_failure: true)
- create_build('staging', :skipped, 1, when: :manual, allow_failure: true)
+ create_build('rspec 1', :canceled, build_stage)
+ create_build('rspec 2', :skipped, build_stage, when: :manual, allow_failure: true)
+ create_build('staging', :skipped, test_stage, when: :manual, allow_failure: true)
end
it 'retries canceled job and reprocesses manual actions' do
@@ -242,8 +245,8 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there is a created manual action in the last stage' do
before do
- create_build('rspec 1', :canceled, 0)
- create_build('staging', :created, 1, when: :manual, allow_failure: true)
+ create_build('rspec 1', :canceled, build_stage)
+ create_build('staging', :created, test_stage, when: :manual, allow_failure: true)
end
it 'retries canceled job and does not update the manual action' do
@@ -257,8 +260,8 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there is a created manual action in the first stage' do
before do
- create_build('rspec 1', :canceled, 0)
- create_build('staging', :created, 0, when: :manual, allow_failure: true)
+ create_build('rspec 1', :canceled, build_stage)
+ create_build('staging', :created, build_stage, when: :manual, allow_failure: true)
end
it 'retries canceled job and processes the manual action' do
@@ -285,9 +288,9 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
end
context 'when pipeline has processables with nil scheduling_type' do
- let!(:build1) { create_build('build1', :success, 0) }
- let!(:build2) { create_build('build2', :failed, 0) }
- let!(:build3) { create_build('build3', :failed, 1) }
+ let!(:build1) { create_build('build1', :success, build_stage) }
+ let!(:build2) { create_build('build2', :failed, build_stage) }
+ let!(:build3) { create_build('build3', :failed, test_stage) }
let!(:build3_needs_build1) { create(:ci_build_need, build: build3, name: build1.name) }
before do
@@ -319,10 +322,10 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there are skipped jobs in later stages' do
before do
- create_build('build 1', :success, 0)
- create_build('test 2', :failed, 1)
- create_build('report 3', :skipped, 2)
- create_bridge('deploy 4', :skipped, 2)
+ create_build('build 1', :success, build_stage)
+ create_build('test 2', :failed, test_stage)
+ create_build('report 3', :skipped, deploy_stage)
+ create_bridge('deploy 4', :skipped, deploy_stage)
end
it 'retries failed jobs and processes skipped jobs' do
@@ -374,9 +377,9 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there is a failed manual action present' do
before do
- create_build('test', :failed, 0)
- create_build('deploy', :failed, 0, when: :manual)
- create_build('verify', :canceled, 1)
+ create_build('test', :failed, build_stage)
+ create_build('deploy', :failed, build_stage, when: :manual)
+ create_build('verify', :canceled, test_stage)
end
it 'returns an error' do
@@ -390,9 +393,9 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
context 'when there is a failed manual action in later stage' do
before do
- create_build('test', :failed, 0)
- create_build('deploy', :failed, 1, when: :manual)
- create_build('verify', :canceled, 2)
+ create_build('test', :failed, build_stage)
+ create_build('deploy', :failed, test_stage, when: :manual)
+ create_build('verify', :canceled, deploy_stage)
end
it 'returns an error' do
@@ -418,7 +421,7 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
target_project: project,
source_branch: 'fixes',
allow_collaboration: true)
- create_build('rspec 1', :failed, 1)
+ create_build('rspec 1', :failed, test_stage)
end
it 'allows to retry failed pipeline' do
@@ -441,19 +444,19 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
statuses.latest.find_by(name: name)
end
- def create_build(name, status, stage_num, **opts)
- create_processable(:ci_build, name, status, stage_num, **opts)
+ def create_build(name, status, stage, **opts)
+ create_processable(:ci_build, name, status, stage, **opts)
end
- def create_bridge(name, status, stage_num, **opts)
- create_processable(:ci_bridge, name, status, stage_num, **opts)
+ def create_bridge(name, status, stage, **opts)
+ create_processable(:ci_bridge, name, status, stage, **opts)
end
- def create_processable(type, name, status, stage_num, **opts)
+ def create_processable(type, name, status, stage, **opts)
create(type, name: name,
status: status,
- stage: "stage_#{stage_num}",
- stage_idx: stage_num,
+ ci_stage: stage,
+ stage_idx: stage.position,
pipeline: pipeline, **opts) do |_job|
::Ci::ProcessPipelineService.new(pipeline).execute
end
diff --git a/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
new file mode 100644
index 00000000000..0d2e237c87b
--- /dev/null
+++ b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::Runners::SetRunnerAssociatedProjectsService, '#execute' do
+ subject(:execute) { described_class.new(runner: runner, current_user: user, project_ids: project_ids).execute }
+
+ let_it_be(:owner_project) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:original_projects) { [owner_project, project2] }
+
+ let(:runner) { create(:ci_runner, :project, projects: original_projects) }
+
+ context 'without user' do
+ let(:user) { nil }
+ let(:project_ids) { [project2.id] }
+
+ it 'does not call assign_to on runner and returns error response', :aggregate_failures do
+ expect(runner).not_to receive(:assign_to)
+
+ expect(execute).to be_error
+ expect(execute.message).to eq('user not allowed to assign runner')
+ end
+ end
+
+ context 'with unauthorized user' do
+ let(:user) { build(:user) }
+ let(:project_ids) { [project2.id] }
+
+ it 'does not call assign_to on runner and returns error message' do
+ expect(runner).not_to receive(:assign_to)
+
+ expect(execute).to be_error
+ expect(execute.message).to eq('user not allowed to assign runner')
+ end
+ end
+
+ context 'with admin user', :enable_admin_mode do
+ let_it_be(:user) { create(:user, :admin) }
+
+ let(:project3) { create(:project) }
+ let(:project4) { create(:project) }
+
+ context 'with successful requests' do
+ context 'when disassociating a project' do
+ let(:project_ids) { [project3.id, project4.id] }
+
+ it 'reassigns associated projects and returns success response' do
+ expect(execute).to be_success
+ expect(runner.reload.projects.ids).to eq([owner_project.id] + project_ids)
+ end
+ end
+
+ context 'when disassociating no projects' do
+ let(:project_ids) { [project2.id, project3.id] }
+
+ it 'reassigns associated projects and returns success response' do
+ expect(execute).to be_success
+ expect(runner.reload.projects.ids).to eq([owner_project.id] + project_ids)
+ end
+ end
+ end
+
+ context 'with failing assign_to requests' do
+ let(:project_ids) { [project3.id, project4.id] }
+
+ it 'returns error response and rolls back transaction' do
+ expect(runner).to receive(:assign_to).with(project4, user).once.and_return(false)
+
+ expect(execute).to be_error
+ expect(runner.reload.projects).to eq(original_projects)
+ end
+ end
+
+ context 'with failing destroy calls' do
+ let(:project_ids) { [project3.id, project4.id] }
+
+ it 'returns error response and rolls back transaction' do
+ allow_next_found_instance_of(Ci::RunnerProject) do |runner_project|
+ allow(runner_project).to receive(:destroy).and_return(false)
+ end
+
+ expect(execute).to be_error
+ expect(runner.reload.projects).to eq(original_projects)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/runners/update_runner_service_spec.rb b/spec/services/ci/runners/update_runner_service_spec.rb
index e008fde9982..1f953ac4cbb 100644
--- a/spec/services/ci/runners/update_runner_service_spec.rb
+++ b/spec/services/ci/runners/update_runner_service_spec.rb
@@ -2,69 +2,65 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::UpdateRunnerService do
+RSpec.describe Ci::Runners::UpdateRunnerService, '#execute' do
+ subject(:execute) { described_class.new(runner).execute(params) }
+
let(:runner) { create(:ci_runner) }
- describe '#update' do
- before do
- allow(runner).to receive(:tick_runner_queue)
- end
+ before do
+ allow(runner).to receive(:tick_runner_queue)
+ end
- context 'with description params' do
- let(:params) { { description: 'new runner' } }
+ context 'with description params' do
+ let(:params) { { description: 'new runner' } }
- it 'updates the runner and ticking the queue' do
- expect(update).to be_truthy
+ it 'updates the runner and ticking the queue' do
+ expect(execute).to be_success
- runner.reload
+ runner.reload
- expect(runner).to have_received(:tick_runner_queue)
- expect(runner.description).to eq('new runner')
- end
+ expect(runner).to have_received(:tick_runner_queue)
+ expect(runner.description).to eq('new runner')
end
+ end
- context 'with paused param' do
- let(:params) { { paused: true } }
+ context 'with paused param' do
+ let(:params) { { paused: true } }
- it 'updates the runner and ticking the queue' do
- expect(runner.active).to be_truthy
- expect(update).to be_truthy
+ it 'updates the runner and ticking the queue' do
+ expect(runner.active).to be_truthy
+ expect(execute).to be_success
- runner.reload
+ runner.reload
- expect(runner).to have_received(:tick_runner_queue)
- expect(runner.active).to be_falsey
- end
+ expect(runner).to have_received(:tick_runner_queue)
+ expect(runner.active).to be_falsey
end
+ end
- context 'with cost factor params' do
- let(:params) { { public_projects_minutes_cost_factor: 1.1, private_projects_minutes_cost_factor: 2.2 } }
+ context 'with cost factor params' do
+ let(:params) { { public_projects_minutes_cost_factor: 1.1, private_projects_minutes_cost_factor: 2.2 } }
- it 'updates the runner cost factors' do
- expect(update).to be_truthy
+ it 'updates the runner cost factors' do
+ expect(execute).to be_success
- runner.reload
+ runner.reload
- expect(runner.public_projects_minutes_cost_factor).to eq(1.1)
- expect(runner.private_projects_minutes_cost_factor).to eq(2.2)
- end
+ expect(runner.public_projects_minutes_cost_factor).to eq(1.1)
+ expect(runner.private_projects_minutes_cost_factor).to eq(2.2)
end
+ end
- context 'when params are not valid' do
- let(:params) { { run_untagged: false } }
-
- it 'does not update and give false because it is not valid' do
- expect(update).to be_falsey
+ context 'when params are not valid' do
+ let(:params) { { run_untagged: false } }
- runner.reload
+ it 'does not update and returns error because it is not valid' do
+ expect(execute).to be_error
- expect(runner).not_to have_received(:tick_runner_queue)
- expect(runner.run_untagged).to be_truthy
- end
- end
+ runner.reload
- def update
- described_class.new(runner).update(params) # rubocop: disable Rails/SaveBang
+ expect(runner).not_to have_received(:tick_runner_queue)
+ expect(runner.run_untagged).to be_truthy
end
end
end
diff --git a/spec/services/ci/unlock_artifacts_service_spec.rb b/spec/services/ci/unlock_artifacts_service_spec.rb
index 94d39fc9f14..776019f03f8 100644
--- a/spec/services/ci/unlock_artifacts_service_spec.rb
+++ b/spec/services/ci/unlock_artifacts_service_spec.rb
@@ -5,11 +5,15 @@ require 'spec_helper'
RSpec.describe Ci::UnlockArtifactsService do
using RSpec::Parameterized::TableSyntax
- where(:tag, :ci_update_unlocked_job_artifacts) do
- false | false
- false | true
- true | false
- true | true
+ where(:tag, :ci_update_unlocked_job_artifacts, :ci_update_unlocked_pipeline_artifacts) do
+ false | false | false
+ false | true | false
+ true | false | false
+ true | true | false
+ false | false | true
+ false | true | true
+ true | false | true
+ true | true | true
end
with_them do
@@ -22,6 +26,7 @@ RSpec.describe Ci::UnlockArtifactsService do
let!(:old_unlocked_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :unlocked) }
let!(:older_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
let!(:older_ambiguous_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: !tag, project: project, locked: :artifacts_locked) }
+ let!(:code_coverage_pipeline) { create(:ci_pipeline, :with_coverage_report_artifact, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
let!(:pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
let!(:child_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
let!(:newer_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
@@ -30,7 +35,8 @@ RSpec.describe Ci::UnlockArtifactsService do
before do
stub_const("#{described_class}::BATCH_SIZE", 1)
- stub_feature_flags(ci_update_unlocked_job_artifacts: ci_update_unlocked_job_artifacts)
+ stub_feature_flags(ci_update_unlocked_job_artifacts: ci_update_unlocked_job_artifacts,
+ ci_update_unlocked_pipeline_artifacts: ci_update_unlocked_pipeline_artifacts)
end
describe '#execute' do
@@ -72,6 +78,14 @@ RSpec.describe Ci::UnlockArtifactsService do
expect { execute }.to change { ::Ci::JobArtifact.artifact_unlocked.count }.from(0).to(2)
end
+
+ it 'unlocks pipeline artifact records' do
+ if ci_update_unlocked_job_artifacts && ci_update_unlocked_pipeline_artifacts
+ expect { execute }.to change { ::Ci::PipelineArtifact.artifact_unlocked.count }.from(0).to(1)
+ else
+ expect { execute }.not_to change { ::Ci::PipelineArtifact.artifact_unlocked.count }
+ end
+ end
end
context 'when running on just the ref' do
@@ -106,6 +120,14 @@ RSpec.describe Ci::UnlockArtifactsService do
expect { execute }.to change { ::Ci::JobArtifact.artifact_unlocked.count }.from(0).to(8)
end
+
+ it 'unlocks pipeline artifact records' do
+ if ci_update_unlocked_job_artifacts && ci_update_unlocked_pipeline_artifacts
+ expect { execute }.to change { ::Ci::PipelineArtifact.artifact_unlocked.count }.from(0).to(1)
+ else
+ expect { execute }.not_to change { ::Ci::PipelineArtifact.artifact_unlocked.count }
+ end
+ end
end
end
diff --git a/spec/services/container_expiration_policies/cleanup_service_spec.rb b/spec/services/container_expiration_policies/cleanup_service_spec.rb
index c265ce74d14..6e1be7271e1 100644
--- a/spec/services/container_expiration_policies/cleanup_service_spec.rb
+++ b/spec/services/container_expiration_policies/cleanup_service_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
it 'completely clean up the repository' do
expect(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).with(repository, nil, cleanup_tags_service_params).and_return(cleanup_tags_service)
+ .to receive(:new).with(container_repository: repository, params: cleanup_tags_service_params).and_return(cleanup_tags_service)
expect(cleanup_tags_service).to receive(:execute).and_return(status: :success, deleted_size: 1)
response = subject
diff --git a/spec/services/deployments/link_merge_requests_service_spec.rb b/spec/services/deployments/link_merge_requests_service_spec.rb
index 62adc834733..a653cd2b48b 100644
--- a/spec/services/deployments/link_merge_requests_service_spec.rb
+++ b/spec/services/deployments/link_merge_requests_service_spec.rb
@@ -159,10 +159,10 @@ RSpec.describe Deployments::LinkMergeRequestsService do
it "doesn't link the same merge_request twice" do
create(:merge_request, :merged, merge_commit_sha: mr1_merge_commit_sha,
- source_project: project)
+ source_project: project)
picked_mr = create(:merge_request, :merged, merge_commit_sha: '123abc',
- source_project: project)
+ source_project: project)
# the first MR includes c1c67abba which is a cherry-pick of the fake picked_mr merge request
create(:track_mr_picking_note, noteable: picked_mr, project: project, commit_id: 'c1c67abbaf91f624347bb3ae96eabe3a1b742478')
diff --git a/spec/services/deployments/update_environment_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index 4485ce585bb..c952bcddd9a 100644
--- a/spec/services/deployments/update_environment_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -159,14 +159,37 @@ RSpec.describe Deployments::UpdateEnvironmentService do
{ name: 'production', auto_stop_in: '1 day' }
end
+ before do
+ environment.update_attribute(:auto_stop_at, nil)
+ end
+
it 'renews auto stop at' do
freeze_time do
- environment.update!(auto_stop_at: nil)
-
expect { subject.execute }
.to change { environment.reset.auto_stop_at&.round }.from(nil).to(1.day.since.round)
end
end
+
+ context 'when value is a variable' do
+ let(:options) { { name: 'production', auto_stop_in: '$TTL' } }
+
+ let(:yaml_variables) do
+ [
+ { key: "TTL", value: '2 days', public: true }
+ ]
+ end
+
+ before do
+ job.update_attribute(:yaml_variables, yaml_variables)
+ end
+
+ it 'renews auto stop at with expanded variable value' do
+ freeze_time do
+ expect { subject.execute }
+ .to change { environment.reset.auto_stop_at&.round }.from(nil).to(2.days.since.round)
+ end
+ end
+ end
end
context 'when deployment tier is specified' do
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
index a0e049ea42a..48e53a92758 100644
--- a/spec/services/design_management/delete_designs_service_spec.rb
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -126,7 +126,8 @@ RSpec.describe DesignManagement::DeleteDesignsService do
end
it 'updates UsageData for removed designs' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_designs_removed_action).with(author: user)
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_designs_removed_action)
+ .with(author: user, project: project)
run_service
end
@@ -171,6 +172,11 @@ RSpec.describe DesignManagement::DeleteDesignsService do
run_service
end
+
+ it_behaves_like 'issue_edit snowplow tracking' do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_REMOVED }
+ subject(:service_action) { run_service }
+ end
end
context 'more than one design is passed' do
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index b76c91fbac9..c69df5f2eb9 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -106,7 +106,8 @@ RSpec.describe DesignManagement::SaveDesignsService do
it 'creates a commit, an event in the activity stream and updates the creation count', :aggregate_failures do
counter = Gitlab::UsageDataCounters::DesignsCounter
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_designs_added_action).with(author: user)
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_designs_added_action)
+ .with(author: user, project: project)
expect { run_service }
.to change { Event.count }.by(1)
@@ -119,6 +120,11 @@ RSpec.describe DesignManagement::SaveDesignsService do
)
end
+ it_behaves_like 'issue_edit snowplow tracking' do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_ADDED }
+ subject(:service_action) { run_service }
+ end
+
it 'can run the same command in parallel' do
parellism = 4
@@ -206,11 +212,17 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
it 'updates UsageData for changed designs' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_designs_modified_action).with(author: user)
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_designs_modified_action)
+ .with(author: user, project: project)
run_service
end
+ it_behaves_like 'issue_edit snowplow tracking' do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_MODIFIED }
+ subject(:service_action) { run_service }
+ end
+
it 'records the correct events' do
counter = Gitlab::UsageDataCounters::DesignsCounter
expect { run_service }
diff --git a/spec/services/discussions/capture_diff_note_positions_service_spec.rb b/spec/services/discussions/capture_diff_note_positions_service_spec.rb
index 25e5f549bee..8ba54495d4c 100644
--- a/spec/services/discussions/capture_diff_note_positions_service_spec.rb
+++ b/spec/services/discussions/capture_diff_note_positions_service_spec.rb
@@ -18,8 +18,8 @@ RSpec.describe Discussions::CaptureDiffNotePositionsService do
def build_position(diff_refs, new_line: nil, old_line: nil)
path = 'files/markdown/ruby-style-guide.md'
- Gitlab::Diff::Position.new(old_path: path, new_path: path,
- new_line: new_line, old_line: old_line, diff_refs: diff_refs)
+ Gitlab::Diff::Position.new(
+ old_path: path, new_path: path, new_line: new_line, old_line: old_line, diff_refs: diff_refs)
end
def note_for(new_line: nil, old_line: nil)
diff --git a/spec/services/environments/stop_service_spec.rb b/spec/services/environments/stop_service_spec.rb
index 3ed8a0b1da0..4f766b73710 100644
--- a/spec/services/environments/stop_service_spec.rb
+++ b/spec/services/environments/stop_service_spec.rb
@@ -193,7 +193,7 @@ RSpec.describe Environments::StopService do
end
it 'has active environment at first' do
- expect(pipeline.environments_in_self_and_descendants.first).to be_available
+ expect(pipeline.environments_in_self_and_project_descendants.first).to be_available
end
context 'when user is a developer' do
@@ -201,21 +201,43 @@ RSpec.describe Environments::StopService do
project.add_developer(user)
end
+ context 'and merge request has associated created_environments' do
+ let!(:environment1) { create(:environment, project: project, merge_request: merge_request) }
+ let!(:environment2) { create(:environment, project: project, merge_request: merge_request) }
+
+ before do
+ subject
+ end
+
+ it 'stops the associated created_environments' do
+ expect(environment1.reload).to be_stopped
+ expect(environment2.reload).to be_stopped
+ end
+
+ it 'does not affect environments that are not associated to the merge request' do
+ expect(pipeline.environments_in_self_and_project_descendants.first.merge_request).to be_nil
+ expect(pipeline.environments_in_self_and_project_descendants.first).to be_available
+ end
+ end
+
it 'stops the active environment' do
subject
- expect(pipeline.environments_in_self_and_descendants.first).to be_stopping
+ expect(pipeline.environments_in_self_and_project_descendants.first).to be_stopping
end
context 'when pipeline is a branch pipeline for merge request' do
let(:pipeline) do
- create(:ci_pipeline, source: :push, project: project, sha: merge_request.diff_head_sha,
- merge_requests_as_head_pipeline: [merge_request])
+ create(:ci_pipeline,
+ source: :push,
+ project: project,
+ sha: merge_request.diff_head_sha,
+ merge_requests_as_head_pipeline: [merge_request])
end
it 'does not stop the active environment' do
subject
- expect(pipeline.environments_in_self_and_descendants.first).to be_available
+ expect(pipeline.environments_in_self_and_project_descendants.first).to be_available
end
end
@@ -241,7 +263,7 @@ RSpec.describe Environments::StopService do
it 'does not stop the active environment' do
subject
- expect(pipeline.environments_in_self_and_descendants.first).to be_available
+ expect(pipeline.environments_in_self_and_project_descendants.first).to be_available
end
end
@@ -265,7 +287,7 @@ RSpec.describe Environments::StopService do
it 'does not stop the active environment' do
subject
- expect(pipeline.environments_in_self_and_descendants.first).to be_available
+ expect(pipeline.environments_in_self_and_project_descendants.first).to be_available
end
end
end
diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb
index e66b413a5c9..06f0eb1efbc 100644
--- a/spec/services/event_create_service_spec.rb
+++ b/spec/services/event_create_service_spec.rb
@@ -420,9 +420,9 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
service.save_designs(author, create: [design])
expect_snowplow_event(
- category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
+ category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
action: 'create',
- namespace: design.project.namespace,
+ namespace: design.project.namespace,
user: author,
project: design.project,
label: 'design_users'
@@ -433,9 +433,9 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
service.save_designs(author, update: [design])
expect_snowplow_event(
- category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
+ category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
action: 'update',
- namespace: design.project.namespace,
+ namespace: design.project.namespace,
user: author,
project: design.project,
label: 'design_users'
@@ -481,9 +481,9 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
service.destroy_designs([design], author)
expect_snowplow_event(
- category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
+ category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
action: 'destroy',
- namespace: design.project.namespace,
+ namespace: design.project.namespace,
user: author,
project: design.project,
label: 'design_users'
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index 5de1c0e27be..973ead28462 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -596,7 +596,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
end
end
- project.repository.multi_action(
+ project.repository.commit_files(
user, message: 'message', branch_name: branch, actions: actions
)
end
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index 6280f1263c3..a9f5b07fef4 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Git::BranchPushService, services: true do
+RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services: true do
include RepoHelpers
let_it_be(:user) { create(:user) }
@@ -285,7 +285,7 @@ RSpec.describe Git::BranchPushService, services: true do
author_email: commit_author.email
)
- allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit)
+ allow(Commit).to receive(:build_from_sidekiq_hash)
.and_return(commit)
allow(project.repository).to receive(:commits_between).and_return([commit])
@@ -348,7 +348,7 @@ RSpec.describe Git::BranchPushService, services: true do
committed_date: commit_time
)
- allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit)
+ allow(Commit).to receive(:build_from_sidekiq_hash)
.and_return(commit)
allow(project.repository).to receive(:commits_between).and_return([commit])
@@ -387,7 +387,7 @@ RSpec.describe Git::BranchPushService, services: true do
allow(project.repository).to receive(:commits_between)
.and_return([closing_commit])
- allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit)
+ allow(Commit).to receive(:build_from_sidekiq_hash)
.and_return(closing_commit)
project.add_maintainer(commit_author)
diff --git a/spec/services/git/wiki_push_service_spec.rb b/spec/services/git/wiki_push_service_spec.rb
index 7e5d7066e89..878a5c4ccf0 100644
--- a/spec/services/git/wiki_push_service_spec.rb
+++ b/spec/services/git/wiki_push_service_spec.rb
@@ -9,9 +9,13 @@ RSpec.describe Git::WikiPushService, services: true do
let_it_be(:key_id) { create(:key, user: current_user).shell_id }
let(:wiki) { create(:project_wiki, user: current_user) }
- let(:git_wiki) { wiki.wiki }
+ let(:default_branch) { wiki.default_branch }
let(:repository) { wiki.repository }
+ before do
+ repository.create_if_not_exists(default_branch)
+ end
+
describe '#execute' do
it 'executes model-specific callbacks' do
expect(wiki).to receive(:after_post_receive)
@@ -351,7 +355,12 @@ RSpec.describe Git::WikiPushService, services: true do
# that have not gone through our services.
def write_new_page
- generate(:wiki_page_title).tap { |t| git_wiki.write_page(t, 'markdown', 'Hello', commit_details) }
+ generate(:wiki_page_title).tap do |t|
+ repository.create_file(
+ current_user, ::Wiki.sluggified_full_path(t, 'md'), 'Hello',
+ **commit_details
+ )
+ end
end
# We write something to the wiki-repo that is not a page - as, for example, an
@@ -368,15 +377,26 @@ RSpec.describe Git::WikiPushService, services: true do
def update_page(title, new_title = nil)
new_title = title unless new_title.present?
- page = git_wiki.page(title: title)
- git_wiki.update_page(page.path, new_title, 'markdown', 'Hey', commit_details)
+
+ old_path = ::Wiki.sluggified_full_path(title, 'md')
+ new_path = ::Wiki.sluggified_full_path(new_title, 'md')
+
+ repository.update_file(
+ current_user, new_path, 'Hey',
+ **commit_details.merge(previous_path: old_path)
+ )
end
def delete_page(page)
- wiki.delete_page(page, 'commit message')
+ repository.delete_file(current_user, page.path, **commit_details)
end
def commit_details
- create(:git_wiki_commit_details, author: current_user)
+ {
+ branch_name: default_branch,
+ message: "commit message",
+ author_email: current_user.email,
+ author_name: current_user.name
+ }
end
end
diff --git a/spec/services/google_cloud/enable_cloudsql_service_spec.rb b/spec/services/google_cloud/enable_cloudsql_service_spec.rb
index e54e5a8d446..f267f6d3bc2 100644
--- a/spec/services/google_cloud/enable_cloudsql_service_spec.rb
+++ b/spec/services/google_cloud/enable_cloudsql_service_spec.rb
@@ -23,6 +23,11 @@ RSpec.describe GoogleCloud::EnableCloudsqlService do
project.save!
end
+ after do
+ project.variables.destroy_all # rubocop:disable Cop/DestroyAll
+ project.save!
+ end
+
it 'enables cloudsql, compute and service networking Google APIs', :aggregate_failures do
expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |instance|
expect(instance).to receive(:enable_cloud_sql_admin).with('prj-prod')
@@ -35,5 +40,22 @@ RSpec.describe GoogleCloud::EnableCloudsqlService do
expect(result[:status]).to eq(:success)
end
+
+ context 'when Google APIs raise an error' do
+ it 'returns error result' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |instance|
+ allow(instance).to receive(:enable_cloud_sql_admin).with('prj-prod')
+ allow(instance).to receive(:enable_compute).with('prj-prod')
+ allow(instance).to receive(:enable_service_networking).with('prj-prod')
+ allow(instance).to receive(:enable_cloud_sql_admin).with('prj-staging')
+ allow(instance).to receive(:enable_compute).with('prj-staging')
+ allow(instance).to receive(:enable_service_networking).with('prj-staging')
+ .and_raise(Google::Apis::Error.new('error'))
+ end
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('error')
+ end
+ end
end
end
diff --git a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb b/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
new file mode 100644
index 00000000000..b83037f80cd
--- /dev/null
+++ b/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GoogleCloud::FetchGoogleIpListService,
+ :use_clean_rails_memory_store_caching, :clean_gitlab_redis_rate_limiting do
+ include StubRequests
+
+ let(:google_cloud_ips) { File.read(Rails.root.join('spec/fixtures/cdn/google_cloud.json')) }
+ let(:headers) { { 'Content-Type' => 'application/json' } }
+
+ subject { described_class.new.execute }
+
+ before do
+ WebMock.stub_request(:get, described_class::GOOGLE_IP_RANGES_URL)
+ .to_return(status: 200, body: google_cloud_ips, headers: headers)
+ end
+
+ describe '#execute' do
+ it 'returns a list of IPAddr subnets and caches the result' do
+ expect(::ObjectStorage::CDN::GoogleIpCache).to receive(:update!).and_call_original
+ expect(subject[:subnets]).to be_an(Array)
+ expect(subject[:subnets]).to all(be_an(IPAddr))
+ end
+
+ shared_examples 'IP range retrieval failure' do
+ it 'does not cache the result and logs an error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
+ expect(::ObjectStorage::CDN::GoogleIpCache).not_to receive(:update!)
+ expect(subject[:subnets]).to be_nil
+ end
+ end
+
+ context 'with rate limit in effect' do
+ before do
+ 10.times { described_class.new.execute }
+ end
+
+ it 'returns rate limit error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq("#{described_class} was rate limited")
+ end
+ end
+
+ context 'when the URL returns a 404' do
+ before do
+ WebMock.stub_request(:get, described_class::GOOGLE_IP_RANGES_URL).to_return(status: 404)
+ end
+
+ it_behaves_like 'IP range retrieval failure'
+ end
+
+ context 'when the URL returns too large of a payload' do
+ before do
+ stub_const("#{described_class}::RESPONSE_BODY_LIMIT", 300)
+ end
+
+ it_behaves_like 'IP range retrieval failure'
+ end
+
+ context 'when the URL returns HTML' do
+ let(:headers) { { 'Content-Type' => 'text/html' } }
+
+ it_behaves_like 'IP range retrieval failure'
+ end
+
+ context 'when the URL returns empty results' do
+ let(:google_cloud_ips) { '{}' }
+
+ it_behaves_like 'IP range retrieval failure'
+ end
+ end
+end
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index 0cfde9ef434..0a8164c9ca3 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe Groups::CreateService, '#execute' do
it { is_expected.to be_persisted }
it 'adds an onboarding progress record' do
- expect { subject }.to change(OnboardingProgress, :count).from(0).to(1)
+ expect { subject }.to change(Onboarding::Progress, :count).from(0).to(1)
end
context 'with before_commit callback' do
@@ -108,7 +108,7 @@ RSpec.describe Groups::CreateService, '#execute' do
it { is_expected.to be_persisted }
it 'does not add an onboarding progress record' do
- expect { subject }.not_to change(OnboardingProgress, :count).from(0)
+ expect { subject }.not_to change(Onboarding::Progress, :count).from(0)
end
it_behaves_like 'has sync-ed traversal_ids'
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index 0d699dd447b..9288793cc7a 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -35,17 +35,38 @@ RSpec.describe Groups::DestroyService do
it { expect(NotificationSetting.unscoped.all).not_to include(notification_setting) }
end
- context 'bot tokens', :sidekiq_might_not_need_inline do
- it 'removes group bot', :aggregate_failures do
- bot = create(:user, :project_bot)
- group.add_developer(bot)
- token = create(:personal_access_token, user: bot)
+ context 'bot tokens', :sidekiq_inline do
+ context 'when user_destroy_with_limited_execution_time_worker is enabled' do
+ it 'initiates group bot removal', :aggregate_failures do
+ bot = create(:user, :project_bot)
+ group.add_developer(bot)
+ create(:personal_access_token, user: bot)
+
+ destroy_group(group, user, async)
+
+ expect(
+ Users::GhostUserMigration.where(user: bot,
+ initiator_user: user)
+ ).to be_exists
+ end
+ end
- destroy_group(group, user, async)
+ context 'when user_destroy_with_limited_execution_time_worker is disabled' do
+ before do
+ stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
+ end
+
+ it 'removes group bot', :aggregate_failures do
+ bot = create(:user, :project_bot)
+ group.add_developer(bot)
+ token = create(:personal_access_token, user: bot)
- expect(PersonalAccessToken.find_by(id: token.id)).to be_nil
- expect(User.find_by(id: bot.id)).to be_nil
- expect(User.find_by(id: user.id)).not_to be_nil
+ destroy_group(group, user, async)
+
+ expect(PersonalAccessToken.find_by(id: token.id)).to be_nil
+ expect(User.find_by(id: bot.id)).to be_nil
+ expect(User.find_by(id: user.id)).not_to be_nil
+ end
end
end
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index 292f2e2b86b..a4dfec4723a 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -149,9 +149,9 @@ RSpec.describe Groups::ImportExport::ImportService do
it 'logs the import success' do
expect(import_logger).to receive(:info).with(
- group_id: group.id,
+ group_id: group.id,
group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
+ message: 'Group Import/Export: Import succeeded'
).once
subject
@@ -161,9 +161,9 @@ RSpec.describe Groups::ImportExport::ImportService do
context 'when user does not have correct permissions' do
it 'logs the error and raises an exception' do
expect(import_logger).to receive(:error).with(
- group_id: group.id,
+ group_id: group.id,
group_name: group.name,
- message: a_string_including('Errors occurred')
+ message: a_string_including('Errors occurred')
)
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
@@ -186,9 +186,9 @@ RSpec.describe Groups::ImportExport::ImportService do
it 'logs the error and raises an exception' do
expect(import_logger).to receive(:error).with(
- group_id: group.id,
+ group_id: group.id,
group_name: group.name,
- message: a_string_including('Errors occurred')
+ message: a_string_including('Errors occurred')
).once
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
@@ -267,9 +267,9 @@ RSpec.describe Groups::ImportExport::ImportService do
it 'logs the import success' do
expect(import_logger).to receive(:info).with(
- group_id: group.id,
+ group_id: group.id,
group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
+ message: 'Group Import/Export: Import succeeded'
).once
subject
@@ -279,9 +279,9 @@ RSpec.describe Groups::ImportExport::ImportService do
context 'when user does not have correct permissions' do
it 'logs the error and raises an exception' do
expect(import_logger).to receive(:error).with(
- group_id: group.id,
+ group_id: group.id,
group_name: group.name,
- message: a_string_including('Errors occurred')
+ message: a_string_including('Errors occurred')
)
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
@@ -304,9 +304,9 @@ RSpec.describe Groups::ImportExport::ImportService do
it 'logs the error and raises an exception' do
expect(import_logger).to receive(:error).with(
- group_id: group.id,
+ group_id: group.id,
group_name: group.name,
- message: a_string_including('Errors occurred')
+ message: a_string_including('Errors occurred')
).once
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
@@ -328,9 +328,9 @@ RSpec.describe Groups::ImportExport::ImportService do
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
expect(import_logger).to receive(:info).with(
- group_id: group.id,
+ group_id: group.id,
group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
+ message: 'Group Import/Export: Import succeeded'
)
subject
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index 1c26677cfa5..67a2c237e43 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe Import::GithubService do
end
context 'repository size validation' do
- let(:repository_double) { double(name: 'repository', size: 99) }
+ let(:repository_double) { { name: 'repository', size: 99 } }
before do
expect(client).to receive(:repository).and_return(repository_double)
@@ -84,7 +84,7 @@ RSpec.describe Import::GithubService do
end
it 'returns error when the repository is larger than the limit' do
- allow(repository_double).to receive(:size).and_return(101)
+ repository_double[:size] = 101
expect(subject.execute(access_params, :github)).to include(size_limit_error)
end
@@ -103,7 +103,7 @@ RSpec.describe Import::GithubService do
end
it 'returns error when the repository is larger than the limit' do
- allow(repository_double).to receive(:size).and_return(101)
+ repository_double[:size] = 101
expect(subject.execute(access_params, :github)).to include(size_limit_error)
end
@@ -113,14 +113,14 @@ RSpec.describe Import::GithubService do
context 'when import source is disabled' do
let(:repository_double) do
- double({
+ {
name: 'vim',
description: 'test',
full_name: 'test/vim',
clone_url: 'http://repo.com/repo/repo.git',
private: false,
has_wiki?: false
- })
+ }
end
before do
diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb
index 55e0e799c19..dc72cf04776 100644
--- a/spec/services/issuable/bulk_update_service_spec.rb
+++ b/spec/services/issuable/bulk_update_service_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Issuable::BulkUpdateService do
let(:bulk_update_params) do
{
- add_label_ids: add_labels.map(&:id),
+ add_label_ids: add_labels.map(&:id),
remove_label_ids: remove_labels.map(&:id)
}
end
diff --git a/spec/services/issues/build_service_spec.rb b/spec/services/issues/build_service_spec.rb
index 304e4bb3ebb..838e0675372 100644
--- a/spec/services/issues/build_service_spec.rb
+++ b/spec/services/issues/build_service_spec.rb
@@ -63,12 +63,14 @@ RSpec.describe Issues::BuildService do
it 'wraps the note in a blockquote' do
note_text = "This is a string\n"\
+ "\n"\
">>>\n"\
"with a blockquote\n"\
"> That has a quote\n"\
">>>\n"
note_result = " > This is a string\n"\
" > \n"\
+ " > \n"\
" > > with a blockquote\n"\
" > > > That has a quote\n"\
" > \n"
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 80c455e72b0..4a84862b9d5 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -416,7 +416,7 @@ RSpec.describe Issues::CreateService do
context "when issuable feature is private" do
before do
project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE,
- merge_requests_access_level: ProjectFeature::PRIVATE)
+ merge_requests_access_level: ProjectFeature::PRIVATE)
end
levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC]
@@ -555,24 +555,29 @@ RSpec.describe Issues::CreateService do
expect(reloaded_discussion.last_note.system).to eq(true)
end
- it 'assigns the title and description for the issue' do
- issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
+ it 'sets default title and description values if not provided' do
+ issue = described_class.new(
+ project: project, current_user: user,
+ params: opts,
+ spam_params: spam_params
+ ).execute
- expect(issue.title).not_to be_nil
- expect(issue.description).not_to be_nil
+ expect(issue).to be_persisted
+ expect(issue.title).to eq("Follow-up from \"#{merge_request.title}\"")
+ expect(issue.description).to include("The following discussion from #{merge_request.to_reference} should be addressed")
end
- it 'can set nil explicitly to the title and description' do
+ it 'takes params from the request over the default values' do
issue = described_class.new(project: project, current_user: user,
- params: {
- merge_request_to_resolve_discussions_of: merge_request,
- description: nil,
- title: nil
- },
+ params: opts.merge(
+ description: 'Custom issue description',
+ title: 'My new issue'
+ ),
spam_params: spam_params).execute
- expect(issue.description).to be_nil
- expect(issue.title).to be_nil
+ expect(issue).to be_persisted
+ expect(issue.description).to eq('Custom issue description')
+ expect(issue.title).to eq('My new issue')
end
end
@@ -594,24 +599,29 @@ RSpec.describe Issues::CreateService do
expect(reloaded_discussion.last_note.system).to eq(true)
end
- it 'assigns the title and description for the issue' do
- issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
+ it 'sets default title and description values if not provided' do
+ issue = described_class.new(
+ project: project, current_user: user,
+ params: opts,
+ spam_params: spam_params
+ ).execute
- expect(issue.title).not_to be_nil
- expect(issue.description).not_to be_nil
+ expect(issue).to be_persisted
+ expect(issue.title).to eq("Follow-up from \"#{merge_request.title}\"")
+ expect(issue.description).to include("The following discussion from #{merge_request.to_reference} should be addressed")
end
- it 'can set nil explicitly to the title and description' do
+ it 'takes params from the request over the default values' do
issue = described_class.new(project: project, current_user: user,
- params: {
- merge_request_to_resolve_discussions_of: merge_request,
- description: nil,
- title: nil
- },
+ params: opts.merge(
+ description: 'Custom issue description',
+ title: 'My new issue'
+ ),
spam_params: spam_params).execute
- expect(issue.description).to be_nil
- expect(issue.title).to be_nil
+ expect(issue).to be_persisted
+ expect(issue.description).to eq('Custom issue description')
+ expect(issue.title).to eq('My new issue')
end
end
end
diff --git a/spec/services/issues/relative_position_rebalancing_service_spec.rb b/spec/services/issues/relative_position_rebalancing_service_spec.rb
index 20064bd7e4b..37a94e1d6a2 100644
--- a/spec/services/issues/relative_position_rebalancing_service_spec.rb
+++ b/spec/services/issues/relative_position_rebalancing_service_spec.rb
@@ -72,22 +72,8 @@ RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_s
end.not_to change { issues_in_position_order.map(&:id) }
end
- it 'does nothing if the feature flag is disabled' do
- stub_feature_flags(rebalance_issues: false)
- issue = project.issues.first
- issue.project
- issue.project.group
- old_pos = issue.relative_position
-
- # fetching root namespace in the initializer triggers 2 queries:
- # for fetching a random project from collection and fetching the root namespace.
- expect { service.execute }.not_to exceed_query_limit(2)
- expect(old_pos).to eq(issue.reload.relative_position)
- end
-
it 'acts if the flag is enabled for the root namespace' do
issue = create(:issue, project: project, author: user, relative_position: max_pos)
- stub_feature_flags(rebalance_issues: project.root_namespace)
expect { service.execute }.to change { issue.reload.relative_position }
end
@@ -95,7 +81,6 @@ RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_s
it 'acts if the flag is enabled for the group' do
issue = create(:issue, project: project, author: user, relative_position: max_pos)
project.update!(group: create(:group))
- stub_feature_flags(rebalance_issues: issue.project.group)
expect { service.execute }.to change { issue.reload.relative_position }
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index aef3608831c..8a2e9ed74f7 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -48,6 +48,11 @@ RSpec.describe Issues::UpdateService, :mailer do
described_class.new(project: project, current_user: user, params: opts).execute(issue)
end
+ it_behaves_like 'issuable update service updating last_edited_at values' do
+ let(:issuable) { issue }
+ subject(:update_issuable) { update_issue(update_params) }
+ end
+
context 'valid params' do
let(:opts) do
{
@@ -299,38 +304,6 @@ RSpec.describe Issues::UpdateService, :mailer do
end
end
- it 'does not rebalance even if needed if the flag is disabled' do
- stub_feature_flags(rebalance_issues: false)
-
- range = described_class::NO_REBALANCING_NEEDED
- issue1 = create(:issue, project: project, relative_position: range.first - 100)
- issue2 = create(:issue, project: project, relative_position: range.first)
- issue.update!(relative_position: RelativePositioning::START_POSITION)
-
- opts[:move_between_ids] = [issue1.id, issue2.id]
-
- expect(Issues::RebalancingWorker).not_to receive(:perform_async)
-
- update_issue(opts)
- expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position)
- end
-
- it 'rebalances if needed if the flag is enabled for the project' do
- stub_feature_flags(rebalance_issues: project)
-
- range = described_class::NO_REBALANCING_NEEDED
- issue1 = create(:issue, project: project, relative_position: range.first - 100)
- issue2 = create(:issue, project: project, relative_position: range.first)
- issue.update!(relative_position: RelativePositioning::START_POSITION)
-
- opts[:move_between_ids] = [issue1.id, issue2.id]
-
- expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id)
-
- update_issue(opts)
- expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position)
- end
-
it 'rebalances if needed on the left' do
range = described_class::NO_REBALANCING_NEEDED
issue1 = create(:issue, project: project, relative_position: range.first - 100)
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index fe9f3ddc14d..25696ca209e 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -20,10 +20,10 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
case source
when Project
source.add_maintainer(user)
- OnboardingProgress.onboard(source.namespace)
+ Onboarding::Progress.onboard(source.namespace)
when Group
source.add_owner(user)
- OnboardingProgress.onboard(source)
+ Onboarding::Progress.onboard(source)
end
end
@@ -59,7 +59,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'adds a user to members' do
expect(execute_service[:status]).to eq(:success)
expect(source.users).to include member
- expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(true)
+ expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true)
end
context 'when user_id is passed as an integer' do
@@ -68,7 +68,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'successfully creates member' do
expect(execute_service[:status]).to eq(:success)
expect(source.users).to include member
- expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(true)
+ expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true)
end
end
@@ -78,7 +78,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'successfully creates members' do
expect(execute_service[:status]).to eq(:success)
expect(source.users).to include(member, user_invited_by_id)
- expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(true)
+ expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true)
end
end
@@ -88,7 +88,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'successfully creates members' do
expect(execute_service[:status]).to eq(:success)
expect(source.users).to include(member, user_invited_by_id)
- expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(true)
+ expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true)
end
end
@@ -98,7 +98,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'adds a user to members' do
expect(execute_service[:status]).to eq(:success)
expect(source.users).to include member
- expect(OnboardingProgress.completed?(source, :user_added)).to be(true)
+ expect(Onboarding::Progress.completed?(source, :user_added)).to be(true)
end
it 'triggers a members added event' do
@@ -119,7 +119,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
expect(execute_service[:status]).to eq(:error)
expect(execute_service[:message]).to be_present
expect(source.users).not_to include member
- expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
+ expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(false)
end
end
@@ -130,7 +130,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
expect(execute_service[:status]).to eq(:error)
expect(execute_service[:message]).to be_present
expect(source.users).not_to include member
- expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
+ expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(false)
end
end
@@ -141,7 +141,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
expect(execute_service[:status]).to eq(:error)
expect(execute_service[:message]).to include("#{member.username}: Access level is not included in the list")
expect(source.users).not_to include member
- expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
+ expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(false)
end
end
@@ -153,7 +153,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'allows already invited members to be re-invited by email and updates the member access' do
expect(execute_service[:status]).to eq(:success)
expect(invited_member.reset.access_level).to eq ProjectMember::MAINTAINER
- expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(true)
+ expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true)
end
end
@@ -170,7 +170,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'does not update the member' do
expect(execute_service[:status]).to eq(:error)
expect(execute_service[:message]).to eq("#{project_bot.username}: not authorized to update member")
- expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
+ expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(false)
end
end
@@ -178,7 +178,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'adds the member' do
expect(execute_service[:status]).to eq(:success)
expect(source.users).to include project_bot
- expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(true)
+ expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true)
end
end
end
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index ab98fad5d73..0846ec7f50e 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -36,29 +36,15 @@ RSpec.describe MergeRequests::ApprovalService do
it 'does not publish MergeRequests::ApprovedEvent' do
expect { service.execute(merge_request) }.not_to publish_event(MergeRequests::ApprovedEvent)
end
+ end
- context 'async_after_approval feature flag is disabled' do
- before do
- stub_feature_flags(async_after_approval: false)
- end
-
- it 'does not create approve MR event' do
- expect(EventCreateService).not_to receive(:new)
-
- service.execute(merge_request)
- end
-
- it 'does not create an approval note' do
- expect(SystemNoteService).not_to receive(:approve_mr)
-
- service.execute(merge_request)
- end
-
- it 'does not mark pending todos as done' do
- service.execute(merge_request)
+ context 'with an already approved MR' do
+ before do
+ merge_request.approvals.create!(user: user)
+ end
- expect(todo.reload).to be_pending
- end
+ it 'does not create an approval' do
+ expect { service.execute(merge_request) }.not_to change { merge_request.approvals.size }
end
end
@@ -81,51 +67,6 @@ RSpec.describe MergeRequests::ApprovalService do
.to publish_event(MergeRequests::ApprovedEvent)
.with(current_user_id: user.id, merge_request_id: merge_request.id)
end
-
- context 'async_after_approval feature flag is disabled' do
- let(:notification_service) { NotificationService.new }
-
- before do
- stub_feature_flags(async_after_approval: false)
- allow(service).to receive(:notification_service).and_return(notification_service)
- end
-
- it 'creates approve MR event' do
- expect_next_instance_of(EventCreateService) do |instance|
- expect(instance).to receive(:approve_mr)
- .with(merge_request, user)
- end
-
- service.execute(merge_request)
- end
-
- it 'creates an approval note' do
- expect(SystemNoteService).to receive(:approve_mr).with(merge_request, user)
-
- service.execute(merge_request)
- end
-
- it 'marks pending todos as done' do
- service.execute(merge_request)
-
- expect(todo.reload).to be_done
- end
-
- it 'sends a notification when approving' do
- expect(notification_service).to receive_message_chain(:async, :approve_mr)
- .with(merge_request, user)
-
- service.execute(merge_request)
- end
-
- context 'with remaining approvals' do
- it 'fires an approval webhook' do
- expect(service).to receive(:execute_hooks).with(merge_request, 'approved')
-
- service.execute(merge_request)
- end
- end
- end
end
context 'user cannot update the merge request' do
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index 3c9d2271ddc..6a6f01e6a95 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -20,18 +20,30 @@ RSpec.describe MergeRequests::BuildService do
let(:merge_request) { service.execute }
let(:compare) { double(:compare, commits: commits) }
let(:commit_1) do
- double(:commit_1, sha: 'f00ba6', safe_message: 'Initial commit',
- gitaly_commit?: false, id: 'f00ba6', parent_ids: ['f00ba5'])
+ double(:commit_1,
+ sha: 'f00ba6',
+ safe_message: 'Initial commit',
+ gitaly_commit?: false,
+ id: 'f00ba6',
+ parent_ids: ['f00ba5'])
end
let(:commit_2) do
- double(:commit_2, sha: 'f00ba7', safe_message: "Closes #1234 Second commit\n\nCreate the app",
- gitaly_commit?: false, id: 'f00ba7', parent_ids: ['f00ba6'])
+ double(:commit_2,
+ sha: 'f00ba7',
+ safe_message: "Closes #1234 Second commit\n\nCreate the app",
+ gitaly_commit?: false,
+ id: 'f00ba7',
+ parent_ids: ['f00ba6'])
end
let(:commit_3) do
- double(:commit_3, sha: 'f00ba8', safe_message: 'This is a bad commit message!',
- gitaly_commit?: false, id: 'f00ba8', parent_ids: ['f00ba7'])
+ double(:commit_3,
+ sha: 'f00ba8',
+ safe_message: 'This is a bad commit message!',
+ gitaly_commit?: false,
+ id: 'f00ba8',
+ parent_ids: ['f00ba7'])
end
let(:commits) { nil }
diff --git a/spec/services/merge_requests/create_pipeline_service_spec.rb b/spec/services/merge_requests/create_pipeline_service_spec.rb
index c443d758a77..dc96b5c0e5e 100644
--- a/spec/services/merge_requests/create_pipeline_service_spec.rb
+++ b/spec/services/merge_requests/create_pipeline_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequests::CreatePipelineService do
+RSpec.describe MergeRequests::CreatePipelineService, :clean_gitlab_redis_cache do
include ProjectForksHelper
let_it_be(:project, reload: true) { create(:project, :repository) }
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index 9c9bcb79990..4102cdc101e 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -434,7 +434,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
context "when issuable feature is private" do
before do
project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE,
- merge_requests_access_level: ProjectFeature::PRIVATE)
+ merge_requests_access_level: ProjectFeature::PRIVATE)
end
levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC]
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
index 24a1a8b3113..aa5d6dcd1fb 100644
--- a/spec/services/merge_requests/ff_merge_service_spec.rb
+++ b/spec/services/merge_requests/ff_merge_service_spec.rb
@@ -75,6 +75,7 @@ RSpec.describe MergeRequests::FfMergeService do
expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
expect { execute_ff_merge }.not_to change { merge_request.squash_commit_sha }
+ expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.in_progress_merge_commit_sha).to be_nil
end
@@ -87,6 +88,7 @@ RSpec.describe MergeRequests::FfMergeService do
.to change { merge_request.squash_commit_sha }
.from(nil)
+ expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.in_progress_merge_commit_sha).to be_nil
end
end
@@ -106,7 +108,6 @@ RSpec.describe MergeRequests::FfMergeService do
service.execute(merge_request)
- expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
@@ -117,11 +118,6 @@ RSpec.describe MergeRequests::FfMergeService do
pre_receive_error = Gitlab::Git::PreReceiveError.new(raw_message, fallback_message: error_message)
allow(service).to receive(:repository).and_raise(pre_receive_error)
allow(service).to receive(:execute_hooks)
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- pre_receive_error,
- pre_receive_message: raw_message,
- merge_request_id: merge_request.id
- )
service.execute(merge_request)
diff --git a/spec/services/merge_requests/handle_assignees_change_service_spec.rb b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
index c43f5db6059..3db3efedb84 100644
--- a/spec/services/merge_requests/handle_assignees_change_service_spec.rb
+++ b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe MergeRequests::HandleAssigneesChangeService do
end
context 'when execute_hooks option is set to true' do
- let(:options) { { execute_hooks: true } }
+ let(:options) { { 'execute_hooks' => true } }
it 'executes hooks and integrations' do
expect(merge_request.project).to receive(:execute_hooks).with(anything, :merge_request_hooks)
diff --git a/spec/services/merge_requests/mergeability/detailed_merge_status_service_spec.rb b/spec/services/merge_requests/mergeability/detailed_merge_status_service_spec.rb
new file mode 100644
index 00000000000..5722bb79cc5
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/detailed_merge_status_service_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::MergeRequests::Mergeability::DetailedMergeStatusService do
+ subject(:detailed_merge_status) { described_class.new(merge_request: merge_request).execute }
+
+ context 'when merge status is cannot_be_merged_rechecking' do
+ let(:merge_request) { create(:merge_request, merge_status: :cannot_be_merged_rechecking) }
+
+ it 'returns :checking' do
+ expect(detailed_merge_status).to eq(:checking)
+ end
+ end
+
+ context 'when merge status is preparing' do
+ let(:merge_request) { create(:merge_request, merge_status: :preparing) }
+
+ it 'returns :checking' do
+ expect(detailed_merge_status).to eq(:checking)
+ end
+ end
+
+ context 'when merge status is checking' do
+ let(:merge_request) { create(:merge_request, merge_status: :checking) }
+
+ it 'returns :checking' do
+ expect(detailed_merge_status).to eq(:checking)
+ end
+ end
+
+ context 'when merge status is unchecked' do
+ let(:merge_request) { create(:merge_request, merge_status: :unchecked) }
+
+ it 'returns :unchecked' do
+ expect(detailed_merge_status).to eq(:unchecked)
+ end
+ end
+
+ context 'when merge checks are a success' do
+ let(:merge_request) { create(:merge_request) }
+
+ it 'returns :mergeable' do
+ expect(detailed_merge_status).to eq(:mergeable)
+ end
+ end
+
+ context 'when merge status have a failure' do
+ let(:merge_request) { create(:merge_request) }
+
+ before do
+ merge_request.close!
+ end
+
+ it 'returns the failure reason' do
+ expect(detailed_merge_status).to eq(:not_open)
+ end
+ end
+
+ context 'when all but the ci check fails' do
+ let(:merge_request) { create(:merge_request) }
+
+ before do
+ merge_request.project.update!(only_allow_merge_if_pipeline_succeeds: true)
+ end
+
+ context 'when pipeline does not exist' do
+ it 'returns the failure reason' do
+ expect(detailed_merge_status).to eq(:ci_must_pass)
+ end
+ end
+
+ context 'when pipeline exists' do
+ before do
+ create(:ci_pipeline, ci_status, merge_request: merge_request,
+ project: merge_request.project, sha: merge_request.source_branch_sha,
+ head_pipeline_of: merge_request)
+ end
+
+ context 'when the pipeline is running' do
+ let(:ci_status) { :running }
+
+ it 'returns the failure reason' do
+ expect(detailed_merge_status).to eq(:ci_still_running)
+ end
+ end
+
+ context 'when the pipeline is not running' do
+ let(:ci_status) { :failed }
+
+ it 'returns the failure reason' do
+ expect(detailed_merge_status).to eq(:ci_must_pass)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/logger_spec.rb b/spec/services/merge_requests/mergeability/logger_spec.rb
new file mode 100644
index 00000000000..a4d544884b9
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/logger_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::Logger, :request_store do
+ let_it_be(:merge_request) { create(:merge_request) }
+
+ subject(:logger) { described_class.new(merge_request: merge_request) }
+
+ let(:caller_id) { 'a' }
+
+ before do
+ allow(Gitlab::ApplicationContext).to receive(:current_context_attribute).with(:caller_id).and_return(caller_id)
+ end
+
+ def loggable_data(**extras)
+ {
+ 'mergeability.expensive_operation.duration_s.values' => a_kind_of(Array),
+ "mergeability_merge_request_id" => merge_request.id,
+ "correlation_id" => a_kind_of(String),
+ "mergeability_project_id" => merge_request.project.id
+ }.merge(extras)
+ end
+
+ describe '#instrument' do
+ let(:operation_count) { 1 }
+
+ context 'when enabled' do
+ it "returns the block's value" do
+ expect(logger.instrument(mergeability_name: :expensive_operation) { 123 }).to eq(123)
+ end
+
+ it 'records durations of instrumented operations' do
+ expect_next_instance_of(Gitlab::AppJsonLogger) do |app_logger|
+ expect(app_logger).to receive(:info).with(match(a_hash_including(loggable_data)))
+ end
+
+ expect(logger.instrument(mergeability_name: :expensive_operation) { 123 }).to eq(123)
+
+ logger.commit
+ end
+
+ context 'with multiple observations' do
+ let(:operation_count) { 2 }
+
+ it 'records durations of instrumented operations' do
+ expect_next_instance_of(Gitlab::AppJsonLogger) do |app_logger|
+ expect(app_logger).to receive(:info).with(match(a_hash_including(loggable_data)))
+ end
+
+ 2.times do
+ expect(logger.instrument(mergeability_name: :expensive_operation) { 123 }).to eq(123)
+ end
+
+ logger.commit
+ end
+ end
+
+ context 'when its a query' do
+ let(:extra_data) do
+ {
+ 'mergeability.expensive_operation.db_count.values' => a_kind_of(Array),
+ 'mergeability.expensive_operation.db_main_count.values' => a_kind_of(Array),
+ 'mergeability.expensive_operation.db_main_duration_s.values' => a_kind_of(Array),
+ 'mergeability.expensive_operation.db_primary_count.values' => a_kind_of(Array),
+ 'mergeability.expensive_operation.db_primary_duration_s.values' => a_kind_of(Array)
+ }
+ end
+
+ context 'with a single query' do
+ it 'includes SQL metrics' do
+ expect_next_instance_of(Gitlab::AppJsonLogger) do |app_logger|
+ expect(app_logger).to receive(:info).with(match(a_hash_including(loggable_data(**extra_data))))
+ end
+
+ expect(logger.instrument(mergeability_name: :expensive_operation) { MergeRequest.count }).to eq(1)
+
+ logger.commit
+ end
+ end
+
+ context 'with multiple queries' do
+ it 'includes SQL metrics' do
+ expect_next_instance_of(Gitlab::AppJsonLogger) do |app_logger|
+ expect(app_logger).to receive(:info).with(match(a_hash_including(loggable_data(**extra_data))))
+ end
+
+ expect(logger.instrument(mergeability_name: :expensive_operation) { Project.count + MergeRequest.count })
+ .to eq(2)
+
+ logger.commit
+ end
+ end
+ end
+ end
+
+ context 'when disabled' do
+ before do
+ stub_feature_flags(mergeability_checks_logger: false)
+ end
+
+ it "returns the block's value" do
+ expect(logger.instrument(mergeability_name: :expensive_operation) { 123 }).to eq(123)
+ end
+
+ it 'does not call the logger' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:new)
+
+ expect(logger.instrument(mergeability_name: :expensive_operation) { Project.count + MergeRequest.count })
+ .to eq(2)
+
+ logger.commit
+ end
+ end
+
+ it 'raises an error when block is not provided' do
+ expect { logger.instrument(mergeability_name: :expensive_operation) }
+ .to raise_error(ArgumentError, 'block not given')
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
index afea3e952a1..cf34923795e 100644
--- a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
@@ -69,6 +69,11 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
expect(service).to receive(:read).with(merge_check: merge_check).and_return(success_result)
end
+ expect_next_instance_of(MergeRequests::Mergeability::Logger, merge_request: merge_request) do |logger|
+ expect(logger).to receive(:instrument).with(mergeability_name: 'check_ci_status_service').and_call_original
+ expect(logger).to receive(:commit)
+ end
+
expect(execute.success?).to eq(true)
end
end
@@ -80,6 +85,11 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
expect(service).to receive(:write).with(merge_check: merge_check, result_hash: success_result.to_hash).and_return(true)
end
+ expect_next_instance_of(MergeRequests::Mergeability::Logger, merge_request: merge_request) do |logger|
+ expect(logger).to receive(:instrument).with(mergeability_name: 'check_ci_status_service').and_call_original
+ expect(logger).to receive(:commit)
+ end
+
expect(execute.success?).to eq(true)
end
end
diff --git a/spec/services/merge_requests/update_assignees_service_spec.rb b/spec/services/merge_requests/update_assignees_service_spec.rb
index f5f6f0ca301..3a0b17c2768 100644
--- a/spec/services/merge_requests/update_assignees_service_spec.rb
+++ b/spec/services/merge_requests/update_assignees_service_spec.rb
@@ -113,49 +113,6 @@ RSpec.describe MergeRequests::UpdateAssigneesService do
expect { service.execute(merge_request) }
.to issue_fewer_queries_than { update_service.execute(other_mr) }
end
-
- context 'setting state of assignees' do
- before do
- stub_feature_flags(mr_attention_requests: false)
- end
-
- it 'does not set state as attention_requested if feature flag is disabled' do
- update_merge_request
-
- expect(merge_request.merge_request_assignees[0].state).not_to eq('attention_requested')
- end
-
- context 'feature flag is enabled for current_user' do
- before do
- stub_feature_flags(mr_attention_requests: user)
- end
-
- it 'sets state as attention_requested' do
- update_merge_request
-
- expect(merge_request.merge_request_assignees[0].state).to eq('attention_requested')
- expect(merge_request.merge_request_assignees[0].updated_state_by).to eq(user)
- end
-
- it 'uses reviewers state if it is same user as new assignee' do
- merge_request.reviewers << user2
-
- update_merge_request
-
- expect(merge_request.merge_request_assignees[0].state).to eq('unreviewed')
- end
-
- context 'when assignee_ids matches existing assignee' do
- let(:opts) { { assignee_ids: [user3.id] } }
-
- it 'keeps original assignees state' do
- update_merge_request
-
- expect(merge_request.find_assignee(user3).state).to eq('unreviewed')
- end
- end
- end
- end
end
end
end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index b7fb48718d8..8ebabd64d8a 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -47,6 +47,11 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
@merge_request.reload
end
+ it_behaves_like 'issuable update service updating last_edited_at values' do
+ let(:issuable) { merge_request }
+ subject(:update_issuable) { update_merge_request(update_params) }
+ end
+
context 'valid params' do
let(:locked) { true }
@@ -215,14 +220,6 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
end
-
- it 'updates attention requested by of reviewer' do
- opts[:reviewers] = [user2]
-
- MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
-
- expect(merge_request.find_reviewer(user2).updated_state_by).to eq(user)
- end
end
context 'when reviewers did not change' do
@@ -328,49 +325,6 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
update_merge_request(reviewer_ids: [user.id])
end
-
- context 'setting state of reviewers' do
- before do
- stub_feature_flags(mr_attention_requests: false)
- end
-
- it 'does not set state as attention_requested if feature flag is disabled' do
- update_merge_request(reviewer_ids: [user.id])
-
- expect(merge_request.merge_request_reviewers[0].state).not_to eq('attention_requested')
- end
-
- context 'feature flag is enabled for current_user' do
- before do
- stub_feature_flags(mr_attention_requests: user)
- end
-
- it 'sets state as attention_requested' do
- update_merge_request(reviewer_ids: [user2.id])
-
- expect(merge_request.merge_request_reviewers[0].state).to eq('attention_requested')
- expect(merge_request.merge_request_reviewers[0].updated_state_by).to eq(user)
- end
-
- it 'keeps original reviewers state' do
- merge_request.find_reviewer(user2).update!(state: :unreviewed)
-
- update_merge_request({
- reviewer_ids: [user2.id]
- })
-
- expect(merge_request.find_reviewer(user2).state).to eq('unreviewed')
- end
-
- it 'uses reviewers state if it is same user as new assignee' do
- merge_request.assignees << user
-
- update_merge_request(reviewer_ids: [user.id])
-
- expect(merge_request.merge_request_reviewers[0].state).to eq('unreviewed')
- end
- end
- end
end
it 'creates a resource label event' do
@@ -561,9 +515,9 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
before do
create(:ci_pipeline,
project: project,
- ref: merge_request.source_branch,
- sha: merge_request.diff_head_sha,
- status: :success)
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha,
+ status: :success)
perform_enqueued_jobs do
@merge_request = service.execute(merge_request)
@@ -895,7 +849,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
@merge_request = described_class.new(project: project, current_user: user, params: opts).execute(merge_request)
end
- should_not_email(subscriber)
+ should_email(subscriber)
should_not_email(non_subscriber)
end
@@ -1133,53 +1087,6 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
end
end
-
- context 'setting state of assignees' do
- before do
- stub_feature_flags(mr_attention_requests: false)
- end
-
- it 'does not set state as attention_requested if feature flag is disabled' do
- update_merge_request({
- assignee_ids: [user2.id]
- })
-
- expect(merge_request.merge_request_assignees[0].state).not_to eq('attention_requested')
- end
-
- context 'feature flag is enabled for current_user' do
- before do
- stub_feature_flags(mr_attention_requests: user)
- end
-
- it 'sets state as attention_requested' do
- update_merge_request({
- assignee_ids: [user2.id]
- })
-
- expect(merge_request.merge_request_assignees[0].state).to eq('attention_requested')
- expect(merge_request.merge_request_assignees[0].updated_state_by).to eq(user)
- end
-
- it 'keeps original assignees state' do
- update_merge_request({
- assignee_ids: [user3.id]
- })
-
- expect(merge_request.find_assignee(user3).state).to eq('unreviewed')
- end
-
- it 'uses reviewers state if it is same user as new assignee' do
- merge_request.reviewers << user2
-
- update_merge_request({
- assignee_ids: [user2.id]
- })
-
- expect(merge_request.merge_request_assignees[0].state).to eq('unreviewed')
- end
- end
- end
end
context 'when adding time spent' do
diff --git a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
index b326fc1726d..47e5557105b 100644
--- a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memor
start_branch: project.default_branch,
encoding: 'text',
file_path: ".gitlab/dashboards/custom_dashboard.yml",
- file_content: file_content_hash.to_yaml
+ file_content: file_content_hash.to_yaml
}
end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 37318d76586..4922e72b7a4 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -134,8 +134,7 @@ RSpec.describe Notes::CreateService do
context 'in a merge request' do
let_it_be(:project_with_repo) { create(:project, :repository) }
let_it_be(:merge_request) do
- create(:merge_request, source_project: project_with_repo,
- target_project: project_with_repo)
+ create(:merge_request, source_project: project_with_repo, target_project: project_with_repo)
end
context 'noteable highlight cache clearing' do
@@ -181,8 +180,7 @@ RSpec.describe Notes::CreateService do
it 'does not clear cache when note is not the first of the discussion' do
prev_note =
- create(:diff_note_on_merge_request, noteable: merge_request,
- project: project_with_repo)
+ create(:diff_note_on_merge_request, noteable: merge_request, project: project_with_repo)
reply_opts =
opts.merge(in_reply_to_discussion_id: prev_note.discussion_id,
type: 'DiffNote',
@@ -408,9 +406,9 @@ RSpec.describe Notes::CreateService do
expect(issuable.draft?).to eq(can_use_quick_action)
}
),
- # Remove draft status
+ # Remove draft (set ready) status
QuickAction.new(
- action_text: "/draft",
+ action_text: "/ready",
before_action: -> {
issuable.reload.update!(title: "Draft: title")
},
diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb
index be95a4bb181..82caec52aee 100644
--- a/spec/services/notes/destroy_service_spec.rb
+++ b/spec/services/notes/destroy_service_spec.rb
@@ -57,13 +57,11 @@ RSpec.describe Notes::DestroyService do
context 'in a merge request' do
let_it_be(:repo_project) { create(:project, :repository) }
let_it_be(:merge_request) do
- create(:merge_request, source_project: repo_project,
- target_project: repo_project)
+ create(:merge_request, source_project: repo_project, target_project: repo_project)
end
let_it_be(:note) do
- create(:diff_note_on_merge_request, project: repo_project,
- noteable: merge_request)
+ create(:diff_note_on_merge_request, project: repo_project, noteable: merge_request)
end
it 'does not track issue comment removal usage data' do
@@ -84,9 +82,8 @@ RSpec.describe Notes::DestroyService do
end
it 'does not clear cache when note is not the first of the discussion' do
- reply_note = create(:diff_note_on_merge_request, in_reply_to: note,
- project: repo_project,
- noteable: merge_request)
+ reply_note = create(:diff_note_on_merge_request,
+ in_reply_to: note, project: repo_project, noteable: merge_request)
expect(merge_request).not_to receive(:diffs)
diff --git a/spec/services/onboarding_progress_service_spec.rb b/spec/services/onboarding/progress_service_spec.rb
index ef4f4f0d822..e9b8ea2e859 100644
--- a/spec/services/onboarding_progress_service_spec.rb
+++ b/spec/services/onboarding/progress_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe OnboardingProgressService do
+RSpec.describe Onboarding::ProgressService do
describe '.async' do
let_it_be(:namespace) { create(:namespace) }
let_it_be(:action) { :git_pull }
@@ -19,12 +19,12 @@ RSpec.describe OnboardingProgressService do
context 'when onboarded' do
before do
- OnboardingProgress.onboard(namespace)
+ Onboarding::Progress.onboard(namespace)
end
context 'when action is already completed' do
before do
- OnboardingProgress.register(namespace, action)
+ Onboarding::Progress.register(namespace, action)
end
it 'does not schedule a worker' do
@@ -52,13 +52,13 @@ RSpec.describe OnboardingProgressService do
context 'when the namespace is a root' do
before do
- OnboardingProgress.onboard(namespace)
+ Onboarding::Progress.onboard(namespace)
end
it 'registers a namespace onboarding progress action for the given namespace' do
execute_service
- expect(OnboardingProgress.completed?(namespace, :subscription_created)).to eq(true)
+ expect(Onboarding::Progress.completed?(namespace, :subscription_created)).to eq(true)
end
end
@@ -66,13 +66,13 @@ RSpec.describe OnboardingProgressService do
let(:group) { create(:group, :nested) }
before do
- OnboardingProgress.onboard(group)
+ Onboarding::Progress.onboard(group)
end
it 'does not register a namespace onboarding progress action' do
execute_service
- expect(OnboardingProgress.completed?(group, :subscription_created)).to be(nil)
+ expect(Onboarding::Progress.completed?(group, :subscription_created)).to be(nil)
end
end
@@ -82,7 +82,7 @@ RSpec.describe OnboardingProgressService do
it 'does not register a namespace onboarding progress action' do
execute_service
- expect(OnboardingProgress.completed?(namespace, :subscription_created)).to be(nil)
+ expect(Onboarding::Progress.completed?(namespace, :subscription_created)).to be(nil)
end
end
end
diff --git a/spec/services/packages/debian/parse_debian822_service_spec.rb b/spec/services/packages/debian/parse_debian822_service_spec.rb
index ff146fda250..a2731816459 100644
--- a/spec/services/packages/debian/parse_debian822_service_spec.rb
+++ b/spec/services/packages/debian/parse_debian822_service_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe Packages::Debian::ParseDebian822Service do
'Depends' => '${shlibs:Depends}, ${misc:Depends}',
'Description' => "Some mostly empty lib\nUsed in GitLab tests.\n\nTesting another paragraph."
},
- 'Package: sample-udeb' => {
+ 'Package: sample-udeb' => {
'Package' => 'sample-udeb',
'Package-Type' => 'udeb',
'Architecture' => 'any',
diff --git a/spec/services/packages/debian/process_changes_service_spec.rb b/spec/services/packages/debian/process_changes_service_spec.rb
index 52bcddb6f5e..a45dd68cd6e 100644
--- a/spec/services/packages/debian/process_changes_service_spec.rb
+++ b/spec/services/packages/debian/process_changes_service_spec.rb
@@ -5,7 +5,8 @@ RSpec.describe Packages::Debian::ProcessChangesService do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_file, codename: 'unstable') }
- let_it_be(:incoming) { create(:debian_incoming, project: distribution.project) }
+
+ let!(:incoming) { create(:debian_incoming, project: distribution.project) }
let(:package_file) { incoming.package_files.last }
@@ -33,10 +34,11 @@ RSpec.describe Packages::Debian::ProcessChangesService do
existing_package.update!(debian_distribution: distribution)
end
- it 'does not create a package' do
+ it 'does not create a package and assigns the package_file to the existing package' do
expect { subject.execute }
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
+ .and change(package_file, :package).to(existing_package)
end
context 'marked as pending_destruction' do
diff --git a/spec/services/packages/rpm/repository_metadata/base_builder_spec.rb b/spec/services/packages/rpm/repository_metadata/base_builder_spec.rb
new file mode 100644
index 00000000000..0fb58cc27d5
--- /dev/null
+++ b/spec/services/packages/rpm/repository_metadata/base_builder_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rpm::RepositoryMetadata::BaseBuilder do
+ describe '#execute' do
+ subject { described_class.new.execute }
+
+ before do
+ stub_const("#{described_class}::ROOT_TAG", 'test')
+ stub_const("#{described_class}::ROOT_ATTRIBUTES", { foo1: 'bar1', foo2: 'bar2' })
+ end
+
+ it 'generate valid xml' do
+ result = Nokogiri::XML::Document.parse(subject)
+
+ expect(result.children.count).to eq(1)
+ expect(result.children.first.attributes.count).to eq(2)
+ expect(result.children.first.attributes['foo1'].value).to eq('bar1')
+ expect(result.children.first.attributes['foo2'].value).to eq('bar2')
+ end
+ end
+end
diff --git a/spec/services/packages/rpm/repository_metadata/build_filelist_xml_spec.rb b/spec/services/packages/rpm/repository_metadata/build_filelist_xml_spec.rb
new file mode 100644
index 00000000000..2feb44c7c1b
--- /dev/null
+++ b/spec/services/packages/rpm/repository_metadata/build_filelist_xml_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rpm::RepositoryMetadata::BuildFilelistXml do
+ describe '#execute' do
+ subject { described_class.new.execute }
+
+ context "when generate empty xml" do
+ let(:expected_xml) do
+ <<~XML
+ <?xml version="1.0" encoding="UTF-8"?>
+ <filelists xmlns="http://linux.duke.edu/metadata/filelists" packages="0"/>
+ XML
+ end
+
+ it 'generate expected xml' do
+ expect(subject).to eq(expected_xml)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/rpm/repository_metadata/build_other_xml_spec.rb b/spec/services/packages/rpm/repository_metadata/build_other_xml_spec.rb
new file mode 100644
index 00000000000..823aa18808a
--- /dev/null
+++ b/spec/services/packages/rpm/repository_metadata/build_other_xml_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rpm::RepositoryMetadata::BuildOtherXml do
+ describe '#execute' do
+ subject { described_class.new.execute }
+
+ context "when generate empty xml" do
+ let(:expected_xml) do
+ <<~XML
+ <?xml version="1.0" encoding="UTF-8"?>
+ <otherdata xmlns="http://linux.duke.edu/metadata/other" packages="0"/>
+ XML
+ end
+
+ it 'generate expected xml' do
+ expect(subject).to eq(expected_xml)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/rpm/repository_metadata/build_primary_xml_spec.rb b/spec/services/packages/rpm/repository_metadata/build_primary_xml_spec.rb
new file mode 100644
index 00000000000..f5294d6f7f7
--- /dev/null
+++ b/spec/services/packages/rpm/repository_metadata/build_primary_xml_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rpm::RepositoryMetadata::BuildPrimaryXml do
+ describe '#execute' do
+ subject { described_class.new.execute }
+
+ context "when generate empty xml" do
+ let(:expected_xml) do
+ <<~XML
+ <?xml version="1.0" encoding="UTF-8"?>
+ <metadata xmlns="http://linux.duke.edu/metadata/common" xmlns:rpm="http://linux.duke.edu/metadata/rpm" packages="0"/>
+ XML
+ end
+
+ it 'generate expected xml' do
+ expect(subject).to eq(expected_xml)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/rpm/repository_metadata/build_repomd_xml_spec.rb b/spec/services/packages/rpm/repository_metadata/build_repomd_xml_spec.rb
new file mode 100644
index 00000000000..29b0f73e3c1
--- /dev/null
+++ b/spec/services/packages/rpm/repository_metadata/build_repomd_xml_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rpm::RepositoryMetadata::BuildRepomdXml do
+ describe '#execute' do
+ subject { described_class.new(data).execute }
+
+ let(:data) do
+ {
+ filelists: {
+ checksum: { type: "sha256", value: "123" },
+ 'open-checksum': { type: "sha256", value: "123" },
+ location: { href: "repodata/123-filelists.xml.gz" },
+ timestamp: { value: 1644602784 },
+ size: { value: 11111 },
+ 'open-size': { value: 11111 }
+ },
+ primary: {
+ checksum: { type: "sha256", value: "234" },
+ 'open-checksum': { type: "sha256", value: "234" },
+ location: { href: "repodata/234-primary.xml.gz" },
+ timestamp: { value: 1644602784 },
+ size: { value: 22222 },
+ 'open-size': { value: 22222 }
+ },
+ other: {
+ checksum: { type: "sha256", value: "345" },
+ 'open-checksum': { type: "sha256", value: "345" },
+ location: { href: "repodata/345-other.xml.gz" },
+ timestamp: { value: 1644602784 },
+ size: { value: 33333 },
+ 'open-size': { value: 33333 }
+ }
+ }
+ end
+
+ let(:creation_timestamp) { 111111 }
+
+ before do
+ allow(Time).to receive(:now).and_return(creation_timestamp)
+ end
+
+ it 'generate valid xml' do
+ # Have one root attribute
+ result = Nokogiri::XML::Document.parse(subject)
+ expect(result.children.count).to eq(1)
+
+ # Root attribute name is 'repomd'
+ root = result.children.first
+ expect(root.name).to eq('repomd')
+
+ # Have the same count of 'data' tags as count of keys in 'data'
+ expect(result.css('data').count).to eq(data.count)
+ end
+
+ it 'has all data info' do
+ result = Nokogiri::XML::Document.parse(subject).remove_namespaces!
+
+ data.each do |tag_name, tag_attributes|
+ tag_attributes.each_key do |key|
+ expect(result.at("//repomd/data[@type=\"#{tag_name}\"]/#{key}")).not_to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index 3f4d37e5ddc..aa955b3445b 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe PostReceiveService do
+ include GitlabShellHelpers
include Gitlab::Routing
let_it_be(:user) { create(:user) }
@@ -13,7 +14,6 @@ RSpec.describe PostReceiveService do
let(:identifier) { 'key-123' }
let(:gl_repository) { "project-#{project.id}" }
let(:branch_name) { 'feature' }
- let(:secret_token) { Gitlab::Shell.secret_token }
let(:reference_counter) { double('ReferenceCounter') }
let(:push_options) { ['ci.skip', 'another push option'] }
let(:repository) { project.repository }
@@ -25,7 +25,6 @@ RSpec.describe PostReceiveService do
let(:params) do
{
gl_repository: gl_repository,
- secret_token: secret_token,
identifier: identifier,
changes: changes,
push_options: push_options
diff --git a/spec/services/projects/blame_service_spec.rb b/spec/services/projects/blame_service_spec.rb
index 54c4315d242..52b0ed3412d 100644
--- a/spec/services/projects/blame_service_spec.rb
+++ b/spec/services/projects/blame_service_spec.rb
@@ -54,6 +54,12 @@ RSpec.describe Projects::BlameService, :aggregate_failures do
it { is_expected.to eq(1..2) }
end
+ context 'when user disabled the pagination' do
+ let(:params) { super().merge(no_pagination: 1) }
+
+ it { is_expected.to be_nil }
+ end
+
context 'when feature flag disabled' do
before do
stub_feature_flags(blame_page_pagination: false)
@@ -75,6 +81,12 @@ RSpec.describe Projects::BlameService, :aggregate_failures do
expect(subject.total_count).to eq(4)
end
+ context 'when user disabled the pagination' do
+ let(:params) { super().merge(no_pagination: 1) }
+
+ it { is_expected.to be_nil }
+ end
+
context 'when feature flag disabled' do
before do
stub_feature_flags(blame_page_pagination: false)
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index 79904e2bf72..2008de195ab 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -5,11 +5,13 @@ require 'spec_helper'
RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_redis_cache do
using RSpec::Parameterized::TableSyntax
+ include_context 'for a cleanup tags service'
+
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :private) }
let(:repository) { create(:container_repository, :root, project: project) }
- let(:service) { described_class.new(repository, user, params) }
+ let(:service) { described_class.new(container_repository: repository, current_user: user, params: params) }
let(:tags) { %w[latest A Ba Bb C D E] }
before do
@@ -39,268 +41,141 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
describe '#execute' do
subject { service.execute }
- context 'when no params are specified' do
- let(:params) { {} }
-
- it 'does not remove anything' do
- expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
- .not_to receive(:execute)
- expect_no_caching
-
- is_expected.to eq(expected_service_response(before_truncate_size: 0, after_truncate_size: 0, before_delete_size: 0))
- end
- end
-
- context 'when regex matching everything is specified' do
- shared_examples 'removes all matches' do
- it 'does remove all tags except latest' do
- expect_no_caching
-
- expect_delete(%w(A Ba Bb C D E))
-
- is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C D E)))
- end
- end
-
- let(:params) do
- { 'name_regex_delete' => '.*' }
- end
-
- it_behaves_like 'removes all matches'
-
- context 'with deprecated name_regex param' do
- let(:params) do
- { 'name_regex' => '.*' }
- end
-
- it_behaves_like 'removes all matches'
- end
- end
-
- context 'with invalid regular expressions' do
- shared_examples 'handling an invalid regex' do
- it 'keeps all tags' do
- expect_no_caching
-
- expect(Projects::ContainerRepository::DeleteTagsService)
- .not_to receive(:new)
-
- subject
- end
-
- it { is_expected.to eq(status: :error, message: 'invalid regex') }
-
- it 'calls error tracking service' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
-
- subject
- end
- end
-
- context 'when name_regex_delete is invalid' do
- let(:params) { { 'name_regex_delete' => '*test*' } }
-
- it_behaves_like 'handling an invalid regex'
- end
-
- context 'when name_regex is invalid' do
- let(:params) { { 'name_regex' => '*test*' } }
-
- it_behaves_like 'handling an invalid regex'
- end
-
- context 'when name_regex_keep is invalid' do
- let(:params) { { 'name_regex_keep' => '*test*' } }
-
- it_behaves_like 'handling an invalid regex'
- end
- end
-
- context 'when delete regex matching specific tags is used' do
- let(:params) do
- { 'name_regex_delete' => 'C|D' }
- end
-
- it 'does remove C and D' do
- expect_delete(%w(C D))
-
- expect_no_caching
-
- is_expected.to eq(expected_service_response(deleted: %w(C D), before_truncate_size: 2, after_truncate_size: 2, before_delete_size: 2))
- end
-
- context 'with overriding allow regex' do
- let(:params) do
- { 'name_regex_delete' => 'C|D',
- 'name_regex_keep' => 'C' }
- end
-
- it 'does not remove C' do
- expect_delete(%w(D))
-
- expect_no_caching
-
- is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
- end
- end
-
- context 'with name_regex_delete overriding deprecated name_regex' do
- let(:params) do
- { 'name_regex' => 'C|D',
- 'name_regex_delete' => 'D' }
- end
-
- it 'does not remove C' do
- expect_delete(%w(D))
-
- expect_no_caching
-
- is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
- end
- end
- end
-
- context 'with allow regex value' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'name_regex_keep' => 'B.*' }
- end
-
- it 'does not remove B*' do
- expect_delete(%w(A C D E))
-
- expect_no_caching
-
- is_expected.to eq(expected_service_response(deleted: %w(A C D E), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
- end
- end
-
- context 'when keeping only N tags' do
- let(:params) do
- { 'name_regex' => 'A|B.*|C',
- 'keep_n' => 1 }
- end
-
- it 'sorts tags by date' do
- expect_delete(%w(Bb Ba C))
-
- expect_no_caching
-
- expect(service).to receive(:order_by_date).and_call_original
-
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 3))
- end
- end
-
- context 'when not keeping N tags' do
- let(:params) do
- { 'name_regex' => 'A|B.*|C' }
- end
-
- it 'does not sort tags by date' do
- expect_delete(%w(A Ba Bb C))
-
- expect_no_caching
-
- expect(service).not_to receive(:order_by_date)
-
- is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
- end
- end
-
- context 'when removing keeping only 3' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 3 }
- end
-
- it 'does remove B* and C as they are the oldest' do
- expect_delete(%w(Bb Ba C))
-
- expect_no_caching
-
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
- end
- end
-
- context 'when removing older than 1 day' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'older_than' => '1 day' }
- end
-
- it 'does remove B* and C as they are older than 1 day' do
- expect_delete(%w(Ba Bb C))
-
- expect_no_caching
-
- is_expected.to eq(expected_service_response(deleted: %w(Ba Bb C), before_delete_size: 3))
- end
- end
-
- context 'when combining all parameters' do
+ it_behaves_like 'handling invalid params',
+ service_response_extra: {
+ before_truncate_size: 0,
+ after_truncate_size: 0,
+ before_delete_size: 0,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when regex matching everything is specified',
+ delete_expectations: [%w(A Ba Bb C D E)],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 6,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when delete regex matching specific tags is used',
+ service_response_extra: {
+ before_truncate_size: 2,
+ after_truncate_size: 2,
+ before_delete_size: 2,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex',
+ service_response_extra: {
+ before_truncate_size: 1,
+ after_truncate_size: 1,
+ before_delete_size: 1,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'with allow regex value',
+ delete_expectations: [%w(A C D E)],
+ service_response_extra: {
+ before_truncate_size: 4,
+ after_truncate_size: 4,
+ before_delete_size: 4,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when keeping only N tags',
+ delete_expectations: [%w(Bb Ba C)],
+ service_response_extra: {
+ before_truncate_size: 4,
+ after_truncate_size: 4,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when not keeping N tags',
+ delete_expectations: [%w(A Ba Bb C)],
+ service_response_extra: {
+ before_truncate_size: 4,
+ after_truncate_size: 4,
+ before_delete_size: 4,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when removing keeping only 3',
+ delete_expectations: [%w(Bb Ba C)],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when removing older than 1 day',
+ delete_expectations: [%w(Ba Bb C)],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when combining all parameters',
+ delete_expectations: [%w(Bb Ba C)],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when running a container_expiration_policy',
+ delete_expectations: [%w(Bb Ba C)],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ context 'when running a container_expiration_policy with caching' do
+ let(:user) { nil }
let(:params) do
- { 'name_regex_delete' => '.*',
+ {
+ 'name_regex_delete' => '.*',
'keep_n' => 1,
- 'older_than' => '1 day' }
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true
+ }
end
- it 'does remove B* and C' do
- expect_delete(%w(Bb Ba C))
-
- expect_no_caching
+ it 'expects caching to be used' do
+ expect_delete(%w(Bb Ba C), container_expiration_policy: true)
+ expect_caching
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ subject
end
- end
-
- context 'when running a container_expiration_policy' do
- let(:user) { nil }
-
- context 'with valid container_expiration_policy param' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day',
- 'container_expiration_policy' => true }
- end
+ context 'when setting set to false' do
before do
- expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- end
-
- it { is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3)) }
-
- context 'caching' do
- it 'expects caching to be used' do
- expect_caching
-
- subject
- end
-
- context 'when setting set to false' do
- before do
- stub_application_setting(container_registry_expiration_policies_caching: false)
- end
-
- it 'does not use caching' do
- expect_no_caching
-
- subject
- end
- end
+ stub_application_setting(container_registry_expiration_policies_caching: false)
end
- end
- context 'without container_expiration_policy param' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day' }
- end
+ it 'does not use caching' do
+ expect_delete(%w(Bb Ba C), container_expiration_policy: true)
+ expect_no_caching
- it 'fails' do
- is_expected.to eq(status: :error, message: 'access denied')
+ subject
end
end
end
@@ -322,10 +197,12 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
service_response = expected_service_response(
status: status,
original_size: original_size,
+ deleted: nil
+ ).merge(
before_truncate_size: before_truncate_size,
after_truncate_size: after_truncate_size,
before_delete_size: before_delete_size,
- deleted: nil
+ cached_tags_count: 0
)
expect(result).to eq(service_response)
@@ -395,11 +272,8 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
end
it 'caches the created_at values' do
- ::Gitlab::Redis::Cache.with do |redis|
- expect_mget(redis, tags_and_created_ats.keys)
-
- expect_set(redis, cacheable_tags)
- end
+ expect_mget(tags_and_created_ats.keys)
+ expect_set(cacheable_tags)
expect(subject).to include(cached_tags_count: 0)
end
@@ -412,12 +286,10 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
end
it 'uses them' do
- ::Gitlab::Redis::Cache.with do |redis|
- expect_mget(redis, tags_and_created_ats.keys)
+ expect_mget(tags_and_created_ats.keys)
- # because C is already in cache, it should not be cached again
- expect_set(redis, cacheable_tags.except('C'))
- end
+ # because C is already in cache, it should not be cached again
+ expect_set(cacheable_tags.except('C'))
# We will ping the container registry for all tags *except* for C because it's cached
expect(ContainerRegistry::Blob).to receive(:new).with(repository, { "digest" => "sha256:configA" }).and_call_original
@@ -429,15 +301,27 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
end
end
- def expect_mget(redis, keys)
- expect(redis).to receive(:mget).with(keys.map(&method(:cache_key))).and_call_original
+ def expect_mget(keys)
+ Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:mget).with(keys.map(&method(:cache_key))).and_call_original
+ end
end
- def expect_set(redis, tags)
- tags.each do |tag_name, created_at|
+ def expect_set(tags)
+ selected_tags = tags.map do |tag_name, created_at|
ex = 1.day.seconds - (Time.zone.now - created_at).seconds
- if ex > 0
- expect(redis).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex.to_i)
+ [tag_name, created_at, ex.to_i] if ex.positive?
+ end.compact
+
+ return if selected_tags.count.zero?
+
+ Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:pipelined).and_call_original
+
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ selected_tags.each do |tag_name, created_at, ex|
+ expect(pipeline).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex)
+ end
end
end
end
@@ -476,38 +360,14 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
end
end
- def expect_delete(tags, container_expiration_policy: nil)
- expect(Projects::ContainerRepository::DeleteTagsService)
- .to receive(:new)
- .with(repository.project, user, tags: tags, container_expiration_policy: container_expiration_policy)
- .and_call_original
-
- expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
- .to receive(:execute)
- .with(repository) { { status: :success, deleted: tags } }
- end
-
- # all those -1 because the default tags on L13 have a "latest" that will be filtered out
- def expected_service_response(status: :success, deleted: [], original_size: tags.size, before_truncate_size: tags.size - 1, after_truncate_size: tags.size - 1, before_delete_size: tags.size - 1)
- {
- status: status,
- deleted: deleted,
- original_size: original_size,
- before_truncate_size: before_truncate_size,
- after_truncate_size: after_truncate_size,
- before_delete_size: before_delete_size,
- cached_tags_count: 0
- }.compact.merge(deleted_size: deleted&.size)
- end
-
- def expect_no_caching
- expect(::Gitlab::Redis::Cache).not_to receive(:with)
- end
-
def expect_caching
::Gitlab::Redis::Cache.with do |redis|
expect(redis).to receive(:mget).and_call_original
- expect(redis).to receive(:set).and_call_original
+ expect(redis).to receive(:pipelined).and_call_original
+
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ expect(pipeline).to receive(:set).and_call_original
+ end
end
end
end
diff --git a/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb
new file mode 100644
index 00000000000..d2cdb667659
--- /dev/null
+++ b/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb
@@ -0,0 +1,183 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService do
+ using RSpec::Parameterized::TableSyntax
+
+ include_context 'for a cleanup tags service'
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :private) }
+
+ let(:repository) { create(:container_repository, :root, :import_done, project: project) }
+ let(:service) { described_class.new(container_repository: repository, current_user: user, params: params) }
+ let(:tags) { %w[latest A Ba Bb C D E] }
+
+ before do
+ project.add_maintainer(user) if user
+
+ stub_container_registry_config(enabled: true)
+
+ stub_const("#{described_class}::TAGS_PAGE_SIZE", tags_page_size)
+
+ one_hour_ago = 1.hour.ago
+ five_days_ago = 5.days.ago
+ six_days_ago = 6.days.ago
+ one_month_ago = 1.month.ago
+
+ stub_tags(
+ {
+ 'latest' => one_hour_ago,
+ 'A' => one_hour_ago,
+ 'Ba' => five_days_ago,
+ 'Bb' => six_days_ago,
+ 'C' => one_month_ago,
+ 'D' => nil,
+ 'E' => nil
+ }
+ )
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'with several tags pages' do
+ let(:tags_page_size) { 2 }
+
+ it_behaves_like 'handling invalid params'
+
+ it_behaves_like 'when regex matching everything is specified',
+ delete_expectations: [%w[A], %w[Ba Bb], %w[C D], %w[E]]
+
+ it_behaves_like 'when delete regex matching specific tags is used'
+
+ it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex'
+
+ it_behaves_like 'with allow regex value',
+ delete_expectations: [%w[A], %w[C D], %w[E]]
+
+ it_behaves_like 'when keeping only N tags',
+ delete_expectations: [%w[Bb]]
+
+ it_behaves_like 'when not keeping N tags',
+ delete_expectations: [%w[A], %w[Ba Bb], %w[C]]
+
+ context 'when removing keeping only 3' do
+ let(:params) do
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 3
+ }
+ end
+
+ it_behaves_like 'not removing anything'
+ end
+
+ it_behaves_like 'when removing older than 1 day',
+ delete_expectations: [%w[Ba Bb], %w[C]]
+
+ it_behaves_like 'when combining all parameters',
+ delete_expectations: [%w[Bb], %w[C]]
+
+ it_behaves_like 'when running a container_expiration_policy',
+ delete_expectations: [%w[Bb], %w[C]]
+
+ context 'with a timeout' do
+ let(:params) do
+ { 'name_regex_delete' => '.*' }
+ end
+
+ it 'removes the first few pages' do
+ expect(service).to receive(:timeout?).and_return(false, true)
+
+ expect_delete(%w[A])
+ expect_delete(%w[Ba Bb])
+
+ response = expected_service_response(status: :error, deleted: %w[A Ba Bb], original_size: 4)
+
+ is_expected.to eq(response)
+ end
+ end
+ end
+
+ context 'with a single tags page' do
+ let(:tags_page_size) { 1000 }
+
+ it_behaves_like 'handling invalid params'
+
+ it_behaves_like 'when regex matching everything is specified',
+ delete_expectations: [%w[A Ba Bb C D E]]
+
+ it_behaves_like 'when delete regex matching specific tags is used'
+
+ it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex'
+
+ it_behaves_like 'with allow regex value',
+ delete_expectations: [%w[A C D E]]
+
+ it_behaves_like 'when keeping only N tags',
+ delete_expectations: [%w[Ba Bb C]]
+
+ it_behaves_like 'when not keeping N tags',
+ delete_expectations: [%w[A Ba Bb C]]
+
+ it_behaves_like 'when removing keeping only 3',
+ delete_expectations: [%w[Ba Bb C]]
+
+ it_behaves_like 'when removing older than 1 day',
+ delete_expectations: [%w[Ba Bb C]]
+
+ it_behaves_like 'when combining all parameters',
+ delete_expectations: [%w[Ba Bb C]]
+
+ it_behaves_like 'when running a container_expiration_policy',
+ delete_expectations: [%w[Ba Bb C]]
+ end
+ end
+
+ private
+
+ def stub_tags(tags)
+ chunked = tags_page_size < tags.size
+ previous_last = nil
+ max_chunk_index = tags.size / tags_page_size
+
+ tags.keys.in_groups_of(tags_page_size, false).each_with_index do |chunked_tag_names, index|
+ last = index == max_chunk_index
+ pagination_needed = chunked && !last
+
+ response = {
+ pagination: pagination_needed ? pagination_with(last: chunked_tag_names.last) : {},
+ response_body: chunked_tag_names.map do |name|
+ tag_raw_response(name, tags[name])
+ end
+ }
+
+ allow(repository.gitlab_api_client)
+ .to receive(:tags)
+ .with(repository.path, page_size: described_class::TAGS_PAGE_SIZE, last: previous_last)
+ .and_return(response)
+ previous_last = chunked_tag_names.last
+ end
+ end
+
+ def pagination_with(last:)
+ {
+ next: {
+ uri: URI("http://test.org?last=#{last}")
+ }
+ }
+ end
+
+ def tag_raw_response(name, timestamp)
+ timestamp_field = name.start_with?('B') ? 'updated_at' : 'created_at'
+ {
+ 'name' => name,
+ 'digest' => 'sha256:1234567890',
+ 'media_type' => 'application/vnd.oci.image.manifest.v1+json',
+ timestamp_field => timestamp&.iso8601
+ }
+ end
+end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index e112c1e2497..9c8aeb5cf7b 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -125,6 +125,26 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project.namespace).to eq(user.namespace)
expect(project.project_namespace).to be_in_sync_with_project(project)
end
+
+ context 'project_authorizations record creation' do
+ context 'when the project_authrizations records are not created via the callback' do
+ it 'still creates project_authrizations record for the user' do
+ # stub out the callback that creates project_authorizations records on the `ProjectMember` model.
+ expect_next_instance_of(ProjectMember) do |member|
+ expect(member).to receive(:refresh_member_authorized_projects).and_return(nil)
+ end
+
+ project = create_project(user, opts)
+
+ expected_record = project.project_authorizations.where(
+ user: user,
+ access_level: ProjectMember::OWNER
+ )
+
+ expect(expected_record).to exist
+ end
+ end
+ end
end
describe 'after create actions' do
@@ -417,10 +437,10 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(imported_project.import_url).to eq('http://import-url')
end
- it 'tracks for the combined_registration experiment', :experiment do
- expect(experiment(:combined_registration)).to track(:import_project).on_next_instance
-
+ it 'tracks for imported project' do
imported_project
+
+ expect_snowplow_event(category: described_class.name, action: 'import_project', user: user)
end
describe 'import scheduling' do
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 955384e518c..8269dbebccb 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -135,6 +135,33 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
end
end
+ context 'deleting a project with merge request diffs' do
+ let!(:merge_request) { create(:merge_request, source_project: project) }
+ let!(:another_project_mr) { create(:merge_request, source_project: create(:project)) }
+
+ it 'deletes merge request diffs' do
+ merge_request_diffs = merge_request.merge_request_diffs
+ expect(merge_request_diffs.size).to eq(1)
+
+ expect { destroy_project(project, user, {}) }.to change(MergeRequestDiff, :count).by(-1)
+ expect { another_project_mr.reload }.not_to raise_error
+ end
+
+ context 'when extract_mr_diff_deletions feature flag is disabled' do
+ before do
+ stub_feature_flags(extract_mr_diff_deletions: false)
+ end
+
+ it 'also deletes merge request diffs' do
+ merge_request_diffs = merge_request.merge_request_diffs
+ expect(merge_request_diffs.size).to eq(1)
+
+ expect { destroy_project(project, user, {}) }.to change(MergeRequestDiff, :count).by(-1)
+ expect { another_project_mr.reload }.not_to raise_error
+ end
+ end
+ end
+
it_behaves_like 'deleting the project'
it 'invalidates personal_project_count cache' do
@@ -312,7 +339,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
before do
stub_container_registry_tags(repository: project.full_path + '/image',
- tags: ['tag'])
+ tags: ['tag'])
project.container_repositories << container_repository
end
@@ -350,7 +377,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
context 'when there are tags for legacy root repository' do
before do
stub_container_registry_tags(repository: project.full_path,
- tags: ['tag'])
+ tags: ['tag'])
end
context 'when image repository tags deletion succeeds' do
@@ -423,11 +450,11 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
destroy_project(project, user)
end
- it 'calls the bulk snippet destroy service with the hard_delete param set to true' do
+ it 'calls the bulk snippet destroy service with the skip_authorization param set to true' do
expect(project.snippets.count).to eq 2
expect_next_instance_of(Snippets::BulkDestroyService, user, project.snippets) do |instance|
- expect(instance).to receive(:execute).with(hard_delete: true).and_call_original
+ expect(instance).to receive(:execute).with(skip_authorization: true).and_call_original
end
expect do
@@ -485,9 +512,11 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
let!(:project_bot) { create(:user, :project_bot).tap { |user| project.add_maintainer(user) } }
it 'deletes bot user as well' do
- expect do
- destroy_project(project, user)
- end.to change { User.find_by(id: project_bot.id) }.to(nil)
+ expect_next_instance_of(Users::DestroyService, user) do |instance|
+ expect(instance).to receive(:execute).with(project_bot, skip_authorization: true).and_call_original
+ end
+
+ destroy_project(project, user)
end
end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 24b5e35e422..eea2ea3271f 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -19,6 +19,25 @@ RSpec.describe Projects::UpdatePagesService do
subject { described_class.new(project, build) }
+ context 'when a deploy stage already exists' do
+ let!(:stage) { create(:ci_stage, name: 'deploy', pipeline: pipeline) }
+
+ it 'assigns the deploy stage' do
+ subject.execute
+
+ expect(GenericCommitStatus.last.ci_stage).to eq(stage)
+ expect(GenericCommitStatus.last.ci_stage.name).to eq('deploy')
+ end
+ end
+
+ context 'when a deploy stage does not exists' do
+ it 'assigns the deploy stage' do
+ subject.execute
+
+ expect(GenericCommitStatus.last.ci_stage.name).to eq('deploy')
+ end
+ end
+
context 'for new artifacts' do
context "for a valid job" do
let!(:artifacts_archive) { create(:ci_job_artifact, :correct_checksum, file: file, job: build) }
diff --git a/spec/services/protected_branches/cache_service_spec.rb b/spec/services/protected_branches/cache_service_spec.rb
index 4fa7553c23d..00d1e8b5457 100644
--- a/spec/services/protected_branches/cache_service_spec.rb
+++ b/spec/services/protected_branches/cache_service_spec.rb
@@ -75,10 +75,12 @@ RSpec.describe ProtectedBranches::CacheService, :clean_gitlab_redis_cache do
expect(service.fetch('main', dry_run: true) { true }).to eq(true)
expect(Gitlab::AppLogger).to receive(:error).with(
- 'class' => described_class.name,
- 'message' => /Cache mismatch/,
- 'project_id' => project.id,
- 'project_path' => project.full_path
+ {
+ 'class' => described_class.name,
+ 'message' => /Cache mismatch/,
+ 'project_id' => project.id,
+ 'project_path' => project.full_path
+ }
)
expect(service.fetch('main', dry_run: true) { false }).to eq(false)
diff --git a/spec/services/protected_branches/destroy_service_spec.rb b/spec/services/protected_branches/destroy_service_spec.rb
index 9fa07820148..123deeea005 100644
--- a/spec/services/protected_branches/destroy_service_spec.rb
+++ b/spec/services/protected_branches/destroy_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe ProtectedBranches::DestroyService do
let_it_be_with_reload(:project) { create(:project) }
- let(:protected_branch) { create(:protected_branch, project: project) }
+ let!(:protected_branch) { create(:protected_branch, project: project) }
let(:user) { project.first_owner }
subject(:service) { described_class.new(project, user) }
diff --git a/spec/services/protected_branches/update_service_spec.rb b/spec/services/protected_branches/update_service_spec.rb
index c4fe4d78070..2ff6c3c489a 100644
--- a/spec/services/protected_branches/update_service_spec.rb
+++ b/spec/services/protected_branches/update_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe ProtectedBranches::UpdateService do
let_it_be_with_reload(:project) { create(:project) }
- let(:protected_branch) { create(:protected_branch, project: project) }
+ let!(:protected_branch) { create(:protected_branch, project: project) }
let(:user) { project.first_owner }
let(:params) { { name: new_name } }
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 2d38d968ce4..a43f3bc55bf 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -1393,14 +1393,41 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { issue }
end
+ # /draft is a toggle (ff disabled)
it_behaves_like 'draft command' do
let(:content) { '/draft' }
let(:issuable) { merge_request }
+
+ before do
+ stub_feature_flags(draft_quick_action_non_toggle: false)
+ end
end
+ # /draft is a toggle (ff disabled)
it_behaves_like 'ready command' do
let(:content) { '/draft' }
let(:issuable) { merge_request }
+
+ before do
+ stub_feature_flags(draft_quick_action_non_toggle: false)
+ issuable.update!(title: issuable.draft_title)
+ end
+ end
+
+ # /draft is one way (ff enabled)
+ it_behaves_like 'draft command' do
+ let(:content) { '/draft' }
+ let(:issuable) { merge_request }
+ end
+
+ # /draft is one way (ff enabled)
+ it_behaves_like 'draft/ready command no action' do
+ let(:content) { '/draft' }
+ let(:issuable) { merge_request }
+
+ before do
+ issuable.update!(title: issuable.draft_title)
+ end
end
it_behaves_like 'draft/ready command no action' do
@@ -2646,7 +2673,28 @@ RSpec.describe QuickActions::InterpretService do
end
end
- describe 'draft command' do
+ describe 'draft command toggle (deprecated)' do
+ let(:content) { '/draft' }
+
+ before do
+ stub_feature_flags(draft_quick_action_non_toggle: false)
+ end
+
+ it 'includes the new status' do
+ _, explanations = service.explain(content, merge_request)
+
+ expect(explanations).to match_array(['Marks this merge request as a draft.'])
+ end
+
+ it 'sets the ready status on a draft' do
+ merge_request.update!(title: merge_request.draft_title)
+ _, explanations = service.explain(content, merge_request)
+
+ expect(explanations).to match_array(["Marks this merge request as ready."])
+ end
+ end
+
+ describe 'draft command set' do
let(:content) { '/draft' }
it 'includes the new status' do
@@ -2654,6 +2702,13 @@ RSpec.describe QuickActions::InterpretService do
expect(explanations).to match_array(['Marks this merge request as a draft.'])
end
+
+ it 'includes the no change message when status unchanged' do
+ merge_request.update!(title: merge_request.draft_title)
+ _, explanations = service.explain(content, merge_request)
+
+ expect(explanations).to match_array(["No change to this merge request's draft status."])
+ end
end
describe 'ready command' do
diff --git a/spec/services/releases/create_service_spec.rb b/spec/services/releases/create_service_spec.rb
index 2421fab0eec..5f49eed3e77 100644
--- a/spec/services/releases/create_service_spec.rb
+++ b/spec/services/releases/create_service_spec.rb
@@ -70,6 +70,28 @@ RSpec.describe Releases::CreateService do
expect(result[:release]).not_to be_nil
end
+ context 'and the tag would be protected' do
+ let!(:protected_tag) { create(:protected_tag, project: project, name: tag_name) }
+
+ context 'and the user does not have permissions' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'raises an error' do
+ result = service.execute
+
+ expect(result[:status]).to eq(:error)
+ end
+ end
+
+ context 'and the user has permissions' do
+ it_behaves_like 'a successful release creation'
+ end
+ end
+
context 'and tag_message is provided' do
let(:ref) { 'master' }
let(:tag_name) { 'foobar' }
diff --git a/spec/services/resource_access_tokens/revoke_service_spec.rb b/spec/services/resource_access_tokens/revoke_service_spec.rb
index 3d724a79fef..8f89696cc55 100644
--- a/spec/services/resource_access_tokens/revoke_service_spec.rb
+++ b/spec/services/resource_access_tokens/revoke_service_spec.rb
@@ -29,18 +29,35 @@ RSpec.describe ResourceAccessTokens::RevokeService do
expect(resource.reload.users).not_to include(resource_bot)
end
- it 'transfer issuables of bot user to ghost user' do
- issue = create(:issue, author: resource_bot)
+ context 'when user_destroy_with_limited_execution_time_worker is enabled' do
+ it 'initiates user removal' do
+ subject
+
+ expect(
+ Users::GhostUserMigration.where(user: resource_bot,
+ initiator_user: user)
+ ).to be_exists
+ end
+ end
- subject
+ context 'when user_destroy_with_limited_execution_time_worker is disabled' do
+ before do
+ stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
+ end
- expect(issue.reload.author.ghost?).to be true
- end
+ it 'transfer issuables of bot user to ghost user' do
+ issue = create(:issue, author: resource_bot)
- it 'deletes project bot user' do
- subject
+ subject
+
+ expect(issue.reload.author.ghost?).to be true
+ end
+
+ it 'deletes project bot user' do
+ subject
- expect(User.exists?(resource_bot.id)).to be_falsy
+ expect(User.exists?(resource_bot.id)).to be_falsy
+ end
end
it 'logs the event' do
diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb
index 8dc7b07e397..9b0ca54a394 100644
--- a/spec/services/resource_events/change_labels_service_spec.rb
+++ b/spec/services/resource_events/change_labels_service_spec.rb
@@ -98,20 +98,33 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
let(:added) { [labels[0]] }
let(:removed) { [labels[1]] }
+ subject(:counter_class) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter }
+
context 'when resource is an issue' do
it 'tracks changed labels' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_label_changed_action)
+ expect(counter_class).to receive(:track_issue_label_changed_action)
change_labels
end
+
+ it_behaves_like 'issue_edit snowplow tracking' do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL_CHANGED }
+ let(:user) { author }
+ subject(:service_action) { change_labels }
+ end
end
context 'when resource is a merge request' do
let(:resource) { create(:merge_request, source_project: project) }
it 'does not track changed labels' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter)
- .not_to receive(:track_issue_label_changed_action)
+ expect(counter_class).not_to receive(:track_issue_label_changed_action)
+
+ change_labels
+ end
+
+ it 'does not emit snowplow event', :snowplow do
+ expect_no_snowplow_event
change_labels
end
diff --git a/spec/services/security/ci_configuration/sast_parser_service_spec.rb b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
index 1fd196cdcee..7a004e2915c 100644
--- a/spec/services/security/ci_configuration/sast_parser_service_spec.rb
+++ b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Security::CiConfiguration::SastParserService do
- include Ci::TemplateHelpers
-
describe '#configuration' do
include_context 'read ci configuration for sast enabled project'
diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb
index b863b2a46b0..5dbf5edb776 100644
--- a/spec/services/service_ping/submit_service_ping_service_spec.rb
+++ b/spec/services/service_ping/submit_service_ping_service_spec.rb
@@ -54,11 +54,13 @@ RSpec.describe ServicePing::SubmitService do
let(:service_ping_payload_url) { File.join(described_class::STAGING_BASE_URL, described_class::USAGE_DATA_PATH) }
let(:service_ping_errors_url) { File.join(described_class::STAGING_BASE_URL, described_class::ERROR_PATH) }
let(:service_ping_metadata_url) { File.join(described_class::STAGING_BASE_URL, described_class::METADATA_PATH) }
+ let!(:usage_data) { { uuid: 'uuid', recorded_at: Time.current } }
+
+ let(:subject) { described_class.new(payload: usage_data) }
shared_examples 'does not run' do
it do
expect(Gitlab::HTTP).not_to receive(:post)
- expect(Gitlab::Usage::ServicePingReport).not_to receive(:for)
subject.execute
end
@@ -69,7 +71,7 @@ RSpec.describe ServicePing::SubmitService do
expect(Gitlab::HTTP).not_to receive(:post).with(service_ping_payload_url, any_args)
expect { subject.execute }.to raise_error(described_class::SubmissionError) do |error|
- expect(error.message).to include('Usage data is blank')
+ expect(error.message).to include('Usage data payload is blank')
end
end
end
@@ -118,13 +120,18 @@ RSpec.describe ServicePing::SubmitService do
allow(ServicePing::ServicePingSettings).to receive(:product_intelligence_enabled?).and_return(true)
end
- it 'generates service ping' do
- stub_response(body: with_dev_ops_score_params)
- stub_response(body: nil, url: service_ping_metadata_url, status: 201)
+ it 'submits a service ping payload without errors', :aggregate_failures do
+ response = stub_response(body: with_dev_ops_score_params)
+ error_response = stub_response(body: nil, url: service_ping_errors_url, status: 201)
+ metadata_response = stub_response(body: nil, url: service_ping_metadata_url, status: 201)
- expect(Gitlab::Usage::ServicePingReport).to receive(:for).with(output: :all_metrics_values).and_call_original
+ expect(Gitlab::HTTP).to receive(:post).twice.and_call_original
subject.execute
+
+ expect(response).to have_been_requested
+ expect(error_response).not_to have_been_requested
+ expect(metadata_response).to have_been_requested
end
end
@@ -155,15 +162,9 @@ RSpec.describe ServicePing::SubmitService do
expect(response).to have_been_requested
end
- it 'forces a refresh of usage data statistics before submitting' do
- stub_response(body: with_dev_ops_score_params)
-
- expect(Gitlab::Usage::ServicePingReport).to receive(:for).with(output: :all_metrics_values).and_call_original
-
- subject.execute
- end
-
context 'when conv_index data is passed' do
+ let(:usage_data) { { uuid: 'uuid', recorded_at: Time.current } }
+
before do
stub_response(body: with_conv_index_params)
end
@@ -171,21 +172,17 @@ RSpec.describe ServicePing::SubmitService do
it_behaves_like 'saves DevOps report data from the response'
it 'saves usage_data_id to version_usage_data_id_value' do
- recorded_at = Time.current
- usage_data = { uuid: 'uuid', recorded_at: recorded_at }
-
- expect(Gitlab::Usage::ServicePingReport).to receive(:for).with(output: :all_metrics_values)
- .and_return(usage_data)
-
subject.execute
- raw_usage_data = RawUsageData.find_by(recorded_at: recorded_at)
+ raw_usage_data = RawUsageData.find_by(recorded_at: usage_data[:recorded_at])
expect(raw_usage_data.version_usage_data_id_value).to eq(31643)
end
end
context 'when only usage_data_id is passed in response' do
+ let(:usage_data) { { uuid: 'uuid', recorded_at: Time.current } }
+
before do
stub_response(body: with_usage_data_id_params)
end
@@ -195,15 +192,9 @@ RSpec.describe ServicePing::SubmitService do
end
it 'saves usage_data_id to version_usage_data_id_value' do
- recorded_at = Time.current
- usage_data = { uuid: 'uuid', recorded_at: recorded_at }
-
- expect(Gitlab::Usage::ServicePingReport).to receive(:for).with(output: :all_metrics_values)
- .and_return(usage_data)
-
subject.execute
- raw_usage_data = RawUsageData.find_by(recorded_at: recorded_at)
+ raw_usage_data = RawUsageData.find_by(recorded_at: usage_data[:recorded_at])
expect(raw_usage_data.version_usage_data_id_value).to eq(31643)
end
@@ -232,6 +223,8 @@ RSpec.describe ServicePing::SubmitService do
end
context 'with saving raw_usage_data' do
+ let(:usage_data) { { uuid: 'uuid', recorded_at: Time.current } }
+
before do
stub_response(body: with_dev_ops_score_params)
end
@@ -241,17 +234,10 @@ RSpec.describe ServicePing::SubmitService do
end
it 'saves the correct payload' do
- recorded_at = Time.current
- usage_data = { uuid: 'uuid', recorded_at: recorded_at }
-
- expect(Gitlab::Usage::ServicePingReport).to receive(:for).with(output: :all_metrics_values)
- .and_return(usage_data)
-
subject.execute
- raw_usage_data = RawUsageData.find_by(recorded_at: recorded_at)
+ raw_usage_data = RawUsageData.find_by(recorded_at: usage_data[:recorded_at])
- expect(raw_usage_data.recorded_at).to be_like_time(recorded_at)
expect(raw_usage_data.payload.to_json).to eq(usage_data.to_json)
end
end
@@ -269,90 +255,30 @@ RSpec.describe ServicePing::SubmitService do
end
context 'and usage data is empty string' do
- before do
- allow(Gitlab::Usage::ServicePingReport).to receive(:for).with(output: :all_metrics_values).and_return({})
- end
+ let(:usage_data) { {} }
it_behaves_like 'does not send a blank usage ping payload'
end
context 'and usage data is nil' do
- before do
- allow(ServicePing::BuildPayload).to receive(:execute).and_return(nil)
- allow(Gitlab::Usage::ServicePingReport).to receive(:for).with(output: :all_metrics_values).and_return(nil)
- end
+ let(:usage_data) { nil }
it_behaves_like 'does not send a blank usage ping payload'
end
- context 'if payload service fails' do
- before do
- stub_response(body: with_dev_ops_score_params)
-
- allow(ServicePing::BuildPayload).to receive_message_chain(:new, :execute)
- .and_raise(described_class::SubmissionError, 'SubmissionError')
- end
-
- it 'calls Gitlab::Usage::ServicePingReport .for method' do
- usage_data = build_usage_data
-
- expect(Gitlab::Usage::ServicePingReport).to receive(:for).with(output: :all_metrics_values)
- .and_return(usage_data)
-
- subject.execute
- end
-
- it 'submits error' do
- expect(Gitlab::HTTP).to receive(:post).with(URI.join(service_ping_payload_url), any_args)
- .and_call_original
- expect(Gitlab::HTTP).to receive(:post).with(URI.join(service_ping_errors_url), any_args)
- .and_call_original
- expect(Gitlab::HTTP).to receive(:post).with(URI.join(service_ping_metadata_url), any_args)
- .and_call_original
-
- subject.execute
- end
- end
-
- context 'calls BuildPayload first' do
- before do
- stub_response(body: with_dev_ops_score_params)
- end
-
- it 'returns usage data' do
- usage_data = build_usage_data
-
- expect_next_instance_of(ServicePing::BuildPayload) do |service|
- expect(service).to receive(:execute).and_return(usage_data)
- end
-
- subject.execute
- end
- end
-
context 'if version app response fails' do
before do
stub_response(body: with_dev_ops_score_params, status: 404)
-
- usage_data = build_usage_data
- allow_next_instance_of(ServicePing::BuildPayload) do |service|
- allow(service).to receive(:execute).and_return(usage_data)
- end
end
- it 'calls Gitlab::Usage::ServicePingReport .for method' do
- usage_data = build_usage_data
-
- expect(Gitlab::Usage::ServicePingReport).to receive(:for).with(output: :all_metrics_values)
- .and_return(usage_data)
-
+ it 'raises SubmissionError' do
# SubmissionError is raised as a result of 404 in response from HTTP Request
expect { subject.execute }.to raise_error(described_class::SubmissionError)
end
end
context 'when skip_db_write passed to service' do
- let(:subject) { ServicePing::SubmitService.new(skip_db_write: true) }
+ let(:subject) { described_class.new(payload: usage_data, skip_db_write: true) }
before do
stub_response(body: with_dev_ops_score_params)
@@ -377,21 +303,18 @@ RSpec.describe ServicePing::SubmitService do
stub_database_flavor_check
stub_application_setting(usage_ping_enabled: true)
stub_response(body: with_conv_index_params)
- allow_next_instance_of(ServicePing::BuildPayload) do |service|
- allow(service).to receive(:execute).and_return(payload)
- end
end
let(:metric_double) { instance_double(Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator, duration: 123) }
- let(:payload) do
+ let(:usage_data) do
{
uuid: 'uuid',
- metric_a: metric_double,
- metric_group: {
+ metric_a: metric_double,
+ metric_group: {
metric_b: metric_double
},
- metric_without_timing: "value",
- recorded_at: Time.current
+ metric_without_timing: "value",
+ recorded_at: Time.current
}
end
@@ -399,10 +322,10 @@ RSpec.describe ServicePing::SubmitService do
{
metadata: {
uuid: 'uuid',
- metrics: [
- { name: 'metric_a', time_elapsed: 123 },
- { name: 'metric_group.metric_b', time_elapsed: 123 }
- ]
+ metrics: [
+ { name: 'metric_a', time_elapsed: 123 },
+ { name: 'metric_group.metric_b', time_elapsed: 123 }
+ ]
}
}
end
@@ -425,8 +348,4 @@ RSpec.describe ServicePing::SubmitService do
status: status
)
end
-
- def build_usage_data
- { uuid: 'uuid', recorded_at: Time.current }
- end
end
diff --git a/spec/services/service_response_spec.rb b/spec/services/service_response_spec.rb
index 3ede90fbc44..2d70979dd3a 100644
--- a/spec/services/service_response_spec.rb
+++ b/spec/services/service_response_spec.rb
@@ -43,14 +43,14 @@ RSpec.describe ServiceResponse do
end
describe '.error' do
- it 'creates a failed response without HTTP status' do
+ it 'creates an error response without HTTP status' do
response = described_class.error(message: 'Bad apple')
expect(response).to be_error
expect(response.message).to eq('Bad apple')
end
- it 'creates a failed response with HTTP status' do
+ it 'creates an error response with HTTP status' do
response = described_class.error(message: 'Bad apple', http_status: 400)
expect(response).to be_error
@@ -58,7 +58,7 @@ RSpec.describe ServiceResponse do
expect(response.http_status).to eq(400)
end
- it 'creates a failed response with payload' do
+ it 'creates an error response with payload' do
response = described_class.error(message: 'Bad apple',
payload: { bad: 'apple' })
@@ -66,6 +66,15 @@ RSpec.describe ServiceResponse do
expect(response.message).to eq('Bad apple')
expect(response.payload).to eq(bad: 'apple')
end
+
+ it 'creates an error response with a reason' do
+ response = described_class.error(message: 'Bad apple',
+ reason: :permission_denied)
+
+ expect(response).to be_error
+ expect(response.message).to eq('Bad apple')
+ expect(response.reason).to eq(:permission_denied)
+ end
end
describe '#success?' do
diff --git a/spec/services/snippets/bulk_destroy_service_spec.rb b/spec/services/snippets/bulk_destroy_service_spec.rb
index 2d2bdd116d1..4142aa349e4 100644
--- a/spec/services/snippets/bulk_destroy_service_spec.rb
+++ b/spec/services/snippets/bulk_destroy_service_spec.rb
@@ -71,8 +71,8 @@ RSpec.describe Snippets::BulkDestroyService do
let(:error_message) { "You don't have access to delete these snippets." }
end
- context 'when hard_delete option is passed' do
- subject { described_class.new(service_user, snippets).execute(hard_delete: true) }
+ context 'when skip_authorization option is passed' do
+ subject { described_class.new(service_user, snippets).execute(skip_authorization: true) }
it 'returns a ServiceResponse success response' do
expect(subject).to be_success
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index bd8418d7092..4dfec9735ba 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -6,6 +6,8 @@ RSpec.describe Spam::SpamActionService do
include_context 'includes Spam constants'
let(:issue) { create(:issue, project: project, author: author) }
+ let(:personal_snippet) { create(:personal_snippet, :public, author: author) }
+ let(:project_snippet) { create(:project_snippet, :public, author: author) }
let(:fake_ip) { '1.2.3.4' }
let(:fake_user_agent) { 'fake-user-agent' }
let(:fake_referer) { 'fake-http-referer' }
@@ -27,6 +29,7 @@ RSpec.describe Spam::SpamActionService do
before do
issue.spam = false
+ personal_snippet.spam = false
end
describe 'constructor argument validation' do
@@ -50,24 +53,24 @@ RSpec.describe Spam::SpamActionService do
end
end
- shared_examples 'creates a spam log' do
+ shared_examples 'creates a spam log' do |target_type|
it do
expect { subject }
- .to log_spam(title: issue.title, description: issue.description, noteable_type: 'Issue')
+ .to log_spam(title: target.title, description: target.description, noteable_type: target_type)
# TODO: These checks should be incorporated into the `log_spam` RSpec matcher above
new_spam_log = SpamLog.last
expect(new_spam_log.user_id).to eq(user.id)
- expect(new_spam_log.title).to eq(issue.title)
- expect(new_spam_log.description).to eq(issue.description)
+ expect(new_spam_log.title).to eq(target.title)
+ expect(new_spam_log.description).to eq(target.spam_description)
expect(new_spam_log.source_ip).to eq(fake_ip)
expect(new_spam_log.user_agent).to eq(fake_user_agent)
- expect(new_spam_log.noteable_type).to eq('Issue')
+ expect(new_spam_log.noteable_type).to eq(target_type)
expect(new_spam_log.via_api).to eq(true)
end
end
- describe '#execute' do
+ shared_examples 'execute spam action service' do |target_type|
let(:fake_captcha_verification_service) { double(:captcha_verification_service) }
let(:fake_verdict_service) { double(:spam_verdict_service) }
let(:allowlisted) { false }
@@ -82,20 +85,22 @@ RSpec.describe Spam::SpamActionService do
let(:verdict_service_args) do
{
- target: issue,
+ target: target,
user: user,
options: verdict_service_opts,
context: {
action: :create,
- target_type: 'Issue'
- }
+ target_type: target_type
+ },
+ extra_features: extra_features
}
end
let_it_be(:existing_spam_log) { create(:spam_log, user: user, recaptcha_verified: false) }
subject do
- described_service = described_class.new(spammable: issue, spam_params: spam_params, user: user, action: :create)
+ described_service = described_class.new(spammable: target, spam_params: spam_params, extra_features:
+ extra_features, user: user, action: :create)
allow(described_service).to receive(:allowlisted?).and_return(allowlisted)
described_service.execute
end
@@ -136,7 +141,7 @@ RSpec.describe Spam::SpamActionService do
context 'when spammable attributes have not changed' do
before do
- issue.closed_at = Time.zone.now
+ allow(target).to receive(:has_changes_to_save?).and_return(true)
end
it 'does not create a spam log' do
@@ -146,11 +151,11 @@ RSpec.describe Spam::SpamActionService do
context 'when spammable attributes have changed' do
let(:expected_service_check_response_message) do
- /Check Issue spammable model for any errors or CAPTCHA requirement/
+ /Check #{target_type} spammable model for any errors or CAPTCHA requirement/
end
before do
- issue.description = 'Lovely Spam! Wonderful Spam!'
+ target.description = 'Lovely Spam! Wonderful Spam!'
end
context 'when allowlisted' do
@@ -170,13 +175,13 @@ RSpec.describe Spam::SpamActionService do
allow(fake_verdict_service).to receive(:execute).and_return(DISALLOW)
end
- it_behaves_like 'creates a spam log'
+ it_behaves_like 'creates a spam log', target_type
it 'marks as spam' do
response = subject
expect(response.message).to match(expected_service_check_response_message)
- expect(issue).to be_spam
+ expect(target).to be_spam
end
end
@@ -185,13 +190,13 @@ RSpec.describe Spam::SpamActionService do
allow(fake_verdict_service).to receive(:execute).and_return(BLOCK_USER)
end
- it_behaves_like 'creates a spam log'
+ it_behaves_like 'creates a spam log', target_type
it 'marks as spam' do
response = subject
expect(response.message).to match(expected_service_check_response_message)
- expect(issue).to be_spam
+ expect(target).to be_spam
end
end
@@ -200,20 +205,20 @@ RSpec.describe Spam::SpamActionService do
allow(fake_verdict_service).to receive(:execute).and_return(CONDITIONAL_ALLOW)
end
- it_behaves_like 'creates a spam log'
+ it_behaves_like 'creates a spam log', target_type
it 'does not mark as spam' do
response = subject
expect(response.message).to match(expected_service_check_response_message)
- expect(issue).not_to be_spam
+ expect(target).not_to be_spam
end
it 'marks as needing reCAPTCHA' do
response = subject
expect(response.message).to match(expected_service_check_response_message)
- expect(issue).to be_needs_recaptcha
+ expect(target).to be_needs_recaptcha
end
end
@@ -222,20 +227,20 @@ RSpec.describe Spam::SpamActionService do
allow(fake_verdict_service).to receive(:execute).and_return(OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM)
end
- it_behaves_like 'creates a spam log'
+ it_behaves_like 'creates a spam log', target_type
it 'does not mark as spam' do
response = subject
expect(response.message).to match(expected_service_check_response_message)
- expect(issue).not_to be_spam
+ expect(target).not_to be_spam
end
it 'does not mark as needing CAPTCHA' do
response = subject
expect(response.message).to match(expected_service_check_response_message)
- expect(issue).not_to be_needs_recaptcha
+ expect(target).not_to be_needs_recaptcha
end
end
@@ -249,7 +254,7 @@ RSpec.describe Spam::SpamActionService do
end
it 'clears spam flags' do
- expect(issue).to receive(:clear_spam_flags!)
+ expect(target).to receive(:clear_spam_flags!)
subject
end
@@ -265,7 +270,7 @@ RSpec.describe Spam::SpamActionService do
end
it 'clears spam flags' do
- expect(issue).to receive(:clear_spam_flags!)
+ expect(target).to receive(:clear_spam_flags!)
subject
end
@@ -285,4 +290,27 @@ RSpec.describe Spam::SpamActionService do
end
end
end
+
+ describe '#execute' do
+ describe 'issue' do
+ let(:target) { issue }
+ let(:extra_features) { {} }
+
+ it_behaves_like 'execute spam action service', 'Issue'
+ end
+
+ describe 'project snippet' do
+ let(:target) { project_snippet }
+ let(:extra_features) { { files: [{ path: 'project.rb' }] } }
+
+ it_behaves_like 'execute spam action service', 'ProjectSnippet'
+ end
+
+ describe 'personal snippet' do
+ let(:target) { personal_snippet }
+ let(:extra_features) { { files: [{ path: 'personal.rb' }] } }
+
+ it_behaves_like 'execute spam action service', 'PersonalSnippet'
+ end
+ end
end
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index 082b8f909f9..b89c96129c2 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -17,9 +17,10 @@ RSpec.describe Spam::SpamVerdictService do
let(:check_for_spam) { true }
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, author: user) }
+ let_it_be(:snippet) { create(:personal_snippet, :public, author: user) }
let(:service) do
- described_class.new(user: user, target: issue, options: {})
+ described_class.new(user: user, target: target, options: {})
end
let(:attribs) do
@@ -31,7 +32,7 @@ RSpec.describe Spam::SpamVerdictService do
stub_feature_flags(allow_possible_spam: false)
end
- describe '#execute' do
+ shared_examples 'execute spam verdict service' do
subject { service.execute }
before do
@@ -172,7 +173,8 @@ RSpec.describe Spam::SpamVerdictService do
end
end
- describe '#akismet_verdict' do
+ shared_examples 'akismet verdict' do
+ let(:target) { issue }
subject { service.send(:akismet_verdict) }
context 'if Akismet is enabled' do
@@ -227,7 +229,7 @@ RSpec.describe Spam::SpamVerdictService do
end
end
- describe '#spamcheck_verdict' do
+ shared_examples 'spamcheck verdict' do
subject { service.send(:spamcheck_verdict) }
context 'if a Spam Check endpoint enabled and set to a URL' do
@@ -254,7 +256,7 @@ RSpec.describe Spam::SpamVerdictService do
before do
allow(service).to receive(:spamcheck_client).and_return(spam_client)
- allow(spam_client).to receive(:issue_spam?).and_return([verdict, attribs, error])
+ allow(spam_client).to receive(:spam?).and_return([verdict, attribs, error])
end
context 'if the result is a NOOP verdict' do
@@ -365,10 +367,13 @@ RSpec.describe Spam::SpamVerdictService do
let(:attribs) { nil }
before do
- allow(spam_client).to receive(:issue_spam?).and_raise(GRPC::Aborted)
+ allow(spam_client).to receive(:spam?).and_raise(GRPC::Aborted)
end
it 'returns nil' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(GRPC::Aborted), error: ::Spam::SpamConstants::ERROR_TYPE
+ )
expect(subject).to eq([ALLOW, attribs, true])
end
end
@@ -381,17 +386,20 @@ RSpec.describe Spam::SpamVerdictService do
expect(subject).to eq [DISALLOW, attribs]
end
end
- end
- context 'if the endpoint times out' do
- let(:attribs) { nil }
+ context 'if the endpoint times out' do
+ let(:attribs) { nil }
- before do
- allow(spam_client).to receive(:issue_spam?).and_raise(GRPC::DeadlineExceeded)
- end
+ before do
+ allow(spam_client).to receive(:spam?).and_raise(GRPC::DeadlineExceeded)
+ end
- it 'returns nil' do
- expect(subject).to eq([ALLOW, attribs, true])
+ it 'returns nil' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(GRPC::DeadlineExceeded), error: ::Spam::SpamConstants::ERROR_TYPE
+ )
+ expect(subject).to eq([ALLOW, attribs, true])
+ end
end
end
end
@@ -416,4 +424,46 @@ RSpec.describe Spam::SpamVerdictService do
end
end
end
+
+ describe '#execute' do
+ describe 'issue' do
+ let(:target) { issue }
+
+ it_behaves_like 'execute spam verdict service'
+ end
+
+ describe 'snippet' do
+ let(:target) { snippet }
+
+ it_behaves_like 'execute spam verdict service'
+ end
+ end
+
+ describe '#akismet_verdict' do
+ describe 'issue' do
+ let(:target) { issue }
+
+ it_behaves_like 'akismet verdict'
+ end
+
+ describe 'snippet' do
+ let(:target) { snippet }
+
+ it_behaves_like 'akismet verdict'
+ end
+ end
+
+ describe '#spamcheck_verdict' do
+ describe 'issue' do
+ let(:target) { issue }
+
+ it_behaves_like 'spamcheck verdict'
+ end
+
+ describe 'snippet' do
+ let(:target) { snippet }
+
+ it_behaves_like 'spamcheck verdict'
+ end
+ end
end
diff --git a/spec/services/suggestions/apply_service_spec.rb b/spec/services/suggestions/apply_service_spec.rb
index e34324d5fe2..41ccd8523fa 100644
--- a/spec/services/suggestions/apply_service_spec.rb
+++ b/spec/services/suggestions/apply_service_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Suggestions::ApplyService do
def apply(suggestions, custom_message = nil)
result = apply_service.new(user, *suggestions, message: custom_message).execute
- suggestions.map { |suggestion| suggestion.reload }
+ suggestions.map(&:reload)
expect(result[:status]).to eq(:success)
end
@@ -136,21 +136,20 @@ RSpec.describe Suggestions::ApplyService do
end
let(:merge_request) do
- create(:merge_request, source_project: project,
- target_project: project,
- source_branch: 'master')
+ create(:merge_request,
+ source_project: project, target_project: project, source_branch: 'master')
end
let(:position) { build_position }
let(:diff_note) do
- create(:diff_note_on_merge_request, noteable: merge_request,
- position: position, project: project)
+ create(:diff_note_on_merge_request,
+ noteable: merge_request, position: position, project: project)
end
let(:suggestion) do
- create(:suggestion, :content_from_repo, note: diff_note,
- to_content: " raise RuntimeError, 'Explosion'\n # explosion?\n")
+ create(:suggestion, :content_from_repo,
+ note: diff_note, to_content: " raise RuntimeError, 'Explosion'\n # explosion?\n")
end
let(:suggestion2) do
@@ -311,9 +310,9 @@ RSpec.describe Suggestions::ApplyService do
context 'when HEAD from position is different from source branch HEAD on repo' do
it 'returns error message' do
- allow(suggestion).to receive(:appliable?) { true }
- allow(suggestion.position).to receive(:head_sha) { 'old-sha' }
- allow(suggestion.noteable).to receive(:source_branch_sha) { 'new-sha' }
+ allow(suggestion).to receive(:appliable?).and_return(true)
+ allow(suggestion.position).to receive(:head_sha).and_return('old-sha')
+ allow(suggestion.noteable).to receive(:source_branch_sha).and_return('new-sha')
result = apply_service.new(user, suggestion).execute
@@ -430,7 +429,6 @@ RSpec.describe Suggestions::ApplyService do
suggestion1_diff = fetch_raw_diff(suggestion1)
suggestion2_diff = fetch_raw_diff(suggestion2)
- # rubocop: disable Layout/TrailingWhitespace
expected_suggestion1_diff = <<-CONTENT.strip_heredoc
@@ -10,7 +10,7 @@ module Popen
end
@@ -442,9 +440,6 @@ RSpec.describe Suggestions::ApplyService do
"PWD" => path
}
CONTENT
- # rubocop: enable Layout/TrailingWhitespace
-
- # rubocop: disable Layout/TrailingWhitespace
expected_suggestion2_diff = <<-CONTENT.strip_heredoc
@@ -28,7 +28,7 @@ module Popen
@@ -455,8 +450,6 @@ RSpec.describe Suggestions::ApplyService do
@cmd_status = wait_thr.value.exitstatus
end
CONTENT
- # rubocop: enable Layout/TrailingWhitespace
-
expect(suggestion1_diff.strip).to eq(expected_suggestion1_diff.strip)
expect(suggestion2_diff.strip).to eq(expected_suggestion2_diff.strip)
end
@@ -508,10 +501,8 @@ RSpec.describe Suggestions::ApplyService do
end
let(:suggestion) do
- create(:suggestion, :content_from_repo, note: diff_note,
- lines_above: 2,
- lines_below: 3,
- to_content: "# multi\n# line\n")
+ create(:suggestion, :content_from_repo,
+ note: diff_note, lines_above: 2, lines_below: 3, to_content: "# multi\n# line\n")
end
let(:suggestions) { [suggestion] }
@@ -568,7 +559,7 @@ RSpec.describe Suggestions::ApplyService do
end
let(:suggestion) do
- create_suggestion( to_content: "", new_line: 13)
+ create_suggestion(to_content: "", new_line: 13)
end
let(:suggestions) { [suggestion] }
@@ -616,14 +607,12 @@ RSpec.describe Suggestions::ApplyService do
context 'no permission' do
let(:merge_request) do
- create(:merge_request, source_project: project,
- target_project: project)
+ create(:merge_request, source_project: project, target_project: project)
end
let(:diff_note) do
- create(:diff_note_on_merge_request, noteable: merge_request,
- position: position,
- project: project)
+ create(:diff_note_on_merge_request,
+ noteable: merge_request, position: position, project: project)
end
context 'user cannot write in project repo' do
@@ -642,14 +631,12 @@ RSpec.describe Suggestions::ApplyService do
context 'patch is not appliable' do
let(:merge_request) do
- create(:merge_request, source_project: project,
- target_project: project)
+ create(:merge_request, source_project: project, target_project: project)
end
let(:diff_note) do
- create(:diff_note_on_merge_request, noteable: merge_request,
- position: position,
- project: project)
+ create(:diff_note_on_merge_request,
+ noteable: merge_request, position: position, project: project)
end
before do
@@ -669,7 +656,7 @@ RSpec.describe Suggestions::ApplyService do
let(:result) { apply_service.new(user, suggestion).execute }
before do
- expect(suggestion.note).to receive(:latest_diff_file) { nil }
+ expect(suggestion.note).to receive(:latest_diff_file).and_return(nil)
end
it 'returns error message' do
diff --git a/spec/services/system_notes/time_tracking_service_spec.rb b/spec/services/system_notes/time_tracking_service_spec.rb
index 33608deaa64..c856caa3f3e 100644
--- a/spec/services/system_notes/time_tracking_service_spec.rb
+++ b/spec/services/system_notes/time_tracking_service_spec.rb
@@ -48,12 +48,6 @@ RSpec.describe ::SystemNotes::TimeTrackingService do
expect(note.note).to eq("changed due date to #{due_date.to_s(:long)}")
end
- it 'tracks the issue event in usage ping' do
- expect(activity_counter_class).to receive(activity_counter_method).with(author: author)
-
- subject
- end
-
context 'and start date removed' do
let(:changed_dates) { { 'due_date' => [nil, due_date], 'start_date' => [start_date, nil] } }
@@ -66,12 +60,18 @@ RSpec.describe ::SystemNotes::TimeTrackingService do
context 'when start_date is added' do
let(:changed_dates) { { 'start_date' => [nil, start_date] } }
- it 'does not track the issue event in usage ping' do
+ it 'does not track the issue event' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_due_date_changed_action)
subject
end
+ it 'does not emit snowplow event', :snowplow do
+ expect_no_snowplow_event
+
+ subject
+ end
+
it 'sets the correct note message' do
expect(note.note).to eq("changed start date to #{start_date.to_s(:long)}")
end
@@ -111,12 +111,19 @@ RSpec.describe ::SystemNotes::TimeTrackingService do
subject
end
- it 'tracks the issue event in usage ping' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_due_date_changed_action).with(author: author)
+ it 'tracks the issue event' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_due_date_changed_action)
+ .with(author: author, project: project)
subject
end
+ it_behaves_like 'issue_edit snowplow tracking' do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DUE_DATE_CHANGED }
+ let(:user) { author }
+ subject(:service_action) { note }
+ end
+
context 'when only start_date is added' do
let(:changed_dates) { { 'start_date' => [nil, start_date] } }
@@ -135,12 +142,18 @@ RSpec.describe ::SystemNotes::TimeTrackingService do
it_behaves_like 'issuable getting date change notes'
- it 'does not track the issue event in usage ping' do
+ it 'does not track the issue event' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_due_date_changed_action)
subject
end
+ it 'does not emit snowplow event', :snowplow do
+ expect_no_snowplow_event
+
+ subject
+ end
+
context 'when only start_date is added' do
let(:changed_dates) { { 'start_date' => [nil, start_date] } }
@@ -155,12 +168,23 @@ RSpec.describe ::SystemNotes::TimeTrackingService do
context 'when noteable is a merge request' do
let(:noteable) { create(:merge_request, source_project: project) }
- it 'does not track the issue event in usage ping' do
+ it 'does not track the issue event' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_due_date_changed_action)
+
+ subject
+ end
+
+ it 'does not track the work item event in usage ping' do
expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).not_to receive(:track_work_item_date_changed_action)
subject
end
+
+ it 'does not emit snowplow event', :snowplow do
+ expect_no_snowplow_event
+
+ subject
+ end
end
end
@@ -201,17 +225,31 @@ RSpec.describe ::SystemNotes::TimeTrackingService do
end
it 'tracks the issue event in usage ping' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_time_estimate_changed_action).with(author: author)
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_time_estimate_changed_action)
+ .with(author: author, project: project)
subject
end
+
+ it_behaves_like 'issue_edit snowplow tracking' do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_ESTIMATE_CHANGED }
+ let(:user) { author }
+ let(:service_action) { subject }
+ end
end
context 'when noteable is a merge request' do
let_it_be(:noteable) { create(:merge_request, source_project: project) }
- it 'does not track the issue event in usage ping' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_time_estimate_changed_action).with(author: author)
+ it 'does not track the issue event' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_time_estimate_changed_action)
+ .with(author: author, project: project)
+
+ subject
+ end
+
+ it 'does not emit snowplow event', :snowplow do
+ expect_no_snowplow_event
subject
end
@@ -316,25 +354,42 @@ RSpec.describe ::SystemNotes::TimeTrackingService do
end
end
- it 'tracks the issue event in usage ping' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_time_spent_changed_action).with(author: author)
+ it 'tracks the issue event' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_time_spent_changed_action)
+ .with(author: author, project: project)
spend_time!(277200)
subject
end
+
+ it_behaves_like 'issue_edit snowplow tracking' do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_SPENT_CHANGED }
+ let(:user) { author }
+ let(:service_action) do
+ spend_time!(277200)
+ subject
+ end
+ end
end
context 'when noteable is a merge request' do
let_it_be(:noteable) { create(:merge_request, source_project: project) }
- it 'does not track the issue event in usage ping' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_time_estimate_changed_action).with(author: author)
+ it 'does not track the issue event' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_time_estimate_changed_action)
+ .with(author: author, project: project)
spend_time!(277200)
subject
end
+
+ it 'does not emit snowplow event', :snowplow do
+ expect_no_snowplow_event
+
+ subject
+ end
end
def spend_time!(seconds)
diff --git a/spec/services/topics/merge_service_spec.rb b/spec/services/topics/merge_service_spec.rb
index 971917eb8e9..eef31817aa8 100644
--- a/spec/services/topics/merge_service_spec.rb
+++ b/spec/services/topics/merge_service_spec.rb
@@ -30,7 +30,9 @@ RSpec.describe Topics::MergeService do
it 'reverts previous changes' do
allow(source_topic.reload).to receive(:destroy!).and_raise(ActiveRecord::RecordNotDestroyed)
- expect { subject }.to raise_error(ActiveRecord::RecordNotDestroyed)
+ response = subject
+ expect(response).to be_error
+ expect(response.message).to eq('Topics could not be merged!')
expect(source_topic.projects).to contain_exactly(project_1, project_2, project_4)
expect(target_topic.projects).to contain_exactly(project_3, project_4)
@@ -50,9 +52,9 @@ RSpec.describe Topics::MergeService do
with_them do
it 'raises correct error' do
- expect { subject }.to raise_error(ArgumentError) do |error|
- expect(error.message).to eq(expected_message)
- end
+ response = subject
+ expect(response).to be_error
+ expect(response.message).to eq(expected_message)
end
end
end
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index 90c4f70d749..b32599d4af8 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -10,371 +10,475 @@ RSpec.describe Users::DestroyService do
let(:service) { described_class.new(admin) }
let(:gitlab_shell) { Gitlab::Shell.new }
- describe "Deletes a user and all their personal projects", :enable_admin_mode do
- context 'no options are given' do
- it 'deletes the user' do
- user_data = service.execute(user)
-
- expect(user_data['email']).to eq(user.email)
- expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
- expect { Namespace.find(namespace.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ shared_examples 'pre-migrate clean-up' do
+ describe "Deletes a user and all their personal projects", :enable_admin_mode do
+ context 'no options are given' do
+ it 'will delete the personal project' do
+ expect_next_instance_of(Projects::DestroyService) do |destroy_service|
+ expect(destroy_service).to receive(:execute).once.and_return(true)
+ end
+
+ service.execute(user)
+ end
end
- it 'deletes user associations in batches' do
- expect(user).to receive(:destroy_dependent_associations_in_batches)
+ context 'personal projects in pending_delete' do
+ before do
+ project.pending_delete = true
+ project.save!
+ end
+
+ it 'destroys a personal project in pending_delete' do
+ expect_next_instance_of(Projects::DestroyService) do |destroy_service|
+ expect(destroy_service).to receive(:execute).once.and_return(true)
+ end
- service.execute(user)
+ service.execute(user)
+ end
end
- it 'does not include snippets when deleting in batches' do
- expect(user).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:snippets] })
+ context "solo owned groups present" do
+ let(:solo_owned) { create(:group) }
+ let(:member) { create(:group_member) }
+ let(:user) { member.user }
- service.execute(user)
- end
+ before do
+ solo_owned.group_members = [member]
+ end
- it 'will delete the project' do
- expect_next_instance_of(Projects::DestroyService) do |destroy_service|
- expect(destroy_service).to receive(:execute).once.and_return(true)
+ it 'returns the user with attached errors' do
+ expect(service.execute(user)).to be(user)
+ expect(user.errors.full_messages).to(
+ contain_exactly('You must transfer ownership or delete groups before you can remove user'))
end
- service.execute(user)
+ it 'does not delete the user, nor the group' do
+ service.execute(user)
+
+ expect(User.find(user.id)).to eq user
+ expect(Group.find(solo_owned.id)).to eq solo_owned
+ end
end
- it 'calls the bulk snippet destroy service for the user personal snippets' do
- repo1 = create(:personal_snippet, :repository, author: user).snippet_repository
- repo2 = create(:project_snippet, :repository, project: project, author: user).snippet_repository
+ context "deletions with solo owned groups" do
+ let(:solo_owned) { create(:group) }
+ let(:member) { create(:group_member) }
+ let(:user) { member.user }
- aggregate_failures do
- expect(gitlab_shell.repository_exists?(repo1.shard_name, repo1.disk_path + '.git')).to be_truthy
- expect(gitlab_shell.repository_exists?(repo2.shard_name, repo2.disk_path + '.git')).to be_truthy
+ before do
+ solo_owned.group_members = [member]
+ service.execute(user, delete_solo_owned_groups: true)
end
- # Call made when destroying user personal projects
- expect(Snippets::BulkDestroyService).to receive(:new)
- .with(admin, project.snippets).and_call_original
+ it 'deletes solo owned groups' do
+ expect { Group.find(solo_owned.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
- # Call to remove user personal snippets and for
- # project snippets where projects are not user personal
- # ones
- expect(Snippets::BulkDestroyService).to receive(:new)
- .with(admin, user.snippets.only_personal_snippets).and_call_original
+ context 'deletions with inherited group owners' do
+ let(:group) { create(:group, :nested) }
+ let(:user) { create(:user) }
+ let(:inherited_owner) { create(:user) }
+
+ before do
+ group.parent.add_owner(inherited_owner)
+ group.add_owner(user)
- service.execute(user)
+ service.execute(user, delete_solo_owned_groups: true)
+ end
- aggregate_failures do
- expect(gitlab_shell.repository_exists?(repo1.shard_name, repo1.disk_path + '.git')).to be_falsey
- expect(gitlab_shell.repository_exists?(repo2.shard_name, repo2.disk_path + '.git')).to be_falsey
+ it 'does not delete the group' do
+ expect(Group.exists?(id: group)).to be_truthy
end
end
- it 'calls the bulk snippet destroy service with hard delete option if it is present' do
- # this avoids getting into Projects::DestroyService as it would
- # call Snippets::BulkDestroyService first!
- allow(user).to receive(:personal_projects).and_return([])
+ describe "user personal's repository removal" do
+ context 'storages' do
+ before do
+ perform_enqueued_jobs { service.execute(user) }
+ end
- expect_next_instance_of(Snippets::BulkDestroyService) do |bulk_destroy_service|
- expect(bulk_destroy_service).to receive(:execute).with({ hard_delete: true }).and_call_original
- end
+ context 'legacy storage' do
+ let!(:project) { create(:project, :empty_repo, :legacy_storage, namespace: user.namespace) }
- service.execute(user, { hard_delete: true })
- end
+ it 'removes repository' do
+ expect(
+ gitlab_shell.repository_exists?(project.repository_storage,
+ "#{project.disk_path}.git")
+ ).to be_falsey
+ end
+ end
- it 'does not delete project snippets that the user is the author of' do
- repo = create(:project_snippet, :repository, author: user).snippet_repository
- service.execute(user)
- expect(gitlab_shell.repository_exists?(repo.shard_name, repo.disk_path + '.git')).to be_truthy
- expect(User.ghost.snippets).to include(repo.snippet)
- end
+ context 'hashed storage' do
+ let!(:project) { create(:project, :empty_repo, namespace: user.namespace) }
- context 'when an error is raised deleting snippets' do
- it 'does not delete user' do
- snippet = create(:personal_snippet, :repository, author: user)
+ it 'removes repository' do
+ expect(
+ gitlab_shell.repository_exists?(project.repository_storage,
+ "#{project.disk_path}.git")
+ ).to be_falsey
+ end
+ end
+ end
- bulk_service = double
- allow(Snippets::BulkDestroyService).to receive(:new).and_call_original
- allow(Snippets::BulkDestroyService).to receive(:new).with(admin, user.snippets).and_return(bulk_service)
- allow(bulk_service).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
+ context 'repository removal status is taken into account' do
+ it 'raises exception' do
+ expect_next_instance_of(::Projects::DestroyService) do |destroy_service|
+ expect(destroy_service).to receive(:execute).and_return(false)
+ end
- aggregate_failures do
expect { service.execute(user) }
- .to raise_error(Users::DestroyService::DestroyError, 'foo' )
- expect(snippet.reload).not_to be_nil
- expect(gitlab_shell.repository_exists?(snippet.repository_storage, snippet.disk_path + '.git')).to be_truthy
+ .to raise_error(Users::DestroyService::DestroyError,
+ "Project #{project.id} can't be deleted" )
end
end
end
- end
- context 'projects in pending_delete' do
- before do
- project.pending_delete = true
- project.save!
- end
+ describe "calls the before/after callbacks" do
+ it 'of project_members' do
+ expect_any_instance_of(ProjectMember).to receive(:run_callbacks).with(:find).once
+ expect_any_instance_of(ProjectMember).to receive(:run_callbacks).with(:initialize).once
+ expect_any_instance_of(ProjectMember).to receive(:run_callbacks).with(:destroy).once
- it 'destroys a project in pending_delete' do
- expect_next_instance_of(Projects::DestroyService) do |destroy_service|
- expect(destroy_service).to receive(:execute).once.and_return(true)
+ service.execute(user)
end
- service.execute(user)
+ it 'of group_members' do
+ group_member = create(:group_member)
+ group_member.group.group_members.create!(user: user, access_level: 40)
- expect { Project.find(project.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ expect_any_instance_of(GroupMember).to receive(:run_callbacks).with(:find).once
+ expect_any_instance_of(GroupMember).to receive(:run_callbacks).with(:initialize).once
+ expect_any_instance_of(GroupMember).to receive(:run_callbacks).with(:destroy).once
+
+ service.execute(user)
+ end
end
end
+ end
- context "a deleted user's issues" do
- let(:project) { create(:project) }
+ context 'when user_destroy_with_limited_execution_time_worker is disabled' do
+ before do
+ stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
+ end
- before do
- project.add_developer(user)
- end
+ include_examples 'pre-migrate clean-up'
- context "for an issue the user was assigned to" do
- let!(:issue) { create(:issue, project: project, assignees: [user]) }
+ describe "Deletes a user and all their personal projects", :enable_admin_mode do
+ context 'no options are given' do
+ it 'deletes the user' do
+ user_data = service.execute(user)
- before do
- service.execute(user)
+ expect(user_data['email']).to eq(user.email)
+ expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { Namespace.find(namespace.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
- it 'does not delete issues the user is assigned to' do
- expect(Issue.find_by_id(issue.id)).to be_present
+ it 'deletes user associations in batches' do
+ expect(user).to receive(:destroy_dependent_associations_in_batches)
+
+ service.execute(user)
end
- it 'migrates the issue so that it is "Unassigned"' do
- migrated_issue = Issue.find_by_id(issue.id)
+ it 'does not include snippets when deleting in batches' do
+ expect(user).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:snippets] })
- expect(migrated_issue.assignees).to be_empty
+ service.execute(user)
end
- end
- end
- context "a deleted user's merge_requests" do
- let(:project) { create(:project, :repository) }
+ it 'calls the bulk snippet destroy service for the user personal snippets' do
+ repo1 = create(:personal_snippet, :repository, author: user).snippet_repository
+ repo2 = create(:project_snippet, :repository, project: project, author: user).snippet_repository
- before do
- project.add_developer(user)
- end
+ aggregate_failures do
+ expect(gitlab_shell.repository_exists?(repo1.shard_name, repo1.disk_path + '.git')).to be_truthy
+ expect(gitlab_shell.repository_exists?(repo2.shard_name, repo2.disk_path + '.git')).to be_truthy
+ end
- context "for an merge request the user was assigned to" do
- let!(:merge_request) { create(:merge_request, source_project: project, assignees: [user]) }
+ # Call made when destroying user personal projects
+ expect(Snippets::BulkDestroyService).to receive(:new)
+ .with(admin, project.snippets).and_call_original
+
+ # Call to remove user personal snippets and for
+ # project snippets where projects are not user personal
+ # ones
+ expect(Snippets::BulkDestroyService).to receive(:new)
+ .with(admin, user.snippets.only_personal_snippets).and_call_original
- before do
service.execute(user)
+
+ aggregate_failures do
+ expect(gitlab_shell.repository_exists?(repo1.shard_name, repo1.disk_path + '.git')).to be_falsey
+ expect(gitlab_shell.repository_exists?(repo2.shard_name, repo2.disk_path + '.git')).to be_falsey
+ end
end
- it 'does not delete merge requests the user is assigned to' do
- expect(MergeRequest.find_by_id(merge_request.id)).to be_present
+ it 'calls the bulk snippet destroy service with hard delete option if it is present' do
+ # this avoids getting into Projects::DestroyService as it would
+ # call Snippets::BulkDestroyService first!
+ allow(user).to receive(:personal_projects).and_return([])
+
+ expect_next_instance_of(Snippets::BulkDestroyService) do |bulk_destroy_service|
+ expect(bulk_destroy_service).to receive(:execute).with({ skip_authorization: true }).and_call_original
+ end
+
+ service.execute(user, { hard_delete: true })
end
- it 'migrates the merge request so that it is "Unassigned"' do
- migrated_merge_request = MergeRequest.find_by_id(merge_request.id)
+ it 'does not delete project snippets that the user is the author of' do
+ repo = create(:project_snippet, :repository, author: user).snippet_repository
+ service.execute(user)
+ expect(gitlab_shell.repository_exists?(repo.shard_name, repo.disk_path + '.git')).to be_truthy
+ expect(User.ghost.snippets).to include(repo.snippet)
+ end
- expect(migrated_merge_request.assignees).to be_empty
+ context 'when an error is raised deleting snippets' do
+ it 'does not delete user' do
+ snippet = create(:personal_snippet, :repository, author: user)
+
+ bulk_service = double
+ allow(Snippets::BulkDestroyService).to receive(:new).and_call_original
+ allow(Snippets::BulkDestroyService).to receive(:new).with(admin, user.snippets).and_return(bulk_service)
+ allow(bulk_service).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
+
+ aggregate_failures do
+ expect { service.execute(user) }
+ .to raise_error(Users::DestroyService::DestroyError, 'foo' )
+ expect(snippet.reload).not_to be_nil
+ expect(
+ gitlab_shell.repository_exists?(snippet.repository_storage,
+ snippet.disk_path + '.git')
+ ).to be_truthy
+ end
+ end
end
end
- end
- context "solo owned groups present" do
- let(:solo_owned) { create(:group) }
- let(:member) { create(:group_member) }
- let(:user) { member.user }
+ context 'projects in pending_delete' do
+ before do
+ project.pending_delete = true
+ project.save!
+ end
- before do
- solo_owned.group_members = [member]
- end
+ it 'destroys a project in pending_delete' do
+ expect_next_instance_of(Projects::DestroyService) do |destroy_service|
+ expect(destroy_service).to receive(:execute).once.and_return(true)
+ end
- it 'returns the user with attached errors' do
- expect(service.execute(user)).to be(user)
- expect(user.errors.full_messages).to eq([
- 'You must transfer ownership or delete groups before you can remove user'
- ])
- end
+ service.execute(user)
- it 'does not delete the user' do
- service.execute(user)
- expect(User.find(user.id)).to eq user
+ expect { Project.find(project.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
end
- end
- context "deletions with solo owned groups" do
- let(:solo_owned) { create(:group) }
- let(:member) { create(:group_member) }
- let(:user) { member.user }
+ context "a deleted user's issues" do
+ let(:project) { create(:project) }
- before do
- solo_owned.group_members = [member]
- service.execute(user, delete_solo_owned_groups: true)
- end
+ before do
+ project.add_developer(user)
+ end
- it 'deletes solo owned groups' do
- expect { Project.find(solo_owned.id) }.to raise_error(ActiveRecord::RecordNotFound)
- end
+ context "for an issue the user was assigned to" do
+ let!(:issue) { create(:issue, project: project, assignees: [user]) }
- it 'deletes the user' do
- expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
- end
- end
+ before do
+ service.execute(user)
+ end
- context 'deletions with inherited group owners' do
- let(:group) { create(:group, :nested) }
- let(:user) { create(:user) }
- let(:inherited_owner) { create(:user) }
+ it 'does not delete issues the user is assigned to' do
+ expect(Issue.find_by_id(issue.id)).to be_present
+ end
- before do
- group.parent.add_owner(inherited_owner)
- group.add_owner(user)
+ it 'migrates the issue so that it is "Unassigned"' do
+ migrated_issue = Issue.find_by_id(issue.id)
- service.execute(user, delete_solo_owned_groups: true)
+ expect(migrated_issue.assignees).to be_empty
+ end
+ end
end
- it 'does not delete the group' do
- expect(Group.exists?(id: group)).to be_truthy
- end
+ context "a deleted user's merge_requests" do
+ let(:project) { create(:project, :repository) }
- it 'deletes the user' do
- expect(User.exists?(id: user)).to be_falsey
- end
- end
+ before do
+ project.add_developer(user)
+ end
- context 'migrating associated records' do
- let!(:issue) { create(:issue, author: user) }
+ context "for an merge request the user was assigned to" do
+ let!(:merge_request) { create(:merge_request, source_project: project, assignees: [user]) }
+
+ before do
+ service.execute(user)
+ end
- it 'delegates to the `MigrateToGhostUser` service to move associated records to the ghost user' do
- expect_any_instance_of(Users::MigrateToGhostUserService).to receive(:execute).once.and_call_original
+ it 'does not delete merge requests the user is assigned to' do
+ expect(MergeRequest.find_by_id(merge_request.id)).to be_present
+ end
- service.execute(user)
+ it 'migrates the merge request so that it is "Unassigned"' do
+ migrated_merge_request = MergeRequest.find_by_id(merge_request.id)
- expect(issue.reload.author).to be_ghost
+ expect(migrated_merge_request.assignees).to be_empty
+ end
+ end
end
- context 'when hard_delete option is given' do
- it 'will not ghost certain records' do
+ context 'migrating associated records' do
+ let!(:issue) { create(:issue, author: user) }
+
+ it 'delegates to the `MigrateToGhostUser` service to move associated records to the ghost user' do
expect_any_instance_of(Users::MigrateToGhostUserService).to receive(:execute).once.and_call_original
- service.execute(user, hard_delete: true)
+ service.execute(user)
- expect(Issue.exists?(issue.id)).to be_falsy
+ expect(issue.reload.author).to be_ghost
end
- end
- end
- describe "user personal's repository removal" do
- context 'storages' do
- before do
- perform_enqueued_jobs { service.execute(user) }
- end
+ context 'when hard_delete option is given' do
+ it 'will not ghost certain records' do
+ expect_any_instance_of(Users::MigrateToGhostUserService).to receive(:execute).once.and_call_original
- context 'legacy storage' do
- let!(:project) { create(:project, :empty_repo, :legacy_storage, namespace: user.namespace) }
+ service.execute(user, hard_delete: true)
- it 'removes repository' do
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
+ expect(Issue.exists?(issue.id)).to be_falsy
end
end
+ end
+ end
- context 'hashed storage' do
- let!(:project) { create(:project, :empty_repo, namespace: user.namespace) }
+ describe "Deletion permission checks" do
+ it 'does not delete the user when user is not an admin' do
+ other_user = create(:user)
- it 'removes repository' do
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
- end
+ expect { described_class.new(other_user).execute(user) }.to raise_error(Gitlab::Access::AccessDeniedError)
+ expect(User.exists?(user.id)).to be(true)
+ end
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'allows admins to delete anyone' do
+ described_class.new(admin).execute(user)
+
+ expect(User.exists?(user.id)).to be(false)
end
end
- context 'repository removal status is taken into account' do
- it 'raises exception' do
- expect_next_instance_of(::Projects::DestroyService) do |destroy_service|
- expect(destroy_service).to receive(:execute).and_return(false)
- end
+ context 'when admin mode is disabled' do
+ it 'disallows admins to delete anyone' do
+ expect { described_class.new(admin).execute(user) }.to raise_error(Gitlab::Access::AccessDeniedError)
- expect { service.execute(user) }
- .to raise_error(Users::DestroyService::DestroyError, "Project #{project.id} can't be deleted" )
+ expect(User.exists?(user.id)).to be(true)
end
end
- end
- describe "calls the before/after callbacks" do
- it 'of project_members' do
- expect_any_instance_of(ProjectMember).to receive(:run_callbacks).with(:find).once
- expect_any_instance_of(ProjectMember).to receive(:run_callbacks).with(:initialize).once
- expect_any_instance_of(ProjectMember).to receive(:run_callbacks).with(:destroy).once
+ it 'allows users to delete their own account' do
+ described_class.new(user).execute(user)
- service.execute(user)
+ expect(User.exists?(user.id)).to be(false)
end
- it 'of group_members' do
- group_member = create(:group_member)
- group_member.group.group_members.create!(user: user, access_level: 40)
+ it 'allows user to be deleted if skip_authorization: true' do
+ other_user = create(:user)
- expect_any_instance_of(GroupMember).to receive(:run_callbacks).with(:find).once
- expect_any_instance_of(GroupMember).to receive(:run_callbacks).with(:initialize).once
- expect_any_instance_of(GroupMember).to receive(:run_callbacks).with(:destroy).once
+ described_class.new(user).execute(other_user, skip_authorization: true)
- service.execute(user)
+ expect(User.exists?(other_user.id)).to be(false)
end
end
- end
-
- describe "Deletion permission checks" do
- it 'does not delete the user when user is not an admin' do
- other_user = create(:user)
- expect { described_class.new(other_user).execute(user) }.to raise_error(Gitlab::Access::AccessDeniedError)
- expect(User.exists?(user.id)).to be(true)
- end
+ context 'batched nullify' do
+ let(:other_user) { create(:user) }
- context 'when admin mode is enabled', :enable_admin_mode do
- it 'allows admins to delete anyone' do
- described_class.new(admin).execute(user)
+ it 'nullifies related associations in batches' do
+ expect(other_user).to receive(:nullify_dependent_associations_in_batches).and_call_original
- expect(User.exists?(user.id)).to be(false)
+ described_class.new(user).execute(other_user, skip_authorization: true)
end
- end
- context 'when admin mode is disabled' do
- it 'disallows admins to delete anyone' do
- expect { described_class.new(admin).execute(user) }.to raise_error(Gitlab::Access::AccessDeniedError)
+ it 'nullifies last_updated_issues, closed_issues, resource_label_events' do
+ issue = create(:issue, closed_by: other_user, updated_by: other_user)
+ resource_label_event = create(:resource_label_event, user: other_user)
- expect(User.exists?(user.id)).to be(true)
- end
- end
+ described_class.new(user).execute(other_user, skip_authorization: true)
- it 'allows users to delete their own account' do
- described_class.new(user).execute(user)
+ issue.reload
+ resource_label_event.reload
- expect(User.exists?(user.id)).to be(false)
+ expect(issue.closed_by).to be_nil
+ expect(issue.updated_by).to be_nil
+ expect(resource_label_event.user).to be_nil
+ end
end
+ end
- it 'allows user to be deleted if skip_authorization: true' do
- other_user = create(:user)
-
- described_class.new(user).execute(other_user, skip_authorization: true)
+ context 'when user_destroy_with_limited_execution_time_worker is enabled' do
+ include_examples 'pre-migrate clean-up'
- expect(User.exists?(other_user.id)).to be(false)
+ describe "Deletes a user and all their personal projects", :enable_admin_mode do
+ context 'no options are given' do
+ it 'creates GhostUserMigration record to handle migration in a worker' do
+ expect { service.execute(user) }
+ .to(
+ change do
+ Users::GhostUserMigration.where(user: user,
+ initiator_user: admin)
+ .exists?
+ end.from(false).to(true))
+ end
+ end
end
- end
- context 'batched nullify' do
- let(:other_user) { create(:user) }
+ describe "Deletion permission checks" do
+ it 'does not delete the user when user is not an admin' do
+ other_user = create(:user)
- it 'nullifies related associations in batches' do
- expect(other_user).to receive(:nullify_dependent_associations_in_batches).and_call_original
+ expect { described_class.new(other_user).execute(user) }.to raise_error(Gitlab::Access::AccessDeniedError)
- described_class.new(user).execute(other_user, skip_authorization: true)
- end
+ expect(Users::GhostUserMigration).not_to be_exists
+ end
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'allows admins to delete anyone' do
+ expect { described_class.new(admin).execute(user) }
+ .to(
+ change do
+ Users::GhostUserMigration.where(user: user,
+ initiator_user: admin)
+ .exists?
+ end.from(false).to(true))
+ end
+ end
- it 'nullifies last_updated_issues, closed_issues, resource_label_events' do
- issue = create(:issue, closed_by: other_user, updated_by: other_user)
- resource_label_event = create(:resource_label_event, user: other_user)
+ context 'when admin mode is disabled' do
+ it 'disallows admins to delete anyone' do
+ expect { described_class.new(admin).execute(user) }.to raise_error(Gitlab::Access::AccessDeniedError)
- described_class.new(user).execute(other_user, skip_authorization: true)
+ expect(Users::GhostUserMigration).not_to be_exists
+ end
+ end
- issue.reload
- resource_label_event.reload
+ it 'allows users to delete their own account' do
+ expect { described_class.new(user).execute(user) }
+ .to(
+ change do
+ Users::GhostUserMigration.where(user: user,
+ initiator_user: user)
+ .exists?
+ end.from(false).to(true))
+ end
- expect(issue.closed_by).to be_nil
- expect(issue.updated_by).to be_nil
- expect(resource_label_event.user).to be_nil
+ it 'allows user to be deleted if skip_authorization: true' do
+ other_user = create(:user)
+
+ expect do
+ described_class.new(user)
+ .execute(other_user, skip_authorization: true)
+ end.to(
+ change do
+ Users::GhostUserMigration.where(user: other_user,
+ initiator_user: user )
+ .exists?
+ end.from(false).to(true))
+ end
end
end
end
diff --git a/spec/services/users/email_verification/generate_token_service_spec.rb b/spec/services/users/email_verification/generate_token_service_spec.rb
new file mode 100644
index 00000000000..e7aa1bf8306
--- /dev/null
+++ b/spec/services/users/email_verification/generate_token_service_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::EmailVerification::GenerateTokenService do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:service) { described_class.new(attr: attr) }
+ let(:token) { 'token' }
+ let(:digest) { Devise.token_generator.digest(User, attr, token) }
+
+ describe '#execute' do
+ context 'with a valid attribute' do
+ where(:attr) { [:unlock_token, :confirmation_token] }
+
+ with_them do
+ before do
+ allow_next_instance_of(described_class) do |service|
+ allow(service).to receive(:generate_token).and_return(token)
+ end
+ end
+
+ it "returns a token and it's digest" do
+ expect(service.execute).to eq([token, digest])
+ end
+ end
+ end
+
+ context 'with an invalid attribute' do
+ let(:attr) { :xxx }
+
+ it 'raises an error' do
+ expect { service.execute }.to raise_error(ArgumentError, 'Invalid attribute')
+ end
+ end
+ end
+end
diff --git a/spec/services/users/email_verification/validate_token_service_spec.rb b/spec/services/users/email_verification/validate_token_service_spec.rb
new file mode 100644
index 00000000000..44af4a4d36f
--- /dev/null
+++ b/spec/services/users/email_verification/validate_token_service_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::EmailVerification::ValidateTokenService, :clean_gitlab_redis_rate_limiting do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:service) { described_class.new(attr: attr, user: user, token: token) }
+ let(:token) { 'token' }
+ let(:encrypted_token) { Devise.token_generator.digest(User, attr, token) }
+ let(:generated_at_attr) { attr == :unlock_token ? :locked_at : :confirmation_sent_at }
+ let(:token_generated_at) { 1.minute.ago }
+ let(:user) { build(:user, attr => encrypted_token, generated_at_attr => token_generated_at) }
+
+ describe '#execute' do
+ context 'with a valid attribute' do
+ where(:attr) { [:unlock_token, :confirmation_token] }
+
+ with_them do
+ context 'when successful' do
+ it 'returns a success status' do
+ expect(service.execute).to eq(status: :success)
+ end
+ end
+
+ context 'when rate limited' do
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?)
+ .with(:email_verification, scope: encrypted_token).and_return(true)
+ end
+
+ it 'returns a failure status' do
+ expect(service.execute).to eq(
+ {
+ status: :failure,
+ reason: :rate_limited,
+ message: format(s_("IdentityVerification|You've reached the maximum amount of tries. "\
+ 'Wait %{interval} or send a new code and try again.'), interval: '10 minutes')
+ }
+ )
+ end
+ end
+
+ context 'when expired' do
+ let(:token_generated_at) { 2.hours.ago }
+
+ it 'returns a failure status' do
+ expect(service.execute).to eq(
+ {
+ status: :failure,
+ reason: :expired,
+ message: s_('IdentityVerification|The code has expired. Send a new code and try again.')
+ }
+ )
+ end
+ end
+
+ context 'when invalid' do
+ let(:encrypted_token) { 'xxx' }
+
+ it 'returns a failure status' do
+ expect(service.execute).to eq(
+ {
+ status: :failure,
+ reason: :invalid,
+ message: s_('IdentityVerification|The code is incorrect. Enter it again, or send a new code.')
+ }
+ )
+ end
+ end
+
+ context 'when encrypted token was not set and a blank token is provided' do
+ let(:encrypted_token) { nil }
+ let(:token) { '' }
+
+ it 'returns a failure status' do
+ expect(service.execute).to eq(
+ {
+ status: :failure,
+ reason: :invalid,
+ message: s_('IdentityVerification|The code is incorrect. Enter it again, or send a new code.')
+ }
+ )
+ end
+ end
+ end
+ end
+
+ context 'with an invalid attribute' do
+ let(:attr) { :username }
+
+ it 'raises an error' do
+ expect { service.execute }.to raise_error(ArgumentError, 'Invalid attribute')
+ end
+ end
+ end
+end
diff --git a/spec/services/users/migrate_records_to_ghost_user_in_batches_service_spec.rb b/spec/services/users/migrate_records_to_ghost_user_in_batches_service_spec.rb
new file mode 100644
index 00000000000..7366b1646b9
--- /dev/null
+++ b/spec/services/users/migrate_records_to_ghost_user_in_batches_service_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::MigrateRecordsToGhostUserInBatchesService do
+ let(:service) { described_class.new }
+
+ let_it_be(:ghost_user_migration) { create(:ghost_user_migration) }
+
+ describe '#execute' do
+ it 'stops when execution time limit reached' do
+ expect_next_instance_of(::Gitlab::Utils::ExecutionTracker) do |tracker|
+ expect(tracker).to receive(:over_limit?).and_return(true)
+ end
+
+ expect(Users::MigrateRecordsToGhostUserService).not_to receive(:new)
+
+ service.execute
+ end
+
+ it 'calls Users::MigrateRecordsToGhostUserService' do
+ expect_next_instance_of(Users::MigrateRecordsToGhostUserService) do |service|
+ expect(service).to(
+ receive(:execute)
+ .with(hard_delete: ghost_user_migration.hard_delete))
+ end
+
+ service.execute
+ end
+ end
+end
diff --git a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
new file mode 100644
index 00000000000..766be51ae13
--- /dev/null
+++ b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
@@ -0,0 +1,259 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::MigrateRecordsToGhostUserService do
+ let!(:user) { create(:user) }
+ let(:service) { described_class.new(user, admin, execution_tracker) }
+ let(:execution_tracker) { instance_double(::Gitlab::Utils::ExecutionTracker, over_limit?: false) }
+
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ context "when migrating a user's associated records to the ghost user" do
+ context 'for issues' do
+ context 'when deleted user is present as both author and edited_user' do
+ include_examples 'migrating records to the ghost user', Issue, [:author, :last_edited_by] do
+ let(:created_record) do
+ create(:issue, project: project, author: user, last_edited_by: user)
+ end
+ end
+ end
+
+ context 'when deleted user is present only as edited_user' do
+ include_examples 'migrating records to the ghost user', Issue, [:last_edited_by] do
+ let(:created_record) { create(:issue, project: project, author: create(:user), last_edited_by: user) }
+ end
+ end
+
+ context "when deleted user is the assignee" do
+ let!(:issue) { create(:issue, project: project, assignees: [user]) }
+
+ it 'migrates the issue so that it is "Unassigned"' do
+ service.execute
+
+ migrated_issue = Issue.find_by_id(issue.id)
+ expect(migrated_issue).to be_present
+ expect(migrated_issue.assignees).to be_empty
+ end
+ end
+ end
+
+ context 'for merge requests' do
+ context 'when deleted user is present as both author and merge_user' do
+ include_examples 'migrating records to the ghost user', MergeRequest, [:author, :merge_user] do
+ let(:created_record) do
+ create(:merge_request, source_project: project,
+ author: user,
+ merge_user: user,
+ target_branch: "first")
+ end
+ end
+ end
+
+ context 'when deleted user is present only as both merge_user' do
+ include_examples 'migrating records to the ghost user', MergeRequest, [:merge_user] do
+ let(:created_record) do
+ create(:merge_request, source_project: project,
+ merge_user: user,
+ target_branch: "first")
+ end
+ end
+ end
+
+ context "when deleted user is the assignee" do
+ let!(:merge_request) { create(:merge_request, source_project: project, assignees: [user]) }
+
+ it 'migrates the merge request so that it is "Unassigned"' do
+ service.execute
+
+ migrated_merge_request = MergeRequest.find_by_id(merge_request.id)
+ expect(migrated_merge_request).to be_present
+ expect(migrated_merge_request.assignees).to be_empty
+ end
+ end
+ end
+
+ context 'for notes' do
+ include_examples 'migrating records to the ghost user', Note do
+ let(:created_record) { create(:note, project: project, author: user) }
+ end
+ end
+
+ context 'for abuse reports' do
+ include_examples 'migrating records to the ghost user', AbuseReport do
+ let(:created_record) { create(:abuse_report, reporter: user, user: create(:user)) }
+ end
+ end
+
+ context 'for award emoji' do
+ include_examples 'migrating records to the ghost user', AwardEmoji, [:user] do
+ let(:created_record) { create(:award_emoji, user: user) }
+
+ context "when the awardable already has an award emoji of the same name assigned to the ghost user" do
+ let(:awardable) { create(:issue) }
+ let!(:existing_award_emoji) { create(:award_emoji, user: User.ghost, name: "thumbsup", awardable: awardable) }
+ let!(:award_emoji) { create(:award_emoji, user: user, name: "thumbsup", awardable: awardable) }
+
+ it "migrates the award emoji regardless" do
+ service.execute
+
+ migrated_record = AwardEmoji.find_by_id(award_emoji.id)
+
+ expect(migrated_record.user).to eq(User.ghost)
+ end
+
+ it "does not leave the migrated award emoji in an invalid state" do
+ service.execute
+
+ migrated_record = AwardEmoji.find_by_id(award_emoji.id)
+
+ expect(migrated_record).to be_valid
+ end
+ end
+ end
+ end
+
+ context 'for snippets' do
+ include_examples 'migrating records to the ghost user', Snippet do
+ let(:created_record) { create(:snippet, project: project, author: user) }
+ end
+ end
+
+ context 'for reviews' do
+ include_examples 'migrating records to the ghost user', Review, [:author] do
+ let(:created_record) { create(:review, author: user) }
+ end
+ end
+ end
+
+ context 'on post-migrate cleanups' do
+ it 'destroys the user and personal namespace' do
+ namespace = user.namespace
+
+ allow(user).to receive(:destroy).and_call_original
+
+ service.execute
+
+ expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { Namespace.find(namespace.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'deletes user associations in batches' do
+ expect(user).to receive(:destroy_dependent_associations_in_batches)
+
+ service.execute
+ end
+
+ context 'for batched nullify' do
+ it 'nullifies related associations in batches' do
+ expect(user).to receive(:nullify_dependent_associations_in_batches).and_call_original
+
+ service.execute
+ end
+
+ it 'nullifies last_updated_issues, closed_issues, resource_label_events' do
+ issue = create(:issue, closed_by: user, updated_by: user)
+ resource_label_event = create(:resource_label_event, user: user)
+
+ service.execute
+
+ issue.reload
+ resource_label_event.reload
+
+ expect(issue.closed_by).to be_nil
+ expect(issue.updated_by).to be_nil
+ expect(resource_label_event.user).to be_nil
+ end
+ end
+
+ context 'for snippets' do
+ let(:gitlab_shell) { Gitlab::Shell.new }
+
+ it 'does not include snippets when deleting in batches' do
+ expect(user).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:snippets] })
+
+ service.execute
+ end
+
+ it 'calls the bulk snippet destroy service for the user personal snippets' do
+ repo1 = create(:personal_snippet, :repository, author: user).snippet_repository
+ repo2 = create(:project_snippet, :repository, project: project, author: user).snippet_repository
+
+ aggregate_failures do
+ expect(gitlab_shell.repository_exists?(repo1.shard_name, "#{repo1.disk_path}.git")).to be(true)
+ expect(gitlab_shell.repository_exists?(repo2.shard_name, "#{repo2.disk_path}.git")).to be(true)
+ end
+
+ # Call made when destroying user personal projects
+ expect(Snippets::BulkDestroyService).not_to(
+ receive(:new).with(admin, project.snippets).and_call_original)
+
+ # Call to remove user personal snippets and for
+ # project snippets where projects are not user personal
+ # ones
+ expect(Snippets::BulkDestroyService).to(
+ receive(:new).with(admin, user.snippets.only_personal_snippets).and_call_original)
+
+ service.execute
+
+ aggregate_failures do
+ expect(gitlab_shell.repository_exists?(repo1.shard_name, "#{repo1.disk_path}.git")).to be(false)
+ expect(gitlab_shell.repository_exists?(repo2.shard_name, "#{repo2.disk_path}.git")).to be(true)
+ end
+ end
+
+ it 'calls the bulk snippet destroy service with hard delete option if it is present' do
+ # this avoids getting into Projects::DestroyService as it would
+ # call Snippets::BulkDestroyService first!
+ allow(user).to receive(:personal_projects).and_return([])
+
+ expect_next_instance_of(Snippets::BulkDestroyService) do |bulk_destroy_service|
+ expect(bulk_destroy_service).to receive(:execute).with({ skip_authorization: true }).and_call_original
+ end
+
+ service.execute(hard_delete: true)
+ end
+
+ it 'does not delete project snippets that the user is the author of' do
+ repo = create(:project_snippet, :repository, author: user).snippet_repository
+
+ service.execute
+
+ expect(gitlab_shell.repository_exists?(repo.shard_name, "#{repo.disk_path}.git")).to be(true)
+ expect(User.ghost.snippets).to include(repo.snippet)
+ end
+
+ context 'when an error is raised deleting snippets' do
+ it 'does not delete user' do
+ snippet = create(:personal_snippet, :repository, author: user)
+
+ bulk_service = double
+ allow(Snippets::BulkDestroyService).to receive(:new).and_call_original
+ allow(Snippets::BulkDestroyService).to receive(:new).with(admin, user.snippets).and_return(bulk_service)
+ allow(bulk_service).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
+
+ aggregate_failures do
+ expect { service.execute }.to(
+ raise_error(Users::MigrateRecordsToGhostUserService::DestroyError, 'foo' ))
+ expect(snippet.reload).not_to be_nil
+ expect(
+ gitlab_shell.repository_exists?(snippet.repository_storage,
+ "#{snippet.disk_path}.git")
+ ).to be(true)
+ end
+ end
+ end
+ end
+
+ context 'when hard_delete option is given' do
+ it 'will not ghost certain records' do
+ issue = create(:issue, author: user)
+
+ service.execute(hard_delete: true)
+
+ expect(Issue).not_to exist(issue.id)
+ end
+ end
+ end
+end
diff --git a/spec/services/users/reject_service_spec.rb b/spec/services/users/reject_service_spec.rb
index 5a243e876ac..abff6b1e023 100644
--- a/spec/services/users/reject_service_spec.rb
+++ b/spec/services/users/reject_service_spec.rb
@@ -35,11 +35,29 @@ RSpec.describe Users::RejectService do
context 'success' do
context 'when the executor user is an admin in admin mode', :enable_admin_mode do
- it 'deletes the user', :sidekiq_inline do
- subject
+ context 'when user_destroy_with_limited_execution_time_worker is enabled' do
+ it 'initiates user removal', :sidekiq_inline do
+ subject
+
+ expect(subject[:status]).to eq(:success)
+ expect(
+ Users::GhostUserMigration.where(user: user,
+ initiator_user: current_user)
+ ).to be_exists
+ end
+ end
- expect(subject[:status]).to eq(:success)
- expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ context 'when user_destroy_with_limited_execution_time_worker is disabled' do
+ before do
+ stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
+ end
+
+ it 'deletes the user', :sidekiq_inline do
+ subject
+
+ expect(subject[:status]).to eq(:success)
+ expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
end
it 'emails the user on rejection' do
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index 2e0b0051495..e8b82b0b4f2 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -54,6 +54,12 @@ RSpec.describe WorkItems::UpdateService do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_title_changed_action)
expect(update_work_item[:status]).to eq(:success)
end
+
+ it_behaves_like 'issue_edit snowplow tracking' do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TITLE_CHANGED }
+ let(:user) { current_user }
+ subject(:service_action) { update_work_item[:status] }
+ end
end
context 'when title is not changed' do
@@ -64,6 +70,12 @@ RSpec.describe WorkItems::UpdateService do
expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).not_to receive(:track_work_item_title_changed_action)
expect(update_work_item[:status]).to eq(:success)
end
+
+ it 'does not emit Snowplow event', :snowplow do
+ expect_no_snowplow_event
+
+ update_work_item
+ end
end
context 'when dates are changed' do
diff --git a/spec/services/work_items/widgets/description_service/update_service_spec.rb b/spec/services/work_items/widgets/description_service/update_service_spec.rb
index 582d9dc85f7..4275950e720 100644
--- a/spec/services/work_items/widgets/description_service/update_service_spec.rb
+++ b/spec/services/work_items/widgets/description_service/update_service_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService do
let(:current_user) { author }
let(:work_item) do
create(:work_item, author: author, project: project, description: 'old description',
- last_edited_at: Date.yesterday, last_edited_by: random_user
+ last_edited_at: Date.yesterday, last_edited_by: random_user
)
end