Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/admin/propagate_integration_service_spec.rb4
-rw-r--r--spec/services/authorized_project_update/project_access_changed_service_spec.rb21
-rw-r--r--spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb4
-rw-r--r--spec/services/award_emojis/base_service_spec.rb25
-rw-r--r--spec/services/bulk_create_integration_service_spec.rb21
-rw-r--r--spec/services/bulk_update_integration_service_spec.rb41
-rw-r--r--spec/services/ci/create_pipeline_service/include_spec.rb89
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb4
-rw-r--r--spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb22
-rw-r--r--spec/services/ci/generate_kubeconfig_service_spec.rb50
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb14
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb63
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb3
-rw-r--r--spec/services/ci/parse_dotenv_artifact_service_spec.rb6
-rw-r--r--spec/services/ci/retry_build_service_spec.rb31
-rw-r--r--spec/services/ci/unlock_artifacts_service_spec.rb280
-rw-r--r--spec/services/ci/update_build_state_service_spec.rb22
-rw-r--r--spec/services/clusters/agents/refresh_authorization_service_spec.rb10
-rw-r--r--spec/services/clusters/cleanup/project_namespace_service_spec.rb13
-rw-r--r--spec/services/clusters/cleanup/service_account_service_spec.rb8
-rw-r--r--spec/services/clusters/integrations/prometheus_health_check_service_spec.rb (renamed from spec/services/clusters/applications/prometheus_health_check_service_spec.rb)32
-rw-r--r--spec/services/dependency_proxy/find_or_create_blob_service_spec.rb2
-rw-r--r--spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb66
-rw-r--r--spec/services/dependency_proxy/head_manifest_service_spec.rb2
-rw-r--r--spec/services/dependency_proxy/pull_manifest_service_spec.rb2
-rw-r--r--spec/services/deployments/archive_in_project_service_spec.rb80
-rw-r--r--spec/services/deployments/link_merge_requests_service_spec.rb13
-rw-r--r--spec/services/emails/create_service_spec.rb5
-rw-r--r--spec/services/emails/destroy_service_spec.rb10
-rw-r--r--spec/services/error_tracking/collect_error_service_spec.rb31
-rw-r--r--spec/services/google_cloud/service_accounts_service_spec.rb58
-rw-r--r--spec/services/groups/create_service_spec.rb41
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb6
-rw-r--r--spec/services/groups/transfer_service_spec.rb104
-rw-r--r--spec/services/import/github/notes/create_service_spec.rb24
-rw-r--r--spec/services/issues/build_service_spec.rb96
-rw-r--r--spec/services/issues/close_service_spec.rb44
-rw-r--r--spec/services/issues/create_service_spec.rb79
-rw-r--r--spec/services/issues/set_crm_contacts_service_spec.rb162
-rw-r--r--spec/services/issues/update_service_spec.rb42
-rw-r--r--spec/services/labels/transfer_service_spec.rb156
-rw-r--r--spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb119
-rw-r--r--spec/services/loose_foreign_keys/cleaner_service_spec.rb147
-rw-r--r--spec/services/members/create_service_spec.rb104
-rw-r--r--spec/services/members/invite_service_spec.rb5
-rw-r--r--spec/services/merge_requests/mergeability/run_checks_service_spec.rb4
-rw-r--r--spec/services/merge_requests/retarget_chain_service_spec.rb8
-rw-r--r--spec/services/merge_requests/toggle_attention_requested_service_spec.rb128
-rw-r--r--spec/services/namespaces/in_product_marketing_email_records_spec.rb55
-rw-r--r--spec/services/namespaces/invite_team_email_service_spec.rb128
-rw-r--r--spec/services/notification_service_spec.rb24
-rw-r--r--spec/services/packages/create_dependency_service_spec.rb6
-rw-r--r--spec/services/packages/npm/create_package_service_spec.rb46
-rw-r--r--spec/services/packages/update_tags_service_spec.rb2
-rw-r--r--spec/services/projects/all_issues_count_service_spec.rb24
-rw-r--r--spec/services/projects/all_merge_requests_count_service_spec.rb30
-rw-r--r--spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb133
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb457
-rw-r--r--spec/services/projects/create_service_spec.rb35
-rw-r--r--spec/services/projects/destroy_service_spec.rb8
-rw-r--r--spec/services/projects/import_export/export_service_spec.rb12
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_service_spec.rb1
-rw-r--r--spec/services/projects/participants_service_spec.rb8
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb3
-rw-r--r--spec/services/projects/transfer_service_spec.rb8
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb53
-rw-r--r--spec/services/resource_events/change_labels_service_spec.rb2
-rw-r--r--spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb12
-rw-r--r--spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb2
-rw-r--r--spec/services/resource_events/synthetic_state_notes_builder_service_spec.rb11
-rw-r--r--spec/services/security/ci_configuration/sast_iac_create_service_spec.rb19
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb4
-rw-r--r--spec/services/system_note_service_spec.rb199
-rw-r--r--spec/services/system_notes/incident_service_spec.rb10
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb17
-rw-r--r--spec/services/tasks_to_be_done/base_service_spec.rb69
-rw-r--r--spec/services/todo_service_spec.rb11
-rw-r--r--spec/services/users/update_service_spec.rb2
-rw-r--r--spec/services/users/upsert_credit_card_validation_service_spec.rb13
79 files changed, 2666 insertions, 1039 deletions
diff --git a/spec/services/admin/propagate_integration_service_spec.rb b/spec/services/admin/propagate_integration_service_spec.rb
index 151658fe429..b379286ba4f 100644
--- a/spec/services/admin/propagate_integration_service_spec.rb
+++ b/spec/services/admin/propagate_integration_service_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe Admin::PropagateIntegrationService do
end
context 'for a group-level integration' do
- let(:group_integration) { create(:jira_integration, group: group, project: nil) }
+ let(:group_integration) { create(:jira_integration, :group, group: group) }
context 'with a project without integration' do
let(:another_project) { create(:project, group: group) }
@@ -81,7 +81,7 @@ RSpec.describe Admin::PropagateIntegrationService do
context 'with a subgroup with integration' do
let(:subgroup) { create(:group, parent: group) }
- let(:subgroup_integration) { create(:jira_integration, group: subgroup, project: nil, inherit_from_id: group_integration.id) }
+ let(:subgroup_integration) { create(:jira_integration, :group, group: subgroup, inherit_from_id: group_integration.id) }
it 'calls to PropagateIntegrationInheritDescendantWorker' do
expect(PropagateIntegrationInheritDescendantWorker).to receive(:perform_async)
diff --git a/spec/services/authorized_project_update/project_access_changed_service_spec.rb b/spec/services/authorized_project_update/project_access_changed_service_spec.rb
new file mode 100644
index 00000000000..11621055a47
--- /dev/null
+++ b/spec/services/authorized_project_update/project_access_changed_service_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuthorizedProjectUpdate::ProjectAccessChangedService do
+ describe '#execute' do
+ it 'schedules the project IDs' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_and_wait)
+ .with([[1], [2]])
+
+ described_class.new([1, 2]).execute
+ end
+
+ it 'permits non-blocking operation' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_async)
+ .with([[1], [2]])
+
+ described_class.new([1, 2]).execute(blocking: false)
+ end
+ end
+end
diff --git a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
index eaa5f723bec..6f28f892f00 100644
--- a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
+++ b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
@@ -24,6 +24,10 @@ RSpec.describe AutoMerge::MergeWhenPipelineSucceedsService do
project.add_maintainer(user)
end
+ before do
+ allow(MergeWorker).to receive(:with_status).and_return(MergeWorker)
+ end
+
describe "#available_for?" do
subject { service.available_for?(mr_merge_if_green_enabled) }
diff --git a/spec/services/award_emojis/base_service_spec.rb b/spec/services/award_emojis/base_service_spec.rb
new file mode 100644
index 00000000000..e0c8fd39ad9
--- /dev/null
+++ b/spec/services/award_emojis/base_service_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AwardEmojis::BaseService do
+ let(:awardable) { build(:note) }
+ let(:current_user) { build(:user) }
+
+ describe '.initialize' do
+ subject { described_class }
+
+ it 'uses same emoji name if not an alias' do
+ emoji_name = 'horse'
+
+ expect(subject.new(awardable, emoji_name, current_user).name).to eq(emoji_name)
+ end
+
+ it 'uses emoji original name if its an alias' do
+ emoji_alias = 'small_airplane'
+ emoji_name = 'airplane_small'
+
+ expect(subject.new(awardable, emoji_alias, current_user).name).to eq(emoji_name)
+ end
+ end
+end
diff --git a/spec/services/bulk_create_integration_service_spec.rb b/spec/services/bulk_create_integration_service_spec.rb
index 517222c0e69..63bdc39857c 100644
--- a/spec/services/bulk_create_integration_service_spec.rb
+++ b/spec/services/bulk_create_integration_service_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe BulkCreateIntegrationService do
end
context 'integration with data fields' do
- let(:excluded_attributes) { %w[id service_id created_at updated_at] }
+ let(:excluded_attributes) { %w[id service_id integration_id created_at updated_at] }
it 'updates the data fields from inherited integrations' do
described_class.new(integration, batch, association).execute
@@ -74,7 +74,7 @@ RSpec.describe BulkCreateIntegrationService do
context 'with a project association' do
let!(:project) { create(:project, group: group) }
- let(:integration) { create(:jira_integration, group: group, project: nil) }
+ let(:integration) { create(:jira_integration, :group, group: group) }
let(:created_integration) { project.jira_integration }
let(:batch) { Project.where(id: Project.minimum(:id)..Project.maximum(:id)).without_integration(integration).in_namespace(integration.group.self_and_descendants) }
let(:association) { 'project' }
@@ -82,11 +82,19 @@ RSpec.describe BulkCreateIntegrationService do
it_behaves_like 'creates integration from batch ids'
it_behaves_like 'updates inherit_from_id'
+
+ context 'with different foreign key of data_fields' do
+ let(:integration) { create(:zentao_integration, :group, group: group) }
+ let(:created_integration) { project.zentao_integration }
+
+ it_behaves_like 'creates integration from batch ids'
+ it_behaves_like 'updates inherit_from_id'
+ end
end
context 'with a group association' do
let!(:subgroup) { create(:group, parent: group) }
- let(:integration) { create(:jira_integration, group: group, project: nil, inherit_from_id: instance_integration.id) }
+ let(:integration) { create(:jira_integration, :group, group: group, inherit_from_id: instance_integration.id) }
let(:created_integration) { Integration.find_by(group: subgroup) }
let(:batch) { Group.where(id: subgroup.id) }
let(:association) { 'group' }
@@ -94,6 +102,13 @@ RSpec.describe BulkCreateIntegrationService do
it_behaves_like 'creates integration from batch ids'
it_behaves_like 'updates inherit_from_id'
+
+ context 'with different foreign key of data_fields' do
+ let(:integration) { create(:zentao_integration, :group, group: group, inherit_from_id: instance_integration.id) }
+
+ it_behaves_like 'creates integration from batch ids'
+ it_behaves_like 'updates inherit_from_id'
+ end
end
end
end
diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb
index c10a9b75648..5e521b98482 100644
--- a/spec/services/bulk_update_integration_service_spec.rb
+++ b/spec/services/bulk_update_integration_service_spec.rb
@@ -16,32 +16,19 @@ RSpec.describe BulkUpdateIntegrationService do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
- let_it_be(:group_integration) do
- Integrations::Jira.create!(
- group: group,
- url: 'http://group.jira.com'
- )
- end
-
+ let_it_be(:group_integration) { create(:jira_integration, :group, group: group, url: 'http://group.jira.com') }
+ let_it_be(:excluded_integration) { create(:jira_integration, :group, group: create(:group), url: 'http://another.jira.com', push_events: false) }
let_it_be(:subgroup_integration) do
- Integrations::Jira.create!(
- inherit_from_id: group_integration.id,
+ create(:jira_integration, :group,
group: subgroup,
+ inherit_from_id: group_integration.id,
url: 'http://subgroup.jira.com',
push_events: true
)
end
- let_it_be(:excluded_integration) do
- Integrations::Jira.create!(
- group: create(:group),
- url: 'http://another.jira.com',
- push_events: false
- )
- end
-
let_it_be(:integration) do
- Integrations::Jira.create!(
+ create(:jira_integration,
project: create(:project, group: subgroup),
inherit_from_id: subgroup_integration.id,
url: 'http://project.jira.com',
@@ -88,4 +75,22 @@ RSpec.describe BulkUpdateIntegrationService do
described_class.new(group_integration, [integration]).execute
end.to change { integration.reload.url }.to(group_integration.url)
end
+
+ context 'with different foreign key of data_fields' do
+ let(:integration) { create(:zentao_integration, project: create(:project, group: group)) }
+ let(:group_integration) do
+ create(:zentao_integration, :group,
+ group: group,
+ url: 'https://group.zentao.net',
+ api_token: 'GROUP_TOKEN',
+ zentao_product_xid: '1'
+ )
+ end
+
+ it 'works with batch as an array of ActiveRecord objects' do
+ expect do
+ described_class.new(group_integration, [integration]).execute
+ end.to change { integration.reload.url }.to(group_integration.url)
+ end
+ end
end
diff --git a/spec/services/ci/create_pipeline_service/include_spec.rb b/spec/services/ci/create_pipeline_service/include_spec.rb
index 5e7dace8e15..aa01977272a 100644
--- a/spec/services/ci/create_pipeline_service/include_spec.rb
+++ b/spec/services/ci/create_pipeline_service/include_spec.rb
@@ -7,9 +7,11 @@ RSpec.describe Ci::CreatePipelineService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.owner }
- let(:ref) { 'refs/heads/master' }
- let(:source) { :push }
- let(:service) { described_class.new(project, user, { ref: ref }) }
+ let(:ref) { 'refs/heads/master' }
+ let(:variables_attributes) { [{ key: 'MYVAR', secret_value: 'hello' }] }
+ let(:source) { :push }
+
+ let(:service) { described_class.new(project, user, { ref: ref, variables_attributes: variables_attributes }) }
let(:pipeline) { service.execute(source).payload }
let(:file_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' }
@@ -24,6 +26,20 @@ RSpec.describe Ci::CreatePipelineService do
.and_return(File.read(Rails.root.join(file_location)))
end
+ shared_examples 'not including the file' do
+ it 'does not include the job in the file' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.processables.pluck(:name)).to contain_exactly('job')
+ end
+ end
+
+ shared_examples 'including the file' do
+ it 'includes the job in the file' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
+ end
+ end
+
context 'with a local file' do
let(:config) do
<<~EOY
@@ -33,13 +49,10 @@ RSpec.describe Ci::CreatePipelineService do
EOY
end
- it 'includes the job in the file' do
- expect(pipeline).to be_created_successfully
- expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
- end
+ it_behaves_like 'including the file'
end
- context 'with a local file with rules' do
+ context 'with a local file with rules with a project variable' do
let(:config) do
<<~EOY
include:
@@ -54,19 +67,63 @@ RSpec.describe Ci::CreatePipelineService do
context 'when the rules matches' do
let(:project_id) { project.id }
- it 'includes the job in the file' do
- expect(pipeline).to be_created_successfully
- expect(pipeline.processables.pluck(:name)).to contain_exactly('job', 'rspec')
- end
+ it_behaves_like 'including the file'
end
context 'when the rules does not match' do
let(:project_id) { non_existing_record_id }
- it 'does not include the job in the file' do
- expect(pipeline).to be_created_successfully
- expect(pipeline.processables.pluck(:name)).to contain_exactly('job')
- end
+ it_behaves_like 'not including the file'
+ end
+ end
+
+ context 'with a local file with rules with a predefined pipeline variable' do
+ let(:config) do
+ <<~EOY
+ include:
+ - local: #{file_location}
+ rules:
+ - if: $CI_PIPELINE_SOURCE == "#{pipeline_source}"
+ job:
+ script: exit 0
+ EOY
+ end
+
+ context 'when the rules matches' do
+ let(:pipeline_source) { 'push' }
+
+ it_behaves_like 'including the file'
+ end
+
+ context 'when the rules does not match' do
+ let(:pipeline_source) { 'web' }
+
+ it_behaves_like 'not including the file'
+ end
+ end
+
+ context 'with a local file with rules with a run pipeline variable' do
+ let(:config) do
+ <<~EOY
+ include:
+ - local: #{file_location}
+ rules:
+ - if: $MYVAR == "#{my_var}"
+ job:
+ script: exit 0
+ EOY
+ end
+
+ context 'when the rules matches' do
+ let(:my_var) { 'hello' }
+
+ it_behaves_like 'including the file'
+ end
+
+ context 'when the rules does not match' do
+ let(:my_var) { 'mello' }
+
+ it_behaves_like 'not including the file'
end
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 78646665539..c78e19ea62d 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe Ci::CreatePipelineService do
include ProjectForksHelper
- let_it_be(:project, reload: true) { create(:project, :repository) }
- let_it_be(:user, reload: true) { project.owner }
+ let_it_be_with_refind(:project) { create(:project, :repository) }
+ let_it_be_with_reload(:user) { project.owner }
let(:ref_name) { 'refs/heads/master' }
diff --git a/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb b/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
index 04d75630295..d5881d3b204 100644
--- a/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
+++ b/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
@@ -26,28 +26,6 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
pull_request.update!(source_branch: source_branch.name, source_sha: source_branch.target)
end
- context 'when the FF ci_create_external_pr_pipeline_async is disabled' do
- before do
- stub_feature_flags(ci_create_external_pr_pipeline_async: false)
- end
-
- it 'creates a pipeline for external pull request', :aggregate_failures do
- pipeline = execute.payload
-
- expect(execute).to be_success
- expect(pipeline).to be_valid
- expect(pipeline).to be_persisted
- expect(pipeline).to be_external_pull_request_event
- expect(pipeline).to eq(project.ci_pipelines.last)
- expect(pipeline.external_pull_request).to eq(pull_request)
- expect(pipeline.user).to eq(user)
- expect(pipeline.status).to eq('created')
- expect(pipeline.ref).to eq(pull_request.source_branch)
- expect(pipeline.sha).to eq(pull_request.source_sha)
- expect(pipeline.source_sha).to eq(pull_request.source_sha)
- end
- end
-
it 'enqueues Ci::ExternalPullRequests::CreatePipelineWorker' do
expect { execute }
.to change { ::Ci::ExternalPullRequests::CreatePipelineWorker.jobs.count }
diff --git a/spec/services/ci/generate_kubeconfig_service_spec.rb b/spec/services/ci/generate_kubeconfig_service_spec.rb
new file mode 100644
index 00000000000..b0673d16158
--- /dev/null
+++ b/spec/services/ci/generate_kubeconfig_service_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::GenerateKubeconfigService do
+ describe '#execute' do
+ let(:project) { create(:project) }
+ let(:build) { create(:ci_build, project: project) }
+ let(:agent1) { create(:cluster_agent, project: project) }
+ let(:agent2) { create(:cluster_agent) }
+
+ let(:template) { instance_double(Gitlab::Kubernetes::Kubeconfig::Template) }
+
+ subject { described_class.new(build).execute }
+
+ before do
+ expect(Gitlab::Kubernetes::Kubeconfig::Template).to receive(:new).and_return(template)
+ expect(build.pipeline).to receive(:authorized_cluster_agents).and_return([agent1, agent2])
+ end
+
+ it 'adds a cluster, and a user and context for each available agent' do
+ expect(template).to receive(:add_cluster).with(
+ name: 'gitlab',
+ url: Gitlab::Kas.tunnel_url
+ ).once
+
+ expect(template).to receive(:add_user).with(
+ name: "agent:#{agent1.id}",
+ token: "ci:#{agent1.id}:#{build.token}"
+ )
+ expect(template).to receive(:add_user).with(
+ name: "agent:#{agent2.id}",
+ token: "ci:#{agent2.id}:#{build.token}"
+ )
+
+ expect(template).to receive(:add_context).with(
+ name: "#{project.full_path}:#{agent1.name}",
+ cluster: 'gitlab',
+ user: "agent:#{agent1.id}"
+ )
+ expect(template).to receive(:add_context).with(
+ name: "#{agent2.project.full_path}:#{agent2.name}",
+ cluster: 'gitlab',
+ user: "agent:#{agent2.id}"
+ )
+
+ expect(subject).to eq(template)
+ end
+ end
+end
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index e6d9f208096..6ad3e9ceb54 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -49,6 +49,7 @@ RSpec.describe Ci::JobArtifacts::CreateService do
expect(new_artifact.file_type).to eq(params['artifact_type'])
expect(new_artifact.file_format).to eq(params['artifact_format'])
expect(new_artifact.file_sha256).to eq(artifacts_sha256)
+ expect(new_artifact.locked).to eq(job.pipeline.locked)
end
it 'does not track the job user_id' do
@@ -75,6 +76,7 @@ RSpec.describe Ci::JobArtifacts::CreateService do
expect(new_artifact.file_type).to eq('metadata')
expect(new_artifact.file_format).to eq('gzip')
expect(new_artifact.file_sha256).to eq(artifacts_sha256)
+ expect(new_artifact.locked).to eq(job.pipeline.locked)
end
it 'sets expiration date according to application settings' do
@@ -175,18 +177,6 @@ RSpec.describe Ci::JobArtifacts::CreateService do
hash_including('key' => 'KEY1', 'value' => 'VAR1', 'source' => 'dotenv'),
hash_including('key' => 'KEY2', 'value' => 'VAR2', 'source' => 'dotenv'))
end
-
- context 'when ci_synchronous_artifact_parsing feature flag is disabled' do
- before do
- stub_feature_flags(ci_synchronous_artifact_parsing: false)
- end
-
- it 'does not call parse service' do
- expect(Ci::ParseDotenvArtifactService).not_to receive(:new)
-
- expect(subject[:status]).to eq(:success)
- end
- end
end
context 'when artifact_type is metrics' do
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index 7a91ad9dcc1..6761f052e18 100644
--- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -16,26 +16,43 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
let_it_be(:job) { create(:ci_build, :success, pipeline: pipeline) }
context 'when artifact is expired' do
- let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
context 'with preloaded relationships' do
before do
stub_const("#{described_class}::LOOP_LIMIT", 1)
end
- it 'performs the smallest number of queries for job_artifacts' do
- log = ActiveRecord::QueryRecorder.new { subject }
+ context 'with ci_destroy_unlocked_job_artifacts feature flag disabled' do
+ before do
+ stub_feature_flags(ci_destroy_unlocked_job_artifacts: false)
+ end
+
+ it 'performs the smallest number of queries for job_artifacts' do
+ log = ActiveRecord::QueryRecorder.new { subject }
+
+ # SELECT expired ci_job_artifacts - 3 queries from each_batch
+ # PRELOAD projects, routes, project_statistics
+ # BEGIN
+ # INSERT into ci_deleted_objects
+ # DELETE loaded ci_job_artifacts
+ # DELETE security_findings -- for EE
+ # COMMIT
+ # SELECT next expired ci_job_artifacts
+
+ expect(log.count).to be_within(1).of(10)
+ end
+ end
- # SELECT expired ci_job_artifacts - 3 queries from each_batch
- # PRELOAD projects, routes, project_statistics
- # BEGIN
- # INSERT into ci_deleted_objects
- # DELETE loaded ci_job_artifacts
- # DELETE security_findings -- for EE
- # COMMIT
- # SELECT next expired ci_job_artifacts
+ context 'with ci_destroy_unlocked_job_artifacts feature flag enabled' do
+ before do
+ stub_feature_flags(ci_destroy_unlocked_job_artifacts: true)
+ end
- expect(log.count).to be_within(1).of(10)
+ it 'performs the smallest number of queries for job_artifacts' do
+ log = ActiveRecord::QueryRecorder.new { subject }
+ expect(log.count).to be_within(1).of(8)
+ end
end
end
@@ -53,7 +70,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when the artifact has a file attached to it' do
- let!(:artifact) { create(:ci_job_artifact, :expired, :zip, job: job) }
+ let!(:artifact) { create(:ci_job_artifact, :expired, :zip, job: job, locked: job.pipeline.locked) }
it 'creates a deleted object' do
expect { subject }.to change { Ci::DeletedObject.count }.by(1)
@@ -74,7 +91,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when artifact is locked' do
- let!(:artifact) { create(:ci_job_artifact, :expired, job: locked_job) }
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: locked_job, locked: locked_job.pipeline.locked) }
it 'does not destroy job artifact' do
expect { subject }.not_to change { Ci::JobArtifact.count }
@@ -83,7 +100,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when artifact is not expired' do
- let!(:artifact) { create(:ci_job_artifact, job: job) }
+ let!(:artifact) { create(:ci_job_artifact, job: job, locked: job.pipeline.locked) }
it 'does not destroy expired job artifacts' do
expect { subject }.not_to change { Ci::JobArtifact.count }
@@ -91,7 +108,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when artifact is permanent' do
- let!(:artifact) { create(:ci_job_artifact, expire_at: nil, job: job) }
+ let!(:artifact) { create(:ci_job_artifact, expire_at: nil, job: job, locked: job.pipeline.locked) }
it 'does not destroy expired job artifacts' do
expect { subject }.not_to change { Ci::JobArtifact.count }
@@ -99,7 +116,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when failed to destroy artifact' do
- let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
before do
stub_const("#{described_class}::LOOP_LIMIT", 10)
@@ -135,7 +152,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when exclusive lease has already been taken by the other instance' do
- let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
before do
stub_exclusive_lease_taken(described_class::EXCLUSIVE_LOCK_KEY, timeout: described_class::LOCK_TIMEOUT)
@@ -149,8 +166,8 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
context 'with a second artifact and batch size of 1' do
let(:second_job) { create(:ci_build, :success, pipeline: pipeline) }
- let!(:second_artifact) { create(:ci_job_artifact, :archive, expire_at: 1.day.ago, job: second_job) }
- let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
+ let!(:second_artifact) { create(:ci_job_artifact, :archive, expire_at: 1.day.ago, job: second_job, locked: job.pipeline.locked) }
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
before do
stub_const("#{described_class}::BATCH_SIZE", 1)
@@ -206,8 +223,8 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when some artifacts are locked' do
- let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
- let!(:locked_artifact) { create(:ci_job_artifact, :expired, job: locked_job) }
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
+ let!(:locked_artifact) { create(:ci_job_artifact, :expired, job: locked_job, locked: locked_job.pipeline.locked) }
it 'destroys only unlocked artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
@@ -216,7 +233,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when all artifacts are locked' do
- let!(:artifact) { create(:ci_job_artifact, :expired, job: locked_job) }
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: locked_job, locked: locked_job.pipeline.locked) }
it 'destroys no artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(0)
diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
index 2cedbf93d74..1cc856734fc 100644
--- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -29,7 +29,8 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
it 'reports metrics for destroyed artifacts' do
expect_next_instance_of(Gitlab::Ci::Artifacts::Metrics) do |metrics|
- expect(metrics).to receive(:increment_destroyed_artifacts).with(1).and_call_original
+ expect(metrics).to receive(:increment_destroyed_artifacts_count).with(1).and_call_original
+ expect(metrics).to receive(:increment_destroyed_artifacts_bytes).with(107464).and_call_original
end
execute
diff --git a/spec/services/ci/parse_dotenv_artifact_service_spec.rb b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
index 7536e04f2de..c4040a426f2 100644
--- a/spec/services/ci/parse_dotenv_artifact_service_spec.rb
+++ b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'returns error' do
expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to eq("Dotenv Artifact Too Big. Maximum Allowable Size: #{described_class::MAX_ACCEPTABLE_DOTENV_SIZE}")
+ expect(subject[:message]).to eq("Dotenv Artifact Too Big. Maximum Allowable Size: #{service.send(:dotenv_size_limit)}")
expect(subject[:http_status]).to eq(:bad_request)
end
end
@@ -186,7 +186,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
context 'when more than limitated variables are specified in dotenv' do
let(:blob) do
StringIO.new.tap do |s|
- (described_class::MAX_ACCEPTABLE_VARIABLES_COUNT + 1).times do |i|
+ (service.send(:dotenv_variable_limit) + 1).times do |i|
s << "KEY#{i}=VAR#{i}\n"
end
end.string
@@ -194,7 +194,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'returns error' do
expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to eq("Dotenv files cannot have more than #{described_class::MAX_ACCEPTABLE_VARIABLES_COUNT} variables")
+ expect(subject[:message]).to eq("Dotenv files cannot have more than #{service.send(:dotenv_variable_limit)} variables")
expect(subject[:http_status]).to eq(:bad_request)
end
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 15c88c9f657..16635c64434 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -323,6 +323,37 @@ RSpec.describe Ci::RetryBuildService do
it 'persists expanded environment name' do
expect(new_build.metadata.expanded_environment_name).to eq('production')
end
+
+ it 'does not create a new environment' do
+ expect { new_build }.not_to change { Environment.count }
+ end
+ end
+
+ context 'when build with dynamic environment is retried' do
+ let_it_be(:other_developer) { create(:user).tap { |u| project.add_developer(other_developer) } }
+
+ let(:environment_name) { 'review/$CI_COMMIT_REF_SLUG-$GITLAB_USER_ID' }
+
+ let!(:build) do
+ create(:ci_build, :with_deployment, environment: environment_name,
+ options: { environment: { name: environment_name } },
+ pipeline: pipeline, stage_id: stage.id, project: project,
+ user: other_developer)
+ end
+
+ it 're-uses the previous persisted environment' do
+ expect(build.persisted_environment.name).to eq("review/#{build.ref}-#{other_developer.id}")
+
+ expect(new_build.persisted_environment.name).to eq("review/#{build.ref}-#{other_developer.id}")
+ end
+
+ it 'creates a new deployment' do
+ expect { new_build }.to change { Deployment.count }.by(1)
+ end
+
+ it 'does not create a new environment' do
+ expect { new_build }.not_to change { Environment.count }
+ end
end
context 'when build has needs' do
diff --git a/spec/services/ci/unlock_artifacts_service_spec.rb b/spec/services/ci/unlock_artifacts_service_spec.rb
index 8d289a867ba..8ee07fc44c8 100644
--- a/spec/services/ci/unlock_artifacts_service_spec.rb
+++ b/spec/services/ci/unlock_artifacts_service_spec.rb
@@ -3,93 +3,247 @@
require 'spec_helper'
RSpec.describe Ci::UnlockArtifactsService do
- describe '#execute' do
- subject(:execute) { described_class.new(pipeline.project, pipeline.user).execute(ci_ref, before_pipeline) }
+ using RSpec::Parameterized::TableSyntax
+
+ where(:tag, :ci_update_unlocked_job_artifacts) do
+ false | false
+ false | true
+ true | false
+ true | true
+ end
+
+ with_them do
+ let(:ref) { 'master' }
+ let(:ref_path) { tag ? "#{::Gitlab::Git::TAG_REF_PREFIX}#{ref}" : "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{ref}" }
+ let(:ci_ref) { create(:ci_ref, ref_path: ref_path) }
+ let(:project) { ci_ref.project }
+ let(:source_job) { create(:ci_build, pipeline: pipeline) }
+
+ let!(:old_unlocked_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :unlocked) }
+ let!(:older_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
+ let!(:older_ambiguous_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: !tag, project: project, locked: :artifacts_locked) }
+ let!(:pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
+ let!(:child_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
+ let!(:newer_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
+ let!(:other_ref_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: 'other_ref', tag: tag, project: project, locked: :artifacts_locked) }
+ let!(:sources_pipeline) { create(:ci_sources_pipeline, source_job: source_job, source_project: project, pipeline: child_pipeline, project: project) }
before do
stub_const("#{described_class}::BATCH_SIZE", 1)
+ stub_feature_flags(ci_update_unlocked_job_artifacts: ci_update_unlocked_job_artifacts)
end
- [true, false].each do |tag|
- context "when tag is #{tag}" do
- let(:ref) { 'master' }
- let(:ref_path) { tag ? "#{::Gitlab::Git::TAG_REF_PREFIX}#{ref}" : "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{ref}" }
- let(:ci_ref) { create(:ci_ref, ref_path: ref_path) }
+ describe '#execute' do
+ subject(:execute) { described_class.new(pipeline.project, pipeline.user).execute(ci_ref, before_pipeline) }
+
+ context 'when running on a ref before a pipeline' do
+ let(:before_pipeline) { pipeline }
+
+ it 'unlocks artifacts from older pipelines' do
+ expect { execute }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
+ end
+
+ it 'does not unlock artifacts for tag or branch with same name as ref' do
+ expect { execute }.not_to change { older_ambiguous_pipeline.reload.locked }.from('artifacts_locked')
+ end
+
+ it 'does not unlock artifacts from newer pipelines' do
+ expect { execute }.not_to change { newer_pipeline.reload.locked }.from('artifacts_locked')
+ end
+
+ it 'does not lock artifacts from old unlocked pipelines' do
+ expect { execute }.not_to change { old_unlocked_pipeline.reload.locked }.from('unlocked')
+ end
+
+ it 'does not unlock artifacts from the same pipeline' do
+ expect { execute }.not_to change { pipeline.reload.locked }.from('artifacts_locked')
+ end
- let!(:old_unlocked_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :unlocked) }
- let!(:older_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
- let!(:older_ambiguous_pipeline) { create(:ci_pipeline, ref: ref, tag: !tag, project: ci_ref.project, locked: :artifacts_locked) }
- let!(:pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
- let!(:child_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
- let!(:newer_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
- let!(:other_ref_pipeline) { create(:ci_pipeline, ref: 'other_ref', tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
+ it 'does not unlock artifacts for other refs' do
+ expect { execute }.not_to change { other_ref_pipeline.reload.locked }.from('artifacts_locked')
+ end
- before do
- create(:ci_sources_pipeline,
- source_job: create(:ci_build, pipeline: pipeline),
- source_project: ci_ref.project,
- pipeline: child_pipeline,
- project: ci_ref.project)
+ it 'does not unlock artifacts for child pipeline' do
+ expect { execute }.not_to change { child_pipeline.reload.locked }.from('artifacts_locked')
end
- context 'when running on a ref before a pipeline' do
- let(:before_pipeline) { pipeline }
+ it 'unlocks job artifact records' do
+ pending unless ci_update_unlocked_job_artifacts
- it 'unlocks artifacts from older pipelines' do
- expect { execute }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
- end
+ expect { execute }.to change { ::Ci::JobArtifact.artifact_unlocked.count }.from(0).to(2)
+ end
+ end
- it 'does not unlock artifacts for tag or branch with same name as ref' do
- expect { execute }.not_to change { older_ambiguous_pipeline.reload.locked }.from('artifacts_locked')
- end
+ context 'when running on just the ref' do
+ let(:before_pipeline) { nil }
- it 'does not unlock artifacts from newer pipelines' do
- expect { execute }.not_to change { newer_pipeline.reload.locked }.from('artifacts_locked')
- end
+ it 'unlocks artifacts from older pipelines' do
+ expect { execute }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
+ end
- it 'does not lock artifacts from old unlocked pipelines' do
- expect { execute }.not_to change { old_unlocked_pipeline.reload.locked }.from('unlocked')
- end
+ it 'unlocks artifacts from newer pipelines' do
+ expect { execute }.to change { newer_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
+ end
- it 'does not unlock artifacts from the same pipeline' do
- expect { execute }.not_to change { pipeline.reload.locked }.from('artifacts_locked')
- end
+ it 'unlocks artifacts from the same pipeline' do
+ expect { execute }.to change { pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
+ end
- it 'does not unlock artifacts for other refs' do
- expect { execute }.not_to change { other_ref_pipeline.reload.locked }.from('artifacts_locked')
- end
+ it 'does not unlock artifacts for tag or branch with same name as ref' do
+ expect { execute }.not_to change { older_ambiguous_pipeline.reload.locked }.from('artifacts_locked')
+ end
- it 'does not unlock artifacts for child pipeline' do
- expect { execute }.not_to change { child_pipeline.reload.locked }.from('artifacts_locked')
- end
+ it 'does not lock artifacts from old unlocked pipelines' do
+ expect { execute }.not_to change { old_unlocked_pipeline.reload.locked }.from('unlocked')
end
- context 'when running on just the ref' do
- let(:before_pipeline) { nil }
+ it 'does not unlock artifacts for other refs' do
+ expect { execute }.not_to change { other_ref_pipeline.reload.locked }.from('artifacts_locked')
+ end
- it 'unlocks artifacts from older pipelines' do
- expect { execute }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
- end
+ it 'unlocks job artifact records' do
+ pending unless ci_update_unlocked_job_artifacts
- it 'unlocks artifacts from newer pipelines' do
- expect { execute }.to change { newer_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
- end
+ expect { execute }.to change { ::Ci::JobArtifact.artifact_unlocked.count }.from(0).to(8)
+ end
+ end
+ end
- it 'unlocks artifacts from the same pipeline' do
- expect { execute }.to change { pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
- end
+ describe '#unlock_pipelines_query' do
+ subject { described_class.new(pipeline.project, pipeline.user).unlock_pipelines_query(ci_ref, before_pipeline) }
+
+ context 'when running on a ref before a pipeline' do
+ let(:before_pipeline) { pipeline }
+
+ it 'produces the expected SQL string' do
+ expect(subject.squish).to eq <<~SQL.squish
+ UPDATE
+ "ci_pipelines"
+ SET
+ "locked" = 0
+ WHERE
+ "ci_pipelines"."id" IN
+ (SELECT
+ "ci_pipelines"."id"
+ FROM
+ "ci_pipelines"
+ WHERE
+ "ci_pipelines"."ci_ref_id" = #{ci_ref.id}
+ AND "ci_pipelines"."locked" = 1
+ AND (ci_pipelines.id < #{before_pipeline.id})
+ AND "ci_pipelines"."id" NOT IN
+ (WITH RECURSIVE
+ "base_and_descendants"
+ AS
+ ((SELECT
+ "ci_pipelines".*
+ FROM
+ "ci_pipelines"
+ WHERE
+ "ci_pipelines"."id" = #{before_pipeline.id})
+ UNION
+ (SELECT
+ "ci_pipelines".*
+ FROM
+ "ci_pipelines",
+ "base_and_descendants",
+ "ci_sources_pipelines"
+ WHERE
+ "ci_sources_pipelines"."pipeline_id" = "ci_pipelines"."id"
+ AND "ci_sources_pipelines"."source_pipeline_id" = "base_and_descendants"."id"
+ AND "ci_sources_pipelines"."source_project_id" = "ci_sources_pipelines"."project_id"))
+ SELECT
+ "id"
+ FROM
+ "base_and_descendants"
+ AS
+ "ci_pipelines")
+ LIMIT 1
+ FOR UPDATE
+ SKIP LOCKED)
+ RETURNING ("ci_pipelines"."id")
+ SQL
+ end
+ end
- it 'does not unlock artifacts for tag or branch with same name as ref' do
- expect { execute }.not_to change { older_ambiguous_pipeline.reload.locked }.from('artifacts_locked')
- end
+ context 'when running on just the ref' do
+ let(:before_pipeline) { nil }
+
+ it 'produces the expected SQL string' do
+ expect(subject.squish).to eq <<~SQL.squish
+ UPDATE
+ "ci_pipelines"
+ SET
+ "locked" = 0
+ WHERE
+ "ci_pipelines"."id" IN
+ (SELECT
+ "ci_pipelines"."id"
+ FROM
+ "ci_pipelines"
+ WHERE
+ "ci_pipelines"."ci_ref_id" = #{ci_ref.id}
+ AND "ci_pipelines"."locked" = 1
+ LIMIT 1
+ FOR UPDATE
+ SKIP LOCKED)
+ RETURNING
+ ("ci_pipelines"."id")
+ SQL
+ end
+ end
+ end
- it 'does not lock artifacts from old unlocked pipelines' do
- expect { execute }.not_to change { old_unlocked_pipeline.reload.locked }.from('unlocked')
- end
+ describe '#unlock_job_artifacts_query' do
+ subject { described_class.new(pipeline.project, pipeline.user).unlock_job_artifacts_query(pipeline_ids) }
+
+ context 'when running on a ref before a pipeline' do
+ let(:before_pipeline) { pipeline }
+ let(:pipeline_ids) { [older_pipeline.id] }
+
+ it 'produces the expected SQL string' do
+ expect(subject.squish).to eq <<~SQL.squish
+ UPDATE
+ "ci_job_artifacts"
+ SET
+ "locked" = 0
+ WHERE
+ "ci_job_artifacts"."job_id" IN
+ (SELECT
+ "ci_builds"."id"
+ FROM
+ "ci_builds"
+ WHERE
+ "ci_builds"."type" = 'Ci::Build'
+ AND "ci_builds"."commit_id" = #{older_pipeline.id})
+ RETURNING
+ ("ci_job_artifacts"."id")
+ SQL
+ end
+ end
- it 'does not unlock artifacts for other refs' do
- expect { execute }.not_to change { other_ref_pipeline.reload.locked }.from('artifacts_locked')
- end
+ context 'when running on just the ref' do
+ let(:before_pipeline) { nil }
+ let(:pipeline_ids) { [older_pipeline.id, newer_pipeline.id, pipeline.id] }
+
+ it 'produces the expected SQL string' do
+ expect(subject.squish).to eq <<~SQL.squish
+ UPDATE
+ "ci_job_artifacts"
+ SET
+ "locked" = 0
+ WHERE
+ "ci_job_artifacts"."job_id" IN
+ (SELECT
+ "ci_builds"."id"
+ FROM
+ "ci_builds"
+ WHERE
+ "ci_builds"."type" = 'Ci::Build'
+ AND "ci_builds"."commit_id" IN (#{pipeline_ids.join(', ')}))
+ RETURNING
+ ("ci_job_artifacts"."id")
+ SQL
end
end
end
diff --git a/spec/services/ci/update_build_state_service_spec.rb b/spec/services/ci/update_build_state_service_spec.rb
index e4dd3d0500f..937b19beff5 100644
--- a/spec/services/ci/update_build_state_service_spec.rb
+++ b/spec/services/ci/update_build_state_service_spec.rb
@@ -118,7 +118,7 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.not_to have_received(:increment_error_counter)
- .with(type: :chunks_invalid_checksum)
+ .with(error_reason: :chunks_invalid_checksum)
end
end
@@ -188,7 +188,7 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.to have_received(:increment_error_counter)
- .with(type: :chunks_invalid_checksum)
+ .with(error_reason: :chunks_invalid_checksum)
end
end
@@ -210,11 +210,11 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.not_to have_received(:increment_error_counter)
- .with(type: :chunks_invalid_checksum)
+ .with(error_reason: :chunks_invalid_checksum)
expect(metrics)
.not_to have_received(:increment_error_counter)
- .with(type: :chunks_invalid_size)
+ .with(error_reason: :chunks_invalid_size)
end
context 'when using deprecated parameters' do
@@ -235,11 +235,11 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.not_to have_received(:increment_error_counter)
- .with(type: :chunks_invalid_checksum)
+ .with(error_reason: :chunks_invalid_checksum)
expect(metrics)
.not_to have_received(:increment_error_counter)
- .with(type: :chunks_invalid_size)
+ .with(error_reason: :chunks_invalid_size)
end
end
end
@@ -262,11 +262,11 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.to have_received(:increment_error_counter)
- .with(type: :chunks_invalid_checksum)
+ .with(error_reason: :chunks_invalid_checksum)
expect(metrics)
.to have_received(:increment_error_counter)
- .with(type: :chunks_invalid_size)
+ .with(error_reason: :chunks_invalid_size)
end
end
@@ -284,7 +284,7 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.to have_received(:increment_error_counter)
- .with(type: :chunks_invalid_checksum)
+ .with(error_reason: :chunks_invalid_checksum)
expect(metrics)
.not_to have_received(:increment_trace_operation)
@@ -292,7 +292,7 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.not_to have_received(:increment_error_counter)
- .with(type: :chunks_invalid_size)
+ .with(error_reason: :chunks_invalid_size)
end
end
@@ -376,7 +376,7 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(metrics)
.not_to have_received(:increment_error_counter)
- .with(type: :chunks_invalid_checksum)
+ .with(error_reason: :chunks_invalid_checksum)
end
context 'when build pending state is outdated' do
diff --git a/spec/services/clusters/agents/refresh_authorization_service_spec.rb b/spec/services/clusters/agents/refresh_authorization_service_spec.rb
index 77ba81ea9c0..09bec7ae0e8 100644
--- a/spec/services/clusters/agents/refresh_authorization_service_spec.rb
+++ b/spec/services/clusters/agents/refresh_authorization_service_spec.rb
@@ -113,6 +113,16 @@ RSpec.describe Clusters::Agents::RefreshAuthorizationService do
expect(modified_authorization.config).to eq({ 'default_namespace' => 'new-namespace' })
end
+ context 'project does not belong to a group, and is authorizing itself' do
+ let(:root_ancestor) { create(:namespace) }
+ let(:added_project) { project }
+
+ it 'creates an authorization record for the project' do
+ expect(subject).to be_truthy
+ expect(agent.authorized_projects).to contain_exactly(added_project)
+ end
+ end
+
context 'config contains too many projects' do
before do
stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 1)
diff --git a/spec/services/clusters/cleanup/project_namespace_service_spec.rb b/spec/services/clusters/cleanup/project_namespace_service_spec.rb
index 605aaea17e4..ec510b2e3c5 100644
--- a/spec/services/clusters/cleanup/project_namespace_service_spec.rb
+++ b/spec/services/clusters/cleanup/project_namespace_service_spec.rb
@@ -58,6 +58,19 @@ RSpec.describe Clusters::Cleanup::ProjectNamespaceService do
subject
end
+
+ context 'when cluster.kubeclient is nil' do
+ let(:kubeclient_instance_double) { nil }
+
+ it 'schedules ::ServiceAccountWorker' do
+ expect(Clusters::Cleanup::ServiceAccountWorker).to receive(:perform_async).with(cluster.id)
+ subject
+ end
+
+ it 'deletes namespaces from database' do
+ expect { subject }.to change { cluster.kubernetes_namespaces.exists? }.from(true).to(false)
+ end
+ end
end
context 'when cluster has no namespaces' do
diff --git a/spec/services/clusters/cleanup/service_account_service_spec.rb b/spec/services/clusters/cleanup/service_account_service_spec.rb
index f256df1b2fc..adcdbd84da0 100644
--- a/spec/services/clusters/cleanup/service_account_service_spec.rb
+++ b/spec/services/clusters/cleanup/service_account_service_spec.rb
@@ -44,5 +44,13 @@ RSpec.describe Clusters::Cleanup::ServiceAccountService do
it 'deletes cluster' do
expect { subject }.to change { Clusters::Cluster.where(id: cluster.id).exists? }.from(true).to(false)
end
+
+ context 'when cluster.kubeclient is nil' do
+ let(:kubeclient_instance_double) { nil }
+
+ it 'deletes cluster' do
+ expect { subject }.to change { Clusters::Cluster.where(id: cluster.id).exists? }.from(true).to(false)
+ end
+ end
end
end
diff --git a/spec/services/clusters/applications/prometheus_health_check_service_spec.rb b/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb
index e6c7b147ab7..9db3b9d2417 100644
--- a/spec/services/clusters/applications/prometheus_health_check_service_spec.rb
+++ b/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Clusters::Applications::PrometheusHealthCheckService, '#execute' do
+RSpec.describe Clusters::Integrations::PrometheusHealthCheckService, '#execute' do
let(:service) { described_class.new(cluster) }
subject { service.execute }
@@ -26,10 +26,10 @@ RSpec.describe Clusters::Applications::PrometheusHealthCheckService, '#execute'
end
RSpec.shared_examples 'correct health stored' do
- it 'stores the correct health of prometheus app' do
+ it 'stores the correct health of prometheus' do
subject
- expect(prometheus.healthy).to eq(client_healthy)
+ expect(prometheus.healthy?).to eq(client_healthy)
end
end
@@ -43,19 +43,19 @@ RSpec.describe Clusters::Applications::PrometheusHealthCheckService, '#execute'
let_it_be(:project) { create(:project) }
let_it_be(:integration) { create(:alert_management_http_integration, project: project) }
- let(:applications_prometheus_healthy) { true }
- let(:prometheus) { create(:clusters_applications_prometheus, status: prometheus_status_value, healthy: applications_prometheus_healthy) }
- let(:cluster) { create(:cluster, :project, application_prometheus: prometheus, projects: [project]) }
+ let(:previous_health_status) { :healthy }
+ let(:prometheus) { create(:clusters_integrations_prometheus, enabled: prometheus_enabled, health_status: previous_health_status) }
+ let(:cluster) { create(:cluster, :project, integration_prometheus: prometheus, projects: [project]) }
- context 'when prometheus not installed' do
- let(:prometheus_status_value) { Clusters::Applications::Prometheus.state_machine.states[:installing].value }
+ context 'when prometheus not enabled' do
+ let(:prometheus_enabled) { false }
it { expect(subject).to eq(nil) }
include_examples 'no alert'
end
- context 'when prometheus installed' do
- let(:prometheus_status_value) { Clusters::Applications::Prometheus.state_machine.states[:installed].value }
+ context 'when prometheus enabled' do
+ let(:prometheus_enabled) { true }
before do
client = instance_double('PrometheusClient', healthy?: client_healthy)
@@ -63,7 +63,7 @@ RSpec.describe Clusters::Applications::PrometheusHealthCheckService, '#execute'
end
context 'when newly unhealthy' do
- let(:applications_prometheus_healthy) { true }
+ let(:previous_health_status) { :healthy }
let(:client_healthy) { false }
include_examples 'sends alert'
@@ -71,7 +71,7 @@ RSpec.describe Clusters::Applications::PrometheusHealthCheckService, '#execute'
end
context 'when newly healthy' do
- let(:applications_prometheus_healthy) { false }
+ let(:previous_health_status) { :unhealthy }
let(:client_healthy) { true }
include_examples 'no alert'
@@ -79,7 +79,7 @@ RSpec.describe Clusters::Applications::PrometheusHealthCheckService, '#execute'
end
context 'when continuously unhealthy' do
- let(:applications_prometheus_healthy) { false }
+ let(:previous_health_status) { :unhealthy }
let(:client_healthy) { false }
include_examples 'no alert'
@@ -87,7 +87,7 @@ RSpec.describe Clusters::Applications::PrometheusHealthCheckService, '#execute'
end
context 'when continuously healthy' do
- let(:applications_prometheus_healthy) { true }
+ let(:previous_health_status) { :healthy }
let(:client_healthy) { true }
include_examples 'no alert'
@@ -95,7 +95,7 @@ RSpec.describe Clusters::Applications::PrometheusHealthCheckService, '#execute'
end
context 'when first health check and healthy' do
- let(:applications_prometheus_healthy) { nil }
+ let(:previous_health_status) { :unknown }
let(:client_healthy) { true }
include_examples 'no alert'
@@ -103,7 +103,7 @@ RSpec.describe Clusters::Applications::PrometheusHealthCheckService, '#execute'
end
context 'when first health check and not healthy' do
- let(:applications_prometheus_healthy) { nil }
+ let(:previous_health_status) { :unknown }
let(:client_healthy) { false }
include_examples 'sends alert'
diff --git a/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb b/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
index 20b0546effa..5f7afdf699a 100644
--- a/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
+++ b/spec/services/dependency_proxy/find_or_create_blob_service_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe DependencyProxy::FindOrCreateBlobService do
let(:blob_sha) { blob.file_name.sub('.gz', '') }
it 'uses cached blob instead of downloading one' do
- expect { subject }.to change { blob.reload.updated_at }
+ expect { subject }.to change { blob.reload.read_at }
expect(subject[:status]).to eq(:success)
expect(subject[:blob]).to be_a(DependencyProxy::Blob)
diff --git a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
index b3f88f91289..ef608c9b113 100644
--- a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
let(:token) { Digest::SHA256.hexdigest('123') }
let(:headers) do
{
- 'docker-content-digest' => dependency_proxy_manifest.digest,
+ DependencyProxy::Manifest::DIGEST_HEADER => dependency_proxy_manifest.digest,
'content-type' => dependency_proxy_manifest.content_type
}
end
@@ -31,6 +31,14 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
end
end
+ shared_examples 'returning no manifest' do
+ it 'returns a nil manifest' do
+ expect(subject[:status]).to eq(:success)
+ expect(subject[:from_cache]).to eq false
+ expect(subject[:manifest]).to be_nil
+ end
+ end
+
context 'when no manifest exists' do
let_it_be(:image) { 'new-image' }
@@ -40,7 +48,15 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
stub_manifest_download(image, tag, headers: headers)
end
- it_behaves_like 'downloading the manifest'
+ it_behaves_like 'returning no manifest'
+
+ context 'with dependency_proxy_manifest_workhorse feature disabled' do
+ before do
+ stub_feature_flags(dependency_proxy_manifest_workhorse: false)
+ end
+
+ it_behaves_like 'downloading the manifest'
+ end
end
context 'failed head request' do
@@ -49,7 +65,15 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
stub_manifest_download(image, tag, headers: headers)
end
- it_behaves_like 'downloading the manifest'
+ it_behaves_like 'returning no manifest'
+
+ context 'with dependency_proxy_manifest_workhorse feature disabled' do
+ before do
+ stub_feature_flags(dependency_proxy_manifest_workhorse: false)
+ end
+
+ it_behaves_like 'downloading the manifest'
+ end
end
end
@@ -60,7 +84,7 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
shared_examples 'using the cached manifest' do
it 'uses cached manifest instead of downloading one', :aggregate_failures do
- expect { subject }.to change { dependency_proxy_manifest.reload.updated_at }
+ expect { subject }.to change { dependency_proxy_manifest.reload.read_at }
expect(subject[:status]).to eq(:success)
expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
@@ -76,16 +100,24 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
let(:content_type) { 'new-content-type' }
before do
- stub_manifest_head(image, tag, headers: { 'docker-content-digest' => digest, 'content-type' => content_type })
- stub_manifest_download(image, tag, headers: { 'docker-content-digest' => digest, 'content-type' => content_type })
+ stub_manifest_head(image, tag, headers: { DependencyProxy::Manifest::DIGEST_HEADER => digest, 'content-type' => content_type })
+ stub_manifest_download(image, tag, headers: { DependencyProxy::Manifest::DIGEST_HEADER => digest, 'content-type' => content_type })
end
- it 'downloads the new manifest and updates the existing record', :aggregate_failures do
- expect(subject[:status]).to eq(:success)
- expect(subject[:manifest]).to eq(dependency_proxy_manifest)
- expect(subject[:manifest].content_type).to eq(content_type)
- expect(subject[:manifest].digest).to eq(digest)
- expect(subject[:from_cache]).to eq false
+ it_behaves_like 'returning no manifest'
+
+ context 'with dependency_proxy_manifest_workhorse feature disabled' do
+ before do
+ stub_feature_flags(dependency_proxy_manifest_workhorse: false)
+ end
+
+ it 'downloads the new manifest and updates the existing record', :aggregate_failures do
+ expect(subject[:status]).to eq(:success)
+ expect(subject[:manifest]).to eq(dependency_proxy_manifest)
+ expect(subject[:manifest].content_type).to eq(content_type)
+ expect(subject[:manifest].digest).to eq(digest)
+ expect(subject[:from_cache]).to eq false
+ end
end
end
@@ -96,7 +128,15 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
stub_manifest_download(image, tag, headers: headers)
end
- it_behaves_like 'downloading the manifest'
+ it_behaves_like 'returning no manifest'
+
+ context 'with dependency_proxy_manifest_workhorse feature disabled' do
+ before do
+ stub_feature_flags(dependency_proxy_manifest_workhorse: false)
+ end
+
+ it_behaves_like 'downloading the manifest'
+ end
end
context 'failed connection' do
diff --git a/spec/services/dependency_proxy/head_manifest_service_spec.rb b/spec/services/dependency_proxy/head_manifest_service_spec.rb
index 9c1e4d650f8..949a8eb3bee 100644
--- a/spec/services/dependency_proxy/head_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/head_manifest_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe DependencyProxy::HeadManifestService do
let(:content_type) { 'foo' }
let(:headers) do
{
- 'docker-content-digest' => digest,
+ DependencyProxy::Manifest::DIGEST_HEADER => digest,
'content-type' => content_type
}
end
diff --git a/spec/services/dependency_proxy/pull_manifest_service_spec.rb b/spec/services/dependency_proxy/pull_manifest_service_spec.rb
index b3053174cc0..6018a3229fb 100644
--- a/spec/services/dependency_proxy/pull_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/pull_manifest_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe DependencyProxy::PullManifestService do
let(:digest) { '12345' }
let(:content_type) { 'foo' }
let(:headers) do
- { 'docker-content-digest' => digest, 'content-type' => content_type }
+ { DependencyProxy::Manifest::DIGEST_HEADER => digest, 'content-type' => content_type }
end
subject { described_class.new(image, tag, token).execute_with_manifest(&method(:check_response)) }
diff --git a/spec/services/deployments/archive_in_project_service_spec.rb b/spec/services/deployments/archive_in_project_service_spec.rb
new file mode 100644
index 00000000000..d4039ee7b4a
--- /dev/null
+++ b/spec/services/deployments/archive_in_project_service_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Deployments::ArchiveInProjectService do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:service) { described_class.new(project, nil) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'when there are archivable deployments' do
+ let!(:deployments) { create_list(:deployment, 3, project: project) }
+ let!(:deployment_refs) { deployments.map(&:ref_path) }
+
+ before do
+ deployments.each(&:create_ref)
+ allow(Deployment).to receive(:archivables_in) { deployments }
+ end
+
+ it 'returns result code' do
+ expect(subject[:result]).to eq(:archived)
+ expect(subject[:status]).to eq(:success)
+ expect(subject[:count]).to eq(3)
+ end
+
+ it 'archives the deployment' do
+ expect(deployments.map(&:archived?)).to be_all(false)
+ expect(deployment_refs_exist?).to be_all(true)
+
+ subject
+
+ deployments.each(&:reload)
+ expect(deployments.map(&:archived?)).to be_all(true)
+ expect(deployment_refs_exist?).to be_all(false)
+ end
+
+ context 'when ref does not exist by some reason' do
+ before do
+ project.repository.delete_refs(*deployment_refs)
+ end
+
+ it 'does not raise an error' do
+ expect(deployment_refs_exist?).to be_all(false)
+
+ expect { subject }.not_to raise_error
+
+ expect(deployment_refs_exist?).to be_all(false)
+ end
+ end
+
+ context 'when deployments_archive feature flag is disabled' do
+ before do
+ stub_feature_flags(deployments_archive: false)
+ end
+
+ it 'does not do anything' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Feature flag is not enabled')
+ end
+ end
+
+ def deployment_refs_exist?
+ deployment_refs.map { |path| project.repository.ref_exists?(path) }
+ end
+ end
+
+ context 'when there are no archivable deployments' do
+ before do
+ allow(Deployment).to receive(:archivables_in) { Deployment.none }
+ end
+
+ it 'returns result code' do
+ expect(subject[:result]).to eq(:empty)
+ expect(subject[:status]).to eq(:success)
+ end
+ end
+ end
+end
diff --git a/spec/services/deployments/link_merge_requests_service_spec.rb b/spec/services/deployments/link_merge_requests_service_spec.rb
index a5a13230d6f..62adc834733 100644
--- a/spec/services/deployments/link_merge_requests_service_spec.rb
+++ b/spec/services/deployments/link_merge_requests_service_spec.rb
@@ -32,6 +32,19 @@ RSpec.describe Deployments::LinkMergeRequestsService do
end
end
+ context 'when the deployment is for one of the production environments' do
+ it 'links merge requests' do
+ environment =
+ create(:environment, environment_type: 'production', name: 'production/gcp')
+
+ deploy = create(:deployment, :success, environment: environment)
+
+ expect(deploy).to receive(:link_merge_requests).once
+
+ described_class.new(deploy).execute
+ end
+ end
+
context 'when the deployment failed' do
it 'does nothing' do
environment = create(:environment, name: 'foo')
diff --git a/spec/services/emails/create_service_spec.rb b/spec/services/emails/create_service_spec.rb
index 1396a1fce30..2fabf4ae66a 100644
--- a/spec/services/emails/create_service_spec.rb
+++ b/spec/services/emails/create_service_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Emails::CreateService do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+
let(:opts) { { email: 'new@email.com', user: user } }
subject(:service) { described_class.new(user, opts) }
@@ -22,7 +23,7 @@ RSpec.describe Emails::CreateService do
it 'has the right user association' do
service.execute
- expect(user.emails).to eq(Email.where(opts))
+ expect(user.emails).to include(Email.find_by(opts))
end
end
end
diff --git a/spec/services/emails/destroy_service_spec.rb b/spec/services/emails/destroy_service_spec.rb
index f8407be41e7..7dcf367016e 100644
--- a/spec/services/emails/destroy_service_spec.rb
+++ b/spec/services/emails/destroy_service_spec.rb
@@ -15,5 +15,15 @@ RSpec.describe Emails::DestroyService do
expect(user.emails).not_to include(email)
expect(response).to be true
end
+
+ context 'when it corresponds to the user primary email' do
+ let(:email) { user.emails.find_by!(email: user.email) }
+
+ it 'does not remove the email and raises an exception' do
+ expect { service.execute(email) }.to raise_error(StandardError, 'Cannot delete primary email')
+
+ expect(user.emails).to include(email)
+ end
+ end
end
end
diff --git a/spec/services/error_tracking/collect_error_service_spec.rb b/spec/services/error_tracking/collect_error_service_spec.rb
index ee9d0813e64..52d095148c8 100644
--- a/spec/services/error_tracking/collect_error_service_spec.rb
+++ b/spec/services/error_tracking/collect_error_service_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe ErrorTracking::CollectErrorService do
let_it_be(:project) { create(:project) }
- let_it_be(:parsed_event) { Gitlab::Json.parse(fixture_file('error_tracking/parsed_event.json')) }
+ let_it_be(:parsed_event_file) { 'error_tracking/parsed_event.json' }
+ let_it_be(:parsed_event) { Gitlab::Json.parse(fixture_file(parsed_event_file)) }
subject { described_class.new(project, nil, event: parsed_event) }
@@ -41,6 +42,14 @@ RSpec.describe ErrorTracking::CollectErrorService do
expect(event.payload).to eq parsed_event
end
+ context 'python sdk event' do
+ let(:parsed_event) { Gitlab::Json.parse(fixture_file('error_tracking/python_event.json')) }
+
+ it 'creates a valid event' do
+ expect { subject.execute }.to change { ErrorTracking::ErrorEvent.count }.by(1)
+ end
+ end
+
context 'unusual payload' do
let(:modified_event) { parsed_event }
@@ -64,5 +73,25 @@ RSpec.describe ErrorTracking::CollectErrorService do
end
end
end
+
+ context 'go payload' do
+ let(:parsed_event) { Gitlab::Json.parse(fixture_file('error_tracking/go_parsed_event.json')) }
+
+ it 'has correct values set' do
+ subject.execute
+
+ event = ErrorTracking::ErrorEvent.last
+ error = event.error
+
+ expect(error.name).to eq '*errors.errorString'
+ expect(error.description).to start_with 'Hello world'
+ expect(error.platform).to eq 'go'
+
+ expect(event.description).to start_with 'Hello world'
+ expect(event.level).to eq 'error'
+ expect(event.environment).to eq 'Accumulate'
+ expect(event.payload).to eq parsed_event
+ end
+ end
end
end
diff --git a/spec/services/google_cloud/service_accounts_service_spec.rb b/spec/services/google_cloud/service_accounts_service_spec.rb
new file mode 100644
index 00000000000..a0d09affa72
--- /dev/null
+++ b/spec/services/google_cloud/service_accounts_service_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GoogleCloud::ServiceAccountsService do
+ let_it_be(:project) { create(:project) }
+
+ let(:service) { described_class.new(project) }
+
+ describe 'find_for_project' do
+ context 'when a project does not have GCP service account vars' do
+ before do
+ project.variables.build(key: 'blah', value: 'foo', environment_scope: 'world')
+ project.save!
+ end
+
+ it 'returns an empty list' do
+ expect(service.find_for_project.length).to eq(0)
+ end
+ end
+
+ context 'when a project has GCP service account ci vars' do
+ before do
+ project.variables.build(environment_scope: '*', key: 'GCP_PROJECT_ID', value: 'prj1')
+ project.variables.build(environment_scope: '*', key: 'GCP_SERVICE_ACCOUNT_KEY', value: 'mock')
+ project.variables.build(environment_scope: 'staging', key: 'GCP_PROJECT_ID', value: 'prj2')
+ project.variables.build(environment_scope: 'staging', key: 'GCP_SERVICE_ACCOUNT', value: 'mock')
+ project.variables.build(environment_scope: 'production', key: 'GCP_PROJECT_ID', value: 'prj3')
+ project.variables.build(environment_scope: 'production', key: 'GCP_SERVICE_ACCOUNT', value: 'mock')
+ project.variables.build(environment_scope: 'production', key: 'GCP_SERVICE_ACCOUNT_KEY', value: 'mock')
+ project.save!
+ end
+
+ it 'returns a list of service accounts' do
+ list = service.find_for_project
+
+ aggregate_failures 'testing list of service accounts' do
+ expect(list.length).to eq(3)
+
+ expect(list.first[:environment]).to eq('*')
+ expect(list.first[:gcp_project]).to eq('prj1')
+ expect(list.first[:service_account_exists]).to eq(false)
+ expect(list.first[:service_account_key_exists]).to eq(true)
+
+ expect(list.second[:environment]).to eq('staging')
+ expect(list.second[:gcp_project]).to eq('prj2')
+ expect(list.second[:service_account_exists]).to eq(true)
+ expect(list.second[:service_account_key_exists]).to eq(false)
+
+ expect(list.third[:environment]).to eq('production')
+ expect(list.third[:gcp_project]).to eq('prj3')
+ expect(list.third[:service_account_exists]).to eq(true)
+ expect(list.third[:service_account_key_exists]).to eq(true)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index bcba39b0eb4..7ea08131419 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -171,7 +171,7 @@ RSpec.describe Groups::CreateService, '#execute' do
context 'with an active group-level integration' do
let(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
- let!(:group_integration) { create(:prometheus_integration, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+ let!(:group_integration) { create(:prometheus_integration, :group, group: group, api_url: 'https://prometheus.group.com/') }
let(:group) do
create(:group).tap do |group|
group.add_owner(user)
@@ -186,7 +186,7 @@ RSpec.describe Groups::CreateService, '#execute' do
context 'with an active subgroup' do
let(:service) { described_class.new(user, group_params.merge(parent_id: subgroup.id)) }
- let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let!(:subgroup_integration) { create(:prometheus_integration, :group, group: subgroup, api_url: 'https://prometheus.subgroup.com/') }
let(:subgroup) do
create(:group, parent: group).tap do |subgroup|
subgroup.add_owner(user)
@@ -242,4 +242,41 @@ RSpec.describe Groups::CreateService, '#execute' do
end
end
end
+
+ describe 'invite team email' do
+ let(:service) { described_class.new(user, group_params) }
+
+ before do
+ allow(Namespaces::InviteTeamEmailWorker).to receive(:perform_in)
+ end
+
+ it 'is sent' do
+ group = service.execute
+ delay = Namespaces::InviteTeamEmailService::DELIVERY_DELAY_IN_MINUTES
+ expect(Namespaces::InviteTeamEmailWorker).to have_received(:perform_in).with(delay, group.id, user.id)
+ end
+
+ context 'when group has not been persisted' do
+ let(:service) { described_class.new(user, group_params.merge(name: '<script>alert("Attack!")</script>')) }
+
+ it 'not sent' do
+ expect(Namespaces::InviteTeamEmailWorker).not_to receive(:perform_in)
+ service.execute
+ end
+ end
+
+ context 'when group is not root' do
+ let(:parent_group) { create :group }
+ let(:service) { described_class.new(user, group_params.merge(parent_id: parent_group.id)) }
+
+ before do
+ parent_group.add_owner(user)
+ end
+
+ it 'not sent' do
+ expect(Namespaces::InviteTeamEmailWorker).not_to receive(:perform_in)
+ service.execute
+ end
+ end
+ end
end
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index ad5c4364deb..292f2e2b86b 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -7,6 +7,10 @@ RSpec.describe Groups::ImportExport::ImportService do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
+ before do
+ allow(GroupImportWorker).to receive(:with_status).and_return(GroupImportWorker)
+ end
+
context 'when the job can be successfully scheduled' do
subject(:import_service) { described_class.new(group: group, user: user) }
@@ -20,6 +24,8 @@ RSpec.describe Groups::ImportExport::ImportService do
end
it 'enqueues an import job' do
+ allow(GroupImportWorker).to receive(:with_status).and_return(GroupImportWorker)
+
expect(GroupImportWorker).to receive(:perform_async).with(user.id, group.id)
import_service.async_execute
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 8b506d2bc2c..35d46884f4d 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -153,7 +153,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
it 'adds an error on group' do
transfer_service.execute(nil)
- expect(transfer_service.error).to eq('Transfer failed: The parent group already has a subgroup with the same path.')
+ expect(transfer_service.error).to eq('Transfer failed: The parent group already has a subgroup or a project with the same path.')
end
end
@@ -185,9 +185,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
context 'when projects have project namespaces' do
let_it_be(:project1) { create(:project, :private, namespace: group) }
- let_it_be(:project_namespace1) { create(:project_namespace, project: project1) }
let_it_be(:project2) { create(:project, :private, namespace: group) }
- let_it_be(:project_namespace2) { create(:project_namespace, project: project2) }
it_behaves_like 'project namespace path is in sync with project path' do
let(:group_full_path) { "#{group.path}" }
@@ -241,7 +239,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
it 'adds an error on group' do
transfer_service.execute(new_parent_group)
- expect(transfer_service.error).to eq('Transfer failed: The parent group already has a subgroup with the same path.')
+ expect(transfer_service.error).to eq('Transfer failed: The parent group already has a subgroup or a project with the same path.')
end
end
@@ -250,36 +248,45 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
let_it_be(:membership) { create(:group_member, :owner, group: new_parent_group, user: user) }
let_it_be(:project) { create(:project, path: 'foo', namespace: new_parent_group) }
- before do
- group.update_attribute(:path, 'foo')
- end
-
- it 'returns false' do
- expect(transfer_service.execute(new_parent_group)).to be_falsy
- end
-
it 'adds an error on group' do
- transfer_service.execute(new_parent_group)
- expect(transfer_service.error).to eq('Transfer failed: Validation failed: Group URL has already been taken')
+ expect(transfer_service.execute(new_parent_group)).to be_falsy
+ expect(transfer_service.error).to eq('Transfer failed: The parent group already has a subgroup or a project with the same path.')
end
- context 'when projects have project namespaces' do
- let!(:project_namespace) { create(:project_namespace, project: project) }
-
+ # currently when a project is created it gets a corresponding project namespace
+ # so we test the case where a project without a project namespace is transferred
+ # for backward compatibility
+ context 'without project namespace' do
before do
- transfer_service.execute(new_parent_group)
+ project_namespace = project.project_namespace
+ project.update_column(:project_namespace_id, nil)
+ project_namespace.delete
end
- it_behaves_like 'project namespace path is in sync with project path' do
- let(:group_full_path) { "#{new_parent_group.full_path}" }
- let(:projects_with_project_namespace) { [project] }
+ it 'adds an error on group' do
+ expect(project.reload.project_namespace).to be_nil
+ expect(transfer_service.execute(new_parent_group)).to be_falsy
+ expect(transfer_service.error).to eq('Transfer failed: Validation failed: Group URL has already been taken')
end
end
end
+ context 'when projects have project namespaces' do
+ let_it_be(:project) { create(:project, path: 'foo', namespace: new_parent_group) }
+
+ before do
+ transfer_service.execute(new_parent_group)
+ end
+
+ it_behaves_like 'project namespace path is in sync with project path' do
+ let(:group_full_path) { "#{new_parent_group.full_path}" }
+ let(:projects_with_project_namespace) { [project] }
+ end
+ end
+
context 'when the group is allowed to be transferred' do
let_it_be(:new_parent_group, reload: true) { create(:group, :public) }
- let_it_be(:new_parent_group_integration) { create(:integrations_slack, group: new_parent_group, project: nil, webhook: 'http://new-group.slack.com') }
+ let_it_be(:new_parent_group_integration) { create(:integrations_slack, :group, group: new_parent_group, webhook: 'http://new-group.slack.com') }
before do
allow(PropagateIntegrationWorker).to receive(:perform_async)
@@ -316,7 +323,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
context 'with an inherited integration' do
let_it_be(:instance_integration) { create(:integrations_slack, :instance, webhook: 'http://project.slack.com') }
- let_it_be(:group_integration) { create(:integrations_slack, group: group, project: nil, webhook: 'http://group.slack.com', inherit_from_id: instance_integration.id) }
+ let_it_be(:group_integration) { create(:integrations_slack, :group, group: group, webhook: 'http://group.slack.com', inherit_from_id: instance_integration.id) }
it 'replaces inherited integrations', :aggregate_failures do
expect(new_created_integration.webhook).to eq(new_parent_group_integration.webhook)
@@ -326,7 +333,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
end
context 'with a custom integration' do
- let_it_be(:group_integration) { create(:integrations_slack, group: group, project: nil, webhook: 'http://group.slack.com') }
+ let_it_be(:group_integration) { create(:integrations_slack, :group, group: group, webhook: 'http://group.slack.com') }
it 'does not updates the integrations', :aggregate_failures do
expect { transfer_service.execute(new_parent_group) }.not_to change { group_integration.webhook }
@@ -445,8 +452,6 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
context 'when transferring a group with project descendants' do
let!(:project1) { create(:project, :repository, :private, namespace: group) }
let!(:project2) { create(:project, :repository, :internal, namespace: group) }
- let!(:project_namespace1) { create(:project_namespace, project: project1) }
- let!(:project_namespace2) { create(:project_namespace, project: project2) }
before do
TestEnv.clean_test_path
@@ -483,8 +488,6 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
let!(:project1) { create(:project, :repository, :public, namespace: group) }
let!(:project2) { create(:project, :repository, :public, namespace: group) }
let!(:new_parent_group) { create(:group, :private) }
- let!(:project_namespace1) { create(:project_namespace, project: project1) }
- let!(:project_namespace2) { create(:project_namespace, project: project2) }
it 'updates projects visibility to match the new parent' do
group.projects.each do |project|
@@ -504,8 +507,6 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
let!(:project2) { create(:project, :repository, :internal, namespace: group) }
let!(:subgroup1) { create(:group, :private, parent: group) }
let!(:subgroup2) { create(:group, :internal, parent: group) }
- let!(:project_namespace1) { create(:project_namespace, project: project1) }
- let!(:project_namespace2) { create(:project_namespace, project: project2) }
before do
TestEnv.clean_test_path
@@ -593,11 +594,16 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
let_it_be_with_reload(:group) { create(:group, :private, parent: old_parent_group) }
let_it_be(:new_group_member) { create(:user) }
let_it_be(:old_group_member) { create(:user) }
+ let_it_be(:unique_subgroup_member) { create(:user) }
+ let_it_be(:direct_project_member) { create(:user) }
before do
new_parent_group.add_maintainer(new_group_member)
old_parent_group.add_maintainer(old_group_member)
+ subgroup1.add_developer(unique_subgroup_member)
+ nested_project.add_developer(direct_project_member)
group.refresh_members_authorized_projects
+ subgroup1.refresh_members_authorized_projects
end
it 'removes old project authorizations' do
@@ -613,7 +619,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
end
it 'performs authorizations job immediately' do
- expect(AuthorizedProjectsWorker).to receive(:bulk_perform_inline)
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_inline)
transfer_service.execute(new_parent_group)
end
@@ -630,14 +636,24 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
ProjectAuthorization.where(project_id: nested_project.id, user_id: new_group_member.id).size
}.from(0).to(1)
end
+
+ it 'preserves existing project authorizations for direct project members' do
+ expect { transfer_service.execute(new_parent_group) }.not_to change {
+ ProjectAuthorization.where(project_id: nested_project.id, user_id: direct_project_member.id).count
+ }
+ end
end
- context 'for groups with many members' do
- before do
- 11.times do
- new_parent_group.add_maintainer(create(:user))
- end
+ context 'for nested groups with unique members' do
+ it 'preserves existing project authorizations' do
+ expect { transfer_service.execute(new_parent_group) }.not_to change {
+ ProjectAuthorization.where(project_id: nested_project.id, user_id: unique_subgroup_member.id).count
+ }
end
+ end
+
+ context 'for groups with many projects' do
+ let_it_be(:project_list) { create_list(:project, 11, :repository, :private, namespace: group) }
it 'adds new project authorizations for the user which makes a transfer' do
transfer_service.execute(new_parent_group)
@@ -646,9 +662,21 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
expect(ProjectAuthorization.where(project_id: nested_project.id, user_id: user.id).size).to eq(1)
end
+ it 'adds project authorizations for users in the new hierarchy' do
+ expect { transfer_service.execute(new_parent_group) }.to change {
+ ProjectAuthorization.where(project_id: project_list.map { |project| project.id }, user_id: new_group_member.id).size
+ }.from(0).to(project_list.count)
+ end
+
+ it 'removes project authorizations for users in the old hierarchy' do
+ expect { transfer_service.execute(new_parent_group) }.to change {
+ ProjectAuthorization.where(project_id: project_list.map { |project| project.id }, user_id: old_group_member.id).size
+ }.from(project_list.count).to(0)
+ end
+
it 'schedules authorizations job' do
- expect(AuthorizedProjectsWorker).to receive(:bulk_perform_async)
- .with(array_including(new_parent_group.members_with_parents.pluck(:user_id).map {|id| [id, anything] }))
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_async)
+ .with(array_including(group.all_projects.ids.map { |id| [id, anything] }))
transfer_service.execute(new_parent_group)
end
diff --git a/spec/services/import/github/notes/create_service_spec.rb b/spec/services/import/github/notes/create_service_spec.rb
new file mode 100644
index 00000000000..57699def848
--- /dev/null
+++ b/spec/services/import/github/notes/create_service_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::Github::Notes::CreateService do
+ it 'does not support quick actions' do
+ project = create(:project, :repository)
+ user = create(:user)
+ merge_request = create(:merge_request, source_project: project)
+
+ project.add_maintainer(user)
+
+ note = described_class.new(
+ project,
+ user,
+ note: '/close',
+ noteable_type: 'MergeRequest',
+ noteable_id: merge_request.id
+ ).execute
+
+ expect(note.note).to eq('/close')
+ expect(note.noteable.closed?).to be(false)
+ end
+end
diff --git a/spec/services/issues/build_service_spec.rb b/spec/services/issues/build_service_spec.rb
index b96dd981e0f..cf75efb5c57 100644
--- a/spec/services/issues/build_service_spec.rb
+++ b/spec/services/issues/build_service_spec.rb
@@ -7,12 +7,14 @@ RSpec.describe Issues::BuildService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
let_it_be(:guest) { create(:user) }
let(:user) { developer }
before_all do
project.add_developer(developer)
+ project.add_reporter(reporter)
project.add_guest(guest)
end
@@ -140,76 +142,64 @@ RSpec.describe Issues::BuildService do
end
describe '#execute' do
- context 'as developer' do
- it 'builds a new issues with given params' do
- milestone = create(:milestone, project: project)
- issue = build_issue(milestone_id: milestone.id)
-
- expect(issue.milestone).to eq(milestone)
- expect(issue.issue_type).to eq('issue')
- expect(issue.work_item_type.base_type).to eq('issue')
- end
+ describe 'setting milestone' do
+ context 'when developer' do
+ it 'builds a new issues with given params' do
+ milestone = create(:milestone, project: project)
+ issue = build_issue(milestone_id: milestone.id)
+
+ expect(issue.milestone).to eq(milestone)
+ end
- it 'sets milestone to nil if it is not available for the project' do
- milestone = create(:milestone, project: create(:project))
- issue = build_issue(milestone_id: milestone.id)
+ it 'sets milestone to nil if it is not available for the project' do
+ milestone = create(:milestone, project: create(:project))
+ issue = build_issue(milestone_id: milestone.id)
- expect(issue.milestone).to be_nil
+ expect(issue.milestone).to be_nil
+ end
end
- context 'when issue_type is incident' do
- it 'sets the correct issue type' do
- issue = build_issue(issue_type: 'incident')
+ context 'when guest' do
+ let(:user) { guest }
- expect(issue.issue_type).to eq('incident')
- expect(issue.work_item_type.base_type).to eq('incident')
+ it 'cannot set milestone' do
+ milestone = create(:milestone, project: project)
+ issue = build_issue(milestone_id: milestone.id)
+
+ expect(issue.milestone).to be_nil
end
end
end
- context 'as guest' do
- let(:user) { guest }
-
- it 'cannot set milestone' do
- milestone = create(:milestone, project: project)
- issue = build_issue(milestone_id: milestone.id)
+ describe 'setting issue type' do
+ context 'with a corresponding WorkItem::Type' do
+ let_it_be(:type_issue_id) { WorkItem::Type.default_issue_type.id }
+ let_it_be(:type_incident_id) { WorkItem::Type.default_by_type(:incident).id }
+
+ where(:issue_type, :current_user, :work_item_type_id, :resulting_issue_type) do
+ nil | ref(:guest) | ref(:type_issue_id) | 'issue'
+ 'issue' | ref(:guest) | ref(:type_issue_id) | 'issue'
+ 'incident' | ref(:guest) | ref(:type_issue_id) | 'issue'
+ 'incident' | ref(:reporter) | ref(:type_incident_id) | 'incident'
+ # update once support for test_case is enabled
+ 'test_case' | ref(:guest) | ref(:type_issue_id) | 'issue'
+ # update once support for requirement is enabled
+ 'requirement' | ref(:guest) | ref(:type_issue_id) | 'issue'
+ 'invalid' | ref(:guest) | ref(:type_issue_id) | 'issue'
+ # ensure that we don't set a value which has a permission check but is an invalid issue type
+ 'project' | ref(:guest) | ref(:type_issue_id) | 'issue'
+ end
- expect(issue.milestone).to be_nil
- end
+ with_them do
+ let(:user) { current_user }
- context 'setting issue type' do
- shared_examples 'builds an issue' do
- specify do
+ it 'builds an issue' do
issue = build_issue(issue_type: issue_type)
expect(issue.issue_type).to eq(resulting_issue_type)
expect(issue.work_item_type_id).to eq(work_item_type_id)
end
end
-
- it 'cannot set invalid issue type' do
- issue = build_issue(issue_type: 'project')
-
- expect(issue).to be_issue
- end
-
- context 'with a corresponding WorkItem::Type' do
- let_it_be(:type_issue_id) { WorkItem::Type.default_issue_type.id }
- let_it_be(:type_incident_id) { WorkItem::Type.default_by_type(:incident).id }
-
- where(:issue_type, :work_item_type_id, :resulting_issue_type) do
- nil | ref(:type_issue_id) | 'issue'
- 'issue' | ref(:type_issue_id) | 'issue'
- 'incident' | ref(:type_incident_id) | 'incident'
- 'test_case' | ref(:type_issue_id) | 'issue' # update once support for test_case is enabled
- 'requirement' | ref(:type_issue_id) | 'issue' # update once support for requirement is enabled
- 'invalid' | ref(:type_issue_id) | 'issue'
- end
-
- with_them do
- it_behaves_like 'builds an issue'
- end
- end
end
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 93ef046a632..158f9dec83e 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -83,6 +83,14 @@ RSpec.describe Issues::CloseService do
service.execute(issue)
end
+ it 'does not change escalation status' do
+ resolved = IncidentManagement::Escalatable::STATUSES[:resolved]
+
+ expect { service.execute(issue) }
+ .to not_change { IncidentManagement::IssuableEscalationStatus.where(issue: issue).count }
+ .and not_change { IncidentManagement::IssuableEscalationStatus.where(status: resolved).count }
+ end
+
context 'issue is incident type' do
let(:issue) { create(:incident, project: project) }
let(:current_user) { user }
@@ -90,6 +98,40 @@ RSpec.describe Issues::CloseService do
subject { service.execute(issue) }
it_behaves_like 'an incident management tracked event', :incident_management_incident_closed
+
+ it 'creates a new escalation resolved escalation status', :aggregate_failures do
+ expect { service.execute(issue) }.to change { IncidentManagement::IssuableEscalationStatus.where(issue: issue).count }.by(1)
+
+ expect(issue.incident_management_issuable_escalation_status).to be_resolved
+ end
+
+ context 'when there is an escalation status' do
+ before do
+ create(:incident_management_issuable_escalation_status, issue: issue)
+ end
+
+ it 'changes escalations status to resolved' do
+ expect { service.execute(issue) }.to change { issue.incident_management_issuable_escalation_status.reload.resolved? }.to(true)
+ end
+
+ it 'adds a system note', :aggregate_failures do
+ expect { service.execute(issue) }.to change { issue.notes.count }.by(1)
+
+ new_note = issue.notes.last
+ expect(new_note.note).to eq('changed the status to **Resolved** by closing the incident')
+ expect(new_note.author).to eq(user)
+ end
+
+ context 'when the escalation status did not change to resolved' do
+ let(:escalation_status) { instance_double('IncidentManagement::IssuableEscalationStatus', resolve: false) }
+
+ it 'does not create a system note' do
+ allow(issue).to receive(:incident_management_issuable_escalation_status).and_return(escalation_status)
+
+ expect { service.execute(issue) }.not_to change { issue.notes.count }
+ end
+ end
+ end
end
end
@@ -237,7 +279,7 @@ RSpec.describe Issues::CloseService do
it 'verifies the number of queries' do
recorded = ActiveRecord::QueryRecorder.new { close_issue }
- expected_queries = 27
+ expected_queries = 32
expect(recorded.count).to be <= expected_queries
expect(recorded.cached_count).to eq(0)
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 1887be4896e..18e03db11dc 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -15,8 +15,7 @@ RSpec.describe Issues::CreateService do
expect(described_class.rate_limiter_scoped_and_keyed).to be_a(RateLimitedService::RateLimiterScopedAndKeyed)
expect(described_class.rate_limiter_scoped_and_keyed.key).to eq(:issues_create)
- expect(described_class.rate_limiter_scoped_and_keyed.opts[:scope]).to eq(%i[project current_user])
- expect(described_class.rate_limiter_scoped_and_keyed.opts[:users_allowlist].call).to eq(%w[support-bot])
+ expect(described_class.rate_limiter_scoped_and_keyed.opts[:scope]).to eq(%i[project current_user external_author])
expect(described_class.rate_limiter_scoped_and_keyed.rate_limiter_klass).to eq(Gitlab::ApplicationRateLimiter)
end
end
@@ -81,7 +80,7 @@ RSpec.describe Issues::CreateService do
it_behaves_like 'not an incident issue'
- context 'issue is incident type' do
+ context 'when issue is incident type' do
before do
opts.merge!(issue_type: 'incident')
end
@@ -91,23 +90,37 @@ RSpec.describe Issues::CreateService do
subject { issue }
- it_behaves_like 'incident issue'
- it_behaves_like 'has incident label'
+ context 'as reporter' do
+ let_it_be(:reporter) { create(:user) }
- it 'does create an incident label' do
- expect { subject }
- .to change { Label.where(incident_label_attributes).count }.by(1)
- end
+ let(:user) { reporter }
- context 'when invalid' do
- before do
- opts.merge!(title: '')
+ before_all do
+ project.add_reporter(reporter)
end
- it 'does not apply an incident label prematurely' do
- expect { subject }.to not_change(LabelLink, :count).and not_change(Issue, :count)
+ it_behaves_like 'incident issue'
+ it_behaves_like 'has incident label'
+
+ it 'does create an incident label' do
+ expect { subject }
+ .to change { Label.where(incident_label_attributes).count }.by(1)
+ end
+
+ context 'when invalid' do
+ before do
+ opts.merge!(title: '')
+ end
+
+ it 'does not apply an incident label prematurely' do
+ expect { subject }.to not_change(LabelLink, :count).and not_change(Issue, :count)
+ end
end
end
+
+ context 'as guest' do
+ it_behaves_like 'not an incident issue'
+ end
end
it 'refreshes the number of open issues', :use_clean_rails_memory_store_caching do
@@ -289,6 +302,44 @@ RSpec.describe Issues::CreateService do
described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
end
+ context 'when rate limiting is in effect', :freeze_time, :clean_gitlab_redis_rate_limiting do
+ let(:user) { create(:user) }
+
+ before do
+ stub_feature_flags(rate_limited_service_issues_create: true)
+ stub_application_setting(issues_create_limit: 1)
+ end
+
+ subject do
+ 2.times { described_class.new(project: project, current_user: user, params: opts, spam_params: double).execute }
+ end
+
+ context 'when too many requests are sent by one user' do
+ it 'raises an error' do
+ expect do
+ subject
+ end.to raise_error(RateLimitedService::RateLimitedError)
+ end
+
+ it 'creates 1 issue' do
+ expect do
+ subject
+ rescue RateLimitedService::RateLimitedError
+ end.to change { Issue.count }.by(1)
+ end
+ end
+
+ context 'when limit is higher than count of issues being created' do
+ before do
+ stub_application_setting(issues_create_limit: 2)
+ end
+
+ it 'creates 2 issues' do
+ expect { subject }.to change { Issue.count }.by(2)
+ end
+ end
+ end
+
context 'after_save callback to store_mentions' do
context 'when mentionable attributes change' do
let(:opts) { { title: 'Title', description: "Description with #{user.to_reference}" } }
diff --git a/spec/services/issues/set_crm_contacts_service_spec.rb b/spec/services/issues/set_crm_contacts_service_spec.rb
new file mode 100644
index 00000000000..65b22fe3b35
--- /dev/null
+++ b/spec/services/issues/set_crm_contacts_service_spec.rb
@@ -0,0 +1,162 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issues::SetCrmContactsService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:contacts) { create_list(:contact, 4, group: group) }
+
+ let(:issue) { create(:issue, project: project) }
+ let(:does_not_exist_or_no_permission) { "The resource that you are attempting to access does not exist or you don't have permission to perform this action" }
+
+ before do
+ create(:issue_customer_relations_contact, issue: issue, contact: contacts[0])
+ create(:issue_customer_relations_contact, issue: issue, contact: contacts[1])
+ end
+
+ subject(:set_crm_contacts) do
+ described_class.new(project: project, current_user: user, params: params).execute(issue)
+ end
+
+ describe '#execute' do
+ context 'when the user has no permission' do
+ let(:params) { { crm_contact_ids: [contacts[1].id, contacts[2].id] } }
+
+ it 'returns expected error response' do
+ response = set_crm_contacts
+
+ expect(response).to be_error
+ expect(response.message).to match_array(['You have insufficient permissions to set customer relations contacts for this issue'])
+ end
+ end
+
+ context 'when user has permission' do
+ before do
+ group.add_reporter(user)
+ end
+
+ context 'when the contact does not exist' do
+ let(:params) { { crm_contact_ids: [non_existing_record_id] } }
+
+ it 'returns expected error response' do
+ response = set_crm_contacts
+
+ expect(response).to be_error
+ expect(response.message).to match_array(["Issue customer relations contacts #{non_existing_record_id}: #{does_not_exist_or_no_permission}"])
+ end
+ end
+
+ context 'when the contact belongs to a different group' do
+ let(:group2) { create(:group) }
+ let(:contact) { create(:contact, group: group2) }
+ let(:params) { { crm_contact_ids: [contact.id] } }
+
+ before do
+ group2.add_reporter(user)
+ end
+
+ it 'returns expected error response' do
+ response = set_crm_contacts
+
+ expect(response).to be_error
+ expect(response.message).to match_array(["Issue customer relations contacts #{contact.id}: #{does_not_exist_or_no_permission}"])
+ end
+ end
+
+ context 'replace' do
+ let(:params) { { crm_contact_ids: [contacts[1].id, contacts[2].id] } }
+
+ it 'updates the issue with correct contacts' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ expect(issue.customer_relations_contacts).to match_array([contacts[1], contacts[2]])
+ end
+ end
+
+ context 'add' do
+ let(:params) { { add_crm_contact_ids: [contacts[3].id] } }
+
+ it 'updates the issue with correct contacts' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ expect(issue.customer_relations_contacts).to match_array([contacts[0], contacts[1], contacts[3]])
+ end
+ end
+
+ context 'remove' do
+ let(:params) { { remove_crm_contact_ids: [contacts[0].id] } }
+
+ it 'updates the issue with correct contacts' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ expect(issue.customer_relations_contacts).to match_array([contacts[1]])
+ end
+ end
+
+ context 'when attempting to add more than 6' do
+ let(:id) { contacts[0].id }
+ let(:params) { { add_crm_contact_ids: [id, id, id, id, id, id, id] } }
+
+ it 'returns expected error message' do
+ response = set_crm_contacts
+
+ expect(response).to be_error
+ expect(response.message).to match_array(['You can only add up to 6 contacts at one time'])
+ end
+ end
+
+ context 'when trying to remove non-existent contact' do
+ let(:params) { { remove_crm_contact_ids: [non_existing_record_id] } }
+
+ it 'returns expected error message' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ expect(response.message).to be_nil
+ end
+ end
+
+ context 'when combining params' do
+ let(:error_invalid_params) { 'You cannot combine crm_contact_ids with add_crm_contact_ids or remove_crm_contact_ids' }
+
+ context 'add and remove' do
+ let(:params) { { remove_crm_contact_ids: [contacts[1].id], add_crm_contact_ids: [contacts[3].id] } }
+
+ it 'updates the issue with correct contacts' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ expect(issue.customer_relations_contacts).to match_array([contacts[0], contacts[3]])
+ end
+ end
+
+ context 'replace and remove' do
+ let(:params) { { crm_contact_ids: [contacts[3].id], remove_crm_contact_ids: [contacts[0].id] } }
+
+ it 'returns expected error response' do
+ response = set_crm_contacts
+
+ expect(response).to be_error
+ expect(response.message).to match_array([error_invalid_params])
+ end
+ end
+
+ context 'replace and add' do
+ let(:params) { { crm_contact_ids: [contacts[3].id], add_crm_contact_ids: [contacts[1].id] } }
+
+ it 'returns expected error response' do
+ response = set_crm_contacts
+
+ expect(response).to be_error
+ expect(response.message).to match_array([error_invalid_params])
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 83c17f051eb..85b8fef685e 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -1256,28 +1256,38 @@ RSpec.describe Issues::UpdateService, :mailer do
let(:closed_issuable) { create(:closed_issue, project: project) }
end
- context 'real-time updates' do
- using RSpec::Parameterized::TableSyntax
-
+ context 'broadcasting issue assignee updates' do
let(:update_params) { { assignee_ids: [user2.id] } }
- where(:action_cable_in_app_enabled, :feature_flag_enabled, :should_broadcast) do
- true | true | true
- true | false | true
- false | true | true
- false | false | false
- end
+ context 'when feature flag is enabled' do
+ before do
+ stub_feature_flags(broadcast_issue_updates: true)
+ end
+
+ it 'triggers the GraphQL subscription' do
+ expect(GraphqlTriggers).to receive(:issuable_assignees_updated).with(issue)
+
+ update_issue(update_params)
+ end
- with_them do
- it 'broadcasts to the issues channel based on ActionCable and feature flag values' do
- allow(Gitlab::ActionCable::Config).to receive(:in_app?).and_return(action_cable_in_app_enabled)
- stub_feature_flags(broadcast_issue_updates: feature_flag_enabled)
+ context 'when assignee is not updated' do
+ let(:update_params) { { title: 'Some other title' } }
- if should_broadcast
- expect(GraphqlTriggers).to receive(:issuable_assignees_updated).with(issue)
- else
+ it 'does not trigger the GraphQL subscription' do
expect(GraphqlTriggers).not_to receive(:issuable_assignees_updated).with(issue)
+
+ update_issue(update_params)
end
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(broadcast_issue_updates: false)
+ end
+
+ it 'does not trigger the GraphQL subscription' do
+ expect(GraphqlTriggers).not_to receive(:issuable_assignees_updated).with(issue)
update_issue(update_params)
end
diff --git a/spec/services/labels/transfer_service_spec.rb b/spec/services/labels/transfer_service_spec.rb
index 18fd401f383..05190accb33 100644
--- a/spec/services/labels/transfer_service_spec.rb
+++ b/spec/services/labels/transfer_service_spec.rb
@@ -3,107 +3,121 @@
require 'spec_helper'
RSpec.describe Labels::TransferService do
- describe '#execute' do
- let_it_be(:user) { create(:user) }
+ shared_examples 'transfer labels' do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
- let_it_be(:old_group_ancestor) { create(:group) }
- let_it_be(:old_group) { create(:group, parent: old_group_ancestor) }
+ let_it_be(:old_group_ancestor) { create(:group) }
+ let_it_be(:old_group) { create(:group, parent: old_group_ancestor) }
- let_it_be(:new_group) { create(:group) }
+ let_it_be(:new_group) { create(:group) }
- let_it_be(:project) { create(:project, :repository, group: new_group) }
+ let_it_be(:project) { create(:project, :repository, group: new_group) }
- subject(:service) { described_class.new(user, old_group, project) }
+ subject(:service) { described_class.new(user, old_group, project) }
- before do
- old_group_ancestor.add_developer(user)
- new_group.add_developer(user)
- end
+ before do
+ old_group_ancestor.add_developer(user)
+ new_group.add_developer(user)
+ end
- it 'recreates missing group labels at project level and assigns them to the issuables' do
- old_group_label_1 = create(:group_label, group: old_group)
- old_group_label_2 = create(:group_label, group: old_group)
+ it 'recreates missing group labels at project level and assigns them to the issuables' do
+ old_group_label_1 = create(:group_label, group: old_group)
+ old_group_label_2 = create(:group_label, group: old_group)
- labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label_1])
- labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_label_2])
+ labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label_1])
+ labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_label_2])
- expect { service.execute }.to change(project.labels, :count).by(2)
- expect(labeled_issue.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_label_1.title))
- expect(labeled_merge_request.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_label_2.title))
- end
+ expect { service.execute }.to change(project.labels, :count).by(2)
+ expect(labeled_issue.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_label_1.title))
+ expect(labeled_merge_request.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_label_2.title))
+ end
- it 'recreates missing ancestor group labels at project level and assigns them to the issuables' do
- old_group_ancestor_label_1 = create(:group_label, group: old_group_ancestor)
- old_group_ancestor_label_2 = create(:group_label, group: old_group_ancestor)
+ it 'recreates missing ancestor group labels at project level and assigns them to the issuables' do
+ old_group_ancestor_label_1 = create(:group_label, group: old_group_ancestor)
+ old_group_ancestor_label_2 = create(:group_label, group: old_group_ancestor)
- labeled_issue = create(:labeled_issue, project: project, labels: [old_group_ancestor_label_1])
- labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_ancestor_label_2])
+ labeled_issue = create(:labeled_issue, project: project, labels: [old_group_ancestor_label_1])
+ labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_ancestor_label_2])
- expect { service.execute }.to change(project.labels, :count).by(2)
- expect(labeled_issue.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_ancestor_label_1.title))
- expect(labeled_merge_request.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_ancestor_label_2.title))
- end
+ expect { service.execute }.to change(project.labels, :count).by(2)
+ expect(labeled_issue.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_ancestor_label_1.title))
+ expect(labeled_merge_request.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_ancestor_label_2.title))
+ end
- it 'recreates label priorities related to the missing group labels' do
- old_group_label = create(:group_label, group: old_group)
- create(:labeled_issue, project: project, labels: [old_group_label])
- create(:label_priority, project: project, label: old_group_label, priority: 1)
+ it 'recreates label priorities related to the missing group labels' do
+ old_group_label = create(:group_label, group: old_group)
+ create(:labeled_issue, project: project, labels: [old_group_label])
+ create(:label_priority, project: project, label: old_group_label, priority: 1)
- service.execute
+ service.execute
- new_project_label = project.labels.find_by(title: old_group_label.title)
- expect(new_project_label.id).not_to eq old_group_label.id
- expect(new_project_label.priorities).not_to be_empty
- end
+ new_project_label = project.labels.find_by(title: old_group_label.title)
+ expect(new_project_label.id).not_to eq old_group_label.id
+ expect(new_project_label.priorities).not_to be_empty
+ end
- it 'does not recreate missing group labels that are not applied to issues or merge requests' do
- old_group_label = create(:group_label, group: old_group)
+ it 'does not recreate missing group labels that are not applied to issues or merge requests' do
+ old_group_label = create(:group_label, group: old_group)
- service.execute
+ service.execute
- expect(project.labels.where(title: old_group_label.title)).to be_empty
- end
+ expect(project.labels.where(title: old_group_label.title)).to be_empty
+ end
- it 'does not recreate missing group labels that already exist in the project group' do
- old_group_label = create(:group_label, group: old_group)
- labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label])
+ it 'does not recreate missing group labels that already exist in the project group' do
+ old_group_label = create(:group_label, group: old_group)
+ labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label])
- new_group_label = create(:group_label, group: new_group, title: old_group_label.title)
+ new_group_label = create(:group_label, group: new_group, title: old_group_label.title)
- service.execute
+ service.execute
- expect(project.labels.where(title: old_group_label.title)).to be_empty
- expect(labeled_issue.reload.labels).to contain_exactly(new_group_label)
- end
+ expect(project.labels.where(title: old_group_label.title)).to be_empty
+ expect(labeled_issue.reload.labels).to contain_exactly(new_group_label)
+ end
- it 'updates only label links in the given project' do
- old_group_label = create(:group_label, group: old_group)
- other_project = create(:project, group: old_group)
+ it 'updates only label links in the given project' do
+ old_group_label = create(:group_label, group: old_group)
+ other_project = create(:project, group: old_group)
- labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label])
- other_project_labeled_issue = create(:labeled_issue, project: other_project, labels: [old_group_label])
+ labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label])
+ other_project_labeled_issue = create(:labeled_issue, project: other_project, labels: [old_group_label])
- service.execute
+ service.execute
- expect(labeled_issue.reload.labels).not_to include(old_group_label)
- expect(other_project_labeled_issue.reload.labels).to contain_exactly(old_group_label)
- end
+ expect(labeled_issue.reload.labels).not_to include(old_group_label)
+ expect(other_project_labeled_issue.reload.labels).to contain_exactly(old_group_label)
+ end
- context 'when moving within the same ancestor group' do
- let(:other_subgroup) { create(:group, parent: old_group_ancestor) }
- let(:project) { create(:project, :repository, group: other_subgroup) }
+ context 'when moving within the same ancestor group' do
+ let(:other_subgroup) { create(:group, parent: old_group_ancestor) }
+ let(:project) { create(:project, :repository, group: other_subgroup) }
- it 'does not recreate ancestor group labels' do
- old_group_ancestor_label_1 = create(:group_label, group: old_group_ancestor)
- old_group_ancestor_label_2 = create(:group_label, group: old_group_ancestor)
+ it 'does not recreate ancestor group labels' do
+ old_group_ancestor_label_1 = create(:group_label, group: old_group_ancestor)
+ old_group_ancestor_label_2 = create(:group_label, group: old_group_ancestor)
- labeled_issue = create(:labeled_issue, project: project, labels: [old_group_ancestor_label_1])
- labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_ancestor_label_2])
+ labeled_issue = create(:labeled_issue, project: project, labels: [old_group_ancestor_label_1])
+ labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_ancestor_label_2])
- expect { service.execute }.not_to change(project.labels, :count)
- expect(labeled_issue.reload.labels).to contain_exactly(old_group_ancestor_label_1)
- expect(labeled_merge_request.reload.labels).to contain_exactly(old_group_ancestor_label_2)
+ expect { service.execute }.not_to change(project.labels, :count)
+ expect(labeled_issue.reload.labels).to contain_exactly(old_group_ancestor_label_1)
+ expect(labeled_merge_request.reload.labels).to contain_exactly(old_group_ancestor_label_2)
+ end
end
end
end
+
+ context 'with use_optimized_group_labels_query FF on' do
+ it_behaves_like 'transfer labels'
+ end
+
+ context 'with use_optimized_group_labels_query FF off' do
+ before do
+ stub_feature_flags(use_optimized_group_labels_query: false)
+ end
+
+ it_behaves_like 'transfer labels'
+ end
end
diff --git a/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
new file mode 100644
index 00000000000..bdb3d0f6700
--- /dev/null
+++ b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe LooseForeignKeys::BatchCleanerService do
+ include MigrationsHelpers
+
+ def create_table_structure
+ migration = ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers)
+
+ migration.create_table :_test_loose_fk_parent_table
+
+ migration.create_table :_test_loose_fk_child_table_1 do |t|
+ t.bigint :parent_id
+ end
+
+ migration.create_table :_test_loose_fk_child_table_2 do |t|
+ t.bigint :parent_id_with_different_column
+ end
+
+ migration.track_record_deletions(:_test_loose_fk_parent_table)
+ end
+
+ let(:parent_model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = '_test_loose_fk_parent_table'
+
+ include LooseForeignKey
+
+ loose_foreign_key :_test_loose_fk_child_table_1, :parent_id, on_delete: :async_delete
+ loose_foreign_key :_test_loose_fk_child_table_2, :parent_id_with_different_column, on_delete: :async_nullify
+ end
+ end
+
+ let(:child_model_1) do
+ Class.new(ApplicationRecord) do
+ self.table_name = '_test_loose_fk_child_table_1'
+ end
+ end
+
+ let(:child_model_2) do
+ Class.new(ApplicationRecord) do
+ self.table_name = '_test_loose_fk_child_table_2'
+ end
+ end
+
+ let(:loose_fk_child_table_1) { table(:_test_loose_fk_child_table_1) }
+ let(:loose_fk_child_table_2) { table(:_test_loose_fk_child_table_2) }
+ let(:parent_record_1) { parent_model.create! }
+ let(:other_parent_record) { parent_model.create! }
+
+ before(:all) do
+ create_table_structure
+ end
+
+ before do
+ parent_record_1
+
+ loose_fk_child_table_1.create!(parent_id: parent_record_1.id)
+ loose_fk_child_table_1.create!(parent_id: parent_record_1.id)
+
+ # these will not be deleted
+ loose_fk_child_table_1.create!(parent_id: other_parent_record.id)
+ loose_fk_child_table_1.create!(parent_id: other_parent_record.id)
+
+ loose_fk_child_table_2.create!(parent_id_with_different_column: parent_record_1.id)
+ loose_fk_child_table_2.create!(parent_id_with_different_column: parent_record_1.id)
+
+ # these will not be deleted
+ loose_fk_child_table_2.create!(parent_id_with_different_column: other_parent_record.id)
+ loose_fk_child_table_2.create!(parent_id_with_different_column: other_parent_record.id)
+ end
+
+ after(:all) do
+ migration = ActiveRecord::Migration.new
+ migration.drop_table :_test_loose_fk_parent_table
+ migration.drop_table :_test_loose_fk_child_table_1
+ migration.drop_table :_test_loose_fk_child_table_2
+ end
+
+ context 'when parent records are deleted' do
+ let(:deleted_records_counter) { Gitlab::Metrics.registry.get(:loose_foreign_key_processed_deleted_records) }
+
+ before do
+ parent_record_1.delete
+
+ expect(loose_fk_child_table_1.count).to eq(4)
+ expect(loose_fk_child_table_2.count).to eq(4)
+
+ described_class.new(parent_klass: parent_model,
+ deleted_parent_records: LooseForeignKeys::DeletedRecord.status_pending.all,
+ models_by_table_name: {
+ '_test_loose_fk_child_table_1' => child_model_1,
+ '_test_loose_fk_child_table_2' => child_model_2
+ }).execute
+ end
+
+ it 'cleans up the child records' do
+ expect(loose_fk_child_table_1.where(parent_id: parent_record_1.id)).to be_empty
+ expect(loose_fk_child_table_2.where(parent_id_with_different_column: nil).count).to eq(2)
+ end
+
+ it 'cleans up the pending parent DeletedRecord' do
+ expect(LooseForeignKeys::DeletedRecord.status_pending.count).to eq(0)
+ expect(LooseForeignKeys::DeletedRecord.status_processed.count).to eq(1)
+ end
+
+ it 'records the DeletedRecord status updates', :prometheus do
+ counter = Gitlab::Metrics.registry.get(:loose_foreign_key_processed_deleted_records)
+
+ expect(counter.get(table: parent_model.table_name, db_config_name: 'main')).to eq(1)
+ end
+
+ it 'does not delete unrelated records' do
+ expect(loose_fk_child_table_1.where(parent_id: other_parent_record.id).count).to eq(2)
+ expect(loose_fk_child_table_2.where(parent_id_with_different_column: other_parent_record.id).count).to eq(2)
+ end
+ end
+end
diff --git a/spec/services/loose_foreign_keys/cleaner_service_spec.rb b/spec/services/loose_foreign_keys/cleaner_service_spec.rb
new file mode 100644
index 00000000000..6f37ac49435
--- /dev/null
+++ b/spec/services/loose_foreign_keys/cleaner_service_spec.rb
@@ -0,0 +1,147 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe LooseForeignKeys::CleanerService do
+ let(:schema) { ApplicationRecord.connection.current_schema }
+ let(:deleted_records) do
+ [
+ LooseForeignKeys::DeletedRecord.new(fully_qualified_table_name: "#{schema}.projects", primary_key_value: non_existing_record_id),
+ LooseForeignKeys::DeletedRecord.new(fully_qualified_table_name: "#{schema}.projects", primary_key_value: non_existing_record_id)
+ ]
+ end
+
+ let(:loose_fk_definition) do
+ ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
+ 'projects',
+ 'issues',
+ {
+ column: 'project_id',
+ on_delete: :async_nullify
+ }
+ )
+ end
+
+ subject(:cleaner_service) do
+ described_class.new(
+ model: Issue,
+ foreign_key_definition: loose_fk_definition,
+ deleted_parent_records: deleted_records
+ )
+ end
+
+ context 'when invalid foreign key definition is passed' do
+ context 'when invalid on_delete argument was given' do
+ before do
+ loose_fk_definition.options[:on_delete] = :invalid
+ end
+
+ it 'raises KeyError' do
+ expect { cleaner_service.execute }.to raise_error(StandardError, /Invalid on_delete argument/)
+ end
+ end
+ end
+
+ describe 'query generation' do
+ context 'when single primary key is used' do
+ let(:issue) { create(:issue) }
+
+ let(:deleted_records) do
+ [
+ LooseForeignKeys::DeletedRecord.new(fully_qualified_table_name: "#{schema}.projects", primary_key_value: issue.project_id)
+ ]
+ end
+
+ it 'generates an IN query for nullifying the rows' do
+ expected_query = %{UPDATE "issues" SET "project_id" = NULL WHERE ("issues"."id") IN (SELECT "issues"."id" FROM "issues" WHERE "issues"."project_id" IN (#{issue.project_id}) LIMIT 500)}
+ expect(ApplicationRecord.connection).to receive(:execute).with(expected_query).and_call_original
+
+ cleaner_service.execute
+
+ issue.reload
+ expect(issue.project_id).to be_nil
+ end
+
+ it 'generates an IN query for deleting the rows' do
+ loose_fk_definition.options[:on_delete] = :async_delete
+
+ expected_query = %{DELETE FROM "issues" WHERE ("issues"."id") IN (SELECT "issues"."id" FROM "issues" WHERE "issues"."project_id" IN (#{issue.project_id}) LIMIT 1000)}
+ expect(ApplicationRecord.connection).to receive(:execute).with(expected_query).and_call_original
+
+ cleaner_service.execute
+
+ expect(Issue.exists?(id: issue.id)).to eq(false)
+ end
+ end
+
+ context 'when composite primary key is used' do
+ let!(:user) { create(:user) }
+ let!(:project) { create(:project) }
+
+ let(:loose_fk_definition) do
+ ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
+ 'users',
+ 'project_authorizations',
+ {
+ column: 'user_id',
+ on_delete: :async_delete
+ }
+ )
+ end
+
+ let(:deleted_records) do
+ [
+ LooseForeignKeys::DeletedRecord.new(fully_qualified_table_name: "#{schema}.users", primary_key_value: user.id)
+ ]
+ end
+
+ subject(:cleaner_service) do
+ described_class.new(
+ model: ProjectAuthorization,
+ foreign_key_definition: loose_fk_definition,
+ deleted_parent_records: deleted_records
+ )
+ end
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'generates an IN query for deleting the rows' do
+ expected_query = %{DELETE FROM "project_authorizations" WHERE ("project_authorizations"."user_id", "project_authorizations"."project_id", "project_authorizations"."access_level") IN (SELECT "project_authorizations"."user_id", "project_authorizations"."project_id", "project_authorizations"."access_level" FROM "project_authorizations" WHERE "project_authorizations"."user_id" IN (#{user.id}) LIMIT 1000)}
+ expect(ApplicationRecord.connection).to receive(:execute).with(expected_query).and_call_original
+
+ cleaner_service.execute
+
+ expect(ProjectAuthorization.exists?(user_id: user.id)).to eq(false)
+ end
+
+ context 'when the query generation is incorrect (paranoid check)' do
+ it 'raises error if the foreign key condition is missing' do
+ expect_next_instance_of(LooseForeignKeys::CleanerService) do |instance|
+ expect(instance).to receive(:delete_query).and_return('wrong query')
+ end
+
+ expect { cleaner_service.execute }.to raise_error /FATAL: foreign key condition is missing from the generated query/
+ end
+ end
+ end
+
+ context 'when with_skip_locked parameter is true' do
+ subject(:cleaner_service) do
+ described_class.new(
+ model: Issue,
+ foreign_key_definition: loose_fk_definition,
+ deleted_parent_records: deleted_records,
+ with_skip_locked: true
+ )
+ end
+
+ it 'generates a query with the SKIP LOCKED clause' do
+ expect(ApplicationRecord.connection).to receive(:execute).with(/FOR UPDATE SKIP LOCKED/).and_call_original
+
+ cleaner_service.execute
+ end
+ end
+ end
+end
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index 2e6e6041fc3..fe866d73215 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -196,4 +196,108 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
end
end
end
+
+ context 'when assigning tasks to be done' do
+ let(:additional_params) do
+ { invite_source: '_invite_source_', tasks_to_be_done: %w(ci code), tasks_project_id: source.id }
+ end
+
+ before do
+ stub_experiments(invite_members_for_task: true)
+ end
+
+ it 'creates 2 task issues', :aggregate_failures do
+ expect(TasksToBeDone::CreateWorker)
+ .to receive(:perform_async)
+ .with(anything, user.id, [member.id])
+ .once
+ .and_call_original
+ expect { execute_service }.to change { source.issues.count }.by(2)
+
+ expect(source.issues).to all have_attributes(
+ project: source,
+ author: user
+ )
+ end
+
+ context 'when passing many user ids' do
+ before do
+ stub_licensed_features(multiple_issue_assignees: false)
+ end
+
+ let(:another_user) { create(:user) }
+ let(:user_ids) { [member.id, another_user.id].join(',') }
+
+ it 'still creates 2 task issues', :aggregate_failures do
+ expect(TasksToBeDone::CreateWorker)
+ .to receive(:perform_async)
+ .with(anything, user.id, array_including(member.id, another_user.id))
+ .once
+ .and_call_original
+ expect { execute_service }.to change { source.issues.count }.by(2)
+
+ expect(source.issues).to all have_attributes(
+ project: source,
+ author: user
+ )
+ end
+ end
+
+ context 'when a `tasks_project_id` is missing' do
+ let(:additional_params) do
+ { invite_source: '_invite_source_', tasks_to_be_done: %w(ci code) }
+ end
+
+ it 'does not create task issues' do
+ expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
+ execute_service
+ end
+ end
+
+ context 'when `tasks_to_be_done` are missing' do
+ let(:additional_params) do
+ { invite_source: '_invite_source_', tasks_project_id: source.id }
+ end
+
+ it 'does not create task issues' do
+ expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
+ execute_service
+ end
+ end
+
+ context 'when invalid `tasks_to_be_done` are passed' do
+ let(:additional_params) do
+ { invite_source: '_invite_source_', tasks_project_id: source.id, tasks_to_be_done: %w(invalid_task) }
+ end
+
+ it 'does not create task issues' do
+ expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
+ execute_service
+ end
+ end
+
+ context 'when invalid `tasks_project_id` is passed' do
+ let(:another_project) { create(:project) }
+ let(:additional_params) do
+ { invite_source: '_invite_source_', tasks_project_id: another_project.id, tasks_to_be_done: %w(ci code) }
+ end
+
+ it 'does not create task issues' do
+ expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
+ execute_service
+ end
+ end
+
+ context 'when a member was already invited' do
+ let(:user_ids) { create(:project_member, :invited, project: source).invite_email }
+ let(:additional_params) do
+ { invite_source: '_invite_source_', tasks_project_id: source.id, tasks_to_be_done: %w(ci code) }
+ end
+
+ it 'does not create task issues' do
+ expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
+ execute_service
+ end
+ end
+ end
end
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index 478733e8aa0..7b9ae19f038 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -22,6 +22,11 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
end
it_behaves_like 'records an onboarding progress action', :user_added
+
+ it 'does not create task issues' do
+ expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
+ expect { result }.not_to change { project.issues.count }
+ end
end
context 'when email belongs to an existing user as a secondary email' do
diff --git a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
index 170d99f4642..71ad23bc68c 100644
--- a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
let_it_be(:merge_request) { create(:merge_request) }
describe '#CHECKS' do
- it 'contains every subclass of the base checks service' do
+ it 'contains every subclass of the base checks service', :eager_load do
expect(described_class::CHECKS).to contain_exactly(*MergeRequests::Mergeability::CheckBaseService.subclasses)
end
end
@@ -19,7 +19,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
let(:params) { {} }
let(:success_result) { Gitlab::MergeRequests::Mergeability::CheckResult.success }
- context 'when every check is skipped' do
+ context 'when every check is skipped', :eager_load do
before do
MergeRequests::Mergeability::CheckBaseService.subclasses.each do |subclass|
expect_next_instance_of(subclass) do |service|
diff --git a/spec/services/merge_requests/retarget_chain_service_spec.rb b/spec/services/merge_requests/retarget_chain_service_spec.rb
index 87bde4a1400..187dd0cf589 100644
--- a/spec/services/merge_requests/retarget_chain_service_spec.rb
+++ b/spec/services/merge_requests/retarget_chain_service_spec.rb
@@ -45,14 +45,6 @@ RSpec.describe MergeRequests::RetargetChainService do
.from(merge_request.source_branch)
.to(merge_request.target_branch)
end
-
- context 'when FF retarget_merge_requests is disabled' do
- before do
- stub_feature_flags(retarget_merge_requests: false)
- end
-
- include_examples 'does not retarget merge request'
- end
end
context 'in the same project' do
diff --git a/spec/services/merge_requests/toggle_attention_requested_service_spec.rb b/spec/services/merge_requests/toggle_attention_requested_service_spec.rb
new file mode 100644
index 00000000000..a26b1be529e
--- /dev/null
+++ b/spec/services/merge_requests/toggle_attention_requested_service_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ToggleAttentionRequestedService do
+ let(:current_user) { create(:user) }
+ let(:user) { create(:user) }
+ let(:assignee_user) { create(:user) }
+ let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [assignee_user]) }
+ let(:reviewer) { merge_request.find_reviewer(user) }
+ let(:assignee) { merge_request.find_assignee(assignee_user) }
+ let(:project) { merge_request.project }
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) }
+ let(:result) { service.execute }
+ let(:todo_service) { spy('todo service') }
+
+ before do
+ allow(service).to receive(:todo_service).and_return(todo_service)
+
+ project.add_developer(current_user)
+ project.add_developer(user)
+ end
+
+ describe '#execute' do
+ context 'invalid permissions' do
+ let(:service) { described_class.new(project: project, current_user: create(:user), merge_request: merge_request, user: user) }
+
+ it 'returns an error' do
+ expect(result[:status]).to eq :error
+ end
+ end
+
+ context 'reviewer does not exist' do
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: create(:user)) }
+
+ it 'returns an error' do
+ expect(result[:status]).to eq :error
+ end
+ end
+
+ context 'reviewer exists' do
+ before do
+ reviewer.update!(state: :reviewed)
+ end
+
+ it 'returns success' do
+ expect(result[:status]).to eq :success
+ end
+
+ it 'updates reviewers state' do
+ service.execute
+ reviewer.reload
+
+ expect(reviewer.state).to eq 'attention_requested'
+ end
+
+ it 'creates a new todo for the reviewer' do
+ expect(todo_service).to receive(:create_attention_requested_todo).with(merge_request, current_user, user)
+
+ service.execute
+ end
+ end
+
+ context 'assignee exists' do
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: assignee_user) }
+
+ before do
+ assignee.update!(state: :reviewed)
+ end
+
+ it 'returns success' do
+ expect(result[:status]).to eq :success
+ end
+
+ it 'updates assignees state' do
+ service.execute
+ assignee.reload
+
+ expect(assignee.state).to eq 'attention_requested'
+ end
+
+ it 'creates a new todo for the reviewer' do
+ expect(todo_service).to receive(:create_attention_requested_todo).with(merge_request, current_user, assignee_user)
+
+ service.execute
+ end
+ end
+
+ context 'assignee is the same as reviewer' do
+ let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [user]) }
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) }
+ let(:assignee) { merge_request.find_assignee(user) }
+
+ before do
+ reviewer.update!(state: :reviewed)
+ assignee.update!(state: :reviewed)
+ end
+
+ it 'updates reviewers and assignees state' do
+ service.execute
+ reviewer.reload
+ assignee.reload
+
+ expect(reviewer.state).to eq 'attention_requested'
+ expect(assignee.state).to eq 'attention_requested'
+ end
+ end
+
+ context 'state is attention_requested' do
+ before do
+ reviewer.update!(state: :attention_requested)
+ end
+
+ it 'toggles state to reviewed' do
+ service.execute
+ reviewer.reload
+
+ expect(reviewer.state).to eq "reviewed"
+ end
+
+ it 'does not create a new todo for the reviewer' do
+ expect(todo_service).not_to receive(:create_attention_requested_todo).with(merge_request, current_user, assignee_user)
+
+ service.execute
+ end
+ end
+ end
+end
diff --git a/spec/services/namespaces/in_product_marketing_email_records_spec.rb b/spec/services/namespaces/in_product_marketing_email_records_spec.rb
new file mode 100644
index 00000000000..e5f1b275f9c
--- /dev/null
+++ b/spec/services/namespaces/in_product_marketing_email_records_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespaces::InProductMarketingEmailRecords do
+ let_it_be(:user) { create :user }
+
+ subject(:records) { described_class.new }
+
+ it 'initializes records' do
+ expect(subject.records).to match_array []
+ end
+
+ describe '#save!' do
+ before do
+ allow(Users::InProductMarketingEmail).to receive(:bulk_insert!)
+
+ records.add(user, :invite_team, 0)
+ records.add(user, :create, 1)
+ end
+
+ it 'bulk inserts added records' do
+ expect(Users::InProductMarketingEmail).to receive(:bulk_insert!).with(records.records)
+ records.save!
+ end
+
+ it 'resets its records' do
+ records.save!
+ expect(records.records).to match_array []
+ end
+ end
+
+ describe '#add' do
+ it 'adds a Users::InProductMarketingEmail record to its records' do
+ freeze_time do
+ records.add(user, :invite_team, 0)
+ records.add(user, :create, 1)
+
+ first, second = records.records
+
+ expect(first).to be_a Users::InProductMarketingEmail
+ expect(first.track.to_sym).to eq :invite_team
+ expect(first.series).to eq 0
+ expect(first.created_at).to eq Time.zone.now
+ expect(first.updated_at).to eq Time.zone.now
+
+ expect(second).to be_a Users::InProductMarketingEmail
+ expect(second.track.to_sym).to eq :create
+ expect(second.series).to eq 1
+ expect(second.created_at).to eq Time.zone.now
+ expect(second.updated_at).to eq Time.zone.now
+ end
+ end
+ end
+end
diff --git a/spec/services/namespaces/invite_team_email_service_spec.rb b/spec/services/namespaces/invite_team_email_service_spec.rb
new file mode 100644
index 00000000000..60ba91f433d
--- /dev/null
+++ b/spec/services/namespaces/invite_team_email_service_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespaces::InviteTeamEmailService do
+ let_it_be(:user) { create(:user, email_opted_in: true) }
+
+ let(:track) { described_class::TRACK }
+ let(:series) { 0 }
+
+ let(:setup_for_company) { true }
+ let(:parent_group) { nil }
+ let(:group) { create(:group, parent: parent_group) }
+
+ subject(:action) { described_class.send_email(user, group) }
+
+ before do
+ group.add_owner(user)
+ allow(group).to receive(:setup_for_company).and_return(setup_for_company)
+ allow(Notify).to receive(:in_product_marketing_email).and_return(double(deliver_later: nil))
+ end
+
+ RSpec::Matchers.define :send_invite_team_email do |*args|
+ match do
+ expect(Notify).to have_received(:in_product_marketing_email).with(*args).once
+ end
+
+ match_when_negated do
+ expect(Notify).not_to have_received(:in_product_marketing_email)
+ end
+ end
+
+ shared_examples 'unexperimented' do
+ it { is_expected.not_to send_invite_team_email }
+
+ it 'does not record sent email' do
+ expect { subject }.not_to change { Users::InProductMarketingEmail.count }
+ end
+ end
+
+ shared_examples 'candidate' do
+ it { is_expected.to send_invite_team_email(user.id, group.id, track, 0) }
+
+ it 'records sent email' do
+ expect { subject }.to change { Users::InProductMarketingEmail.count }.by(1)
+
+ expect(
+ Users::InProductMarketingEmail.where(
+ user: user,
+ track: track,
+ series: 0
+ )
+ ).to exist
+ end
+
+ it_behaves_like 'tracks assignment and records the subject', :invite_team_email, :group do
+ subject { group }
+ end
+ end
+
+ context 'when group is in control path' do
+ before do
+ stub_experiments(invite_team_email: :control)
+ end
+
+ it { is_expected.not_to send_invite_team_email }
+
+ it 'does not record sent email' do
+ expect { subject }.not_to change { Users::InProductMarketingEmail.count }
+ end
+
+ it_behaves_like 'tracks assignment and records the subject', :invite_team_email, :group do
+ subject { group }
+ end
+ end
+
+ context 'when group is in candidate path' do
+ before do
+ stub_experiments(invite_team_email: :candidate)
+ end
+
+ it_behaves_like 'candidate'
+
+ context 'when the user has not opted into marketing emails' do
+ let(:user) { create(:user, email_opted_in: false ) }
+
+ it_behaves_like 'unexperimented'
+ end
+
+ context 'when group is not top level' do
+ it_behaves_like 'unexperimented' do
+ let(:parent_group) do
+ create(:group).tap { |g| g.add_owner(user) }
+ end
+ end
+ end
+
+ context 'when group is not set up for a company' do
+ it_behaves_like 'unexperimented' do
+ let(:setup_for_company) { nil }
+ end
+ end
+
+ context 'when other users have already been added to the group' do
+ before do
+ group.add_developer(create(:user))
+ end
+
+ it_behaves_like 'unexperimented'
+ end
+
+ context 'when other users have already been invited to the group' do
+ before do
+ group.add_developer('not_a_user_yet@example.com')
+ end
+
+ it_behaves_like 'unexperimented'
+ end
+
+ context 'when the user already got sent the email' do
+ before do
+ create(:in_product_marketing_email, user: user, track: track, series: 0)
+ end
+
+ it_behaves_like 'unexperimented'
+ end
+ end
+end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 48718cbc24a..fbf5b183365 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -3040,7 +3040,7 @@ RSpec.describe NotificationService, :mailer do
it 'emails only the creator' do
notification.pipeline_finished(pipeline)
- should_only_email(u_custom_notification_enabled, kind: :bcc)
+ should_only_email(u_custom_notification_enabled)
end
it_behaves_like 'project emails are disabled' do
@@ -3063,7 +3063,7 @@ RSpec.describe NotificationService, :mailer do
it 'sends to group notification email' do
notification.pipeline_finished(pipeline)
- expect(email_recipients(kind: :bcc).first).to eq(group_notification_email)
+ expect(email_recipients.first).to eq(group_notification_email)
end
end
end
@@ -3076,7 +3076,7 @@ RSpec.describe NotificationService, :mailer do
it 'emails only the creator' do
notification.pipeline_finished(pipeline)
- should_only_email(u_member, kind: :bcc)
+ should_only_email(u_member)
end
it_behaves_like 'project emails are disabled' do
@@ -3098,7 +3098,7 @@ RSpec.describe NotificationService, :mailer do
it 'sends to group notification email' do
notification.pipeline_finished(pipeline)
- expect(email_recipients(kind: :bcc).first).to eq(group_notification_email)
+ expect(email_recipients.first).to eq(group_notification_email)
end
end
end
@@ -3110,7 +3110,7 @@ RSpec.describe NotificationService, :mailer do
end
it 'emails only the creator' do
- should_only_email(u_watcher, kind: :bcc)
+ should_only_email(u_watcher)
end
end
@@ -3121,7 +3121,7 @@ RSpec.describe NotificationService, :mailer do
end
it 'emails only the creator' do
- should_only_email(u_custom_notification_unset, kind: :bcc)
+ should_only_email(u_custom_notification_unset)
end
end
@@ -3143,7 +3143,7 @@ RSpec.describe NotificationService, :mailer do
end
it 'emails only the creator' do
- should_only_email(u_custom_notification_enabled, kind: :bcc)
+ should_only_email(u_custom_notification_enabled)
end
end
@@ -3170,7 +3170,7 @@ RSpec.describe NotificationService, :mailer do
it 'emails only the creator' do
notification.pipeline_finished(pipeline, ref_status: ref_status)
- should_only_email(u_member, kind: :bcc)
+ should_only_email(u_member)
end
it_behaves_like 'project emails are disabled' do
@@ -3192,7 +3192,7 @@ RSpec.describe NotificationService, :mailer do
it 'sends to group notification email' do
notification.pipeline_finished(pipeline, ref_status: ref_status)
- expect(email_recipients(kind: :bcc).first).to eq(group_notification_email)
+ expect(email_recipients.first).to eq(group_notification_email)
end
end
end
@@ -3204,7 +3204,7 @@ RSpec.describe NotificationService, :mailer do
end
it 'emails only the creator' do
- should_only_email(u_watcher, kind: :bcc)
+ should_only_email(u_watcher)
end
end
@@ -3215,7 +3215,7 @@ RSpec.describe NotificationService, :mailer do
end
it 'emails only the creator' do
- should_only_email(u_custom_notification_unset, kind: :bcc)
+ should_only_email(u_custom_notification_unset)
end
end
@@ -3236,7 +3236,7 @@ RSpec.describe NotificationService, :mailer do
notification.pipeline_finished(pipeline, ref_status: ref_status)
- should_only_email(u_custom_notification_enabled, kind: :bcc)
+ should_only_email(u_custom_notification_enabled)
end
end
end
diff --git a/spec/services/packages/create_dependency_service_spec.rb b/spec/services/packages/create_dependency_service_spec.rb
index 261c6b395d5..55414ea68fe 100644
--- a/spec/services/packages/create_dependency_service_spec.rb
+++ b/spec/services/packages/create_dependency_service_spec.rb
@@ -58,9 +58,9 @@ RSpec.describe Packages::CreateDependencyService do
let_it_be(:rows) { [{ name: 'express', version_pattern: '^4.16.4' }] }
it 'creates dependences and links' do
- original_bulk_insert = ::Gitlab::Database.main.method(:bulk_insert)
- expect(::Gitlab::Database.main)
- .to receive(:bulk_insert) do |table, rows, return_ids: false, disable_quote: [], on_conflict: nil|
+ original_bulk_insert = ::ApplicationRecord.method(:legacy_bulk_insert)
+ expect(::ApplicationRecord)
+ .to receive(:legacy_bulk_insert) do |table, rows, return_ids: false, disable_quote: [], on_conflict: nil|
call_count = table == Packages::Dependency.table_name ? 2 : 1
call_count.times { original_bulk_insert.call(table, rows, return_ids: return_ids, disable_quote: disable_quote, on_conflict: on_conflict) }
end.twice
diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb
index ba5729eaf59..b1beb2adb3b 100644
--- a/spec/services/packages/npm/create_package_service_spec.rb
+++ b/spec/services/packages/npm/create_package_service_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Packages::Npm::CreatePackageService do
let(:override) { {} }
let(:package_name) { "@#{namespace.path}/my-app" }
+ let(:version_data) { params.dig('versions', '1.0.1') }
subject { described_class.new(project, user, params).execute }
@@ -25,6 +26,7 @@ RSpec.describe Packages::Npm::CreatePackageService do
.to change { Packages::Package.count }.by(1)
.and change { Packages::Package.npm.count }.by(1)
.and change { Packages::Tag.count }.by(1)
+ .and change { Packages::Npm::Metadatum.count }.by(1)
end
it_behaves_like 'assigns the package creator' do
@@ -40,6 +42,8 @@ RSpec.describe Packages::Npm::CreatePackageService do
expect(package.version).to eq(version)
end
+ it { expect(subject.npm_metadatum.package_json).to eq(version_data) }
+
it { expect(subject.name).to eq(package_name) }
it { expect(subject.version).to eq(version) }
@@ -54,6 +58,48 @@ RSpec.describe Packages::Npm::CreatePackageService do
expect { subject }.to change { Packages::PackageFileBuildInfo.count }.by(1)
end
end
+
+ context 'with a too large metadata structure' do
+ before do
+ params[:versions][version][:test] = 'test' * 10000
+ end
+
+ it 'does not create the package' do
+ expect { subject }.to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Package json structure is too large')
+ .and not_change { Packages::Package.count }
+ .and not_change { Packages::Package.npm.count }
+ .and not_change { Packages::Tag.count }
+ .and not_change { Packages::Npm::Metadatum.count }
+ end
+ end
+
+ described_class::PACKAGE_JSON_NOT_ALLOWED_FIELDS.each do |field|
+ context "with not allowed #{field} field" do
+ before do
+ params[:versions][version][field] = 'test'
+ end
+
+ it 'is persisted without the field' do
+ expect { subject }
+ .to change { Packages::Package.count }.by(1)
+ .and change { Packages::Package.npm.count }.by(1)
+ .and change { Packages::Tag.count }.by(1)
+ .and change { Packages::Npm::Metadatum.count }.by(1)
+ expect(subject.npm_metadatum.package_json[field]).to be_blank
+ end
+ end
+ end
+
+ context 'with packages_npm_abbreviated_metadata disabled' do
+ before do
+ stub_feature_flags(packages_npm_abbreviated_metadata: false)
+ end
+
+ it 'creates a package without metadatum' do
+ expect { subject }
+ .not_to change { Packages::Npm::Metadatum.count }
+ end
+ end
end
describe '#execute' do
diff --git a/spec/services/packages/update_tags_service_spec.rb b/spec/services/packages/update_tags_service_spec.rb
index 6e67489fec9..c4256699c94 100644
--- a/spec/services/packages/update_tags_service_spec.rb
+++ b/spec/services/packages/update_tags_service_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Packages::UpdateTagsService do
it 'is a no op' do
expect(package).not_to receive(:tags)
- expect(::Gitlab::Database.main).not_to receive(:bulk_insert)
+ expect(::ApplicationRecord).not_to receive(:legacy_bulk_insert)
subject
end
diff --git a/spec/services/projects/all_issues_count_service_spec.rb b/spec/services/projects/all_issues_count_service_spec.rb
new file mode 100644
index 00000000000..d7e35991940
--- /dev/null
+++ b/spec/services/projects/all_issues_count_service_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::AllIssuesCountService, :use_clean_rails_memory_store_caching do
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :public, namespace: group) }
+ let_it_be(:banned_user) { create(:user, :banned) }
+
+ subject { described_class.new(project) }
+
+ it_behaves_like 'a counter caching service'
+
+ describe '#count' do
+ it 'returns the number of all issues' do
+ create(:issue, :opened, project: project)
+ create(:issue, :opened, confidential: true, project: project)
+ create(:issue, :opened, author: banned_user, project: project)
+ create(:issue, :closed, project: project)
+
+ expect(subject.count).to eq(4)
+ end
+ end
+end
diff --git a/spec/services/projects/all_merge_requests_count_service_spec.rb b/spec/services/projects/all_merge_requests_count_service_spec.rb
new file mode 100644
index 00000000000..13954d688aa
--- /dev/null
+++ b/spec/services/projects/all_merge_requests_count_service_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::AllMergeRequestsCountService, :use_clean_rails_memory_store_caching do
+ let_it_be(:project) { create(:project) }
+
+ subject { described_class.new(project) }
+
+ it_behaves_like 'a counter caching service'
+
+ describe '#count' do
+ it 'returns the number of all merge requests' do
+ create(:merge_request,
+ :opened,
+ source_project: project,
+ target_project: project)
+ create(:merge_request,
+ :closed,
+ source_project: project,
+ target_project: project)
+ create(:merge_request,
+ :merged,
+ source_project: project,
+ target_project: project)
+
+ expect(subject.count).to eq(3)
+ end
+ end
+end
diff --git a/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb b/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb
deleted file mode 100644
index dfe2ff9e57c..00000000000
--- a/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb
+++ /dev/null
@@ -1,133 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::Projects::ContainerRepository::CacheTagsCreatedAtService, :clean_gitlab_redis_cache do
- let_it_be(:dummy_tag_class) { Struct.new(:name, :created_at) }
- let_it_be(:repository) { create(:container_repository) }
-
- let(:tags) { create_tags(5) }
- let(:service) { described_class.new(repository) }
-
- shared_examples 'not interacting with redis' do
- it 'does not interact with redis' do
- expect(::Gitlab::Redis::Cache).not_to receive(:with)
-
- subject
- end
- end
-
- describe '#populate' do
- subject { service.populate(tags) }
-
- context 'with tags' do
- it 'gets values from redis' do
- expect(::Gitlab::Redis::Cache).to receive(:with).and_call_original
-
- expect(subject).to eq(0)
-
- tags.each { |t| expect(t.created_at).to eq(nil) }
- end
-
- context 'with cached values' do
- let(:cached_tags) { tags.first(2) }
-
- before do
- ::Gitlab::Redis::Cache.with do |redis|
- cached_tags.each do |tag|
- redis.set(cache_key(tag), rfc3339(10.days.ago))
- end
- end
- end
-
- it 'gets values from redis' do
- expect(::Gitlab::Redis::Cache).to receive(:with).and_call_original
-
- expect(subject).to eq(2)
-
- cached_tags.each { |t| expect(t.created_at).not_to eq(nil) }
- (tags - cached_tags).each { |t| expect(t.created_at).to eq(nil) }
- end
- end
- end
-
- context 'with no tags' do
- let(:tags) { [] }
-
- it_behaves_like 'not interacting with redis'
- end
- end
-
- describe '#insert' do
- let(:max_ttl) { 90.days }
-
- subject { service.insert(tags, max_ttl) }
-
- context 'with tags' do
- let(:tag) { tags.first }
- let(:ttl) { 90.days - 3.days }
-
- before do
- travel_to(Time.zone.local(2021, 9, 2, 12, 0, 0))
-
- tag.created_at = DateTime.rfc3339(3.days.ago.rfc3339)
- end
-
- after do
- travel_back
- end
-
- it 'inserts values in redis' do
- ::Gitlab::Redis::Cache.with do |redis|
- expect(redis)
- .to receive(:set)
- .with(cache_key(tag), rfc3339(tag.created_at), ex: ttl.to_i)
- .and_call_original
- end
-
- subject
- end
-
- context 'with some of them already cached' do
- let(:tag) { tags.first }
-
- before do
- ::Gitlab::Redis::Cache.with do |redis|
- redis.set(cache_key(tag), rfc3339(10.days.ago))
- end
- service.populate(tags)
- end
-
- it_behaves_like 'not interacting with redis'
- end
- end
-
- context 'with no tags' do
- let(:tags) { [] }
-
- it_behaves_like 'not interacting with redis'
- end
-
- context 'with no expires_in' do
- let(:max_ttl) { nil }
-
- it_behaves_like 'not interacting with redis'
- end
- end
-
- def create_tags(size)
- Array.new(size) do |i|
- dummy_tag_class.new("Tag #{i}", nil)
- end
- end
-
- def cache_key(tag)
- "container_repository:{#{repository.id}}:tag:#{tag.name}:created_at"
- end
-
- def rfc3339(date_time)
- # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
- # The caching will use DateTime rfc3339
- DateTime.rfc3339(date_time.rfc3339).rfc3339
- end
-end
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index 289bbf4540e..a41ba8216cc 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -41,322 +41,320 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
describe '#execute' do
subject { service.execute }
- shared_examples 'reading and removing tags' do |caching_enabled: true|
- context 'when no params are specified' do
- let(:params) { {} }
+ context 'when no params are specified' do
+ let(:params) { {} }
- it 'does not remove anything' do
- expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
- .not_to receive(:execute)
- expect_no_caching
+ it 'does not remove anything' do
+ expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
+ .not_to receive(:execute)
+ expect_no_caching
- is_expected.to eq(expected_service_response(before_truncate_size: 0, after_truncate_size: 0, before_delete_size: 0))
- end
+ is_expected.to eq(expected_service_response(before_truncate_size: 0, after_truncate_size: 0, before_delete_size: 0))
end
+ end
- context 'when regex matching everything is specified' do
- shared_examples 'removes all matches' do
- it 'does remove all tags except latest' do
- expect_no_caching
+ context 'when regex matching everything is specified' do
+ shared_examples 'removes all matches' do
+ it 'does remove all tags except latest' do
+ expect_no_caching
- expect_delete(%w(A Ba Bb C D E))
+ expect_delete(%w(A Ba Bb C D E))
- is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C D E)))
- end
+ is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C D E)))
end
+ end
+
+ let(:params) do
+ { 'name_regex_delete' => '.*' }
+ end
+ it_behaves_like 'removes all matches'
+
+ context 'with deprecated name_regex param' do
let(:params) do
- { 'name_regex_delete' => '.*' }
+ { 'name_regex' => '.*' }
end
it_behaves_like 'removes all matches'
+ end
+ end
- context 'with deprecated name_regex param' do
- let(:params) do
- { 'name_regex' => '.*' }
- end
+ context 'with invalid regular expressions' do
+ shared_examples 'handling an invalid regex' do
+ it 'keeps all tags' do
+ expect_no_caching
- it_behaves_like 'removes all matches'
+ expect(Projects::ContainerRepository::DeleteTagsService)
+ .not_to receive(:new)
+
+ subject
end
- end
- context 'with invalid regular expressions' do
- shared_examples 'handling an invalid regex' do
- it 'keeps all tags' do
- expect_no_caching
+ it { is_expected.to eq(status: :error, message: 'invalid regex') }
- expect(Projects::ContainerRepository::DeleteTagsService)
- .not_to receive(:new)
+ it 'calls error tracking service' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
- subject
- end
+ subject
+ end
+ end
- it { is_expected.to eq(status: :error, message: 'invalid regex') }
+ context 'when name_regex_delete is invalid' do
+ let(:params) { { 'name_regex_delete' => '*test*' } }
- it 'calls error tracking service' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
+ it_behaves_like 'handling an invalid regex'
+ end
- subject
- end
- end
+ context 'when name_regex is invalid' do
+ let(:params) { { 'name_regex' => '*test*' } }
- context 'when name_regex_delete is invalid' do
- let(:params) { { 'name_regex_delete' => '*test*' } }
+ it_behaves_like 'handling an invalid regex'
+ end
- it_behaves_like 'handling an invalid regex'
- end
+ context 'when name_regex_keep is invalid' do
+ let(:params) { { 'name_regex_keep' => '*test*' } }
- context 'when name_regex is invalid' do
- let(:params) { { 'name_regex' => '*test*' } }
+ it_behaves_like 'handling an invalid regex'
+ end
+ end
- it_behaves_like 'handling an invalid regex'
- end
+ context 'when delete regex matching specific tags is used' do
+ let(:params) do
+ { 'name_regex_delete' => 'C|D' }
+ end
- context 'when name_regex_keep is invalid' do
- let(:params) { { 'name_regex_keep' => '*test*' } }
+ it 'does remove C and D' do
+ expect_delete(%w(C D))
- it_behaves_like 'handling an invalid regex'
- end
+ expect_no_caching
+
+ is_expected.to eq(expected_service_response(deleted: %w(C D), before_truncate_size: 2, after_truncate_size: 2, before_delete_size: 2))
end
- context 'when delete regex matching specific tags is used' do
+ context 'with overriding allow regex' do
let(:params) do
- { 'name_regex_delete' => 'C|D' }
+ { 'name_regex_delete' => 'C|D',
+ 'name_regex_keep' => 'C' }
end
- it 'does remove C and D' do
- expect_delete(%w(C D))
+ it 'does not remove C' do
+ expect_delete(%w(D))
expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(C D), before_truncate_size: 2, after_truncate_size: 2, before_delete_size: 2))
+ is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
end
+ end
- context 'with overriding allow regex' do
- let(:params) do
- { 'name_regex_delete' => 'C|D',
- 'name_regex_keep' => 'C' }
- end
+ context 'with name_regex_delete overriding deprecated name_regex' do
+ let(:params) do
+ { 'name_regex' => 'C|D',
+ 'name_regex_delete' => 'D' }
+ end
- it 'does not remove C' do
- expect_delete(%w(D))
+ it 'does not remove C' do
+ expect_delete(%w(D))
- expect_no_caching
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
- end
+ is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
end
+ end
+ end
- context 'with name_regex_delete overriding deprecated name_regex' do
- let(:params) do
- { 'name_regex' => 'C|D',
- 'name_regex_delete' => 'D' }
- end
+ context 'with allow regex value' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'name_regex_keep' => 'B.*' }
+ end
- it 'does not remove C' do
- expect_delete(%w(D))
+ it 'does not remove B*' do
+ expect_delete(%w(A C D E))
- expect_no_caching
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
- end
- end
+ is_expected.to eq(expected_service_response(deleted: %w(A C D E), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
+ end
+ end
+
+ context 'when keeping only N tags' do
+ let(:params) do
+ { 'name_regex' => 'A|B.*|C',
+ 'keep_n' => 1 }
end
- context 'with allow regex value' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'name_regex_keep' => 'B.*' }
- end
+ it 'sorts tags by date' do
+ expect_delete(%w(Bb Ba C))
- it 'does not remove B*' do
- expect_delete(%w(A C D E))
+ expect_no_caching
- expect_no_caching
+ expect(service).to receive(:order_by_date).and_call_original
- is_expected.to eq(expected_service_response(deleted: %w(A C D E), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
- end
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 3))
end
+ end
- context 'when keeping only N tags' do
- let(:params) do
- { 'name_regex' => 'A|B.*|C',
- 'keep_n' => 1 }
- end
+ context 'when not keeping N tags' do
+ let(:params) do
+ { 'name_regex' => 'A|B.*|C' }
+ end
- it 'sorts tags by date' do
- expect_delete(%w(Bb Ba C))
+ it 'does not sort tags by date' do
+ expect_delete(%w(A Ba Bb C))
- expect_no_caching
+ expect_no_caching
- expect(service).to receive(:order_by_date).and_call_original
+ expect(service).not_to receive(:order_by_date)
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 3))
- end
+ is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
end
+ end
- context 'when not keeping N tags' do
- let(:params) do
- { 'name_regex' => 'A|B.*|C' }
- end
-
- it 'does not sort tags by date' do
- expect_delete(%w(A Ba Bb C))
+ context 'when removing keeping only 3' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 3 }
+ end
- expect_no_caching
+ it 'does remove B* and C as they are the oldest' do
+ expect_delete(%w(Bb Ba C))
- expect(service).not_to receive(:order_by_date)
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
- end
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
end
+ end
- context 'when removing keeping only 3' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 3 }
- end
+ context 'when removing older than 1 day' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'older_than' => '1 day' }
+ end
- it 'does remove B* and C as they are the oldest' do
- expect_delete(%w(Bb Ba C))
+ it 'does remove B* and C as they are older than 1 day' do
+ expect_delete(%w(Ba Bb C))
- expect_no_caching
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
- end
+ is_expected.to eq(expected_service_response(deleted: %w(Ba Bb C), before_delete_size: 3))
end
+ end
- context 'when removing older than 1 day' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'older_than' => '1 day' }
- end
+ context 'when combining all parameters' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day' }
+ end
- it 'does remove B* and C as they are older than 1 day' do
- expect_delete(%w(Ba Bb C))
+ it 'does remove B* and C' do
+ expect_delete(%w(Bb Ba C))
- expect_no_caching
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(Ba Bb C), before_delete_size: 3))
- end
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
end
+ end
+
+ context 'when running a container_expiration_policy' do
+ let(:user) { nil }
- context 'when combining all parameters' do
+ context 'with valid container_expiration_policy param' do
let(:params) do
{ 'name_regex_delete' => '.*',
'keep_n' => 1,
- 'older_than' => '1 day' }
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true }
end
- it 'does remove B* and C' do
- expect_delete(%w(Bb Ba C))
+ it 'succeeds without a user' do
+ expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- expect_no_caching
+ expect_caching
is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
end
end
- context 'when running a container_expiration_policy' do
- let(:user) { nil }
-
- context 'with valid container_expiration_policy param' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day',
- 'container_expiration_policy' => true }
- end
-
- it 'succeeds without a user' do
- expect_delete(%w(Bb Ba C), container_expiration_policy: true)
-
- caching_enabled ? expect_caching : expect_no_caching
-
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
- end
+ context 'without container_expiration_policy param' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day' }
end
- context 'without container_expiration_policy param' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day' }
- end
-
- it 'fails' do
- is_expected.to eq(status: :error, message: 'access denied')
- end
+ it 'fails' do
+ is_expected.to eq(status: :error, message: 'access denied')
end
end
+ end
- context 'truncating the tags list' do
- let(:params) do
- {
- 'name_regex_delete' => '.*',
- 'keep_n' => 1
- }
- end
+ context 'truncating the tags list' do
+ let(:params) do
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 1
+ }
+ end
- shared_examples 'returning the response' do |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
- it 'returns the response' do
- expect_no_caching
+ shared_examples 'returning the response' do |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
+ it 'returns the response' do
+ expect_no_caching
- result = subject
+ result = subject
- service_response = expected_service_response(
- status: status,
- original_size: original_size,
- before_truncate_size: before_truncate_size,
- after_truncate_size: after_truncate_size,
- before_delete_size: before_delete_size,
- deleted: nil
- )
+ service_response = expected_service_response(
+ status: status,
+ original_size: original_size,
+ before_truncate_size: before_truncate_size,
+ after_truncate_size: after_truncate_size,
+ before_delete_size: before_delete_size,
+ deleted: nil
+ )
- expect(result).to eq(service_response)
- end
+ expect(result).to eq(service_response)
end
+ end
- where(:feature_flag_enabled, :max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
- false | 10 | :success | :success | false
- false | 10 | :error | :error | false
- false | 3 | :success | :success | false
- false | 3 | :error | :error | false
- false | 0 | :success | :success | false
- false | 0 | :error | :error | false
- true | 10 | :success | :success | false
- true | 10 | :error | :error | false
- true | 3 | :success | :error | true
- true | 3 | :error | :error | true
- true | 0 | :success | :success | false
- true | 0 | :error | :error | false
- end
+ where(:feature_flag_enabled, :max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
+ false | 10 | :success | :success | false
+ false | 10 | :error | :error | false
+ false | 3 | :success | :success | false
+ false | 3 | :error | :error | false
+ false | 0 | :success | :success | false
+ false | 0 | :error | :error | false
+ true | 10 | :success | :success | false
+ true | 10 | :error | :error | false
+ true | 3 | :success | :error | true
+ true | 3 | :error | :error | true
+ true | 0 | :success | :success | false
+ true | 0 | :error | :error | false
+ end
- with_them do
- before do
- stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
- stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
- allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
- expect(service).to receive(:execute).and_return(status: delete_tags_service_status)
- end
+ with_them do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
+ stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
+ allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
+ expect(service).to receive(:execute).and_return(status: delete_tags_service_status)
end
+ end
- original_size = 7
- keep_n = 1
+ original_size = 7
+ keep_n = 1
- it_behaves_like(
- 'returning the response',
- status: params[:expected_status],
- original_size: original_size,
- before_truncate_size: original_size - keep_n,
- after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
- before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1 # one tag is filtered out with older_than filter
- )
- end
+ it_behaves_like(
+ 'returning the response',
+ status: params[:expected_status],
+ original_size: original_size,
+ before_truncate_size: original_size - keep_n,
+ after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
+ before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1 # one tag is filtered out with older_than filter
+ )
end
end
- context 'caching' do
+ context 'caching', :freeze_time do
let(:params) do
{
'name_regex_delete' => '.*',
@@ -381,17 +379,12 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
before do
expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- travel_to(Time.zone.local(2021, 9, 2, 12, 0, 0))
# We froze time so we need to set the created_at stubs again
stub_digest_config('sha256:configA', 1.hour.ago)
stub_digest_config('sha256:configB', 5.days.ago)
stub_digest_config('sha256:configC', 1.month.ago)
end
- after do
- travel_back
- end
-
it 'caches the created_at values' do
::Gitlab::Redis::Cache.with do |redis|
expect_mget(redis, tags_and_created_ats.keys)
@@ -450,32 +443,6 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
DateTime.rfc3339(date_time.rfc3339).rfc3339
end
end
-
- context 'with container_registry_expiration_policies_caching enabled for the project' do
- before do
- stub_feature_flags(container_registry_expiration_policies_caching: project)
- end
-
- it_behaves_like 'reading and removing tags', caching_enabled: true
- end
-
- context 'with container_registry_expiration_policies_caching disabled' do
- before do
- stub_feature_flags(container_registry_expiration_policies_caching: false)
- end
-
- it_behaves_like 'reading and removing tags', caching_enabled: false
- end
-
- context 'with container_registry_expiration_policies_caching not enabled for the project' do
- let_it_be(:another_project) { create(:project) }
-
- before do
- stub_feature_flags(container_registry_expiration_policies_caching: another_project)
- end
-
- it_behaves_like 'reading and removing tags', caching_enabled: false
- end
end
private
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index d7c43ac676e..2aa9be5066f 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -49,6 +49,7 @@ RSpec.describe Projects::CreateService, '#execute' do
it 'keeps them as specified' do
expect(project.name).to eq('one')
expect(project.path).to eq('two')
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
@@ -58,6 +59,7 @@ RSpec.describe Projects::CreateService, '#execute' do
it 'sets name == path' do
expect(project.path).to eq('one.two_three-four')
expect(project.name).to eq(project.path)
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
@@ -67,6 +69,7 @@ RSpec.describe Projects::CreateService, '#execute' do
it 'sets path == name' do
expect(project.name).to eq('one.two_three-four')
expect(project.path).to eq(project.name)
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
@@ -78,6 +81,7 @@ RSpec.describe Projects::CreateService, '#execute' do
it 'parameterizes the name' do
expect(project.name).to eq('one.two_three-four and five')
expect(project.path).to eq('one-two_three-four-and-five')
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
end
@@ -111,13 +115,14 @@ RSpec.describe Projects::CreateService, '#execute' do
end
context 'user namespace' do
- it do
+ it 'creates a project in user namespace' do
project = create_project(user, opts)
expect(project).to be_valid
expect(project.owner).to eq(user)
expect(project.team.maintainers).to include(user)
expect(project.namespace).to eq(user.namespace)
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
@@ -151,6 +156,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project.owner).to eq(user)
expect(project.team.maintainers).to contain_exactly(user)
expect(project.namespace).to eq(user.namespace)
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
@@ -160,6 +166,7 @@ RSpec.describe Projects::CreateService, '#execute' do
project = create_project(admin, opts)
expect(project).not_to be_persisted
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
end
@@ -183,6 +190,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project.namespace).to eq(group)
expect(project.team.owners).to include(user)
expect(user.authorized_projects).to include(project)
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
@@ -339,6 +347,7 @@ RSpec.describe Projects::CreateService, '#execute' do
end
imported_project
+ expect(imported_project.project_namespace).to be_in_sync_with_project(imported_project)
end
it 'stores import data and URL' do
@@ -406,6 +415,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project.visibility_level).to eq(project_level)
expect(project).to be_saved
expect(project).to be_valid
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
end
@@ -424,6 +434,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project.errors.messages[:visibility_level].first).to(
match('restricted by your GitLab administrator')
)
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
it 'does not allow a restricted visibility level for admins when admin mode is disabled' do
@@ -493,6 +504,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project).to be_valid
expect(project.owner).to eq(user)
expect(project.namespace).to eq(user.namespace)
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
context 'when another repository already exists on disk' do
@@ -522,6 +534,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project).to respond_to(:errors)
expect(project.errors.messages).to have_key(:base)
expect(project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
it 'does not allow to import project when path matches existing repository on disk' do
@@ -531,6 +544,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project).to respond_to(:errors)
expect(project.errors.messages).to have_key(:base)
expect(project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
@@ -555,6 +569,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project).to respond_to(:errors)
expect(project.errors.messages).to have_key(:base)
expect(project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
end
@@ -651,7 +666,7 @@ RSpec.describe Projects::CreateService, '#execute' do
end
context 'with an active group-level integration' do
- let!(:group_integration) { create(:prometheus_integration, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+ let!(:group_integration) { create(:prometheus_integration, :group, group: group, api_url: 'https://prometheus.group.com/') }
let!(:group) do
create(:group).tap do |group|
group.add_owner(user)
@@ -672,7 +687,7 @@ RSpec.describe Projects::CreateService, '#execute' do
end
context 'with an active subgroup' do
- let!(:subgroup_integration) { create(:prometheus_integration, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let!(:subgroup_integration) { create(:prometheus_integration, :group, group: subgroup, api_url: 'https://prometheus.subgroup.com/') }
let!(:subgroup) do
create(:group, parent: group).tap do |subgroup|
subgroup.add_owner(user)
@@ -810,11 +825,11 @@ RSpec.describe Projects::CreateService, '#execute' do
).to be_truthy
end
- it 'schedules authorization update for users with access to group' do
+ it 'schedules authorization update for users with access to group', :sidekiq_inline do
expect(AuthorizedProjectsWorker).not_to(
receive(:bulk_perform_async)
)
- expect(AuthorizedProjectUpdate::ProjectCreateWorker).to(
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to(
receive(:perform_async).and_call_original
)
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
@@ -825,7 +840,11 @@ RSpec.describe Projects::CreateService, '#execute' do
.and_call_original
)
- create_project(user, opts)
+ project = create_project(user, opts)
+
+ expect(
+ Ability.allowed?(other_user, :developer_access, project)
+ ).to be_truthy
end
end
@@ -866,6 +885,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project).to be_valid
expect(project.shared_runners_enabled).to eq(expected_result_for_project)
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
end
@@ -886,6 +906,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project).to be_valid
expect(project.shared_runners_enabled).to eq(expected_result_for_project)
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
end
@@ -903,6 +924,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project.persisted?).to eq(false)
expect(project).to be_invalid
expect(project.errors[:shared_runners_enabled]).to include('cannot be enabled because parent group does not allow it')
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
end
@@ -922,6 +944,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project).to be_valid
expect(project.shared_runners_enabled).to eq(expected_result)
+ expect(project.project_namespace).to be_in_sync_with_project(project)
end
end
end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 9bdd9800fcc..ac84614121a 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -331,6 +331,14 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
end
end
end
+
+ context 'for an archived project' do
+ before do
+ project.update!(archived: true)
+ end
+
+ it_behaves_like 'deleting the project with pipeline and build'
+ end
end
describe 'container registry' do
diff --git a/spec/services/projects/import_export/export_service_spec.rb b/spec/services/projects/import_export/export_service_spec.rb
index 111c1264777..6002aaf427a 100644
--- a/spec/services/projects/import_export/export_service_spec.rb
+++ b/spec/services/projects/import_export/export_service_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe Projects::ImportExport::ExportService do
describe '#execute' do
- let!(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+
let(:project) { create(:project) }
let(:shared) { project.import_export_shared }
let!(:after_export_strategy) { Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy.new }
@@ -28,7 +29,14 @@ RSpec.describe Projects::ImportExport::ExportService do
end
it 'saves the models' do
- expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
+ saver_params = {
+ project: project,
+ current_user: user,
+ shared: shared,
+ params: {},
+ logger: an_instance_of(Gitlab::Export::Logger)
+ }
+ expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).with(saver_params).and_call_original
service.execute
end
diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
index f9ff959fa05..04c6349bf52 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
@@ -102,6 +102,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do
it 'skips read_total_timeout', :aggregate_failures do
stub_const('GitLab::HTTP::DEFAULT_READ_TOTAL_TIMEOUT', 0)
+ expect(ProjectCacheWorker).to receive(:perform_async).once
expect(Gitlab::Metrics::System).not_to receive(:monotonic_time)
expect(subject.execute).to include(status: :success)
end
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index eab7228307a..61edfd23700 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -207,13 +207,5 @@ RSpec.describe Projects::ParticipantsService do
end
it_behaves_like 'return project members'
-
- context 'when feature flag :linear_participants_service_ancestor_scopes is disabled' do
- before do
- stub_feature_flags(linear_participants_service_ancestor_scopes: false)
- end
-
- it_behaves_like 'return project members'
- end
end
end
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index 25cf588dedf..3bd96ad19bc 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -218,8 +218,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
.to receive(:new)
.with(project, kind_of(Hash))
.exactly(3).times
- .and_return(process_service)
- expect(process_service).to receive(:execute).exactly(3).times
+ .and_call_original
subject
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index b539b01066e..c47d44002cc 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Projects::TransferService do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
- let_it_be(:group_integration) { create(:integrations_slack, group: group, project: nil, webhook: 'http://group.slack.com') }
+ let_it_be(:group_integration) { create(:integrations_slack, :group, group: group, webhook: 'http://group.slack.com') }
let(:project) { create(:project, :repository, :legacy_storage, namespace: user.namespace) }
@@ -66,8 +66,6 @@ RSpec.describe Projects::TransferService do
end
context 'when project has an associated project namespace' do
- let!(:project_namespace) { create(:project_namespace, project: project) }
-
it 'keeps project namespace in sync with project' do
transfer_result = execute_transfer
@@ -272,8 +270,6 @@ RSpec.describe Projects::TransferService do
end
context 'when project has an associated project namespace' do
- let!(:project_namespace) { create(:project_namespace, project: project) }
-
it 'keeps project namespace in sync with project' do
attempt_project_transfer
@@ -294,8 +290,6 @@ RSpec.describe Projects::TransferService do
end
context 'when project has an associated project namespace' do
- let!(:project_namespace) { create(:project_namespace, project: project) }
-
it 'keeps project namespace in sync with project' do
transfer_result = execute_transfer
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index d67b189f90e..611261cd92c 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -1326,14 +1326,25 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { issue }
end
- it_behaves_like 'confidential command' do
- let(:content) { '/confidential' }
- let(:issuable) { issue }
- end
+ context '/confidential' do
+ it_behaves_like 'confidential command' do
+ let(:content) { '/confidential' }
+ let(:issuable) { issue }
+ end
- it_behaves_like 'confidential command' do
- let(:content) { '/confidential' }
- let(:issuable) { create(:incident, project: project) }
+ it_behaves_like 'confidential command' do
+ let(:content) { '/confidential' }
+ let(:issuable) { create(:incident, project: project) }
+ end
+
+ context 'when non-member is creating a new issue' do
+ let(:service) { described_class.new(project, create(:user)) }
+
+ it_behaves_like 'confidential command' do
+ let(:content) { '/confidential' }
+ let(:issuable) { build(:issue, project: project) }
+ end
+ end
end
it_behaves_like 'lock command' do
@@ -2542,4 +2553,32 @@ RSpec.describe QuickActions::InterpretService do
end
end
end
+
+ describe '#available_commands' do
+ context 'when Guest is creating a new issue' do
+ let_it_be(:guest) { create(:user) }
+
+ let(:issue) { build(:issue, project: public_project) }
+ let(:service) { described_class.new(project, guest) }
+
+ before_all do
+ public_project.add_guest(guest)
+ end
+
+ it 'includes commands to set metadata' do
+ # milestone action is only available when project has a milestone
+ milestone
+
+ available_commands = service.available_commands(issue)
+
+ expect(available_commands).to include(
+ a_hash_including(name: :label),
+ a_hash_including(name: :milestone),
+ a_hash_including(name: :copy_metadata),
+ a_hash_including(name: :assign),
+ a_hash_including(name: :due)
+ )
+ end
+ end
+ end
end
diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb
index b987e3204ad..c2c0a4c2126 100644
--- a/spec/services/resource_events/change_labels_service_spec.rb
+++ b/spec/services/resource_events/change_labels_service_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
let(:removed) { [labels[1]] }
it 'creates all label events in a single query' do
- expect(Gitlab::Database.main).to receive(:bulk_insert).once.and_call_original
+ expect(ApplicationRecord).to receive(:legacy_bulk_insert).once.and_call_original
expect { subject }.to change { resource.resource_label_events.count }.from(0).to(2)
end
end
diff --git a/spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb b/spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb
index cb42ad5b617..71b1d0993ee 100644
--- a/spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb
+++ b/spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb
@@ -4,18 +4,20 @@ require 'spec_helper'
RSpec.describe ResourceEvents::SyntheticLabelNotesBuilderService do
describe '#execute' do
- let!(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
- let!(:issue) { create(:issue, author: user) }
+ let_it_be(:issue) { create(:issue, author: user) }
- let!(:event1) { create(:resource_label_event, issue: issue) }
- let!(:event2) { create(:resource_label_event, issue: issue) }
- let!(:event3) { create(:resource_label_event, issue: issue) }
+ let_it_be(:event1) { create(:resource_label_event, issue: issue) }
+ let_it_be(:event2) { create(:resource_label_event, issue: issue) }
+ let_it_be(:event3) { create(:resource_label_event, issue: issue) }
it 'returns the expected synthetic notes' do
notes = ResourceEvents::SyntheticLabelNotesBuilderService.new(issue, user).execute
expect(notes.size).to eq(3)
end
+
+ it_behaves_like 'filters by paginated notes', :resource_label_event
end
end
diff --git a/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb b/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb
index 1b35e224e98..9c6b6a33b57 100644
--- a/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb
+++ b/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb
@@ -24,5 +24,7 @@ RSpec.describe ResourceEvents::SyntheticMilestoneNotesBuilderService do
'removed milestone'
])
end
+
+ it_behaves_like 'filters by paginated notes', :resource_milestone_event
end
end
diff --git a/spec/services/resource_events/synthetic_state_notes_builder_service_spec.rb b/spec/services/resource_events/synthetic_state_notes_builder_service_spec.rb
new file mode 100644
index 00000000000..79500f3768b
--- /dev/null
+++ b/spec/services/resource_events/synthetic_state_notes_builder_service_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ResourceEvents::SyntheticStateNotesBuilderService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+
+ it_behaves_like 'filters by paginated notes', :resource_state_event
+ end
+end
diff --git a/spec/services/security/ci_configuration/sast_iac_create_service_spec.rb b/spec/services/security/ci_configuration/sast_iac_create_service_spec.rb
new file mode 100644
index 00000000000..deb10732b37
--- /dev/null
+++ b/spec/services/security/ci_configuration/sast_iac_create_service_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Security::CiConfiguration::SastIacCreateService, :snowplow do
+ subject(:result) { described_class.new(project, user).execute }
+
+ let(:branch_name) { 'set-sast-iac-config-1' }
+
+ let(:snowplow_event) do
+ {
+ category: 'Security::CiConfiguration::SastIacCreateService',
+ action: 'create',
+ label: ''
+ }
+ end
+
+ include_examples 'services security ci configuration create service', true
+end
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index 659c21b7d4f..99047f3233b 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -267,8 +267,8 @@ RSpec.describe Spam::SpamVerdictService do
where(:verdict_value, :expected) do
::Spam::SpamConstants::ALLOW | ::Spam::SpamConstants::ALLOW
::Spam::SpamConstants::CONDITIONAL_ALLOW | ::Spam::SpamConstants::CONDITIONAL_ALLOW
- ::Spam::SpamConstants::DISALLOW | ::Spam::SpamConstants::CONDITIONAL_ALLOW
- ::Spam::SpamConstants::BLOCK_USER | ::Spam::SpamConstants::CONDITIONAL_ALLOW
+ ::Spam::SpamConstants::DISALLOW | ::Spam::SpamConstants::DISALLOW
+ ::Spam::SpamConstants::BLOCK_USER | ::Spam::SpamConstants::BLOCK_USER
end
# rubocop: enable Lint/BinaryOperatorWithIdenticalOperands
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 1a421999ffb..ce0122ae301 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -348,193 +348,6 @@ RSpec.describe SystemNoteService do
end
end
- describe 'Jira integration' do
- include JiraServiceHelper
-
- let(:project) { create(:jira_project, :repository) }
- let(:author) { create(:user) }
- let(:issue) { create(:issue, project: project) }
- let(:merge_request) { create(:merge_request, :simple, target_project: project, source_project: project) }
- let(:jira_issue) { ExternalIssue.new("JIRA-1", project)}
- let(:jira_tracker) { project.jira_integration }
- let(:commit) { project.commit }
- let(:comment_url) { jira_api_comment_url(jira_issue.id) }
- let(:success_message) { "SUCCESS: Successfully posted to http://jira.example.net." }
-
- before do
- stub_jira_integration_test
- stub_jira_urls(jira_issue.id)
- jira_integration_settings
- end
-
- def cross_reference(type, link_exists = false)
- noteable = type == 'commit' ? commit : merge_request
-
- links = []
- if link_exists
- url = if type == 'commit'
- "#{Settings.gitlab.base_url}/#{project.namespace.path}/#{project.path}/-/commit/#{commit.id}"
- else
- "#{Settings.gitlab.base_url}/#{project.namespace.path}/#{project.path}/-/merge_requests/#{merge_request.iid}"
- end
-
- link = double(object: { 'url' => url })
- links << link
- expect(link).to receive(:save!)
- end
-
- allow(JIRA::Resource::Remotelink).to receive(:all).and_return(links)
-
- described_class.cross_reference(jira_issue, noteable, author)
- end
-
- noteable_types = %w(merge_requests commit)
-
- noteable_types.each do |type|
- context "when noteable is a #{type}" do
- it "blocks cross reference when #{type.underscore}_events is false" do
- jira_tracker.update!("#{type}_events" => false)
-
- expect(cross_reference(type)).to eq(s_('JiraService|Events for %{noteable_model_name} are disabled.') % { noteable_model_name: type.pluralize.humanize.downcase })
- end
-
- it "creates cross reference when #{type.underscore}_events is true" do
- jira_tracker.update!("#{type}_events" => true)
-
- expect(cross_reference(type)).to eq(success_message)
- end
- end
-
- context 'when a new cross reference is created' do
- it 'creates a new comment and remote link' do
- cross_reference(type)
-
- expect(WebMock).to have_requested(:post, jira_api_comment_url(jira_issue))
- expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue))
- end
- end
-
- context 'when a link exists' do
- it 'updates a link but does not create a new comment' do
- expect(WebMock).not_to have_requested(:post, jira_api_comment_url(jira_issue))
-
- cross_reference(type, true)
- end
- end
- end
-
- describe "new reference" do
- let(:favicon_path) { "http://localhost/assets/#{find_asset('favicon.png').digest_path}" }
-
- before do
- allow(JIRA::Resource::Remotelink).to receive(:all).and_return([])
- end
-
- context 'for commits' do
- it "creates comment" do
- result = described_class.cross_reference(jira_issue, commit, author)
-
- expect(result).to eq(success_message)
- end
-
- it "creates remote link" do
- described_class.cross_reference(jira_issue, commit, author)
-
- expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
- body: hash_including(
- GlobalID: "GitLab",
- relationship: 'mentioned on',
- object: {
- url: project_commit_url(project, commit),
- title: "Commit - #{commit.title}",
- icon: { title: "GitLab", url16x16: favicon_path },
- status: { resolved: false }
- }
- )
- ).once
- end
- end
-
- context 'for issues' do
- let(:issue) { create(:issue, project: project) }
-
- it "creates comment" do
- result = described_class.cross_reference(jira_issue, issue, author)
-
- expect(result).to eq(success_message)
- end
-
- it "creates remote link" do
- described_class.cross_reference(jira_issue, issue, author)
-
- expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
- body: hash_including(
- GlobalID: "GitLab",
- relationship: 'mentioned on',
- object: {
- url: project_issue_url(project, issue),
- title: "Issue - #{issue.title}",
- icon: { title: "GitLab", url16x16: favicon_path },
- status: { resolved: false }
- }
- )
- ).once
- end
- end
-
- context 'for snippets' do
- let(:snippet) { create(:snippet, project: project) }
-
- it "creates comment" do
- result = described_class.cross_reference(jira_issue, snippet, author)
-
- expect(result).to eq(success_message)
- end
-
- it "creates remote link" do
- described_class.cross_reference(jira_issue, snippet, author)
-
- expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
- body: hash_including(
- GlobalID: "GitLab",
- relationship: 'mentioned on',
- object: {
- url: project_snippet_url(project, snippet),
- title: "Snippet - #{snippet.title}",
- icon: { title: "GitLab", url16x16: favicon_path },
- status: { resolved: false }
- }
- )
- ).once
- end
- end
- end
-
- describe "existing reference" do
- before do
- allow(JIRA::Resource::Remotelink).to receive(:all).and_return([])
- message = double('message')
- allow(message).to receive(:include?) { true }
- allow_next_instance_of(JIRA::Resource::Issue) do |instance|
- allow(instance).to receive(:comments).and_return([OpenStruct.new(body: message)])
- end
- end
-
- it "does not return success message" do
- result = described_class.cross_reference(jira_issue, commit, author)
-
- expect(result).not_to eq(success_message)
- end
-
- it 'does not try to create comment and remote link' do
- subject
-
- expect(WebMock).not_to have_requested(:post, jira_api_comment_url(jira_issue))
- expect(WebMock).not_to have_requested(:post, jira_api_remote_link_url(jira_issue))
- end
- end
- end
-
describe '.change_time_estimate' do
it 'calls TimeTrackingService' do
expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
@@ -781,6 +594,18 @@ RSpec.describe SystemNoteService do
end
end
+ describe '.resolve_incident_status' do
+ let(:incident) { build(:incident, :closed) }
+
+ it 'calls IncidentService' do
+ expect_next_instance_of(SystemNotes::IncidentService) do |service|
+ expect(service).to receive(:resolve_incident_status)
+ end
+
+ described_class.resolve_incident_status(incident, author)
+ end
+ end
+
describe '.log_resolving_alert' do
let(:alert) { build(:alert_management_alert) }
let(:monitoring_tool) { 'Prometheus' }
diff --git a/spec/services/system_notes/incident_service_spec.rb b/spec/services/system_notes/incident_service_spec.rb
index ab9b9eb2bd4..669e357b7a4 100644
--- a/spec/services/system_notes/incident_service_spec.rb
+++ b/spec/services/system_notes/incident_service_spec.rb
@@ -56,4 +56,14 @@ RSpec.describe ::SystemNotes::IncidentService do
end
end
end
+
+ describe '#resolve_incident_status' do
+ subject(:resolve_incident_status) { described_class.new(noteable: noteable, project: project, author: author).resolve_incident_status }
+
+ it 'creates a new note about resolved incident', :aggregate_failures do
+ expect { resolve_incident_status }.to change { noteable.notes.count }.by(1)
+
+ expect(noteable.notes.last.note).to eq('changed the status to **Resolved** by closing the incident')
+ end
+ end
end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 71a28a89cd8..fd481aa6ddb 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -347,6 +347,23 @@ RSpec.describe ::SystemNotes::IssuablesService do
end
end
end
+
+ context 'with external issue' do
+ let(:noteable) { ExternalIssue.new('JIRA-123', project) }
+ let(:mentioner) { project.commit }
+
+ it 'queues a background worker' do
+ expect(Integrations::CreateExternalCrossReferenceWorker).to receive(:perform_async).with(
+ project.id,
+ 'JIRA-123',
+ 'Commit',
+ mentioner.id,
+ author.id
+ )
+
+ subject
+ end
+ end
end
end
diff --git a/spec/services/tasks_to_be_done/base_service_spec.rb b/spec/services/tasks_to_be_done/base_service_spec.rb
new file mode 100644
index 00000000000..bf6be6d46e5
--- /dev/null
+++ b/spec/services/tasks_to_be_done/base_service_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe TasksToBeDone::BaseService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:assignee_one) { create(:user) }
+ let_it_be(:assignee_two) { create(:user) }
+ let_it_be(:assignee_ids) { [assignee_one.id] }
+ let_it_be(:label) { create(:label, title: 'tasks to be done:ci', project: project) }
+
+ before do
+ project.add_maintainer(current_user)
+ project.add_developer(assignee_one)
+ project.add_developer(assignee_two)
+ end
+
+ subject(:service) do
+ TasksToBeDone::CreateCiTaskService.new(
+ project: project,
+ current_user: current_user,
+ assignee_ids: assignee_ids
+ )
+ end
+
+ context 'no existing task issue', :aggregate_failures do
+ it 'creates an issue' do
+ params = {
+ assignee_ids: assignee_ids,
+ title: 'Set up CI/CD',
+ description: anything,
+ add_labels: label.title
+ }
+
+ expect(Issues::BuildService)
+ .to receive(:new)
+ .with(project: project, current_user: current_user, params: params)
+ .and_call_original
+
+ expect { service.execute }.to change(Issue, :count).by(1)
+
+ expect(project.issues.last).to have_attributes(
+ author: current_user,
+ title: params[:title],
+ assignees: [assignee_one],
+ labels: [label]
+ )
+ end
+ end
+
+ context 'an open issue with the same label already exists', :aggregate_failures do
+ let_it_be(:assignee_ids) { [assignee_two.id] }
+
+ it 'assigns the user to the existing issue' do
+ issue = create(:labeled_issue, project: project, labels: [label], assignees: [assignee_one])
+ params = { add_assignee_ids: assignee_ids }
+
+ expect(Issues::UpdateService)
+ .to receive(:new)
+ .with(project: project, current_user: current_user, params: params)
+ .and_call_original
+
+ expect { service.execute }.not_to change(Issue, :count)
+
+ expect(issue.reload.assignees).to match_array([assignee_one, assignee_two])
+ end
+ end
+end
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 6a8e6dc8970..7103cb0b66a 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -1218,6 +1218,17 @@ RSpec.describe TodoService do
end
end
+ describe '#create_attention_requested_todo' do
+ let(:target) { create(:merge_request, author: author, source_project: project) }
+ let(:user) { create(:user) }
+
+ it 'creates a todo for user' do
+ service.create_attention_requested_todo(target, author, user)
+
+ should_create_todo(user: user, target: target, action: Todo::ATTENTION_REQUESTED)
+ end
+ end
+
def should_create_todo(attributes = {})
attributes.reverse_merge!(
project: project,
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index 3244db4c1fb..52c7b54ed72 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Users::UpdateService do
result = update_user(user, status: { emoji: "Moo!" })
expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq("Emoji is not included in the list")
+ expect(result[:message]).to eq("Emoji is not a valid emoji name")
end
it 'updates user detail with provided attributes' do
diff --git a/spec/services/users/upsert_credit_card_validation_service_spec.rb b/spec/services/users/upsert_credit_card_validation_service_spec.rb
index bede30e1898..952d482f1bd 100644
--- a/spec/services/users/upsert_credit_card_validation_service_spec.rb
+++ b/spec/services/users/upsert_credit_card_validation_service_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Users::UpsertCreditCardValidationService do
credit_card_expiration_year: expiration_year,
credit_card_expiration_month: 1,
credit_card_holder_name: 'John Smith',
+ credit_card_type: 'AmericanExpress',
credit_card_mask_number: '1111'
}
end
@@ -30,7 +31,16 @@ RSpec.describe Users::UpsertCreditCardValidationService do
result = service.execute
expect(result.status).to eq(:success)
- expect(user.reload.credit_card_validated_at).to eq(credit_card_validated_time)
+
+ user.reload
+
+ expect(user.credit_card_validation).to have_attributes(
+ credit_card_validated_at: credit_card_validated_time,
+ network: 'AmericanExpress',
+ holder_name: 'John Smith',
+ last_digits: 1111,
+ expiration_date: Date.new(expiration_year, 1, 31)
+ )
end
end
@@ -97,6 +107,7 @@ RSpec.describe Users::UpsertCreditCardValidationService do
expiration_date: Date.new(expiration_year, 1, 31),
holder_name: "John Smith",
last_digits: 1111,
+ network: "AmericanExpress",
user_id: user_id
}