Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-05-20 17:34:42 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-05-20 17:34:42 +0300
commit9f46488805e86b1bc341ea1620b866016c2ce5ed (patch)
treef9748c7e287041e37d6da49e0a29c9511dc34768 /spec/services
parentdfc92d081ea0332d69c8aca2f0e745cb48ae5e6d (diff)
Add latest changes from gitlab-org/gitlab@13-0-stable-ee
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/alert_management/create_alert_issue_service_spec.rb152
-rw-r--r--spec/services/alert_management/process_prometheus_alert_service_spec.rb136
-rw-r--r--spec/services/alert_management/update_alert_status_service_spec.rb66
-rw-r--r--spec/services/application_settings/update_service_spec.rb2
-rw-r--r--spec/services/auth/container_registry_authentication_service_spec.rb20
-rw-r--r--spec/services/authorized_project_update/project_create_service_spec.rb142
-rw-r--r--spec/services/base_container_service_spec.rb23
-rw-r--r--spec/services/boards/issues/list_service_spec.rb2
-rw-r--r--spec/services/branches/create_service_spec.rb30
-rw-r--r--spec/services/ci/compare_accessibility_reports_service_spec.rb62
-rw-r--r--spec/services/ci/compare_test_reports_service_spec.rb7
-rw-r--r--spec/services/ci/create_job_artifacts_service_spec.rb67
-rw-r--r--spec/services/ci/create_pipeline_service/custom_config_content_spec.rb4
-rw-r--r--spec/services/ci/daily_build_group_report_result_service_spec.rb (renamed from spec/services/ci/daily_report_result_service_spec.rb)41
-rw-r--r--spec/services/ci/destroy_expired_job_artifacts_service_spec.rb26
-rw-r--r--spec/services/ci/generate_terraform_reports_service_spec.rb71
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb15
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb6
-rw-r--r--spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb19
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb25
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb57
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_allow_failure_test_on_failure.yml47
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails.yml39
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test.yml39
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test_when_always.yml43
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds.yml62
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_always.yml63
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_allow_failure.yml40
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_always.yml35
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_on_failure.yml35
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_succeeds_test_on_failure.yml35
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure.yml63
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_always.yml64
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_allow_failure_true.yml43
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml66
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml58
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml27
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml48
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds.yml42
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_failure.yml66
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_success.yml40
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_build_allow_failure_test_on_failure.yml53
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_build_fails.yml38
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_allow_failure.yml39
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml65
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml54
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml44
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_failure.yml52
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_success.yml52
-rw-r--r--spec/services/ci/pipeline_schedule_service_spec.rb32
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb19
-rw-r--r--spec/services/ci/register_job_service_spec.rb2
-rw-r--r--spec/services/ci/retry_build_service_spec.rb50
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb19
-rw-r--r--spec/services/ci/update_instance_variables_service_spec.rb230
-rw-r--r--spec/services/clusters/applications/check_upgrade_progress_service_spec.rb4
-rw-r--r--spec/services/clusters/applications/ingress_modsecurity_usage_service_spec.rb196
-rw-r--r--spec/services/clusters/applications/schedule_update_service_spec.rb6
-rw-r--r--spec/services/clusters/gcp/finalize_creation_service_spec.rb3
-rw-r--r--spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb4
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb1
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb12
-rw-r--r--spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb200
-rw-r--r--spec/services/cohorts_service_spec.rb2
-rw-r--r--spec/services/deployments/older_deployments_drop_service_spec.rb37
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb195
-rw-r--r--spec/services/design_management/design_user_notes_count_service_spec.rb43
-rw-r--r--spec/services/design_management/generate_image_versions_service_spec.rb77
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb356
-rw-r--r--spec/services/emails/confirm_service_spec.rb6
-rw-r--r--spec/services/event_create_service_spec.rb13
-rw-r--r--spec/services/git/branch_push_service_spec.rb10
-rw-r--r--spec/services/git/wiki_push_service/change_spec.rb109
-rw-r--r--spec/services/git/wiki_push_service_spec.rb338
-rw-r--r--spec/services/grafana/proxy_service_spec.rb2
-rw-r--r--spec/services/groups/create_service_spec.rb21
-rw-r--r--spec/services/groups/import_export/export_service_spec.rb40
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb254
-rw-r--r--spec/services/groups/update_service_spec.rb20
-rw-r--r--spec/services/incident_management/create_issue_service_spec.rb24
-rw-r--r--spec/services/issuable/clone/attributes_rewriter_spec.rb28
-rw-r--r--spec/services/issues/close_service_spec.rb2
-rw-r--r--spec/services/issues/create_service_spec.rb86
-rw-r--r--spec/services/issues/related_branches_service_spec.rb102
-rw-r--r--spec/services/issues/resolve_discussions_spec.rb19
-rw-r--r--spec/services/issues/update_service_spec.rb30
-rw-r--r--spec/services/jira_import/start_import_service_spec.rb122
-rw-r--r--spec/services/lfs/file_transformer_spec.rb17
-rw-r--r--spec/services/merge_requests/create_service_spec.rb13
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb2
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb19
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb40
-rw-r--r--spec/services/merge_requests/squash_service_spec.rb40
-rw-r--r--spec/services/merge_requests/update_service_spec.rb3
-rw-r--r--spec/services/metrics/dashboard/clone_dashboard_service_spec.rb4
-rw-r--r--spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb4
-rw-r--r--spec/services/metrics/dashboard/transient_embed_service_spec.rb6
-rw-r--r--spec/services/metrics/users_starred_dashboards/create_service_spec.rb72
-rw-r--r--spec/services/metrics/users_starred_dashboards/delete_service_spec.rb41
-rw-r--r--spec/services/namespaces/check_storage_size_service_spec.rb159
-rw-r--r--spec/services/note_summary_spec.rb6
-rw-r--r--spec/services/notes/create_service_spec.rb56
-rw-r--r--spec/services/notes/post_process_service_spec.rb27
-rw-r--r--spec/services/notification_service_spec.rb66
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb2
-rw-r--r--spec/services/pod_logs/base_service_spec.rb30
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb32
-rw-r--r--spec/services/pod_logs/kubernetes_service_spec.rb20
-rw-r--r--spec/services/post_receive_service_spec.rb35
-rw-r--r--spec/services/projects/alerting/notify_service_spec.rb96
-rw-r--r--spec/services/projects/create_service_spec.rb98
-rw-r--r--spec/services/projects/fork_service_spec.rb8
-rw-r--r--spec/services/projects/hashed_storage/base_attachment_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb10
-rw-r--r--spec/services/projects/hashed_storage/rollback_repository_service_spec.rb8
-rw-r--r--spec/services/projects/import_export/export_service_spec.rb28
-rw-r--r--spec/services/projects/import_service_spec.rb22
-rw-r--r--spec/services/projects/prometheus/alerts/create_events_service_spec.rb6
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb33
-rw-r--r--spec/services/projects/propagate_service_template_spec.rb36
-rw-r--r--spec/services/projects/transfer_service_spec.rb271
-rw-r--r--spec/services/projects/update_remote_mirror_service_spec.rb37
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb46
-rw-r--r--spec/services/prometheus/proxy_service_spec.rb2
-rw-r--r--spec/services/prometheus/proxy_variable_substitution_service_spec.rb156
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb4
-rw-r--r--spec/services/releases/create_service_spec.rb3
-rw-r--r--spec/services/repository_archive_clean_up_service_spec.rb2
-rw-r--r--spec/services/resource_access_tokens/create_service_spec.rb (renamed from spec/services/resources/create_access_token_service_spec.rb)22
-rw-r--r--spec/services/resource_access_tokens/revoke_service_spec.rb111
-rw-r--r--spec/services/resource_events/change_milestone_service_spec.rb10
-rw-r--r--spec/services/resource_events/merge_into_notes_service_spec.rb2
-rw-r--r--spec/services/search/snippet_service_spec.rb50
-rw-r--r--spec/services/search_service_spec.rb86
-rw-r--r--spec/services/snippets/create_service_spec.rb154
-rw-r--r--spec/services/snippets/update_service_spec.rb90
-rw-r--r--spec/services/spam/spam_action_service_spec.rb (renamed from spec/services/spam/spam_check_service_spec.rb)87
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb65
-rw-r--r--spec/services/system_note_service_spec.rb28
-rw-r--r--spec/services/system_notes/design_management_service_spec.rb155
-rw-r--r--spec/services/template_engines/liquid_service_spec.rb126
-rw-r--r--spec/services/todo_service_spec.rb30
-rw-r--r--spec/services/update_merge_request_metrics_service_spec.rb4
-rw-r--r--spec/services/user_project_access_changed_service_spec.rb9
-rw-r--r--spec/services/users/destroy_service_spec.rb14
-rw-r--r--spec/services/users/migrate_to_ghost_user_service_spec.rb6
-rw-r--r--spec/services/verify_pages_domain_service_spec.rb2
-rw-r--r--spec/services/wiki_pages/base_service_spec.rb2
-rw-r--r--spec/services/wiki_pages/create_service_spec.rb93
-rw-r--r--spec/services/wiki_pages/destroy_service_spec.rb49
-rw-r--r--spec/services/wiki_pages/event_create_service_spec.rb87
-rw-r--r--spec/services/wiki_pages/update_service_spec.rb97
-rw-r--r--spec/services/wikis/create_attachment_service_spec.rb67
153 files changed, 6591 insertions, 1456 deletions
diff --git a/spec/services/alert_management/create_alert_issue_service_spec.rb b/spec/services/alert_management/create_alert_issue_service_spec.rb
new file mode 100644
index 00000000000..62afe777165
--- /dev/null
+++ b/spec/services/alert_management/create_alert_issue_service_spec.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AlertManagement::CreateAlertIssueService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:payload) do
+ {
+ 'annotations' => {
+ 'title' => 'Alert title'
+ },
+ 'startsAt' => '2020-04-27T10:10:22.265949279Z',
+ 'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1'
+ }
+ end
+ let_it_be(:generic_alert, reload: true) { create(:alert_management_alert, :triggered, project: project, payload: payload) }
+ let_it_be(:prometheus_alert) { create(:alert_management_alert, :triggered, :prometheus, project: project, payload: payload) }
+ let(:alert) { generic_alert }
+ let(:created_issue) { Issue.last! }
+
+ describe '#execute' do
+ subject(:execute) { described_class.new(alert, user).execute }
+
+ before do
+ allow(user).to receive(:can?).and_call_original
+ allow(user).to receive(:can?)
+ .with(:create_issue, project)
+ .and_return(can_create)
+ end
+
+ shared_examples 'creating an alert' do
+ it 'creates an issue' do
+ expect { execute }.to change { project.issues.count }.by(1)
+ end
+
+ it 'returns a created issue' do
+ expect(execute.payload).to eq(issue: created_issue)
+ end
+
+ it 'has a successful status' do
+ expect(execute).to be_success
+ end
+
+ it 'updates alert.issue_id' do
+ execute
+
+ expect(alert.reload.issue_id).to eq(created_issue.id)
+ end
+
+ it 'sets issue author to the current user' do
+ execute
+
+ expect(created_issue.author).to eq(user)
+ end
+ end
+
+ context 'when a user is allowed to create an issue' do
+ let(:can_create) { true }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'checks permissions' do
+ execute
+ expect(user).to have_received(:can?).with(:create_issue, project)
+ end
+
+ context 'when the alert is prometheus alert' do
+ let(:alert) { prometheus_alert }
+
+ it_behaves_like 'creating an alert'
+ end
+
+ context 'when the alert is generic' do
+ let(:alert) { generic_alert }
+
+ it_behaves_like 'creating an alert'
+ end
+
+ context 'when issue cannot be created' do
+ let(:alert) { prometheus_alert }
+
+ before do
+ # set invalid payload for Prometheus alert
+ alert.update!(payload: {})
+ end
+
+ it 'has an unsuccessful status' do
+ expect(execute).to be_error
+ expect(execute.message).to eq('invalid alert')
+ end
+ end
+
+ context 'when alert cannot be updated' do
+ before do
+ # invalidate alert
+ too_many_hosts = Array.new(AlertManagement::Alert::HOSTS_MAX_LENGTH + 1) { |_| 'host' }
+ alert.update_columns(hosts: too_many_hosts)
+ end
+
+ it 'responds with error' do
+ expect(execute).to be_error
+ expect(execute.message).to eq('Hosts hosts array is over 255 chars')
+ end
+ end
+
+ context 'when alert already has an attached issue' do
+ let!(:issue) { create(:issue, project: project) }
+
+ before do
+ alert.update!(issue_id: issue.id)
+ end
+
+ it 'does not create yet another issue' do
+ expect { execute }.not_to change(Issue, :count)
+ end
+
+ it 'responds with error' do
+ expect(execute).to be_error
+ expect(execute.message).to eq(_('An issue already exists'))
+ end
+ end
+
+ context 'when alert_management_create_alert_issue feature flag is disabled' do
+ before do
+ stub_feature_flags(alert_management_create_alert_issue: false)
+ end
+
+ it 'responds with error' do
+ expect(execute).to be_error
+ expect(execute.message).to eq(_('You have no permissions'))
+ end
+ end
+ end
+
+ context 'when a user is not allowed to create an issue' do
+ let(:can_create) { false }
+
+ it 'checks permissions' do
+ execute
+ expect(user).to have_received(:can?).with(:create_issue, project)
+ end
+
+ it 'responds with error' do
+ expect(execute).to be_error
+ expect(execute.message).to eq(_('You have no permissions'))
+ end
+ end
+ end
+end
diff --git a/spec/services/alert_management/process_prometheus_alert_service_spec.rb b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
new file mode 100644
index 00000000000..73f9f103902
--- /dev/null
+++ b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AlertManagement::ProcessPrometheusAlertService do
+ let_it_be(:project) { create(:project) }
+
+ describe '#execute' do
+ subject { described_class.new(project, nil, payload).execute }
+
+ context 'when alert payload is valid' do
+ let(:parsed_alert) { Gitlab::Alerting::Alert.new(project: project, payload: payload) }
+ let(:payload) do
+ {
+ 'status' => status,
+ 'labels' => {
+ 'alertname' => 'GitalyFileServerDown',
+ 'channel' => 'gitaly',
+ 'pager' => 'pagerduty',
+ 'severity' => 's1'
+ },
+ 'annotations' => {
+ 'description' => 'Alert description',
+ 'runbook' => 'troubleshooting/gitaly-down.md',
+ 'title' => 'Alert title'
+ },
+ 'startsAt' => '2020-04-27T10:10:22.265949279Z',
+ 'endsAt' => '2020-04-27T10:20:22.265949279Z',
+ 'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1',
+ 'fingerprint' => 'b6ac4d42057c43c1'
+ }
+ end
+
+ context 'when Prometheus alert status is firing' do
+ let(:status) { 'firing' }
+
+ context 'when alert with the same fingerprint already exists' do
+ let!(:alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: parsed_alert.gitlab_fingerprint) }
+
+ context 'when status can be changed' do
+ it 'changes status to triggered' do
+ expect { subject }.to change { alert.reload.triggered? }.to(true)
+ end
+ end
+
+ context 'when status change did not succeed' do
+ before do
+ allow(AlertManagement::Alert).to receive(:for_fingerprint).and_return([alert])
+ allow(alert).to receive(:trigger).and_return(false)
+ end
+
+ it 'writes a warning to the log' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Unable to update AlertManagement::Alert status to triggered',
+ project_id: project.id,
+ alert_id: alert.id
+ )
+
+ subject
+ end
+ end
+
+ it { is_expected.to be_success }
+ end
+
+ context 'when alert does not exist' do
+ context 'when alert can be created' do
+ it 'creates a new alert' do
+ expect { subject }.to change { AlertManagement::Alert.where(project: project).count }.by(1)
+ end
+ end
+
+ context 'when alert cannot be created' do
+ let(:errors) { double(messages: { hosts: ['hosts array is over 255 chars'] })}
+ let(:am_alert) { instance_double(AlertManagement::Alert, save: false, errors: errors) }
+
+ before do
+ allow(AlertManagement::Alert).to receive(:new).and_return(am_alert)
+ end
+
+ it 'writes a warning to the log' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Unable to create AlertManagement::Alert',
+ project_id: project.id,
+ alert_errors: { hosts: ['hosts array is over 255 chars'] }
+ )
+
+ subject
+ end
+ end
+
+ it { is_expected.to be_success }
+ end
+ end
+
+ context 'when Prometheus alert status is resolved' do
+ let(:status) { 'resolved' }
+ let!(:alert) { create(:alert_management_alert, project: project, fingerprint: parsed_alert.gitlab_fingerprint) }
+
+ context 'when status can be changed' do
+ it 'resolves an existing alert' do
+ expect { subject }.to change { alert.reload.resolved? }.to(true)
+ end
+ end
+
+ context 'when status change did not succeed' do
+ before do
+ allow(AlertManagement::Alert).to receive(:for_fingerprint).and_return([alert])
+ allow(alert).to receive(:resolve).and_return(false)
+ end
+
+ it 'writes a warning to the log' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Unable to update AlertManagement::Alert status to resolved',
+ project_id: project.id,
+ alert_id: alert.id
+ )
+
+ subject
+ end
+ end
+
+ it { is_expected.to be_success }
+ end
+ end
+
+ context 'when alert payload is invalid' do
+ let(:payload) { {} }
+
+ it 'responds with bad_request' do
+ expect(subject).to be_error
+ expect(subject.http_status).to eq(:bad_request)
+ end
+ end
+ end
+end
diff --git a/spec/services/alert_management/update_alert_status_service_spec.rb b/spec/services/alert_management/update_alert_status_service_spec.rb
new file mode 100644
index 00000000000..b287d0d1614
--- /dev/null
+++ b/spec/services/alert_management/update_alert_status_service_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe AlertManagement::UpdateAlertStatusService do
+ let(:project) { alert.project }
+ let_it_be(:user) { build(:user) }
+
+ let_it_be(:alert, reload: true) do
+ create(:alert_management_alert, :triggered)
+ end
+
+ let(:service) { described_class.new(alert, user, new_status) }
+
+ describe '#execute' do
+ shared_examples 'update failure' do |error_message|
+ it 'returns an error' do
+ expect(response).to be_error
+ expect(response.message).to eq(error_message)
+ expect(response.payload[:alert]).to eq(alert)
+ end
+
+ it 'does not update the status' do
+ expect { response }.not_to change { alert.status }
+ end
+ end
+
+ let(:new_status) { Types::AlertManagement::StatusEnum.values['ACKNOWLEDGED'].value }
+ let(:can_update) { true }
+
+ subject(:response) { service.execute }
+
+ before do
+ allow(user).to receive(:can?)
+ .with(:update_alert_management_alert, project)
+ .and_return(can_update)
+ end
+
+ it 'returns success' do
+ expect(response).to be_success
+ expect(response.payload[:alert]).to eq(alert)
+ end
+
+ it 'updates the status' do
+ expect { response }.to change { alert.acknowledged? }.to(true)
+ end
+
+ context 'when user has no permissions' do
+ let(:can_update) { false }
+
+ include_examples 'update failure', _('You have no permissions')
+ end
+
+ context 'with no status' do
+ let(:new_status) { nil }
+
+ include_examples 'update failure', _('Invalid status')
+ end
+
+ context 'with unknown status' do
+ let(:new_status) { -1 }
+
+ include_examples 'update failure', _('Invalid status')
+ end
+ end
+end
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index 069572e4dff..3a37cbc3522 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -335,7 +335,7 @@ describe ApplicationSettings::UpdateService do
end
end
- context 'when issues_create_limit is passsed' do
+ context 'when issues_create_limit is passed' do
let(:params) do
{
issues_create_limit: 600
diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb
index 8273269c2fb..70eb35f0826 100644
--- a/spec/services/auth/container_registry_authentication_service_spec.rb
+++ b/spec/services/auth/container_registry_authentication_service_spec.rb
@@ -35,11 +35,11 @@ describe Auth::ContainerRegistryAuthenticationService do
it { expect(payload).to include('access') }
context 'a expirable' do
- let(:expires_at) { Time.at(payload['exp']) }
+ let(:expires_at) { Time.zone.at(payload['exp']) }
let(:expire_delay) { 10 }
context 'for default configuration' do
- it { expect(expires_at).not_to be_within(2.seconds).of(Time.now + expire_delay.minutes) }
+ it { expect(expires_at).not_to be_within(2.seconds).of(Time.current + expire_delay.minutes) }
end
context 'for changed configuration' do
@@ -47,7 +47,7 @@ describe Auth::ContainerRegistryAuthenticationService do
stub_application_setting(container_registry_token_expire_delay: expire_delay)
end
- it { expect(expires_at).to be_within(2.seconds).of(Time.now + expire_delay.minutes) }
+ it { expect(expires_at).to be_within(2.seconds).of(Time.current + expire_delay.minutes) }
end
end
end
@@ -205,6 +205,20 @@ describe Auth::ContainerRegistryAuthenticationService do
it_behaves_like 'an inaccessible'
it_behaves_like 'not a container repository factory'
+
+ it 'logs an auth warning' do
+ expect(Gitlab::AuthLogger).to receive(:warn).with(
+ message: 'Denied container registry permissions',
+ scope_type: 'repository',
+ requested_project_path: project.full_path,
+ requested_actions: ['*'],
+ authorized_actions: [],
+ user_id: current_user.id,
+ username: current_user.username
+ )
+
+ subject
+ end
end
context 'disallow developer to delete images since registry 2.7' do
diff --git a/spec/services/authorized_project_update/project_create_service_spec.rb b/spec/services/authorized_project_update/project_create_service_spec.rb
new file mode 100644
index 00000000000..49ea538d909
--- /dev/null
+++ b/spec/services/authorized_project_update/project_create_service_spec.rb
@@ -0,0 +1,142 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe AuthorizedProjectUpdate::ProjectCreateService do
+ let_it_be(:group_parent) { create(:group, :private) }
+ let_it_be(:group) { create(:group, :private, parent: group_parent) }
+ let_it_be(:group_child) { create(:group, :private, parent: group) }
+
+ let_it_be(:group_project) { create(:project, group: group) }
+
+ let_it_be(:parent_group_user) { create(:user) }
+ let_it_be(:group_user) { create(:user) }
+ let_it_be(:child_group_user) { create(:user) }
+
+ let(:access_level) { Gitlab::Access::MAINTAINER }
+
+ subject(:service) { described_class.new(group_project) }
+
+ describe '#perform' do
+ context 'direct group members' do
+ before do
+ create(:group_member, access_level: access_level, group: group, user: group_user)
+ ProjectAuthorization.delete_all
+ end
+
+ it 'creates project authorization' do
+ expect { service.execute }.to(
+ change { ProjectAuthorization.count }.from(0).to(1))
+
+ project_authorization = ProjectAuthorization.where(
+ project_id: group_project.id,
+ user_id: group_user.id,
+ access_level: access_level)
+
+ expect(project_authorization).to exist
+ end
+ end
+
+ context 'inherited group members' do
+ before do
+ create(:group_member, access_level: access_level, group: group_parent, user: parent_group_user)
+ ProjectAuthorization.delete_all
+ end
+
+ it 'creates project authorization' do
+ expect { service.execute }.to(
+ change { ProjectAuthorization.count }.from(0).to(1))
+
+ project_authorization = ProjectAuthorization.where(
+ project_id: group_project.id,
+ user_id: parent_group_user.id,
+ access_level: access_level)
+ expect(project_authorization).to exist
+ end
+ end
+
+ context 'membership overrides' do
+ before do
+ create(:group_member, access_level: Gitlab::Access::REPORTER, group: group_parent, user: group_user)
+ create(:group_member, access_level: Gitlab::Access::DEVELOPER, group: group, user: group_user)
+ ProjectAuthorization.delete_all
+ end
+
+ it 'creates project authorization' do
+ expect { service.execute }.to(
+ change { ProjectAuthorization.count }.from(0).to(1))
+
+ project_authorization = ProjectAuthorization.where(
+ project_id: group_project.id,
+ user_id: group_user.id,
+ access_level: Gitlab::Access::DEVELOPER)
+ expect(project_authorization).to exist
+ end
+ end
+
+ context 'no group member' do
+ it 'does not create project authorization' do
+ expect { service.execute }.not_to(
+ change { ProjectAuthorization.count }.from(0))
+ end
+ end
+
+ context 'unapproved access requests' do
+ before do
+ create(:group_member, :guest, :access_request, user: group_user, group: group)
+ end
+
+ it 'does not create project authorization' do
+ expect { service.execute }.not_to(
+ change { ProjectAuthorization.count }.from(0))
+ end
+ end
+
+ context 'project has more user than BATCH_SIZE' do
+ let(:batch_size) { 2 }
+ let(:users) { create_list(:user, batch_size + 1 ) }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", batch_size)
+
+ users.each do |user|
+ create(:group_member, access_level: access_level, group: group_parent, user: user)
+ end
+
+ ProjectAuthorization.delete_all
+ end
+
+ it 'bulk creates project authorizations in batches' do
+ users.each_slice(batch_size) do |batch|
+ attributes = batch.map do |user|
+ { user_id: user.id, project_id: group_project.id, access_level: access_level }
+ end
+
+ expect(ProjectAuthorization).to(
+ receive(:insert_all).with(array_including(attributes)).and_call_original)
+ end
+
+ expect { service.execute }.to(
+ change { ProjectAuthorization.count }.from(0).to(batch_size + 1))
+ end
+ end
+
+ context 'ignores existing project authorizations' do
+ before do
+ # ProjectAuthorizations is also created because of an after_commit
+ # callback on Member model
+ create(:group_member, access_level: access_level, group: group, user: group_user)
+ end
+
+ it 'does not create project authorization' do
+ project_authorization = ProjectAuthorization.where(
+ project_id: group_project.id,
+ user_id: group_user.id,
+ access_level: access_level)
+
+ expect { service.execute }.not_to(
+ change { project_authorization.reload.exists? }.from(true))
+ end
+ end
+ end
+end
diff --git a/spec/services/base_container_service_spec.rb b/spec/services/base_container_service_spec.rb
new file mode 100644
index 00000000000..47cfb387e25
--- /dev/null
+++ b/spec/services/base_container_service_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe BaseContainerService do
+ let(:project) { Project.new }
+ let(:user) { User.new }
+
+ describe '#initialize' do
+ it 'accepts container and current_user' do
+ subject = described_class.new(container: project, current_user: user)
+
+ expect(subject.container).to eq(project)
+ expect(subject.current_user).to eq(user)
+ end
+
+ it 'treats current_user as optional' do
+ subject = described_class.new(container: project)
+
+ expect(subject.current_user).to be_nil
+ end
+ end
+end
diff --git a/spec/services/boards/issues/list_service_spec.rb b/spec/services/boards/issues/list_service_spec.rb
index 33538703e92..c46ab004af6 100644
--- a/spec/services/boards/issues/list_service_spec.rb
+++ b/spec/services/boards/issues/list_service_spec.rb
@@ -87,7 +87,7 @@ describe Boards::Issues::ListService do
let!(:opened_issue1) { create(:labeled_issue, project: project, milestone: m1, title: 'Issue 1', labels: [bug]) }
let!(:opened_issue2) { create(:labeled_issue, project: project, milestone: m2, title: 'Issue 2', labels: [p2, p2_project]) }
let!(:opened_issue3) { create(:labeled_issue, project: project_archived, milestone: m1, title: 'Issue 3', labels: [bug]) }
- let!(:reopened_issue1) { create(:issue, state: 'opened', project: project, title: 'Reopened Issue 1', closed_at: Time.now ) }
+ let!(:reopened_issue1) { create(:issue, state: 'opened', project: project, title: 'Reopened Issue 1', closed_at: Time.current ) }
let!(:list1_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [p2, p2_project, development]) }
let!(:list1_issue2) { create(:labeled_issue, project: project, milestone: m2, labels: [development]) }
diff --git a/spec/services/branches/create_service_spec.rb b/spec/services/branches/create_service_spec.rb
index b0629c5e25a..072a86d17fc 100644
--- a/spec/services/branches/create_service_spec.rb
+++ b/spec/services/branches/create_service_spec.rb
@@ -3,39 +3,45 @@
require 'spec_helper'
describe Branches::CreateService do
- let(:user) { create(:user) }
-
subject(:service) { described_class.new(project, user) }
+ let_it_be(:project) { create(:project_empty_repo) }
+ let_it_be(:user) { create(:user) }
+
describe '#execute' do
context 'when repository is empty' do
- let(:project) { create(:project_empty_repo) }
-
it 'creates master branch' do
service.execute('my-feature', 'master')
expect(project.repository.branch_exists?('master')).to be_truthy
end
- it 'creates my-feature branch' do
- service.execute('my-feature', 'master')
+ it 'creates another-feature branch' do
+ service.execute('another-feature', 'master')
- expect(project.repository.branch_exists?('my-feature')).to be_truthy
+ expect(project.repository.branch_exists?('another-feature')).to be_truthy
end
end
- context 'when creating a branch fails' do
- let(:project) { create(:project_empty_repo) }
+ context 'when branch already exists' do
+ it 'returns an error' do
+ result = service.execute('master', 'master')
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Branch already exists')
+ end
+ end
+ context 'when incorrect reference is provided' do
before do
allow(project.repository).to receive(:add_branch).and_return(false)
end
- it 'returns an error with the branch name' do
- result = service.execute('my-feature', 'master')
+ it 'returns an error with a reference name' do
+ result = service.execute('new-feature', 'unknown')
expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq("Invalid reference name: my-feature")
+ expect(result[:message]).to eq('Invalid reference name: unknown')
end
end
end
diff --git a/spec/services/ci/compare_accessibility_reports_service_spec.rb b/spec/services/ci/compare_accessibility_reports_service_spec.rb
new file mode 100644
index 00000000000..aee1fd14bc5
--- /dev/null
+++ b/spec/services/ci/compare_accessibility_reports_service_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::CompareAccessibilityReportsService do
+ let(:service) { described_class.new(project) }
+ let(:project) { create(:project, :repository) }
+
+ describe '#execute' do
+ subject { service.execute(base_pipeline, head_pipeline) }
+
+ context 'when head pipeline has accessibility reports' do
+ let(:base_pipeline) { nil }
+ let(:head_pipeline) { create(:ci_pipeline, :with_accessibility_reports, project: project) }
+
+ it 'returns status and data' do
+ expect(subject[:status]).to eq(:parsed)
+ expect(subject[:data]).to match_schema('entities/accessibility_reports_comparer')
+ end
+ end
+
+ context 'when base and head pipelines have accessibility reports' do
+ let(:base_pipeline) { create(:ci_pipeline, :with_accessibility_reports, project: project) }
+ let(:head_pipeline) { create(:ci_pipeline, :with_accessibility_reports, project: project) }
+
+ it 'returns status and data' do
+ expect(subject[:status]).to eq(:parsed)
+ expect(subject[:data]).to match_schema('entities/accessibility_reports_comparer')
+ end
+ end
+ end
+
+ describe '#latest?' do
+ subject { service.latest?(base_pipeline, head_pipeline, data) }
+
+ let!(:base_pipeline) { nil }
+ let!(:head_pipeline) { create(:ci_pipeline, :with_accessibility_reports, project: project) }
+ let!(:key) { service.send(:key, base_pipeline, head_pipeline) }
+
+ context 'when cache key is latest' do
+ let(:data) { { key: key } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when cache key is outdated' do
+ before do
+ head_pipeline.update_column(:updated_at, 10.minutes.ago)
+ end
+
+ let(:data) { { key: key } }
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when cache key is empty' do
+ let(:data) { { key: nil } }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+end
diff --git a/spec/services/ci/compare_test_reports_service_spec.rb b/spec/services/ci/compare_test_reports_service_spec.rb
index f5edd3a552d..46f4d2d42ff 100644
--- a/spec/services/ci/compare_test_reports_service_spec.rb
+++ b/spec/services/ci/compare_test_reports_service_spec.rb
@@ -38,9 +38,10 @@ describe Ci::CompareTestReportsService do
create(:ci_job_artifact, :junit_with_corrupted_data, job: build, project: project)
end
- it 'returns status and error message' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:status_reason]).to include('XML parsing failed')
+ it 'returns a parsed TestReports success status and failure on the individual suite' do
+ expect(subject[:status]).to eq(:parsed)
+ expect(subject.dig(:data, 'status')).to eq('success')
+ expect(subject.dig(:data, 'suites', 0, 'status') ).to eq('error')
end
end
end
diff --git a/spec/services/ci/create_job_artifacts_service_spec.rb b/spec/services/ci/create_job_artifacts_service_spec.rb
index fe64a66f322..4d49923a184 100644
--- a/spec/services/ci/create_job_artifacts_service_spec.rb
+++ b/spec/services/ci/create_job_artifacts_service_spec.rb
@@ -30,6 +30,26 @@ describe Ci::CreateJobArtifactsService do
describe '#execute' do
subject { service.execute(job, artifacts_file, params, metadata_file: metadata_file) }
+ context 'locking' do
+ let(:old_job) { create(:ci_build, pipeline: create(:ci_pipeline, project: job.project, ref: job.ref)) }
+ let!(:latest_artifact) { create(:ci_job_artifact, job: old_job, locked: true) }
+ let!(:other_artifact) { create(:ci_job_artifact, locked: true) }
+
+ it 'locks the new artifact' do
+ subject
+
+ expect(Ci::JobArtifact.last).to have_attributes(locked: true)
+ end
+
+ it 'unlocks all other artifacts for the same ref' do
+ expect { subject }.to change { latest_artifact.reload.locked }.from(true).to(false)
+ end
+
+ it 'does not unlock artifacts for other refs' do
+ expect { subject }.not_to change { other_artifact.reload.locked }.from(true)
+ end
+ end
+
context 'when artifacts file is uploaded' do
it 'saves artifact for the given type' do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
@@ -157,6 +177,53 @@ describe Ci::CreateJobArtifactsService do
end
end
+ context 'when artifact type is cluster_applications' do
+ let(:artifacts_file) do
+ file_to_upload('spec/fixtures/helm/helm_list_v2_prometheus_missing.json.gz', sha256: artifacts_sha256)
+ end
+
+ let(:params) do
+ {
+ 'artifact_type' => 'cluster_applications',
+ 'artifact_format' => 'gzip'
+ }
+ end
+
+ it 'calls cluster applications parse service' do
+ expect_next_instance_of(Clusters::ParseClusterApplicationsArtifactService) do |service|
+ expect(service).to receive(:execute).once.and_call_original
+ end
+
+ subject
+ end
+
+ context 'when there is a deployment cluster' do
+ let(:user) { project.owner }
+
+ before do
+ job.update!(user: user)
+ end
+
+ it 'calls cluster applications parse service with job and job user', :aggregate_failures do
+ expect(Clusters::ParseClusterApplicationsArtifactService).to receive(:new).with(job, user).and_call_original
+
+ subject
+ end
+ end
+
+ context 'when ci_synchronous_artifact_parsing feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_synchronous_artifact_parsing: false)
+ end
+
+ it 'does not call parse service' do
+ expect(Clusters::ParseClusterApplicationsArtifactService).not_to receive(:new)
+
+ expect(subject[:status]).to eq(:success)
+ end
+ end
+ end
+
shared_examples 'rescues object storage error' do |klass, message, expected_message|
it "handles #{klass}" do
allow_next_instance_of(JobArtifactUploader) do |uploader|
diff --git a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
index 112b19fcbc5..5980260a08a 100644
--- a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
+++ b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
@@ -34,7 +34,7 @@ describe Ci::CreatePipelineService do
it 'creates a pipeline using the content passed in as param' do
expect(subject).to be_persisted
- expect(subject.builds.map(&:name)).to eq %w[rspec custom]
+ expect(subject.builds.pluck(:name)).to match_array %w[rspec custom]
expect(subject.config_source).to eq 'bridge_source'
end
@@ -59,7 +59,7 @@ describe Ci::CreatePipelineService do
it 'created a pipeline using the content passed in as param and download the artifact' do
expect(subject).to be_persisted
- expect(subject.builds.pluck(:name)).to eq %w[rspec time custom]
+ expect(subject.builds.pluck(:name)).to match_array %w[rspec time custom]
expect(subject.config_source).to eq 'bridge_source'
end
end
diff --git a/spec/services/ci/daily_report_result_service_spec.rb b/spec/services/ci/daily_build_group_report_result_service_spec.rb
index 240709bab0b..f0b72b8fd86 100644
--- a/spec/services/ci/daily_report_result_service_spec.rb
+++ b/spec/services/ci/daily_build_group_report_result_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Ci::DailyReportResultService, '#execute' do
+describe Ci::DailyBuildGroupReportResultService, '#execute' do
let!(:pipeline) { create(:ci_pipeline, created_at: '2020-02-06 00:01:10') }
let!(:rspec_job) { create(:ci_build, pipeline: pipeline, name: '3/3 rspec', coverage: 80) }
let!(:karma_job) { create(:ci_build, pipeline: pipeline, name: '2/2 karma', coverage: 90) }
@@ -11,31 +11,29 @@ describe Ci::DailyReportResultService, '#execute' do
it 'creates daily code coverage record for each job in the pipeline that has coverage value' do
described_class.new.execute(pipeline)
- Ci::DailyReportResult.find_by(title: 'rspec').tap do |coverage|
+ Ci::DailyBuildGroupReportResult.find_by(group_name: 'rspec').tap do |coverage|
expect(coverage).to have_attributes(
project_id: pipeline.project.id,
last_pipeline_id: pipeline.id,
ref_path: pipeline.source_ref_path,
- param_type: 'coverage',
- title: rspec_job.group_name,
- value: rspec_job.coverage,
+ group_name: rspec_job.group_name,
+ data: { 'coverage' => rspec_job.coverage },
date: pipeline.created_at.to_date
)
end
- Ci::DailyReportResult.find_by(title: 'karma').tap do |coverage|
+ Ci::DailyBuildGroupReportResult.find_by(group_name: 'karma').tap do |coverage|
expect(coverage).to have_attributes(
project_id: pipeline.project.id,
last_pipeline_id: pipeline.id,
ref_path: pipeline.source_ref_path,
- param_type: 'coverage',
- title: karma_job.group_name,
- value: karma_job.coverage,
+ group_name: karma_job.group_name,
+ data: { 'coverage' => karma_job.coverage },
date: pipeline.created_at.to_date
)
end
- expect(Ci::DailyReportResult.find_by(title: 'extra')).to be_nil
+ expect(Ci::DailyBuildGroupReportResult.find_by(group_name: 'extra')).to be_nil
end
context 'when there are multiple builds with the same group name that report coverage' do
@@ -45,14 +43,13 @@ describe Ci::DailyReportResultService, '#execute' do
it 'creates daily code coverage record with the average as the value' do
described_class.new.execute(pipeline)
- Ci::DailyReportResult.find_by(title: 'test').tap do |coverage|
+ Ci::DailyBuildGroupReportResult.find_by(group_name: 'test').tap do |coverage|
expect(coverage).to have_attributes(
project_id: pipeline.project.id,
last_pipeline_id: pipeline.id,
ref_path: pipeline.source_ref_path,
- param_type: 'coverage',
- title: test_job_2.group_name,
- value: 75,
+ group_name: test_job_2.group_name,
+ data: { 'coverage' => 75.0 },
date: pipeline.created_at.to_date
)
end
@@ -77,8 +74,8 @@ describe Ci::DailyReportResultService, '#execute' do
end
it "updates the existing record's coverage value and last_pipeline_id" do
- rspec_coverage = Ci::DailyReportResult.find_by(title: 'rspec')
- karma_coverage = Ci::DailyReportResult.find_by(title: 'karma')
+ rspec_coverage = Ci::DailyBuildGroupReportResult.find_by(group_name: 'rspec')
+ karma_coverage = Ci::DailyBuildGroupReportResult.find_by(group_name: 'karma')
# Bump up the coverage values
described_class.new.execute(new_pipeline)
@@ -88,12 +85,12 @@ describe Ci::DailyReportResultService, '#execute' do
expect(rspec_coverage).to have_attributes(
last_pipeline_id: new_pipeline.id,
- value: new_rspec_job.coverage
+ data: { 'coverage' => new_rspec_job.coverage }
)
expect(karma_coverage).to have_attributes(
last_pipeline_id: new_pipeline.id,
- value: new_karma_job.coverage
+ data: { 'coverage' => new_karma_job.coverage }
)
end
end
@@ -117,8 +114,8 @@ describe Ci::DailyReportResultService, '#execute' do
end
it 'updates the existing daily code coverage records' do
- rspec_coverage = Ci::DailyReportResult.find_by(title: 'rspec')
- karma_coverage = Ci::DailyReportResult.find_by(title: 'karma')
+ rspec_coverage = Ci::DailyBuildGroupReportResult.find_by(group_name: 'rspec')
+ karma_coverage = Ci::DailyBuildGroupReportResult.find_by(group_name: 'karma')
# Run another one but for the older pipeline.
# This simulates the scenario wherein the success worker
@@ -135,12 +132,12 @@ describe Ci::DailyReportResultService, '#execute' do
expect(rspec_coverage).to have_attributes(
last_pipeline_id: pipeline.id,
- value: rspec_job.coverage
+ data: { 'coverage' => rspec_job.coverage }
)
expect(karma_coverage).to have_attributes(
last_pipeline_id: pipeline.id,
- value: karma_job.coverage
+ data: { 'coverage' => karma_job.coverage }
)
end
end
diff --git a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
index fc5450ab33d..4b9f12d8fdf 100644
--- a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
+++ b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
@@ -11,8 +11,26 @@ describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state
let(:service) { described_class.new }
let!(:artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
- it 'destroys expired job artifacts' do
- expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ context 'when artifact is expired' do
+ context 'when artifact is not locked' do
+ before do
+ artifact.update!(locked: false)
+ end
+
+ it 'destroys job artifact' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ end
+ end
+
+ context 'when artifact is locked' do
+ before do
+ artifact.update!(locked: true)
+ end
+
+ it 'does not destroy job artifact' do
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ end
+ end
end
context 'when artifact is not expired' do
@@ -72,7 +90,7 @@ describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
end
- let!(:artifact) { create_list(:ci_job_artifact, 2, expire_at: 1.day.ago) }
+ let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
it 'raises an error and does not continue destroying' do
is_expected.to be_falsy
@@ -96,7 +114,7 @@ describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
end
- let!(:artifact) { create_list(:ci_job_artifact, 2, expire_at: 1.day.ago) }
+ let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
it 'destroys all expired artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
diff --git a/spec/services/ci/generate_terraform_reports_service_spec.rb b/spec/services/ci/generate_terraform_reports_service_spec.rb
new file mode 100644
index 00000000000..4d2c60bed2c
--- /dev/null
+++ b/spec/services/ci/generate_terraform_reports_service_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::GenerateTerraformReportsService do
+ let_it_be(:project) { create(:project, :repository) }
+
+ describe '#execute' do
+ let_it_be(:merge_request) { create(:merge_request, :with_terraform_reports, source_project: project) }
+
+ subject { described_class.new(project, nil, id: merge_request.id) }
+
+ context 'when head pipeline has terraform reports' do
+ it 'returns status and data' do
+ result = subject.execute(nil, merge_request.head_pipeline)
+
+ expect(result).to match(
+ status: :parsed,
+ data: match(
+ a_hash_including('tfplan.json' => a_hash_including('create' => 0, 'update' => 1, 'delete' => 0))
+ ),
+ key: an_instance_of(Array)
+ )
+ end
+ end
+
+ context 'when head pipeline has corrupted terraform reports' do
+ it 'returns status and error message' do
+ build = create(:ci_build, pipeline: merge_request.head_pipeline, project: project)
+ create(:ci_job_artifact, :terraform_with_corrupted_data, job: build, project: project)
+
+ result = subject.execute(nil, merge_request.head_pipeline)
+
+ expect(result).to match(
+ status: :error,
+ status_reason: 'An error occurred while fetching terraform reports.',
+ key: an_instance_of(Array)
+ )
+ end
+ end
+ end
+
+ describe '#latest?' do
+ let_it_be(:head_pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
+
+ subject { described_class.new(project) }
+
+ it 'returns true when cache key is latest' do
+ cache_key = subject.send(:key, nil, head_pipeline)
+
+ result = subject.latest?(nil, head_pipeline, key: cache_key)
+
+ expect(result).to eq(true)
+ end
+
+ it 'returns false when cache key is outdated' do
+ cache_key = subject.send(:key, nil, head_pipeline)
+ head_pipeline.update_column(:updated_at, 10.minutes.ago)
+
+ result = subject.latest?(nil, head_pipeline, key: cache_key)
+
+ expect(result).to eq(false)
+ end
+
+ it 'returns false when cache key is nil' do
+ result = subject.latest?(nil, head_pipeline, key: nil)
+
+ expect(result).to eq(false)
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
index b487730d07f..de3c7713ac8 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
@@ -18,7 +18,7 @@ describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection do
it 'does update existing status of processable' do
collection.set_processable_status(test_a.id, 'success', 100)
- expect(collection.status_for_names(['test-a'])).to eq('success')
+ expect(collection.status_for_names(['test-a'], dag: false)).to eq('success')
end
it 'ignores a missing processable' do
@@ -33,15 +33,18 @@ describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection do
end
describe '#status_for_names' do
- where(:names, :status) do
- %w[build-a] | 'success'
- %w[build-a build-b] | 'failed'
- %w[build-a test-a] | 'running'
+ where(:names, :status, :dag) do
+ %w[build-a] | 'success' | false
+ %w[build-a build-b] | 'failed' | false
+ %w[build-a test-a] | 'running' | false
+ %w[build-a] | 'success' | true
+ %w[build-a build-b] | 'failed' | true
+ %w[build-a test-a] | 'pending' | true
end
with_them do
it 'returns composite status of given names' do
- expect(collection.status_for_names(names)).to eq(status)
+ expect(collection.status_for_names(names, dag: dag)).to eq(status)
end
end
end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index cbeb45b92ff..3b66ecff196 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -2,13 +2,19 @@
require 'spec_helper'
require_relative 'shared_processing_service.rb'
+require_relative 'shared_processing_service_tests_with_yaml.rb'
describe Ci::PipelineProcessing::AtomicProcessingService do
before do
stub_feature_flags(ci_atomic_processing: true)
+
+ # This feature flag is implicit
+ # Atomic Processing does not process statuses differently
+ stub_feature_flags(ci_composite_status: true)
end
it_behaves_like 'Pipeline Processing Service'
+ it_behaves_like 'Pipeline Processing Service Tests With Yaml'
private
diff --git a/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb b/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
index 09b462b7600..fd491bf461b 100644
--- a/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
@@ -2,13 +2,30 @@
require 'spec_helper'
require_relative 'shared_processing_service.rb'
+require_relative 'shared_processing_service_tests_with_yaml.rb'
describe Ci::PipelineProcessing::LegacyProcessingService do
before do
stub_feature_flags(ci_atomic_processing: false)
end
- it_behaves_like 'Pipeline Processing Service'
+ context 'when ci_composite_status is enabled' do
+ before do
+ stub_feature_flags(ci_composite_status: true)
+ end
+
+ it_behaves_like 'Pipeline Processing Service'
+ it_behaves_like 'Pipeline Processing Service Tests With Yaml'
+ end
+
+ context 'when ci_composite_status is disabled' do
+ before do
+ stub_feature_flags(ci_composite_status: false)
+ end
+
+ it_behaves_like 'Pipeline Processing Service'
+ it_behaves_like 'Pipeline Processing Service Tests With Yaml'
+ end
private
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
index ffe5eacfc48..29fa43001ae 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -816,10 +816,10 @@ shared_examples 'Pipeline Processing Service' do
context 'when a needed job is skipped', :sidekiq_inline do
let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
- let!(:deploy) do
- create_build('deploy', stage: 'deploy', stage_idx: 2, scheduling_type: :dag, needs: [
- create(:ci_build_need, name: 'linux:rspec')
- ])
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
+
+ before do
+ create(:ci_build_need, build: deploy, name: 'linux:build')
end
it 'skips the jobs depending on it' do
@@ -836,6 +836,23 @@ shared_examples 'Pipeline Processing Service' do
end
end
+ context 'when a needed job is manual', :sidekiq_inline do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0, when: 'manual', allow_failure: true) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 1, scheduling_type: :dag) }
+
+ before do
+ create(:ci_build_need, build: deploy, name: 'linux:build')
+ end
+
+ it 'makes deploy DAG to be waiting for optional manual to finish' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(skipped created))
+ expect(all_builds.manual).to contain_exactly(linux_build)
+ expect(all_builds.created).to contain_exactly(deploy)
+ end
+ end
+
private
def all_builds
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
new file mode 100644
index 00000000000..93f83f0ea3b
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+shared_context 'Pipeline Processing Service Tests With Yaml' do
+ where(:test_file_path) do
+ Dir.glob(Rails.root.join('spec/services/ci/pipeline_processing/test_cases/*.yml'))
+ end
+
+ with_them do
+ let(:test_file) { YAML.load_file(test_file_path) }
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline) }
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(test_file['config']))
+ stub_not_protect_default_branch
+ project.add_developer(user)
+ end
+
+ it 'follows transitions' do
+ expect(pipeline).to be_persisted
+ Sidekiq::Worker.drain_all # ensure that all async jobs are executed
+ check_expectation(test_file.dig('init', 'expect'), "init")
+
+ test_file['transitions'].each_with_index do |transition, idx|
+ event_on_jobs(transition['event'], transition['jobs'])
+ Sidekiq::Worker.drain_all # ensure that all async jobs are executed
+ check_expectation(transition['expect'], "transition:#{idx}")
+ end
+ end
+
+ private
+
+ def check_expectation(expectation, message)
+ expect(current_state.deep_stringify_keys).to eq(expectation), message
+ end
+
+ def current_state
+ # reload pipeline and all relations
+ pipeline.reload
+
+ {
+ pipeline: pipeline.status,
+ stages: pipeline.ordered_stages.pluck(:name, :status).to_h,
+ jobs: pipeline.statuses.latest.pluck(:name, :status).to_h
+ }
+ end
+
+ def event_on_jobs(event, job_names)
+ statuses = pipeline.statuses.latest.by_name(job_names).to_a
+ expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts
+
+ statuses.each { |status| status.public_send("#{event}!") }
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_allow_failure_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_allow_failure_test_on_failure.yml
new file mode 100644
index 00000000000..cfc456387ff
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_allow_failure_test_on_failure.yml
@@ -0,0 +1,47 @@
+config:
+ build:
+ stage: build
+ allow_failure: true
+ script: exit 1
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+ needs: [build]
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
+
+# TODO: What is the real expected behavior here?
+# Is `needs` keyword a requirement indicator or just a helper to build dependency tree?
+# How should it behave `when: on_failure` with `needs`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails.yml
new file mode 100644
index 00000000000..e71ef194c5f
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails.yml
@@ -0,0 +1,39 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+ needs: [build]
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test.yml
new file mode 100644
index 00000000000..40a80f6f53b
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test.yml
@@ -0,0 +1,39 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test_when_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test_when_always.yml
new file mode 100644
index 00000000000..b0904a027f8
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test_when_always.yml
@@ -0,0 +1,43 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ when: always
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: skipped
+ deploy: pending
+ jobs:
+ build: failed
+ test: skipped
+ deploy: pending
+
+# TODO: `test` is actually skipped, but we run `deploy`. Should we?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds.yml
new file mode 100644
index 00000000000..a133023b12d
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds.yml
@@ -0,0 +1,62 @@
+config:
+ build_1:
+ stage: build
+ script: exit 0
+
+ build_2:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [build_1, test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build_1: pending
+ build_2: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build_1]
+ expect:
+ pipeline: running
+ stages:
+ build: running
+ test: created
+ deploy: created
+ jobs:
+ build_1: success
+ build_2: pending
+ test: created
+ deploy: created
+
+ - event: drop
+ jobs: [build_2]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: skipped
+ deploy: pending
+ jobs:
+ build_1: success
+ build_2: failed
+ test: skipped
+ deploy: pending
+
+# TODO: should we run deploy?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_always.yml
new file mode 100644
index 00000000000..4c676761e5c
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_always.yml
@@ -0,0 +1,63 @@
+config:
+ build_1:
+ stage: build
+ script: exit 0
+
+ build_2:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ when: always
+ needs: [build_1, test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build_1: pending
+ build_2: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build_1]
+ expect:
+ pipeline: running
+ stages:
+ build: running
+ test: created
+ deploy: created
+ jobs:
+ build_1: success
+ build_2: pending
+ test: created
+ deploy: created
+
+ - event: drop
+ jobs: [build_2]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: skipped
+ deploy: pending
+ jobs:
+ build_1: success
+ build_2: failed
+ test: skipped
+ deploy: pending
+
+# TODO: what's the actual expected behavior here?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_allow_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_allow_failure.yml
new file mode 100644
index 00000000000..ea7046262c3
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_allow_failure.yml
@@ -0,0 +1,40 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_always.yml
new file mode 100644
index 00000000000..8860f565cc7
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_always.yml
@@ -0,0 +1,35 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ when: always
+ script: exit 0
+ needs: [build]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ jobs:
+ build: pending
+ test: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: pending
+ jobs:
+ build: failed
+ test: pending
+
+# TODO: Should we run `test`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_on_failure.yml
new file mode 100644
index 00000000000..3fa5a8034a2
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_on_failure.yml
@@ -0,0 +1,35 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+ needs: [build]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ jobs:
+ build: pending
+ test: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: pending
+ jobs:
+ build: failed
+ test: pending
+
+# TODO: Should we run `test`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_succeeds_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeeds_test_on_failure.yml
new file mode 100644
index 00000000000..700d4440802
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeeds_test_on_failure.yml
@@ -0,0 +1,35 @@
+config:
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+ needs: [build]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ jobs:
+ build: pending
+ test: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ jobs:
+ build: success
+ test: skipped
+
+# TODO: Should we run `test`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure.yml
new file mode 100644
index 00000000000..f324525bd56
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure.yml
@@ -0,0 +1,63 @@
+config:
+ build_1:
+ stage: build
+ script: exit 0
+
+ build_2:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ script: exit 0
+ when: on_failure
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [build_1, test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build_1: pending
+ build_2: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build_1, build_2]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: skipped
+ deploy: pending
+ jobs:
+ build_1: success
+ build_2: success
+ test: skipped
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: success
+ jobs:
+ build_1: success
+ build_2: success
+ test: skipped
+ deploy: success
+
+# TODO: should we run deploy?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_always.yml
new file mode 100644
index 00000000000..9986dbaa215
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_always.yml
@@ -0,0 +1,64 @@
+config:
+ build_1:
+ stage: build
+ script: exit 0
+
+ build_2:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ script: exit 0
+ when: on_failure
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ when: always
+ needs: [build_1, test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build_1: pending
+ build_2: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build_1, build_2]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: skipped
+ deploy: pending
+ jobs:
+ build_1: success
+ build_2: success
+ test: skipped
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: success
+ jobs:
+ build_1: success
+ build_2: success
+ test: skipped
+ deploy: success
+
+# TODO: should we run deploy?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_allow_failure_true.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_allow_failure_true.yml
new file mode 100644
index 00000000000..8d4d9d403f1
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_allow_failure_true.yml
@@ -0,0 +1,43 @@
+config:
+ test:
+ stage: test
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: success
+ deploy: pending
+ jobs:
+ test: failed
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: success
+ jobs:
+ test: failed
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml
new file mode 100644
index 00000000000..1d61cd24f8c
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml
@@ -0,0 +1,66 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: false
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: manual
+ stages:
+ test: manual
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+ - event: run
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ deploy: created
+ jobs:
+ test: running
+ deploy: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: success
+ deploy: pending
+ jobs:
+ test: success
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: success
+ jobs:
+ test: success
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml
new file mode 100644
index 00000000000..d8ca563b141
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml
@@ -0,0 +1,58 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: created
+ stages:
+ test: skipped
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+ - event: run
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ deploy: created
+ jobs:
+ test: running
+ deploy: created
+
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: success
+ deploy: pending
+ jobs:
+ test: failed
+ deploy: pending
+
+# TOOD: should we run deploy?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml
new file mode 100644
index 00000000000..ba0a20f49a7
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml
@@ -0,0 +1,27 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ when: always
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: created
+ stages:
+ test: skipped
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions: []
+
+# TODO: should we run `deploy`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml
new file mode 100644
index 00000000000..d375c6a49e0
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml
@@ -0,0 +1,48 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ when: on_failure
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: created
+ stages:
+ test: skipped
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: skipped
+ jobs:
+ test: failed
+ deploy: skipped
+
+# TODO: should we run `deploy`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds.yml
new file mode 100644
index 00000000000..34073b92ccc
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds.yml
@@ -0,0 +1,42 @@
+config:
+ test1:
+ stage: test
+ script: exit 0
+
+ test2:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test1, test2]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test1: pending
+ test2: manual
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [test1]
+ expect:
+ pipeline: running
+ stages:
+ test: success
+ deploy: created
+ jobs:
+ test1: success
+ test2: manual
+ deploy: created
+
+# TODO: should deploy run?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_failure.yml
new file mode 100644
index 00000000000..5ace621e89c
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_failure.yml
@@ -0,0 +1,66 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: pending
+ deploy: created
+ jobs:
+ build: failed
+ test: pending
+ deploy: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: success
+ deploy: pending
+ jobs:
+ build: failed
+ test: success
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: success
+ deploy: success
+ jobs:
+ build: failed
+ test: success
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_success.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_success.yml
new file mode 100644
index 00000000000..19524cfd3e4
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_success.yml
@@ -0,0 +1,40 @@
+config:
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: success
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_build_allow_failure_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_build_allow_failure_test_on_failure.yml
new file mode 100644
index 00000000000..3e081d4411b
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_build_allow_failure_test_on_failure.yml
@@ -0,0 +1,53 @@
+config:
+ build:
+ stage: build
+ allow_failure: true
+ script: exit 1
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: pending
+ stages:
+ build: success
+ test: skipped
+ deploy: pending
+ jobs:
+ build: failed
+ test: skipped
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: success
+ jobs:
+ build: failed
+ test: skipped
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_build_fails.yml b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails.yml
new file mode 100644
index 00000000000..0618abf3524
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails.yml
@@ -0,0 +1,38 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_allow_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_allow_failure.yml
new file mode 100644
index 00000000000..362ac6e4239
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_allow_failure.yml
@@ -0,0 +1,39 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml
new file mode 100644
index 00000000000..2ffa35b56d7
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml
@@ -0,0 +1,65 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: false
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: manual
+ stages:
+ test: manual
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+ - event: run
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ deploy: created
+ jobs:
+ test: running
+ deploy: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: success
+ deploy: pending
+ jobs:
+ test: success
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: success
+ jobs:
+ test: success
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml
new file mode 100644
index 00000000000..088fab5ca09
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml
@@ -0,0 +1,54 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ test: skipped
+ deploy: pending
+ jobs:
+ test: manual
+ deploy: pending
+
+transitions:
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ test: skipped
+ deploy: success
+ jobs:
+ test: manual
+ deploy: success
+
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: pending
+ deploy: success
+ jobs:
+ test: pending
+ deploy: success
+
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: success
+ jobs:
+ test: failed
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml
new file mode 100644
index 00000000000..2b30316aef6
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml
@@ -0,0 +1,44 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ when: on_failure
+ script: exit 0
+
+init:
+ expect:
+ pipeline: skipped
+ stages:
+ test: skipped
+ deploy: skipped
+ jobs:
+ test: manual
+ deploy: skipped
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: skipped
+ jobs:
+ test: pending
+ deploy: skipped
+
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: skipped
+ jobs:
+ test: failed
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_failure.yml
new file mode 100644
index 00000000000..1751cbb2023
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_failure.yml
@@ -0,0 +1,52 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: pending
+ deploy: created
+ jobs:
+ build: failed
+ test: pending
+ deploy: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: success
+ deploy: skipped
+ jobs:
+ build: failed
+ test: success
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_success.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_success.yml
new file mode 100644
index 00000000000..15afe1ce8e1
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_success.yml
@@ -0,0 +1,52 @@
+config:
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: skipped
+ deploy: pending
+ jobs:
+ build: success
+ test: skipped
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: success
+ jobs:
+ build: success
+ test: skipped
+ deploy: success
diff --git a/spec/services/ci/pipeline_schedule_service_spec.rb b/spec/services/ci/pipeline_schedule_service_spec.rb
index f7590720f66..867ed0acc0d 100644
--- a/spec/services/ci/pipeline_schedule_service_spec.rb
+++ b/spec/services/ci/pipeline_schedule_service_spec.rb
@@ -25,38 +25,6 @@ describe Ci::PipelineScheduleService do
subject
end
- context 'when ci_pipeline_schedule_async feature flag is disabled' do
- before do
- stub_feature_flags(ci_pipeline_schedule_async: false)
- end
-
- it 'runs RunPipelineScheduleWorker synchronously' do
- expect_next_instance_of(RunPipelineScheduleWorker) do |worker|
- expect(worker).to receive(:perform).with(schedule.id, schedule.owner.id)
- end
-
- subject
- end
-
- it 'calls Garbage Collection manually' do
- expect(GC).to receive(:start)
-
- subject
- end
-
- context 'when ci_pipeline_schedule_force_gc feature flag is disabled' do
- before do
- stub_feature_flags(ci_pipeline_schedule_force_gc: false)
- end
-
- it 'does not call Garbage Collection manually' do
- expect(GC).not_to receive(:start)
-
- subject
- end
- end
- end
-
context 'when owner is nil' do
let(:schedule) { create(:ci_pipeline_schedule, project: project, owner: nil) }
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index 6f5a070d73d..40ae1c4029b 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -33,25 +33,6 @@ describe Ci::ProcessPipelineService do
end
end
- context 'with a pipeline which has processables with nil scheduling_type', :clean_gitlab_redis_shared_state do
- let!(:build1) { create_build('build1') }
- let!(:build2) { create_build('build2') }
- let!(:build3) { create_build('build3', scheduling_type: :dag) }
- let!(:build3_on_build2) { create(:ci_build_need, build: build3, name: 'build2') }
-
- before do
- pipeline.processables.update_all(scheduling_type: nil)
- end
-
- it 'populates scheduling_type before processing' do
- process_pipeline
-
- expect(build1.scheduling_type).to eq('stage')
- expect(build2.scheduling_type).to eq('stage')
- expect(build3.scheduling_type).to eq('dag')
- end
- end
-
def process_pipeline
described_class.new(pipeline).execute
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 2da1350e2af..c0f854df9b7 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -571,7 +571,7 @@ module Ci
end
describe '#register_success' do
- let!(:current_time) { Time.new(2018, 4, 5, 14, 0, 0) }
+ let!(:current_time) { Time.zone.local(2018, 4, 5, 14, 0, 0) }
let!(:attempt_counter) { double('Gitlab::Metrics::NullMetric') }
let!(:job_queue_duration_seconds) { double('Gitlab::Metrics::NullMetric') }
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 86b68dc3ade..0aa603b24ae 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -22,9 +22,9 @@ describe Ci::RetryBuildService do
described_class.new(project, user)
end
- CLONE_ACCESSORS = described_class::CLONE_ACCESSORS
+ clone_accessors = described_class::CLONE_ACCESSORS
- REJECT_ACCESSORS =
+ reject_accessors =
%i[id status user token token_encrypted coverage trace runner
artifacts_expire_at
created_at updated_at started_at finished_at queued_at erased_by
@@ -34,13 +34,13 @@ describe Ci::RetryBuildService do
job_artifacts_container_scanning job_artifacts_dast
job_artifacts_license_management job_artifacts_license_scanning
job_artifacts_performance job_artifacts_lsif
- job_artifacts_terraform
+ job_artifacts_terraform job_artifacts_cluster_applications
job_artifacts_codequality job_artifacts_metrics scheduled_at
job_variables waiting_for_resource_at job_artifacts_metrics_referee
job_artifacts_network_referee job_artifacts_dotenv
- job_artifacts_cobertura needs].freeze
+ job_artifacts_cobertura needs job_artifacts_accessibility].freeze
- IGNORE_ACCESSORS =
+ ignore_accessors =
%i[type lock_version target_url base_tags trace_sections
commit_id deployment erased_by_id project_id
runner_id tag_taggings taggings tags trigger_request_id
@@ -63,6 +63,9 @@ describe Ci::RetryBuildService do
end
before do
+ # Test correctly behaviour of deprecated artifact because it can be still in use
+ stub_feature_flags(drop_license_management_artifact: false)
+
# Make sure that build has both `stage_id` and `stage` because FactoryBot
# can reset one of the fields when assigning another. We plan to deprecate
# and remove legacy `stage` column in the future.
@@ -88,7 +91,7 @@ describe Ci::RetryBuildService do
end
end
- CLONE_ACCESSORS.each do |attribute|
+ clone_accessors.each do |attribute|
it "clones #{attribute} build attribute" do
expect(attribute).not_to be_in(forbidden_associations), "association #{attribute} must be `belongs_to`"
expect(build.send(attribute)).not_to be_nil
@@ -118,7 +121,7 @@ describe Ci::RetryBuildService do
end
describe 'reject accessors' do
- REJECT_ACCESSORS.each do |attribute|
+ reject_accessors.each do |attribute|
it "does not clone #{attribute} build attribute" do
expect(new_build.send(attribute)).not_to eq build.send(attribute)
end
@@ -126,8 +129,8 @@ describe Ci::RetryBuildService do
end
it 'has correct number of known attributes' do
- processed_accessors = CLONE_ACCESSORS + REJECT_ACCESSORS
- known_accessors = processed_accessors + IGNORE_ACCESSORS
+ processed_accessors = clone_accessors + reject_accessors
+ known_accessors = processed_accessors + ignore_accessors
# :tag_list is a special case, this accessor does not exist
# in reflected associations, comes from `act_as_taggable` and
@@ -190,6 +193,35 @@ describe Ci::RetryBuildService do
expect(subsequent_build.reload).to be_created
end
end
+
+ context 'when pipeline has other builds' do
+ let!(:stage2) { create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'deploy') }
+ let!(:build2) { create(:ci_build, pipeline: pipeline, stage_id: stage.id ) }
+ let!(:deploy) { create(:ci_build, pipeline: pipeline, stage_id: stage2.id) }
+ let!(:deploy_needs_build2) { create(:ci_build_need, build: deploy, name: build2.name) }
+
+ context 'when build has nil scheduling_type' do
+ before do
+ build.pipeline.processables.update_all(scheduling_type: nil)
+ build.reload
+ end
+
+ it 'populates scheduling_type of processables' do
+ expect(new_build.scheduling_type).to eq('stage')
+ expect(build.reload.scheduling_type).to eq('stage')
+ expect(build2.reload.scheduling_type).to eq('stage')
+ expect(deploy.reload.scheduling_type).to eq('dag')
+ end
+ end
+
+ context 'when build has scheduling_type' do
+ it 'does not call populate_scheduling_type!' do
+ expect_any_instance_of(Ci::Pipeline).not_to receive(:ensure_scheduling_type!)
+
+ expect(new_build.scheduling_type).to eq('stage')
+ end
+ end
+ end
end
context 'when user does not have ability to execute build' do
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 81a0b05f2c7..8e85e68d4fc 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -261,6 +261,25 @@ describe Ci::RetryPipelineService, '#execute' do
service.execute(pipeline)
end
+
+ context 'when pipeline has processables with nil scheduling_type' do
+ let!(:build1) { create_build('build1', :success, 0) }
+ let!(:build2) { create_build('build2', :failed, 0) }
+ let!(:build3) { create_build('build3', :failed, 1) }
+ let!(:build3_needs_build1) { create(:ci_build_need, build: build3, name: build1.name) }
+
+ before do
+ statuses.update_all(scheduling_type: nil)
+ end
+
+ it 'populates scheduling_type of processables' do
+ service.execute(pipeline)
+
+ expect(build1.reload.scheduling_type).to eq('stage')
+ expect(build2.reload.scheduling_type).to eq('stage')
+ expect(build3.reload.scheduling_type).to eq('dag')
+ end
+ end
end
context 'when user is not allowed to retry pipeline' do
diff --git a/spec/services/ci/update_instance_variables_service_spec.rb b/spec/services/ci/update_instance_variables_service_spec.rb
new file mode 100644
index 00000000000..93f6e5d3ea8
--- /dev/null
+++ b/spec/services/ci/update_instance_variables_service_spec.rb
@@ -0,0 +1,230 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::UpdateInstanceVariablesService do
+ let(:params) { { variables_attributes: variables_attributes } }
+
+ subject { described_class.new(params) }
+
+ describe '#execute' do
+ context 'without variables' do
+ let(:variables_attributes) { [] }
+
+ it { expect(subject.execute).to be_truthy }
+ end
+
+ context 'with insert only variables' do
+ let(:variables_attributes) do
+ [
+ { key: 'var_a', secret_value: 'dummy_value_for_a', protected: true },
+ { key: 'var_b', secret_value: 'dummy_value_for_b', protected: false }
+ ]
+ end
+
+ it { expect(subject.execute).to be_truthy }
+
+ it 'persists all the records' do
+ expect { subject.execute }
+ .to change { Ci::InstanceVariable.count }
+ .by variables_attributes.size
+ end
+
+ it 'persists attributes' do
+ subject.execute
+
+ expect(Ci::InstanceVariable.all).to contain_exactly(
+ have_attributes(key: 'var_a', secret_value: 'dummy_value_for_a', protected: true),
+ have_attributes(key: 'var_b', secret_value: 'dummy_value_for_b', protected: false)
+ )
+ end
+ end
+
+ context 'with update only variables' do
+ let!(:var_a) { create(:ci_instance_variable) }
+ let!(:var_b) { create(:ci_instance_variable, protected: false) }
+
+ let(:variables_attributes) do
+ [
+ {
+ id: var_a.id,
+ key: var_a.key,
+ secret_value: 'new_dummy_value_for_a',
+ protected: var_a.protected?.to_s
+ },
+ {
+ id: var_b.id,
+ key: 'var_b_key',
+ secret_value: 'new_dummy_value_for_b',
+ protected: 'true'
+ }
+ ]
+ end
+
+ it { expect(subject.execute).to be_truthy }
+
+ it 'does not change the count' do
+ expect { subject.execute }
+ .not_to change { Ci::InstanceVariable.count }
+ end
+
+ it 'updates the records in place', :aggregate_failures do
+ subject.execute
+
+ expect(var_a.reload).to have_attributes(secret_value: 'new_dummy_value_for_a')
+
+ expect(var_b.reload).to have_attributes(
+ key: 'var_b_key', secret_value: 'new_dummy_value_for_b', protected: true)
+ end
+ end
+
+ context 'with insert and update variables' do
+ let!(:var_a) { create(:ci_instance_variable) }
+
+ let(:variables_attributes) do
+ [
+ {
+ id: var_a.id,
+ key: var_a.key,
+ secret_value: 'new_dummy_value_for_a',
+ protected: var_a.protected?.to_s
+ },
+ {
+ key: 'var_b',
+ secret_value: 'dummy_value_for_b',
+ protected: true
+ }
+ ]
+ end
+
+ it { expect(subject.execute).to be_truthy }
+
+ it 'inserts only one record' do
+ expect { subject.execute }
+ .to change { Ci::InstanceVariable.count }.by 1
+ end
+
+ it 'persists all the records', :aggregate_failures do
+ subject.execute
+ var_b = Ci::InstanceVariable.find_by(key: 'var_b')
+
+ expect(var_a.reload.secret_value).to eq('new_dummy_value_for_a')
+ expect(var_b.secret_value).to eq('dummy_value_for_b')
+ end
+ end
+
+ context 'with insert, update, and destroy variables' do
+ let!(:var_a) { create(:ci_instance_variable) }
+ let!(:var_b) { create(:ci_instance_variable) }
+
+ let(:variables_attributes) do
+ [
+ {
+ id: var_a.id,
+ key: var_a.key,
+ secret_value: 'new_dummy_value_for_a',
+ protected: var_a.protected?.to_s
+ },
+ {
+ id: var_b.id,
+ key: var_b.key,
+ secret_value: 'dummy_value_for_b',
+ protected: var_b.protected?.to_s,
+ '_destroy' => 'true'
+ },
+ {
+ key: 'var_c',
+ secret_value: 'dummy_value_for_c',
+ protected: true
+ }
+ ]
+ end
+
+ it { expect(subject.execute).to be_truthy }
+
+ it 'persists all the records', :aggregate_failures do
+ subject.execute
+ var_c = Ci::InstanceVariable.find_by(key: 'var_c')
+
+ expect(var_a.reload.secret_value).to eq('new_dummy_value_for_a')
+ expect { var_b.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect(var_c.secret_value).to eq('dummy_value_for_c')
+ end
+ end
+
+ context 'with invalid variables' do
+ let!(:var_a) { create(:ci_instance_variable, secret_value: 'dummy_value_for_a') }
+
+ let(:variables_attributes) do
+ [
+ {
+ key: '...?',
+ secret_value: 'nice_value'
+ },
+ {
+ id: var_a.id,
+ key: var_a.key,
+ secret_value: 'new_dummy_value_for_a',
+ protected: var_a.protected?.to_s
+ },
+ {
+ key: var_a.key,
+ secret_value: 'other_value'
+ }
+ ]
+ end
+
+ it { expect(subject.execute).to be_falsey }
+
+ it 'does not insert any records' do
+ expect { subject.execute }
+ .not_to change { Ci::InstanceVariable.count }
+ end
+
+ it 'does not update existing records' do
+ subject.execute
+
+ expect(var_a.reload.secret_value).to eq('dummy_value_for_a')
+ end
+
+ it 'returns errors' do
+ subject.execute
+
+ expect(subject.errors).to match_array(
+ [
+ "Key (#{var_a.key}) has already been taken",
+ "Key can contain only letters, digits and '_'."
+ ])
+ end
+ end
+
+ context 'when deleting non existing variables' do
+ let(:variables_attributes) do
+ [
+ {
+ id: 'some-id',
+ key: 'some_key',
+ secret_value: 'other_value',
+ '_destroy' => 'true'
+ }
+ ]
+ end
+
+ it { expect { subject.execute }.to raise_error(ActiveRecord::RecordNotFound) }
+ end
+
+ context 'when updating non existing variables' do
+ let(:variables_attributes) do
+ [
+ {
+ id: 'some-id',
+ key: 'some_key',
+ secret_value: 'other_value'
+ }
+ ]
+ end
+
+ it { expect { subject.execute }.to raise_error(ActiveRecord::RecordNotFound) }
+ end
+ end
+end
diff --git a/spec/services/clusters/applications/check_upgrade_progress_service_spec.rb b/spec/services/clusters/applications/check_upgrade_progress_service_spec.rb
index c08b618fe6a..29ee897454a 100644
--- a/spec/services/clusters/applications/check_upgrade_progress_service_spec.rb
+++ b/spec/services/clusters/applications/check_upgrade_progress_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Clusters::Applications::CheckUpgradeProgressService do
- RESCHEDULE_PHASES = ::Gitlab::Kubernetes::Pod::PHASES -
+ reschedule_phashes = ::Gitlab::Kubernetes::Pod::PHASES -
[::Gitlab::Kubernetes::Pod::SUCCEEDED, ::Gitlab::Kubernetes::Pod::FAILED, ::Gitlab].freeze
let(:application) { create(:clusters_applications_prometheus, :updating) }
@@ -89,6 +89,6 @@ describe Clusters::Applications::CheckUpgradeProgressService do
end
end
- RESCHEDULE_PHASES.each { |phase| it_behaves_like 'a not yet terminated upgrade', phase }
+ reschedule_phashes.each { |phase| it_behaves_like 'a not yet terminated upgrade', phase }
end
end
diff --git a/spec/services/clusters/applications/ingress_modsecurity_usage_service_spec.rb b/spec/services/clusters/applications/ingress_modsecurity_usage_service_spec.rb
deleted file mode 100644
index d456284f76a..00000000000
--- a/spec/services/clusters/applications/ingress_modsecurity_usage_service_spec.rb
+++ /dev/null
@@ -1,196 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Clusters::Applications::IngressModsecurityUsageService do
- describe '#execute' do
- ADO_MODSEC_KEY = Clusters::Applications::IngressModsecurityUsageService::ADO_MODSEC_KEY
-
- let(:project_with_ci_var) { create(:environment).project }
- let(:project_with_pipeline_var) { create(:environment).project }
-
- subject { described_class.new.execute }
-
- context 'with multiple projects' do
- let(:pipeline1) { create(:ci_pipeline, :with_job, project: project_with_pipeline_var) }
- let(:pipeline2) { create(:ci_pipeline, :with_job, project: project_with_ci_var) }
-
- let!(:deployment_with_pipeline_var) do
- create(
- :deployment,
- :success,
- environment: project_with_pipeline_var.environments.first,
- project: project_with_pipeline_var,
- deployable: pipeline1.builds.last
- )
- end
- let!(:deployment_with_project_var) do
- create(
- :deployment,
- :success,
- environment: project_with_ci_var.environments.first,
- project: project_with_ci_var,
- deployable: pipeline2.builds.last
- )
- end
-
- context 'mixed data' do
- let!(:ci_variable) { create(:ci_variable, project: project_with_ci_var, key: ADO_MODSEC_KEY, value: "On") }
- let!(:pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline1, key: ADO_MODSEC_KEY, value: "Off") }
-
- it 'gathers variable data' do
- expect(subject[:ingress_modsecurity_blocking]).to eq(1)
- expect(subject[:ingress_modsecurity_disabled]).to eq(1)
- end
- end
-
- context 'blocking' do
- let(:modsec_values) { { key: ADO_MODSEC_KEY, value: "On" } }
-
- let!(:ci_variable) { create(:ci_variable, project: project_with_ci_var, **modsec_values) }
- let!(:pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline1, **modsec_values) }
-
- it 'gathers variable data' do
- expect(subject[:ingress_modsecurity_blocking]).to eq(2)
- expect(subject[:ingress_modsecurity_disabled]).to eq(0)
- end
- end
-
- context 'disabled' do
- let(:modsec_values) { { key: ADO_MODSEC_KEY, value: "Off" } }
-
- let!(:ci_variable) { create(:ci_variable, project: project_with_ci_var, **modsec_values) }
- let!(:pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline1, **modsec_values) }
-
- it 'gathers variable data' do
- expect(subject[:ingress_modsecurity_blocking]).to eq(0)
- expect(subject[:ingress_modsecurity_disabled]).to eq(2)
- end
- end
- end
-
- context 'when set as both ci and pipeline variables' do
- let(:modsec_values) { { key: ADO_MODSEC_KEY, value: "Off" } }
-
- let(:pipeline) { create(:ci_pipeline, :with_job, project: project_with_ci_var) }
- let!(:deployment) do
- create(
- :deployment,
- :success,
- environment: project_with_ci_var.environments.first,
- project: project_with_ci_var,
- deployable: pipeline.builds.last
- )
- end
-
- let!(:ci_variable) { create(:ci_variable, project: project_with_ci_var, **modsec_values) }
- let!(:pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline, **modsec_values) }
-
- it 'wont double-count projects' do
- expect(subject[:ingress_modsecurity_blocking]).to eq(0)
- expect(subject[:ingress_modsecurity_disabled]).to eq(1)
- end
-
- it 'gives precedence to pipeline variable' do
- pipeline_variable.update(value: "On")
-
- expect(subject[:ingress_modsecurity_blocking]).to eq(1)
- expect(subject[:ingress_modsecurity_disabled]).to eq(0)
- end
- end
-
- context 'when a project has multiple environments' do
- let(:modsec_values) { { key: ADO_MODSEC_KEY, value: "On" } }
-
- let!(:env1) { project_with_pipeline_var.environments.first }
- let!(:env2) { create(:environment, project: project_with_pipeline_var) }
-
- let!(:pipeline_with_2_deployments) do
- create(:ci_pipeline, :with_job, project: project_with_ci_var).tap do |pip|
- pip.builds << build(:ci_build, pipeline: pip, project: project_with_pipeline_var)
- end
- end
-
- let!(:deployment1) do
- create(
- :deployment,
- :success,
- environment: env1,
- project: project_with_pipeline_var,
- deployable: pipeline_with_2_deployments.builds.last
- )
- end
- let!(:deployment2) do
- create(
- :deployment,
- :success,
- environment: env2,
- project: project_with_pipeline_var,
- deployable: pipeline_with_2_deployments.builds.last
- )
- end
-
- context 'when set as ci variable' do
- let!(:ci_variable) { create(:ci_variable, project: project_with_pipeline_var, **modsec_values) }
-
- it 'gathers variable data' do
- expect(subject[:ingress_modsecurity_blocking]).to eq(2)
- expect(subject[:ingress_modsecurity_disabled]).to eq(0)
- end
- end
-
- context 'when set as pipeline variable' do
- let!(:pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline_with_2_deployments, **modsec_values) }
-
- it 'gathers variable data' do
- expect(subject[:ingress_modsecurity_blocking]).to eq(2)
- expect(subject[:ingress_modsecurity_disabled]).to eq(0)
- end
- end
- end
-
- context 'when an environment has multiple deployments' do
- let!(:env) { project_with_pipeline_var.environments.first }
-
- let!(:pipeline_first) do
- create(:ci_pipeline, :with_job, project: project_with_pipeline_var).tap do |pip|
- pip.builds << build(:ci_build, pipeline: pip, project: project_with_pipeline_var)
- end
- end
- let!(:pipeline_last) do
- create(:ci_pipeline, :with_job, project: project_with_pipeline_var).tap do |pip|
- pip.builds << build(:ci_build, pipeline: pip, project: project_with_pipeline_var)
- end
- end
-
- let!(:deployment_first) do
- create(
- :deployment,
- :success,
- environment: env,
- project: project_with_pipeline_var,
- deployable: pipeline_first.builds.last
- )
- end
- let!(:deployment_last) do
- create(
- :deployment,
- :success,
- environment: env,
- project: project_with_pipeline_var,
- deployable: pipeline_last.builds.last
- )
- end
-
- context 'when set as pipeline variable' do
- let!(:first_pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline_first, key: ADO_MODSEC_KEY, value: "On") }
- let!(:last_pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline_last, key: ADO_MODSEC_KEY, value: "Off") }
-
- it 'gives precedence to latest deployment' do
- expect(subject[:ingress_modsecurity_blocking]).to eq(0)
- expect(subject[:ingress_modsecurity_disabled]).to eq(1)
- end
- end
- end
- end
-end
diff --git a/spec/services/clusters/applications/schedule_update_service_spec.rb b/spec/services/clusters/applications/schedule_update_service_spec.rb
index 0764f5b6a97..eb1006ce8e0 100644
--- a/spec/services/clusters/applications/schedule_update_service_spec.rb
+++ b/spec/services/clusters/applications/schedule_update_service_spec.rb
@@ -13,10 +13,10 @@ describe Clusters::Applications::ScheduleUpdateService do
context 'when application is able to be updated' do
context 'when the application was recently scheduled' do
it 'schedules worker with a backoff delay' do
- application = create(:clusters_applications_prometheus, :installed, last_update_started_at: Time.now + 5.minutes)
+ application = create(:clusters_applications_prometheus, :installed, last_update_started_at: Time.current + 5.minutes)
service = described_class.new(application, project)
- expect(::ClusterUpdateAppWorker).to receive(:perform_in).with(described_class::BACKOFF_DELAY, application.name, application.id, project.id, Time.now).once
+ expect(::ClusterUpdateAppWorker).to receive(:perform_in).with(described_class::BACKOFF_DELAY, application.name, application.id, project.id, Time.current).once
service.execute
end
@@ -27,7 +27,7 @@ describe Clusters::Applications::ScheduleUpdateService do
application = create(:clusters_applications_prometheus, :installed)
service = described_class.new(application, project)
- expect(::ClusterUpdateAppWorker).to receive(:perform_async).with(application.name, application.id, project.id, Time.now).once
+ expect(::ClusterUpdateAppWorker).to receive(:perform_async).with(application.name, application.id, project.id, Time.current).once
service.execute
end
diff --git a/spec/services/clusters/gcp/finalize_creation_service_spec.rb b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
index 43dbea959a2..4d1548c9786 100644
--- a/spec/services/clusters/gcp/finalize_creation_service_spec.rb
+++ b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
@@ -108,8 +108,7 @@ describe Clusters::Gcp::FinalizeCreationService, '#execute' do
}
)
- stub_kubeclient_get_cluster_role_binding_error(api_url, 'gitlab-admin')
- stub_kubeclient_create_cluster_role_binding(api_url)
+ stub_kubeclient_put_cluster_role_binding(api_url, 'gitlab-admin')
end
end
diff --git a/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb b/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
index 9238f7debd0..e9f7f015293 100644
--- a/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
+++ b/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
@@ -120,8 +120,8 @@ describe Clusters::Kubernetes::ConfigureIstioIngressService, '#execute' do
expect(certificate.subject.to_s).to include(serverless_domain_cluster.knative.hostname)
- expect(certificate.not_before).to be_within(1.minute).of(Time.now)
- expect(certificate.not_after).to be_within(1.minute).of(Time.now + 1000.years)
+ expect(certificate.not_before).to be_within(1.minute).of(Time.current)
+ expect(certificate.not_after).to be_within(1.minute).of(Time.current + 1000.years)
expect(WebMock).to have_requested(:put, api_url + '/api/v1/namespaces/istio-system/secrets/istio-ingressgateway-ca-certs').with(
body: hash_including(
diff --git a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
index 3982d2310d8..6d8b1617c17 100644
--- a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
@@ -28,7 +28,6 @@ describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute' do
stub_kubeclient_get_secret_error(api_url, 'gitlab-token')
stub_kubeclient_create_secret(api_url)
- stub_kubeclient_get_role_binding(api_url, "gitlab-#{namespace}", namespace: namespace)
stub_kubeclient_put_role_binding(api_url, "gitlab-#{namespace}", namespace: namespace)
stub_kubeclient_get_namespace(api_url, namespace: namespace)
stub_kubeclient_get_service_account_error(api_url, "#{namespace}-service-account", namespace: namespace)
diff --git a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
index 8fa22422074..4bcd5c6933e 100644
--- a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
@@ -83,8 +83,7 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
before do
cluster.platform_kubernetes.rbac!
- stub_kubeclient_get_cluster_role_binding_error(api_url, cluster_role_binding_name)
- stub_kubeclient_create_cluster_role_binding(api_url)
+ stub_kubeclient_put_cluster_role_binding(api_url, cluster_role_binding_name)
end
it_behaves_like 'creates service account and token'
@@ -92,9 +91,8 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
it 'creates a cluster role binding with cluster-admin access' do
subject
- expect(WebMock).to have_requested(:post, api_url + "/apis/rbac.authorization.k8s.io/v1/clusterrolebindings").with(
+ expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/gitlab-admin").with(
body: hash_including(
- kind: 'ClusterRoleBinding',
metadata: { name: 'gitlab-admin' },
roleRef: {
apiGroup: 'rbac.authorization.k8s.io',
@@ -143,8 +141,7 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
before do
cluster.platform_kubernetes.rbac!
- stub_kubeclient_get_role_binding_error(api_url, role_binding_name, namespace: namespace)
- stub_kubeclient_create_role_binding(api_url, namespace: namespace)
+ stub_kubeclient_put_role_binding(api_url, role_binding_name, namespace: namespace)
stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME, namespace: namespace)
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME, namespace: namespace)
stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME, namespace: namespace)
@@ -166,9 +163,8 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
it 'creates a namespaced role binding with edit access' do
subject
- expect(WebMock).to have_requested(:post, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings").with(
+ expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{role_binding_name}").with(
body: hash_including(
- kind: 'RoleBinding',
metadata: { name: "gitlab-#{namespace}", namespace: "#{namespace}" },
roleRef: {
apiGroup: 'rbac.authorization.k8s.io',
diff --git a/spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb b/spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb
new file mode 100644
index 00000000000..f14c929554a
--- /dev/null
+++ b/spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb
@@ -0,0 +1,200 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::ParseClusterApplicationsArtifactService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe 'RELEASE_NAMES' do
+ it 'is included in Cluster application names', :aggregate_failures do
+ described_class::RELEASE_NAMES.each do |release_name|
+ expect(Clusters::Cluster::APPLICATIONS).to include(release_name)
+ end
+ end
+ end
+
+ describe '.new' do
+ let(:job) { build(:ci_build) }
+
+ it 'sets the project and current user', :aggregate_failures do
+ service = described_class.new(job, user)
+
+ expect(service.project).to eq(job.project)
+ expect(service.current_user).to eq(user)
+ end
+ end
+
+ describe '#execute' do
+ let_it_be(:cluster, reload: true) { create(:cluster, projects: [project]) }
+ let_it_be(:deployment, reload: true) { create(:deployment, cluster: cluster) }
+
+ let(:job) { deployment.deployable }
+ let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job) }
+
+ context 'when cluster_applications_artifact feature flag is disabled' do
+ before do
+ stub_feature_flags(cluster_applications_artifact: false)
+ end
+
+ it 'does not call Gitlab::Kubernetes::Helm::Parsers::ListV2 and returns success immediately' do
+ expect(Gitlab::Kubernetes::Helm::Parsers::ListV2).not_to receive(:new)
+
+ result = described_class.new(job, user).execute(artifact)
+
+ expect(result[:status]).to eq(:success)
+ end
+ end
+
+ context 'when cluster_applications_artifact feature flag is enabled for project' do
+ before do
+ stub_feature_flags(cluster_applications_artifact: job.project)
+ end
+
+ it 'calls Gitlab::Kubernetes::Helm::Parsers::ListV2' do
+ expect(Gitlab::Kubernetes::Helm::Parsers::ListV2).to receive(:new).and_call_original
+
+ result = described_class.new(job, user).execute(artifact)
+
+ expect(result[:status]).to eq(:success)
+ end
+
+ context 'artifact is not of cluster_applications type' do
+ let(:artifact) { create(:ci_job_artifact, :archive) }
+ let(:job) { artifact.job }
+
+ it 'raise ArgumentError' do
+ expect do
+ described_class.new(job, user).execute(artifact)
+ end.to raise_error(ArgumentError, 'Artifact is not cluster_applications file type')
+ end
+ end
+
+ context 'artifact exceeds acceptable size' do
+ it 'returns an error' do
+ stub_const("#{described_class}::MAX_ACCEPTABLE_ARTIFACT_SIZE", 1.byte)
+
+ result = described_class.new(job, user).execute(artifact)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Cluster_applications artifact too big. Maximum allowable size: 1 Byte')
+ end
+ end
+
+ context 'job has no deployment cluster' do
+ let(:job) { build(:ci_build) }
+
+ it 'returns an error' do
+ result = described_class.new(job, user).execute(artifact)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('No deployment cluster found for this job')
+ end
+ end
+
+ context 'job has deployment cluster' do
+ context 'current user does not have access to deployment cluster' do
+ let(:other_user) { create(:user) }
+
+ it 'returns an error' do
+ result = described_class.new(job, other_user).execute(artifact)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('No deployment cluster found for this job')
+ end
+ end
+
+ context 'release is missing' do
+ let(:fixture) { 'spec/fixtures/helm/helm_list_v2_prometheus_missing.json.gz' }
+ let(:file) { fixture_file_upload(Rails.root.join(fixture)) }
+ let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job, file: file) }
+
+ context 'application does not exist' do
+ it 'does not create or destroy an application' do
+ expect do
+ described_class.new(job, user).execute(artifact)
+ end.not_to change(Clusters::Applications::Prometheus, :count)
+ end
+ end
+
+ context 'application exists' do
+ before do
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ end
+
+ it 'marks the application as uninstalled' do
+ described_class.new(job, user).execute(artifact)
+
+ cluster.application_prometheus.reload
+ expect(cluster.application_prometheus).to be_uninstalled
+ end
+ end
+ end
+
+ context 'release is deployed' do
+ let(:fixture) { 'spec/fixtures/helm/helm_list_v2_prometheus_deployed.json.gz' }
+ let(:file) { fixture_file_upload(Rails.root.join(fixture)) }
+ let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job, file: file) }
+
+ context 'application does not exist' do
+ it 'creates an application and marks it as installed' do
+ expect do
+ described_class.new(job, user).execute(artifact)
+ end.to change(Clusters::Applications::Prometheus, :count)
+
+ expect(cluster.application_prometheus).to be_persisted
+ expect(cluster.application_prometheus).to be_installed
+ end
+ end
+
+ context 'application exists' do
+ before do
+ create(:clusters_applications_prometheus, :errored, cluster: cluster)
+ end
+
+ it 'marks the application as installed' do
+ described_class.new(job, user).execute(artifact)
+
+ expect(cluster.application_prometheus).to be_installed
+ end
+ end
+ end
+
+ context 'release is failed' do
+ let(:fixture) { 'spec/fixtures/helm/helm_list_v2_prometheus_failed.json.gz' }
+ let(:file) { fixture_file_upload(Rails.root.join(fixture)) }
+ let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job, file: file) }
+
+ context 'application does not exist' do
+ it 'creates an application and marks it as errored' do
+ expect do
+ described_class.new(job, user).execute(artifact)
+ end.to change(Clusters::Applications::Prometheus, :count)
+
+ expect(cluster.application_prometheus).to be_persisted
+ expect(cluster.application_prometheus).to be_errored
+ expect(cluster.application_prometheus.status_reason).to eq('Helm release failed to install')
+ end
+ end
+
+ context 'application exists' do
+ before do
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ end
+
+ it 'marks the application as errored' do
+ described_class.new(job, user).execute(artifact)
+
+ expect(cluster.application_prometheus).to be_errored
+ expect(cluster.application_prometheus.status_reason).to eq('Helm release failed to install')
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/cohorts_service_spec.rb b/spec/services/cohorts_service_spec.rb
index 38f441fbc4d..b2f82a1153c 100644
--- a/spec/services/cohorts_service_spec.rb
+++ b/spec/services/cohorts_service_spec.rb
@@ -13,7 +13,7 @@ describe CohortsService do
6.times do |months_ago|
months_ago_time = (months_ago * 2).months.ago
- create(:user, created_at: months_ago_time, last_activity_on: Time.now)
+ create(:user, created_at: months_ago_time, last_activity_on: Time.current)
create(:user, created_at: months_ago_time, last_activity_on: months_ago_time)
end
diff --git a/spec/services/deployments/older_deployments_drop_service_spec.rb b/spec/services/deployments/older_deployments_drop_service_spec.rb
index 44e9af07e46..4c9bcf90533 100644
--- a/spec/services/deployments/older_deployments_drop_service_spec.rb
+++ b/spec/services/deployments/older_deployments_drop_service_spec.rb
@@ -66,6 +66,43 @@ describe Deployments::OlderDeploymentsDropService do
expect(deployable.reload.failed?).to be_truthy
end
+ context 'when older deployable is a manual job' do
+ let(:older_deployment) { create(:deployment, :created, environment: environment, deployable: build) }
+ let(:build) { create(:ci_build, :manual) }
+
+ it 'does not drop any builds nor track the exception' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ expect { subject }.not_to change { Ci::Build.failed.count }
+ end
+ end
+
+ context 'when deployable.drop raises RuntimeError' do
+ before do
+ allow_any_instance_of(Ci::Build).to receive(:drop).and_raise(RuntimeError)
+ end
+
+ it 'does not drop an older deployment and tracks the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(kind_of(RuntimeError), subject_id: deployment.id, deployment_id: older_deployment.id)
+
+ expect { subject }.not_to change { Ci::Build.failed.count }
+ end
+ end
+
+ context 'when ActiveRecord::StaleObjectError is raised' do
+ before do
+ allow_any_instance_of(Ci::Build)
+ .to receive(:drop).and_raise(ActiveRecord::StaleObjectError)
+ end
+
+ it 'resets the object via Gitlab::OptimisticLocking' do
+ allow_any_instance_of(Ci::Build).to receive(:reset).at_least(:once)
+
+ subject
+ end
+ end
+
context 'and there is no deployable for that older deployment' do
let(:older_deployment) { create(:deployment, :running, environment: environment, deployable: nil) }
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
new file mode 100644
index 00000000000..2c0c1570cb4
--- /dev/null
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -0,0 +1,195 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe DesignManagement::DeleteDesignsService do
+ include DesignManagementTestHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:user) { create(:user) }
+ let(:designs) { create_designs }
+
+ subject(:service) { described_class.new(project, user, issue: issue, designs: designs) }
+
+ # Defined as a method so that the reponse is not cached. We also construct
+ # a new service executor each time to avoid the intermediate cached values
+ # it constructs during its execution.
+ def run_service(delenda = nil)
+ service = described_class.new(project, user, issue: issue, designs: delenda || designs)
+ service.execute
+ end
+
+ let(:response) { run_service }
+
+ shared_examples 'a service error' do
+ it 'returns an error', :aggregate_failures do
+ expect(response).to include(status: :error)
+ end
+ end
+
+ shared_examples 'a top-level error' do
+ let(:expected_error) { StandardError }
+ it 'raises an en expected error', :aggregate_failures do
+ expect { run_service }.to raise_error(expected_error)
+ end
+ end
+
+ shared_examples 'a success' do
+ it 'returns successfully', :aggregate_failures do
+ expect(response).to include(status: :success)
+ end
+
+ it 'saves the user as the author' do
+ version = response[:version]
+
+ expect(version.author).to eq(user)
+ end
+ end
+
+ before do
+ enable_design_management(enabled)
+ project.add_developer(user)
+ end
+
+ describe "#execute" do
+ context "when the feature is not available" do
+ let(:enabled) { false }
+
+ it_behaves_like "a service error"
+ end
+
+ context "when the feature is available" do
+ let(:enabled) { true }
+
+ it 'is able to delete designs' do
+ expect(service.send(:can_delete_designs?)).to be true
+ end
+
+ context 'no designs were passed' do
+ let(:designs) { [] }
+
+ it_behaves_like "a top-level error"
+
+ it 'does not log any events' do
+ counter = ::Gitlab::UsageDataCounters::DesignsCounter
+ expect { run_service rescue nil }.not_to change { counter.totals }
+ end
+ end
+
+ context 'one design is passed' do
+ before do
+ create_designs(2)
+ end
+
+ let!(:designs) { create_designs(1) }
+
+ it 'removes that design' do
+ expect { run_service }.to change { issue.designs.current.count }.from(3).to(2)
+ end
+
+ it 'logs a deletion event' do
+ counter = ::Gitlab::UsageDataCounters::DesignsCounter
+ expect { run_service }.to change { counter.read(:delete) }.by(1)
+ end
+
+ it 'informs the new-version-worker' do
+ expect(::DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer)
+
+ run_service
+ end
+
+ it 'creates a new version' do
+ expect { run_service }.to change { DesignManagement::Version.where(issue: issue).count }.by(1)
+ end
+
+ it 'returns the new version' do
+ version = response[:version]
+
+ expect(version).to eq(DesignManagement::Version.for_issue(issue).ordered.first)
+ end
+
+ it_behaves_like "a success"
+
+ it 'removes the design from the current design list' do
+ run_service
+
+ expect(issue.designs.current).not_to include(designs.first)
+ end
+
+ it 'marks the design as deleted' do
+ expect { run_service }
+ .to change { designs.first.deleted? }.from(false).to(true)
+ end
+ end
+
+ context 'more than one design is passed' do
+ before do
+ create_designs(1)
+ end
+
+ let!(:designs) { create_designs(2) }
+
+ it 'removes those designs' do
+ expect { run_service }
+ .to change { issue.designs.current.count }.from(3).to(1)
+ end
+
+ it 'logs the correct number of deletion events' do
+ counter = ::Gitlab::UsageDataCounters::DesignsCounter
+ expect { run_service }.to change { counter.read(:delete) }.by(2)
+ end
+
+ it_behaves_like "a success"
+
+ context 'after executing the service' do
+ let(:deleted_designs) { designs.map(&:reset) }
+
+ let!(:version) { run_service[:version] }
+
+ it 'removes the removed designs from the current design list' do
+ expect(issue.designs.current).not_to include(*deleted_designs)
+ end
+
+ it 'does not make the designs impossible to find' do
+ expect(issue.designs).to include(*deleted_designs)
+ end
+
+ it 'associates the new version with all the designs' do
+ current_versions = deleted_designs.map { |d| d.most_recent_action.version }
+ expect(current_versions).to all(eq version)
+ end
+
+ it 'marks all deleted designs as deleted' do
+ expect(deleted_designs).to all(be_deleted)
+ end
+
+ it 'marks all deleted designs with the same deletion version' do
+ expect(deleted_designs.map { |d| d.most_recent_action.version_id }.uniq)
+ .to have_attributes(size: 1)
+ end
+ end
+ end
+
+ describe 'scalability' do
+ before do
+ run_service(create_designs(1)) # ensure project, issue, etc are created
+ end
+
+ it 'makes the same number of DB requests for one design as for several' do
+ one = create_designs(1)
+ many = create_designs(5)
+
+ baseline = ActiveRecord::QueryRecorder.new { run_service(one) }
+
+ expect { run_service(many) }.not_to exceed_query_limit(baseline)
+ end
+ end
+ end
+ end
+
+ private
+
+ def create_designs(how_many = 2)
+ create_list(:design, how_many, :with_lfs_file, issue: issue)
+ end
+end
diff --git a/spec/services/design_management/design_user_notes_count_service_spec.rb b/spec/services/design_management/design_user_notes_count_service_spec.rb
new file mode 100644
index 00000000000..62211a4dd0f
--- /dev/null
+++ b/spec/services/design_management/design_user_notes_count_service_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe DesignManagement::DesignUserNotesCountService, :use_clean_rails_memory_store_caching do
+ let_it_be(:design) { create(:design, :with_file) }
+
+ subject { described_class.new(design) }
+
+ it_behaves_like 'a counter caching service'
+
+ describe '#count' do
+ it 'returns the count of notes' do
+ create_list(:diff_note_on_design, 3, noteable: design)
+
+ expect(subject.count).to eq(3)
+ end
+ end
+
+ describe '#cache_key' do
+ it 'contains the `VERSION` and `design.id`' do
+ expect(subject.cache_key).to eq(['designs', 'notes_count', DesignManagement::DesignUserNotesCountService::VERSION, design.id])
+ end
+ end
+
+ describe 'cache invalidation' do
+ it 'changes when a new note is created' do
+ new_note_attrs = attributes_for(:diff_note_on_design, noteable: design)
+
+ expect do
+ Notes::CreateService.new(design.project, create(:user), new_note_attrs).execute
+ end.to change { subject.count }.by(1)
+ end
+
+ it 'changes when a note is destroyed' do
+ note = create(:diff_note_on_design, noteable: design)
+
+ expect do
+ Notes::DestroyService.new(note.project, note.author).execute(note)
+ end.to change { subject.count }.by(-1)
+ end
+ end
+end
diff --git a/spec/services/design_management/generate_image_versions_service_spec.rb b/spec/services/design_management/generate_image_versions_service_spec.rb
new file mode 100644
index 00000000000..cd021c8d7d3
--- /dev/null
+++ b/spec/services/design_management/generate_image_versions_service_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe DesignManagement::GenerateImageVersionsService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:version) { create(:design, :with_lfs_file, issue: issue).versions.first }
+ let_it_be(:action) { version.actions.first }
+
+ describe '#execute' do
+ it 'generates the image' do
+ expect { described_class.new(version).execute }
+ .to change { action.reload.image_v432x230.file }
+ .from(nil).to(CarrierWave::SanitizedFile)
+ end
+
+ it 'skips generating image versions if the mime type is not whitelisted' do
+ stub_const('DesignManagement::DesignV432x230Uploader::MIME_TYPE_WHITELIST', [])
+
+ described_class.new(version).execute
+
+ expect(action.reload.image_v432x230.file).to eq(nil)
+ end
+
+ it 'skips generating image versions if the design file size is too large' do
+ stub_const("#{described_class.name}::MAX_DESIGN_SIZE", 1.byte)
+
+ described_class.new(version).execute
+
+ expect(action.reload.image_v432x230.file).to eq(nil)
+ end
+
+ it 'returns the status' do
+ result = described_class.new(version).execute
+
+ expect(result[:status]).to eq(:success)
+ end
+
+ it 'returns the version' do
+ result = described_class.new(version).execute
+
+ expect(result[:version]).to eq(version)
+ end
+
+ it 'logs if the raw image cannot be found' do
+ version.designs.first.update(filename: 'foo.png')
+
+ expect(Gitlab::AppLogger).to receive(:error).with("No design file found for Action: #{action.id}")
+
+ described_class.new(version).execute
+ end
+
+ context 'when an error is encountered when generating the image versions' do
+ before do
+ expect_next_instance_of(DesignManagement::DesignV432x230Uploader) do |uploader|
+ expect(uploader).to receive(:cache!).and_raise(CarrierWave::DownloadError, 'foo')
+ end
+ end
+
+ it 'logs the error' do
+ expect(Gitlab::AppLogger).to receive(:error).with('foo')
+
+ described_class.new(version).execute
+ end
+
+ it 'tracks the error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ instance_of(CarrierWave::DownloadError),
+ project_id: project.id, version_id: version.id, design_id: version.designs.first.id
+ )
+
+ described_class.new(version).execute
+ end
+ end
+ end
+end
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
new file mode 100644
index 00000000000..013d5473860
--- /dev/null
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -0,0 +1,356 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe DesignManagement::SaveDesignsService do
+ include DesignManagementTestHelpers
+ include ConcurrentHelpers
+
+ let_it_be(:developer) { create(:user) }
+ let(:project) { issue.project }
+ let(:issue) { create(:issue) }
+ let(:user) { developer }
+ let(:files) { [rails_sample] }
+ let(:design_repository) { ::Gitlab::GlRepository::DESIGN.repository_resolver.call(project) }
+ let(:rails_sample_name) { 'rails_sample.jpg' }
+ let(:rails_sample) { sample_image(rails_sample_name) }
+ let(:dk_png) { sample_image('dk.png') }
+
+ def sample_image(filename)
+ fixture_file_upload("spec/fixtures/#{filename}")
+ end
+
+ before do
+ project.add_developer(developer)
+ end
+
+ def run_service(files_to_upload = nil)
+ design_files = files_to_upload || files
+ design_files.each(&:rewind)
+
+ service = described_class.new(project, user,
+ issue: issue,
+ files: design_files)
+ service.execute
+ end
+
+ # Randomly alter the content of files.
+ # This allows the files to be updated by the service, as unmodified
+ # files are rejected.
+ def touch_files(files_to_touch = nil)
+ design_files = files_to_touch || files
+
+ design_files.each do |f|
+ f.tempfile.write(SecureRandom.random_bytes)
+ end
+ end
+
+ let(:response) { run_service }
+
+ shared_examples 'a service error' do
+ it 'returns an error', :aggregate_failures do
+ expect(response).to match(a_hash_including(status: :error))
+ end
+ end
+
+ shared_examples 'an execution error' do
+ it 'returns an error', :aggregate_failures do
+ expect { service.execute }.to raise_error(some_error)
+ end
+ end
+
+ describe '#execute' do
+ context 'when the feature is not available' do
+ before do
+ enable_design_management(false)
+ end
+
+ it_behaves_like 'a service error'
+ end
+
+ context 'when the feature is available' do
+ before do
+ enable_design_management(true)
+ end
+
+ describe 'repository existence' do
+ def repository_exists
+ # Expire the memoized value as the service creates it's own instance
+ design_repository.expire_exists_cache
+ design_repository.exists?
+ end
+
+ it 'creates a design repository when it did not exist' do
+ expect { run_service }.to change { repository_exists }.from(false).to(true)
+ end
+ end
+
+ it 'updates the creation count' do
+ counter = Gitlab::UsageDataCounters::DesignsCounter
+ expect { run_service }.to change { counter.read(:create) }.by(1)
+ end
+
+ it 'creates a commit in the repository' do
+ run_service
+
+ expect(design_repository.commit).to have_attributes(
+ author: user,
+ message: include(rails_sample_name)
+ )
+ end
+
+ it 'can run the same command in parallel' do
+ blocks = Array.new(10).map do
+ unique_files = %w(rails_sample.jpg dk.png)
+ .map { |name| RenameableUpload.unique_file(name) }
+
+ -> { run_service(unique_files) }
+ end
+
+ expect { run_parallel(blocks) }.to change(DesignManagement::Version, :count).by(10)
+ end
+
+ it 'causes diff_refs not to be nil' do
+ expect(response).to include(
+ designs: all(have_attributes(diff_refs: be_present))
+ )
+ end
+
+ it 'creates a design & a version for the filename if it did not exist' do
+ expect(issue.designs.size).to eq(0)
+
+ updated_designs = response[:designs]
+
+ expect(updated_designs.size).to eq(1)
+ expect(updated_designs.first.versions.size).to eq(1)
+ end
+
+ it 'saves the user as the author' do
+ updated_designs = response[:designs]
+
+ expect(updated_designs.first.versions.first.author).to eq(user)
+ end
+
+ describe 'saving the file to LFS' do
+ before do
+ expect_next_instance_of(Lfs::FileTransformer) do |transformer|
+ expect(transformer).to receive(:lfs_file?).and_return(true)
+ end
+ end
+
+ it 'saves the design to LFS' do
+ expect { run_service }.to change { LfsObject.count }.by(1)
+ end
+
+ it 'saves the repository_type of the LfsObjectsProject as design' do
+ expect do
+ run_service
+ end.to change { project.lfs_objects_projects.count }.from(0).to(1)
+
+ expect(project.lfs_objects_projects.first.repository_type).to eq('design')
+ end
+ end
+
+ context 'when a design is being updated' do
+ before do
+ run_service
+ touch_files
+ end
+
+ it 'creates a new version for the existing design and updates the file' do
+ expect(issue.designs.size).to eq(1)
+ expect(DesignManagement::Version.for_designs(issue.designs).size).to eq(1)
+
+ updated_designs = response[:designs]
+
+ expect(updated_designs.size).to eq(1)
+ expect(updated_designs.first.versions.size).to eq(2)
+ end
+
+ it 'increments the update counter' do
+ counter = Gitlab::UsageDataCounters::DesignsCounter
+ expect { run_service }.to change { counter.read(:update) }.by 1
+ end
+
+ context 'when uploading a new design' do
+ it 'does not link the new version to the existing design' do
+ existing_design = issue.designs.first
+
+ updated_designs = run_service([dk_png])[:designs]
+
+ expect(existing_design.versions.reload.size).to eq(1)
+ expect(updated_designs.size).to eq(1)
+ expect(updated_designs.first.versions.size).to eq(1)
+ end
+ end
+ end
+
+ context 'when a design has not changed since its previous version' do
+ before do
+ run_service
+ end
+
+ it 'does not create a new version' do
+ expect { run_service }.not_to change { issue.design_versions.count }
+ end
+
+ it 'returns the design in `skipped_designs` instead of `designs`' do
+ response = run_service
+
+ expect(response[:designs]).to be_empty
+ expect(response[:skipped_designs].size).to eq(1)
+ end
+ end
+
+ context 'when doing a mixture of updates and creations' do
+ let(:files) { [rails_sample, dk_png] }
+
+ before do
+ # Create just the first one, which we will later update.
+ run_service([files.first])
+ touch_files([files.first])
+ end
+
+ it 'counts one creation and one update' do
+ counter = Gitlab::UsageDataCounters::DesignsCounter
+ expect { run_service }
+ .to change { counter.read(:create) }.by(1)
+ .and change { counter.read(:update) }.by(1)
+ end
+
+ it 'creates a single commit' do
+ commit_count = -> do
+ design_repository.expire_all_method_caches
+ design_repository.commit_count
+ end
+
+ expect { run_service }.to change { commit_count.call }.by(1)
+ end
+
+ it 'enqueues just one new version worker' do
+ expect(::DesignManagement::NewVersionWorker)
+ .to receive(:perform_async).once.with(Integer)
+
+ run_service
+ end
+ end
+
+ context 'when uploading multiple files' do
+ let(:files) { [rails_sample, dk_png] }
+
+ it 'returns information about both designs in the response' do
+ expect(response).to include(designs: have_attributes(size: 2), status: :success)
+ end
+
+ it 'creates 2 designs with a single version' do
+ expect { run_service }.to change { issue.designs.count }.from(0).to(2)
+
+ expect(DesignManagement::Version.for_designs(issue.designs).size).to eq(1)
+ end
+
+ it 'increments the creation count by 2' do
+ counter = Gitlab::UsageDataCounters::DesignsCounter
+ expect { run_service }.to change { counter.read(:create) }.by 2
+ end
+
+ it 'enqueues a new version worker' do
+ expect(::DesignManagement::NewVersionWorker)
+ .to receive(:perform_async).once.with(Integer)
+
+ run_service
+ end
+
+ it 'creates a single commit' do
+ commit_count = -> do
+ design_repository.expire_all_method_caches
+ design_repository.commit_count
+ end
+
+ expect { run_service }.to change { commit_count.call }.by(1)
+ end
+
+ it 'only does 5 gitaly calls', :request_store, :sidekiq_might_not_need_inline do
+ allow(::DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer)
+ service = described_class.new(project, user, issue: issue, files: files)
+ # Some unrelated calls that are usually cached or happen only once
+ service.__send__(:repository).create_if_not_exists
+ service.__send__(:repository).has_visible_content?
+
+ request_count = -> { Gitlab::GitalyClient.get_request_count }
+
+ # An exists?, a check for existing blobs, default branch, an after_commit
+ # callback on LfsObjectsProject
+ expect { service.execute }.to change(&request_count).by(4)
+ end
+
+ context 'when uploading too many files' do
+ let(:files) { Array.new(DesignManagement::SaveDesignsService::MAX_FILES + 1) { dk_png } }
+
+ it 'returns the correct error' do
+ expect(response[:message]).to match(/only \d+ files are allowed simultaneously/i)
+ end
+ end
+ end
+
+ context 'when the user is not allowed to upload designs' do
+ let(:user) { create(:user) }
+
+ it_behaves_like 'a service error'
+ end
+
+ describe 'failure modes' do
+ let(:service) { described_class.new(project, user, issue: issue, files: files) }
+ let(:response) { service.execute }
+
+ before do
+ expect(service).to receive(:run_actions).and_raise(some_error)
+ end
+
+ context 'when creating the commit fails' do
+ let(:some_error) { Gitlab::Git::BaseError }
+
+ it_behaves_like 'an execution error'
+ end
+
+ context 'when creating the versions fails' do
+ let(:some_error) { ActiveRecord::RecordInvalid }
+
+ it_behaves_like 'a service error'
+ end
+ end
+
+ context "when a design already existed in the repo but we didn't know about it in the database" do
+ let(:filename) { rails_sample_name }
+
+ before do
+ path = File.join(build(:design, issue: issue, filename: filename).full_path)
+ design_repository.create_if_not_exists
+ design_repository.create_file(user, path, 'something fake',
+ branch_name: 'master',
+ message: 'Somehow created without being tracked in db')
+ end
+
+ it 'creates the design and a new version for it' do
+ first_updated_design = response[:designs].first
+
+ expect(first_updated_design.filename).to eq(filename)
+ expect(first_updated_design.versions.size).to eq(1)
+ end
+ end
+
+ describe 'scalability', skip: 'See: https://gitlab.com/gitlab-org/gitlab/-/issues/213169' do
+ before do
+ run_service([sample_image('banana_sample.gif')]) # ensure project, issue, etc are created
+ end
+
+ it 'runs the same queries for all requests, regardless of number of files' do
+ one = [dk_png]
+ two = [rails_sample, dk_png]
+
+ baseline = ActiveRecord::QueryRecorder.new { run_service(one) }
+
+ expect { run_service(two) }.not_to exceed_query_limit(baseline)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/emails/confirm_service_spec.rb b/spec/services/emails/confirm_service_spec.rb
index 6a274ca9dfe..973d2731b2f 100644
--- a/spec/services/emails/confirm_service_spec.rb
+++ b/spec/services/emails/confirm_service_spec.rb
@@ -8,10 +8,10 @@ describe Emails::ConfirmService do
subject(:service) { described_class.new(user) }
describe '#execute' do
- it 'sends a confirmation email again' do
+ it 'enqueues a background job to send confirmation email again' do
email = user.emails.create(email: 'new@email.com')
- mail = service.execute(email)
- expect(mail.subject).to eq('Confirmation instructions')
+
+ expect { service.execute(email) }.to have_enqueued_job.on_queue('mailers')
end
end
end
diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb
index 0a8a4d5bf58..987b4ad68f7 100644
--- a/spec/services/event_create_service_spec.rb
+++ b/spec/services/event_create_service_spec.rb
@@ -162,16 +162,25 @@ describe EventCreateService do
context "The action is #{action}" do
let(:event) { service.wiki_event(meta, user, action) }
- it 'creates the event' do
+ it 'creates the event', :aggregate_failures do
expect(event).to have_attributes(
wiki_page?: true,
valid?: true,
persisted?: true,
action: action,
- wiki_page: wiki_page
+ wiki_page: wiki_page,
+ author: user
)
end
+ it 'is idempotent', :aggregate_failures do
+ expect { event }.to change(Event, :count).by(1)
+ duplicate = nil
+ expect { duplicate = service.wiki_event(meta, user, action) }.not_to change(Event, :count)
+
+ expect(duplicate).to eq(event)
+ end
+
context 'the feature is disabled' do
before do
stub_feature_flags(wiki_events: false)
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index acd14005c69..6ecc1a62ff3 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -291,7 +291,7 @@ describe Git::BranchPushService, services: true do
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
end
- it "defaults to the pushing user if the commit's author is not known", :sidekiq_might_not_need_inline do
+ it "defaults to the pushing user if the commit's author is not known", :sidekiq_inline, :use_clean_rails_redis_caching do
allow(commit).to receive_messages(
author_name: 'unknown name',
author_email: 'unknown@email.com'
@@ -315,7 +315,7 @@ describe Git::BranchPushService, services: true do
let(:issue) { create :issue, project: project }
let(:commit_author) { create :user }
let(:commit) { project.commit }
- let(:commit_time) { Time.now }
+ let(:commit_time) { Time.current }
before do
project.add_developer(commit_author)
@@ -336,7 +336,7 @@ describe Git::BranchPushService, services: true do
end
context "while saving the 'first_mentioned_in_commit_at' metric for an issue" do
- it 'sets the metric for referenced issues', :sidekiq_might_not_need_inline do
+ it 'sets the metric for referenced issues', :sidekiq_inline, :use_clean_rails_redis_caching do
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
expect(issue.reload.metrics.first_mentioned_in_commit_at).to be_like_time(commit_time)
@@ -397,7 +397,7 @@ describe Git::BranchPushService, services: true do
allow(project).to receive(:default_branch).and_return('not-master')
end
- it "creates cross-reference notes", :sidekiq_might_not_need_inline do
+ it "creates cross-reference notes", :sidekiq_inline, :use_clean_rails_redis_caching do
expect(SystemNoteService).to receive(:cross_reference).with(issue, closing_commit, commit_author)
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
end
@@ -438,7 +438,7 @@ describe Git::BranchPushService, services: true do
context "mentioning an issue" do
let(:message) { "this is some work.\n\nrelated to JIRA-1" }
- it "initiates one api call to jira server to mention the issue", :sidekiq_might_not_need_inline do
+ it "initiates one api call to jira server to mention the issue", :sidekiq_inline, :use_clean_rails_redis_caching do
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
expect(WebMock).to have_requested(:post, jira_api_comment_url('JIRA-1')).with(
diff --git a/spec/services/git/wiki_push_service/change_spec.rb b/spec/services/git/wiki_push_service/change_spec.rb
new file mode 100644
index 00000000000..547874270ab
--- /dev/null
+++ b/spec/services/git/wiki_push_service/change_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Git::WikiPushService::Change do
+ subject { described_class.new(project_wiki, change, raw_change) }
+
+ let(:project_wiki) { double('ProjectWiki') }
+ let(:raw_change) { double('RawChange', new_path: new_path, old_path: old_path, operation: operation) }
+ let(:change) { { oldrev: generate(:sha), newrev: generate(:sha) } }
+
+ let(:new_path) do
+ case operation
+ when :deleted
+ nil
+ else
+ generate(:wiki_filename)
+ end
+ end
+
+ let(:old_path) do
+ case operation
+ when :added
+ nil
+ when :deleted, :renamed
+ generate(:wiki_filename)
+ else
+ new_path
+ end
+ end
+
+ describe '#page' do
+ context 'the page does not exist' do
+ before do
+ expect(project_wiki).to receive(:find_page).with(String, String).and_return(nil)
+ end
+
+ %i[added deleted renamed modified].each do |op|
+ context "the operation is #{op}" do
+ let(:operation) { op }
+
+ it { is_expected.to have_attributes(page: be_nil) }
+ end
+ end
+ end
+
+ context 'the page can be found' do
+ let(:wiki_page) { double('WikiPage') }
+
+ before do
+ expect(project_wiki).to receive(:find_page).with(slug, revision).and_return(wiki_page)
+ end
+
+ context 'the page has been deleted' do
+ let(:operation) { :deleted }
+ let(:slug) { old_path.chomp('.md') }
+ let(:revision) { change[:oldrev] }
+
+ it { is_expected.to have_attributes(page: wiki_page) }
+ end
+
+ %i[added renamed modified].each do |op|
+ let(:operation) { op }
+ let(:slug) { new_path.chomp('.md') }
+ let(:revision) { change[:newrev] }
+
+ it { is_expected.to have_attributes(page: wiki_page) }
+ end
+ end
+ end
+
+ describe '#last_known_slug' do
+ context 'the page has been created' do
+ let(:operation) { :added }
+
+ it { is_expected.to have_attributes(last_known_slug: new_path.chomp('.md')) }
+ end
+
+ %i[renamed modified deleted].each do |op|
+ context "the operation is #{op}" do
+ let(:operation) { op }
+
+ it { is_expected.to have_attributes(last_known_slug: old_path.chomp('.md')) }
+ end
+ end
+ end
+
+ describe '#event_action' do
+ context 'the page is deleted' do
+ let(:operation) { :deleted }
+
+ it { is_expected.to have_attributes(event_action: Event::DESTROYED) }
+ end
+
+ context 'the page is added' do
+ let(:operation) { :added }
+
+ it { is_expected.to have_attributes(event_action: Event::CREATED) }
+ end
+
+ %i[renamed modified].each do |op|
+ context "the page is #{op}" do
+ let(:operation) { op }
+
+ it { is_expected.to have_attributes(event_action: Event::UPDATED) }
+ end
+ end
+ end
+end
diff --git a/spec/services/git/wiki_push_service_spec.rb b/spec/services/git/wiki_push_service_spec.rb
new file mode 100644
index 00000000000..cdb1dc5a435
--- /dev/null
+++ b/spec/services/git/wiki_push_service_spec.rb
@@ -0,0 +1,338 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Git::WikiPushService, services: true do
+ include RepoHelpers
+
+ let_it_be(:key_id) { create(:key, user: current_user).shell_id }
+ let_it_be(:project) { create(:project, :wiki_repo) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:git_wiki) { project.wiki.wiki }
+ let_it_be(:repository) { git_wiki.repository }
+
+ describe '#execute' do
+ context 'the push contains more than the permitted number of changes' do
+ def run_service
+ process_changes { described_class::MAX_CHANGES.succ.times { write_new_page } }
+ end
+
+ it 'creates only MAX_CHANGES events' do
+ expect { run_service }.to change(Event, :count).by(described_class::MAX_CHANGES)
+ end
+ end
+
+ context 'default_branch collides with a tag' do
+ it 'creates only one event' do
+ base_sha = current_sha
+ write_new_page
+
+ service = create_service(base_sha, ['refs/heads/master', 'refs/tags/master'])
+
+ expect { service.execute }.to change(Event, :count).by(1)
+ end
+ end
+
+ describe 'successfully creating events' do
+ let(:count) { Event::WIKI_ACTIONS.size }
+
+ def run_service
+ wiki_page_a = create(:wiki_page, project: project)
+ wiki_page_b = create(:wiki_page, project: project)
+
+ process_changes do
+ write_new_page
+ update_page(wiki_page_a.title)
+ delete_page(wiki_page_b.page.path)
+ end
+ end
+
+ it 'creates one event for every wiki action' do
+ expect { run_service }.to change(Event, :count).by(count)
+ end
+
+ it 'handles all known actions' do
+ run_service
+
+ expect(Event.last(count).pluck(:action)).to match_array(Event::WIKI_ACTIONS)
+ end
+ end
+
+ context 'two pages have been created' do
+ def run_service
+ process_changes do
+ write_new_page
+ write_new_page
+ end
+ end
+
+ it 'creates two events' do
+ expect { run_service }.to change(Event, :count).by(2)
+ end
+
+ it 'creates two metadata records' do
+ expect { run_service }.to change(WikiPage::Meta, :count).by(2)
+ end
+
+ it 'creates appropriate events' do
+ run_service
+
+ expect(Event.last(2)).to all(have_attributes(wiki_page?: true, action: Event::CREATED))
+ end
+ end
+
+ context 'a non-page file as been added' do
+ it 'does not create events, or WikiPage metadata' do
+ expect do
+ process_changes { write_non_page }
+ end.not_to change { [Event.count, WikiPage::Meta.count] }
+ end
+ end
+
+ context 'one page, and one non-page have been created' do
+ def run_service
+ process_changes do
+ write_new_page
+ write_non_page
+ end
+ end
+
+ it 'creates a wiki page creation event' do
+ expect { run_service }.to change(Event, :count).by(1)
+
+ expect(Event.last).to have_attributes(wiki_page?: true, action: Event::CREATED)
+ end
+
+ it 'creates one metadata record' do
+ expect { run_service }.to change(WikiPage::Meta, :count).by(1)
+ end
+ end
+
+ context 'one page has been added, and then updated' do
+ def run_service
+ process_changes do
+ title = write_new_page
+ update_page(title)
+ end
+ end
+
+ it 'creates just a single event' do
+ expect { run_service }.to change(Event, :count).by(1)
+ end
+
+ it 'creates just one metadata record' do
+ expect { run_service }.to change(WikiPage::Meta, :count).by(1)
+ end
+
+ it 'creates a new wiki page creation event' do
+ run_service
+
+ expect(Event.last).to have_attributes(
+ wiki_page?: true,
+ action: Event::CREATED
+ )
+ end
+ end
+
+ context 'when a page we already know about has been updated' do
+ let(:wiki_page) { create(:wiki_page, project: project) }
+
+ before do
+ create(:wiki_page_meta, :for_wiki_page, wiki_page: wiki_page)
+ end
+
+ def run_service
+ process_changes { update_page(wiki_page.title) }
+ end
+
+ it 'does not create a new meta-data record' do
+ expect { run_service }.not_to change(WikiPage::Meta, :count)
+ end
+
+ it 'creates a new event' do
+ expect { run_service }.to change(Event, :count).by(1)
+ end
+
+ it 'adds an update event' do
+ run_service
+
+ expect(Event.last).to have_attributes(
+ wiki_page?: true,
+ action: Event::UPDATED
+ )
+ end
+ end
+
+ context 'when a page we do not know about has been updated' do
+ def run_service
+ wiki_page = create(:wiki_page, project: project)
+ process_changes { update_page(wiki_page.title) }
+ end
+
+ it 'creates a new meta-data record' do
+ expect { run_service }.to change(WikiPage::Meta, :count).by(1)
+ end
+
+ it 'creates a new event' do
+ expect { run_service }.to change(Event, :count).by(1)
+ end
+
+ it 'adds an update event' do
+ run_service
+
+ expect(Event.last).to have_attributes(
+ wiki_page?: true,
+ action: Event::UPDATED
+ )
+ end
+ end
+
+ context 'when a page we do not know about has been deleted' do
+ def run_service
+ wiki_page = create(:wiki_page, project: project)
+ process_changes { delete_page(wiki_page.page.path) }
+ end
+
+ it 'create a new meta-data record' do
+ expect { run_service }.to change(WikiPage::Meta, :count).by(1)
+ end
+
+ it 'creates a new event' do
+ expect { run_service }.to change(Event, :count).by(1)
+ end
+
+ it 'adds an update event' do
+ run_service
+
+ expect(Event.last).to have_attributes(
+ wiki_page?: true,
+ action: Event::DESTROYED
+ )
+ end
+ end
+
+ it 'calls log_error for every event we cannot create' do
+ base_sha = current_sha
+ count = 3
+ count.times { write_new_page }
+ message = 'something went very very wrong'
+ allow_next_instance_of(WikiPages::EventCreateService, current_user) do |service|
+ allow(service).to receive(:execute)
+ .with(String, WikiPage, Integer)
+ .and_return(ServiceResponse.error(message: message))
+ end
+
+ service = create_service(base_sha)
+
+ expect(service).to receive(:log_error).exactly(count).times.with(message)
+
+ service.execute
+ end
+
+ describe 'feature flags' do
+ shared_examples 'a no-op push' do
+ it 'does not create any events' do
+ expect { process_changes { write_new_page } }.not_to change(Event, :count)
+ end
+
+ it 'does not even look for events to process' do
+ base_sha = current_sha
+ write_new_page
+
+ service = create_service(base_sha)
+
+ expect(service).not_to receive(:changed_files)
+
+ service.execute
+ end
+ end
+
+ context 'the wiki_events feature is disabled' do
+ before do
+ stub_feature_flags(wiki_events: false)
+ end
+
+ it_behaves_like 'a no-op push'
+ end
+
+ context 'the wiki_events_on_git_push feature is disabled' do
+ before do
+ stub_feature_flags(wiki_events_on_git_push: false)
+ end
+
+ it_behaves_like 'a no-op push'
+
+ context 'but is enabled for a given project' do
+ before do
+ stub_feature_flags(wiki_events_on_git_push: project)
+ end
+
+ it 'creates events' do
+ expect { process_changes { write_new_page } }.to change(Event, :count).by(1)
+ end
+ end
+ end
+ end
+ end
+
+ # In order to construct the correct GitPostReceive object that represents the
+ # changes we are applying, we need to describe the changes between old-ref and
+ # new-ref. Old ref (the base sha) we have to capture before we perform any
+ # changes. Once the changes have been applied, we can execute the service to
+ # process them.
+ def process_changes(&block)
+ base_sha = current_sha
+ yield
+ create_service(base_sha).execute
+ end
+
+ def create_service(base, refs = ['refs/heads/master'])
+ changes = post_received(base, refs).changes
+ described_class.new(project, current_user, changes: changes)
+ end
+
+ def post_received(base, refs)
+ change_str = refs.map { |ref| +"#{base} #{current_sha} #{ref}" }.join("\n")
+ post_received = ::Gitlab::GitPostReceive.new(project, key_id, change_str, {})
+ allow(post_received).to receive(:identify).with(key_id).and_return(current_user)
+
+ post_received
+ end
+
+ def current_sha
+ repository.gitaly_ref_client.find_branch('master')&.dereferenced_target&.id || Gitlab::Git::BLANK_SHA
+ end
+
+ # It is important not to re-use the WikiPage services here, since they create
+ # events - these helper methods below are intended to simulate actions on the repo
+ # that have not gone through our services.
+
+ def write_new_page
+ generate(:wiki_page_title).tap { |t| git_wiki.write_page(t, 'markdown', 'Hello', commit_details) }
+ end
+
+ # We write something to the wiki-repo that is not a page - as, for example, an
+ # attachment. This will appear as a raw-diff change, but wiki.find_page will
+ # return nil.
+ def write_non_page
+ params = {
+ file_name: 'attachment.log',
+ file_content: 'some stuff',
+ branch_name: 'master'
+ }
+ ::Wikis::CreateAttachmentService.new(container: project, current_user: project.owner, params: params).execute
+ end
+
+ def update_page(title)
+ page = git_wiki.page(title: title)
+ git_wiki.update_page(page.path, title, 'markdown', 'Hey', commit_details)
+ end
+
+ def delete_page(path)
+ git_wiki.delete_page(path, commit_details)
+ end
+
+ def commit_details
+ create(:git_wiki_commit_details, author: current_user)
+ end
+end
diff --git a/spec/services/grafana/proxy_service_spec.rb b/spec/services/grafana/proxy_service_spec.rb
index 694d531c9fc..8cb7210524a 100644
--- a/spec/services/grafana/proxy_service_spec.rb
+++ b/spec/services/grafana/proxy_service_spec.rb
@@ -66,7 +66,7 @@ describe Grafana::ProxyService do
context 'with caching', :use_clean_rails_memory_store_caching do
context 'when value not present in cache' do
it 'returns nil' do
- expect(ReactiveCachingWorker)
+ expect(ExternalServiceReactiveCachingWorker)
.to receive(:perform_async)
.with(service.class, service.id, *cache_params)
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index 5cde9a3ed45..c0e876cce33 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -24,6 +24,27 @@ describe Groups::CreateService, '#execute' do
end
end
+ context 'creating a group with `default_branch_protection` attribute' do
+ let(:params) { group_params.merge(default_branch_protection: Gitlab::Access::PROTECTION_NONE) }
+ let(:service) { described_class.new(user, params) }
+ let(:created_group) { service.execute }
+
+ context 'for users who have the ability to create a group with `default_branch_protection`' do
+ it 'creates group with the specified branch protection level' do
+ expect(created_group.default_branch_protection).to eq(Gitlab::Access::PROTECTION_NONE)
+ end
+ end
+
+ context 'for users who do not have the ability to create a group with `default_branch_protection`' do
+ it 'does not create the group with the specified branch protection level' do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :create_group_with_default_branch_protection) { false }
+
+ expect(created_group.default_branch_protection).not_to eq(Gitlab::Access::PROTECTION_NONE)
+ end
+ end
+ end
+
describe 'creating a top level group' do
let(:service) { described_class.new(user, group_params) }
diff --git a/spec/services/groups/import_export/export_service_spec.rb b/spec/services/groups/import_export/export_service_spec.rb
index 56c7121cc34..7bad68b4e00 100644
--- a/spec/services/groups/import_export/export_service_spec.rb
+++ b/spec/services/groups/import_export/export_service_spec.rb
@@ -11,7 +11,7 @@ describe Groups::ImportExport::ExportService do
let(:export_service) { described_class.new(group: group, user: user) }
it 'enqueues an export job' do
- expect(GroupExportWorker).to receive(:perform_async).with(user.id, group.id, {})
+ allow(GroupExportWorker).to receive(:perform_async).with(user.id, group.id, {})
export_service.async_execute
end
@@ -49,12 +49,36 @@ describe Groups::ImportExport::ExportService do
FileUtils.rm_rf(archive_path)
end
- it 'saves the models' do
+ it 'saves the version' do
+ expect(Gitlab::ImportExport::VersionSaver).to receive(:new).and_call_original
+
+ service.execute
+ end
+
+ it 'saves the models using ndjson tree saver' do
+ stub_feature_flags(group_export_ndjson: true)
+
+ expect(Gitlab::ImportExport::Group::TreeSaver).to receive(:new).and_call_original
+
+ service.execute
+ end
+
+ it 'saves the models using legacy tree saver' do
+ stub_feature_flags(group_export_ndjson: false)
+
expect(Gitlab::ImportExport::Group::LegacyTreeSaver).to receive(:new).and_call_original
service.execute
end
+ it 'notifies the user' do
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:group_was_exported)
+ end
+
+ service.execute
+ end
+
context 'when saver succeeds' do
it 'saves the group in the file system' do
service.execute
@@ -98,16 +122,26 @@ describe Groups::ImportExport::ExportService do
context 'when export fails' do
context 'when file saver fails' do
- it 'removes the remaining exported data' do
+ before do
allow_next_instance_of(Gitlab::ImportExport::Saver) do |saver|
allow(saver).to receive(:save).and_return(false)
end
+ end
+ it 'removes the remaining exported data' do
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
expect(group.import_export_upload).to be_nil
expect(File.exist?(shared.archive_path)).to eq(false)
end
+
+ it 'notifies the user about failed group export' do
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:group_was_not_exported)
+ end
+
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+ end
end
context 'when file compression fails' do
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index d95bba38b3e..256e0a1b3c5 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -3,17 +3,16 @@
require 'spec_helper'
describe Groups::ImportExport::ImportService do
- describe '#execute' do
+ context 'with group_import_ndjson feature flag disabled' do
let(:user) { create(:admin) }
let(:group) { create(:group) }
- let(:service) { described_class.new(group: group, user: user) }
- let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
-
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
- subject { service.execute }
+ subject(:service) { described_class.new(group: group, user: user) }
before do
+ stub_feature_flags(group_import_ndjson: false)
+
ImportExportUpload.create(group: group, import_file: import_file)
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
@@ -21,84 +20,227 @@ describe Groups::ImportExport::ImportService do
allow(import_logger).to receive(:info)
end
- context 'when user has correct permissions' do
- it 'imports group structure successfully' do
- expect(subject).to be_truthy
- end
+ context 'with a json file' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
- it 'removes import file' do
- subject
+ it 'uses LegacyTreeRestorer to import the file' do
+ expect(Gitlab::ImportExport::Group::LegacyTreeRestorer).to receive(:new).and_call_original
- expect(group.import_export_upload.import_file.file).to be_nil
+ service.execute
end
+ end
- it 'logs the import success' do
- expect(import_logger).to receive(:info).with(
- group_id: group.id,
- group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
- ).once
+ context 'with a ndjson file' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
- subject
+ it 'fails to import' do
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error, 'Incorrect JSON format')
end
end
+ end
+
+ context 'with group_import_ndjson feature flag enabled' do
+ before do
+ stub_feature_flags(group_import_ndjson: true)
+ end
+
+ context 'when importing a ndjson export' do
+ let(:user) { create(:admin) }
+ let(:group) { create(:group) }
+ let(:service) { described_class.new(group: group, user: user) }
+ let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
- context 'when user does not have correct permissions' do
- let(:user) { create(:user) }
+ let(:import_logger) { instance_double(Gitlab::Import::Logger) }
- it 'logs the error and raises an exception' do
- expect(import_logger).to receive(:error).with(
- group_id: group.id,
- group_name: group.name,
- message: a_string_including('Errors occurred')
- )
+ subject { service.execute }
- expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ before do
+ ImportExportUpload.create(group: group, import_file: import_file)
+
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+ allow(import_logger).to receive(:error)
+ allow(import_logger).to receive(:info)
end
- it 'tracks the error' do
- shared = Gitlab::ImportExport::Shared.new(group)
- allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
+ context 'when user has correct permissions' do
+ it 'imports group structure successfully' do
+ expect(subject).to be_truthy
+ end
+
+ it 'removes import file' do
+ subject
- expect(shared).to receive(:error) do |param|
- expect(param.message).to include 'does not have required permissions for'
+ expect(group.import_export_upload.import_file.file).to be_nil
end
- expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ it 'logs the import success' do
+ expect(import_logger).to receive(:info).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: 'Group Import/Export: Import succeeded'
+ ).once
+
+ subject
+ end
end
- end
- context 'when there are errors with the import file' do
- let(:import_file) { fixture_file_upload('spec/fixtures/symlink_export.tar.gz') }
+ context 'when user does not have correct permissions' do
+ let(:user) { create(:user) }
- it 'logs the error and raises an exception' do
- expect(import_logger).to receive(:error).with(
- group_id: group.id,
- group_name: group.name,
- message: a_string_including('Errors occurred')
- ).once
+ it 'logs the error and raises an exception' do
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ )
- expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ end
+
+ it 'tracks the error' do
+ shared = Gitlab::ImportExport::Shared.new(group)
+ allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
+
+ expect(shared).to receive(:error) do |param|
+ expect(param.message).to include 'does not have required permissions for'
+ end
+
+ expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ end
end
- end
- context 'when there are errors with the sub-relations' do
- let(:import_file) { fixture_file_upload('spec/fixtures/group_export_invalid_subrelations.tar.gz') }
+ context 'when there are errors with the import file' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/symlink_export.tar.gz') }
+
+ it 'logs the error and raises an exception' do
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ ).once
+
+ expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
+
+ context 'when there are errors with the sub-relations' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/group_export_invalid_subrelations.tar.gz') }
+
+ it 'successfully imports the group' do
+ expect(subject).to be_truthy
+ end
+
+ it 'logs the import success' do
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+
+ expect(import_logger).to receive(:info).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: 'Group Import/Export: Import succeeded'
+ )
- it 'successfully imports the group' do
- expect(subject).to be_truthy
+ subject
+ end
end
+ end
+
+ context 'when importing a json export' do
+ let(:user) { create(:admin) }
+ let(:group) { create(:group) }
+ let(:service) { described_class.new(group: group, user: user) }
+ let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
+
+ let(:import_logger) { instance_double(Gitlab::Import::Logger) }
+
+ subject { service.execute }
+
+ before do
+ ImportExportUpload.create(group: group, import_file: import_file)
- it 'logs the import success' do
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+ allow(import_logger).to receive(:error)
+ allow(import_logger).to receive(:info)
+ end
- expect(import_logger).to receive(:info).with(
- group_id: group.id,
- group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
- )
+ context 'when user has correct permissions' do
+ it 'imports group structure successfully' do
+ expect(subject).to be_truthy
+ end
+
+ it 'removes import file' do
+ subject
+
+ expect(group.import_export_upload.import_file.file).to be_nil
+ end
+
+ it 'logs the import success' do
+ expect(import_logger).to receive(:info).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: 'Group Import/Export: Import succeeded'
+ ).once
+
+ subject
+ end
+ end
+
+ context 'when user does not have correct permissions' do
+ let(:user) { create(:user) }
+
+ it 'logs the error and raises an exception' do
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ )
+
+ expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ end
- subject
+ it 'tracks the error' do
+ shared = Gitlab::ImportExport::Shared.new(group)
+ allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
+
+ expect(shared).to receive(:error) do |param|
+ expect(param.message).to include 'does not have required permissions for'
+ end
+
+ expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
+
+ context 'when there are errors with the import file' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/legacy_symlink_export.tar.gz') }
+
+ it 'logs the error and raises an exception' do
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ ).once
+
+ expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
+
+ context 'when there are errors with the sub-relations' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export_invalid_subrelations.tar.gz') }
+
+ it 'successfully imports the group' do
+ expect(subject).to be_truthy
+ end
+
+ it 'logs the import success' do
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+
+ expect(import_logger).to receive(:info).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: 'Group Import/Export: Import succeeded'
+ )
+
+ subject
+ end
end
end
end
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 1aa7e06182b..b17d78505d1 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -148,6 +148,26 @@ describe Groups::UpdateService do
end
end
+ context 'updating default_branch_protection' do
+ let(:service) do
+ described_class.new(internal_group, user, default_branch_protection: Gitlab::Access::PROTECTION_NONE)
+ end
+
+ context 'for users who have the ability to update default_branch_protection' do
+ it 'updates the attribute' do
+ internal_group.add_owner(user)
+
+ expect { service.execute }.to change { internal_group.default_branch_protection }.to(Gitlab::Access::PROTECTION_NONE)
+ end
+ end
+
+ context 'for users who do not have the ability to update default_branch_protection' do
+ it 'does not update the attribute' do
+ expect { service.execute }.not_to change { internal_group.default_branch_protection }
+ end
+ end
+ end
+
context 'rename group' do
let!(:service) { described_class.new(internal_group, user, path: SecureRandom.hex) }
diff --git a/spec/services/incident_management/create_issue_service_spec.rb b/spec/services/incident_management/create_issue_service_spec.rb
index 4c7fb682193..5a3721f00b8 100644
--- a/spec/services/incident_management/create_issue_service_spec.rb
+++ b/spec/services/incident_management/create_issue_service_spec.rb
@@ -6,7 +6,7 @@ describe IncidentManagement::CreateIssueService do
let(:project) { create(:project, :repository, :private) }
let_it_be(:user) { User.alert_bot }
let(:service) { described_class.new(project, alert_payload) }
- let(:alert_starts_at) { Time.now }
+ let(:alert_starts_at) { Time.current }
let(:alert_title) { 'TITLE' }
let(:alert_annotations) { { title: alert_title } }
@@ -281,18 +281,28 @@ describe IncidentManagement::CreateIssueService do
setting.update!(create_issue: false)
end
- it 'returns an error' do
- expect(service)
- .to receive(:log_error)
- .with(error_message('setting disabled'))
+ context 'when skip_settings_check is false (default)' do
+ it 'returns an error' do
+ expect(service)
+ .to receive(:log_error)
+ .with(error_message('setting disabled'))
+
+ expect(subject).to eq(status: :error, message: 'setting disabled')
+ end
+ end
+
+ context 'when skip_settings_check is true' do
+ subject { service.execute(skip_settings_check: true) }
- expect(subject).to eq(status: :error, message: 'setting disabled')
+ it 'creates an issue' do
+ expect { subject }.to change(Issue, :count).by(1)
+ end
end
end
private
- def build_alert_payload(annotations: {}, starts_at: Time.now)
+ def build_alert_payload(annotations: {}, starts_at: Time.current)
{
'annotations' => annotations.stringify_keys
}.tap do |payload|
diff --git a/spec/services/issuable/clone/attributes_rewriter_spec.rb b/spec/services/issuable/clone/attributes_rewriter_spec.rb
index 9111b19d7b7..fb520f828fa 100644
--- a/spec/services/issuable/clone/attributes_rewriter_spec.rb
+++ b/spec/services/issuable/clone/attributes_rewriter_spec.rb
@@ -89,7 +89,7 @@ describe Issuable::Clone::AttributesRewriter do
create_event(milestone1_project1)
create_event(milestone2_project1)
- create_event(milestone1_project1, 'remove')
+ create_event(nil, 'remove')
create_event(milestone3_project1)
end
@@ -101,7 +101,7 @@ describe Issuable::Clone::AttributesRewriter do
expect_milestone_event(new_issue_milestone_events.first, milestone: milestone1_project2, action: 'add', state: 'opened')
expect_milestone_event(new_issue_milestone_events.second, milestone: milestone2_project2, action: 'add', state: 'opened')
- expect_milestone_event(new_issue_milestone_events.third, milestone: milestone1_project2, action: 'remove', state: 'opened')
+ expect_milestone_event(new_issue_milestone_events.third, milestone: nil, action: 'remove', state: 'opened')
end
def create_event(milestone, action = 'add')
@@ -109,10 +109,32 @@ describe Issuable::Clone::AttributesRewriter do
end
def expect_milestone_event(event, expected_attrs)
- expect(event.milestone_id).to eq(expected_attrs[:milestone].id)
+ expect(event.milestone_id).to eq(expected_attrs[:milestone]&.id)
expect(event.action).to eq(expected_attrs[:action])
expect(event.state).to eq(expected_attrs[:state])
end
end
+
+ context 'with existing state events' do
+ let!(:event1) { create(:resource_state_event, issue: original_issue, state: 'opened') }
+ let!(:event2) { create(:resource_state_event, issue: original_issue, state: 'closed') }
+ let!(:event3) { create(:resource_state_event, issue: original_issue, state: 'reopened') }
+
+ it 'copies existing state events as expected' do
+ subject.execute
+
+ state_events = new_issue.reload.resource_state_events
+ expect(state_events.size).to eq(3)
+
+ expect_state_event(state_events.first, issue: new_issue, state: 'opened')
+ expect_state_event(state_events.second, issue: new_issue, state: 'closed')
+ expect_state_event(state_events.third, issue: new_issue, state: 'reopened')
+ end
+
+ def expect_state_event(event, expected_attrs)
+ expect(event.issue_id).to eq(expected_attrs[:issue]&.id)
+ expect(event.state).to eq(expected_attrs[:state])
+ end
+ end
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 86377e054c1..6fc1928d47b 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -146,7 +146,7 @@ describe Issues::CloseService do
context 'when `metrics.first_mentioned_in_commit_at` is already set' do
before do
- issue.metrics.update!(first_mentioned_in_commit_at: Time.now)
+ issue.metrics.update!(first_mentioned_in_commit_at: Time.current)
end
it 'does not update the metrics' do
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index bd50d6b1001..7a251e03e51 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -110,6 +110,31 @@ describe Issues::CreateService do
end
end
+ context 'when labels is nil' do
+ let(:opts) do
+ { title: 'Title',
+ description: 'Description',
+ labels: nil }
+ end
+
+ it 'does not assign label' do
+ expect(issue.labels).to be_empty
+ end
+ end
+
+ context 'when labels is nil and label_ids is present' do
+ let(:opts) do
+ { title: 'Title',
+ description: 'Description',
+ labels: nil,
+ label_ids: labels.map(&:id) }
+ end
+
+ it 'assigns group labels' do
+ expect(issue.labels).to match_array labels
+ end
+ end
+
context 'when milestone belongs to different project' do
let(:milestone) { create(:milestone) }
@@ -368,6 +393,8 @@ describe Issues::CreateService do
end
context 'checking spam' do
+ include_context 'includes Spam constants'
+
let(:title) { 'Legit issue' }
let(:description) { 'please fix' }
let(:opts) do
@@ -378,11 +405,13 @@ describe Issues::CreateService do
}
end
+ subject { described_class.new(project, user, opts) }
+
before do
stub_feature_flags(allow_possible_spam: false)
end
- context 'when recaptcha was verified' do
+ context 'when reCAPTCHA was verified' do
let(:log_user) { user }
let(:spam_logs) { create_list(:spam_log, 2, user: log_user, title: title) }
let(:target_spam_log) { spam_logs.last }
@@ -391,7 +420,7 @@ describe Issues::CreateService do
opts[:recaptcha_verified] = true
opts[:spam_log_id] = target_spam_log.id
- expect(Spam::AkismetService).not_to receive(:new)
+ expect(Spam::SpamVerdictService).not_to receive(:new)
end
it 'does not mark an issue as spam' do
@@ -402,7 +431,7 @@ describe Issues::CreateService do
expect(issue).to be_valid
end
- it 'does not assign a spam_log to an issue' do
+ it 'does not assign a spam_log to the issue' do
expect(issue.spam_log).to be_nil
end
@@ -419,17 +448,42 @@ describe Issues::CreateService do
end
end
- context 'when recaptcha was not verified' do
+ context 'when reCAPTCHA was not verified' do
before do
- expect_next_instance_of(Spam::SpamCheckService) do |spam_service|
+ expect_next_instance_of(Spam::SpamActionService) do |spam_service|
expect(spam_service).to receive_messages(check_for_spam?: true)
end
end
- context 'when akismet detects spam' do
+ context 'when SpamVerdictService requires reCAPTCHA' do
before do
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: true)
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(REQUIRE_RECAPTCHA)
+ end
+ end
+
+ it 'does not mark the issue as spam' do
+ expect(issue).not_to be_spam
+ end
+
+ it 'marks the issue as needing reCAPTCHA' do
+ expect(issue.needs_recaptcha?).to be_truthy
+ end
+
+ it 'invalidates the issue' do
+ expect(issue).to be_invalid
+ end
+
+ it 'creates a new spam_log' do
+ expect { issue }
+ .to have_spam_log(title: title, description: description, user_id: user.id, noteable_type: 'Issue')
+ end
+ end
+
+ context 'when SpamVerdictService disallows creation' do
+ before do
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(DISALLOW)
end
end
@@ -438,6 +492,10 @@ describe Issues::CreateService do
expect(issue).to be_spam
end
+ it 'does not mark the issue as needing reCAPTCHA' do
+ expect(issue.needs_recaptcha?).to be_falsey
+ end
+
it 'invalidates the issue' do
expect(issue).to be_invalid
end
@@ -457,7 +515,11 @@ describe Issues::CreateService do
expect(issue).not_to be_spam
end
- it '​creates a valid issue' do
+ it 'does not mark the issue as needing reCAPTCHA' do
+ expect(issue.needs_recaptcha?).to be_falsey
+ end
+
+ it 'creates a valid issue' do
expect(issue).to be_valid
end
@@ -468,10 +530,10 @@ describe Issues::CreateService do
end
end
- context 'when akismet does not detect spam' do
+ context 'when the SpamVerdictService allows creation' do
before do
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: false)
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(ALLOW)
end
end
diff --git a/spec/services/issues/related_branches_service_spec.rb b/spec/services/issues/related_branches_service_spec.rb
index eae35f12560..9f72e499414 100644
--- a/spec/services/issues/related_branches_service_spec.rb
+++ b/spec/services/issues/related_branches_service_spec.rb
@@ -3,39 +3,103 @@
require 'spec_helper'
describe Issues::RelatedBranchesService do
- let(:user) { create(:admin) }
- let(:issue) { create(:issue) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
+ let(:user) { developer }
subject { described_class.new(issue.project, user) }
+ before do
+ issue.project.add_developer(developer)
+ end
+
describe '#execute' do
+ let(:sha) { 'abcdef' }
+ let(:repo) { issue.project.repository }
+ let(:project) { issue.project }
+ let(:branch_info) { subject.execute(issue) }
+
+ def make_branch
+ double('Branch', dereferenced_target: double('Target', sha: sha))
+ end
+
before do
- allow(issue.project.repository).to receive(:branch_names).and_return(["mpempe", "#{issue.iid}mepmep", issue.to_branch_name, "#{issue.iid}-branch"])
+ allow(repo).to receive(:branch_names).and_return(branch_names)
end
- it "selects the right branches when there are no referenced merge requests" do
- expect(subject.execute(issue)).to eq([issue.to_branch_name, "#{issue.iid}-branch"])
+ context 'no branches are available' do
+ let(:branch_names) { [] }
+
+ it 'returns an empty array' do
+ expect(branch_info).to be_empty
+ end
end
- it "selects the right branches when there is a referenced merge request" do
- merge_request = create(:merge_request, { description: "Closes ##{issue.iid}",
- source_project: issue.project,
- source_branch: "#{issue.iid}-branch" })
- merge_request.create_cross_references!(user)
+ context 'branches are available' do
+ let(:missing_branch) { "#{issue.to_branch_name}-missing" }
+ let(:unreadable_branch_name) { "#{issue.to_branch_name}-unreadable" }
+ let(:pipeline) { build(:ci_pipeline, :success, project: project) }
+ let(:unreadable_pipeline) { build(:ci_pipeline, :running) }
+
+ let(:branch_names) do
+ [
+ generate(:branch),
+ "#{issue.iid}doesnt-match",
+ issue.to_branch_name,
+ missing_branch,
+ unreadable_branch_name
+ ]
+ end
+
+ before do
+ {
+ issue.to_branch_name => pipeline,
+ unreadable_branch_name => unreadable_pipeline
+ }.each do |name, pipeline|
+ allow(repo).to receive(:find_branch).with(name).and_return(make_branch)
+ allow(project).to receive(:pipeline_for).with(name, sha).and_return(pipeline)
+ end
+
+ allow(repo).to receive(:find_branch).with(missing_branch).and_return(nil)
+ end
+
+ it 'selects relevant branches, along with pipeline status where available' do
+ expect(branch_info).to contain_exactly(
+ { name: issue.to_branch_name, pipeline_status: an_instance_of(Gitlab::Ci::Status::Success) },
+ { name: missing_branch, pipeline_status: be_nil },
+ { name: unreadable_branch_name, pipeline_status: be_nil }
+ )
+ end
+
+ context 'the user has access to otherwise unreadable pipelines' do
+ let(:user) { create(:admin) }
+
+ it 'returns info a developer could not see' do
+ expect(branch_info.pluck(:pipeline_status)).to include(an_instance_of(Gitlab::Ci::Status::Running))
+ end
+ end
+
+ it 'excludes branches referenced in merge requests' do
+ merge_request = create(:merge_request, { description: "Closes #{issue.to_reference}",
+ source_project: issue.project,
+ source_branch: issue.to_branch_name })
+ merge_request.create_cross_references!(user)
- referenced_merge_requests = Issues::ReferencedMergeRequestsService
- .new(issue.project, user)
- .referenced_merge_requests(issue)
+ referenced_merge_requests = Issues::ReferencedMergeRequestsService
+ .new(issue.project, user)
+ .referenced_merge_requests(issue)
- expect(referenced_merge_requests).not_to be_empty
- expect(subject.execute(issue)).to eq([issue.to_branch_name])
+ expect(referenced_merge_requests).not_to be_empty
+ expect(branch_info.pluck(:name)).not_to include(merge_request.source_branch)
+ end
end
- it 'excludes stable branches from the related branches' do
- allow(issue.project.repository).to receive(:branch_names)
- .and_return(["#{issue.iid}-0-stable"])
+ context 'one of the branches is stable' do
+ let(:branch_names) { ["#{issue.iid}-0-stable"] }
- expect(subject.execute(issue)).to eq []
+ it 'is excluded' do
+ expect(branch_info).to be_empty
+ end
end
end
end
diff --git a/spec/services/issues/resolve_discussions_spec.rb b/spec/services/issues/resolve_discussions_spec.rb
index f12a3820b8d..ec6624db6fc 100644
--- a/spec/services/issues/resolve_discussions_spec.rb
+++ b/spec/services/issues/resolve_discussions_spec.rb
@@ -3,19 +3,20 @@
require 'spec_helper.rb'
describe Issues::ResolveDiscussions do
- class DummyService < Issues::BaseService
- include ::Issues::ResolveDiscussions
-
- def initialize(*args)
- super
- filter_resolve_discussion_params
- end
- end
-
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
before do
+ stub_const('DummyService', Class.new(Issues::BaseService))
+ DummyService.class_eval do
+ include ::Issues::ResolveDiscussions
+
+ def initialize(*args)
+ super
+ filter_resolve_discussion_params
+ end
+ end
+
project.add_developer(user)
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index c32bef5a1a5..80039049bc3 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -510,7 +510,7 @@ describe Issues::UpdateService, :mailer do
end
it 'updates updated_at' do
- expect(issue.reload.updated_at).to be > Time.now
+ expect(issue.reload.updated_at).to be > Time.current
end
end
end
@@ -842,5 +842,33 @@ describe Issues::UpdateService, :mailer do
let(:open_issuable) { issue }
let(:closed_issuable) { create(:closed_issue, project: project) }
end
+
+ context 'real-time updates' do
+ let(:update_params) { { assignee_ids: [user2.id] } }
+
+ context 'when broadcast_issue_updates is enabled' do
+ before do
+ stub_feature_flags(broadcast_issue_updates: true)
+ end
+
+ it 'broadcasts to the issues channel' do
+ expect(IssuesChannel).to receive(:broadcast_to).with(issue, event: 'updated')
+
+ update_issue(update_params)
+ end
+ end
+
+ context 'when broadcast_issue_updates is disabled' do
+ before do
+ stub_feature_flags(broadcast_issue_updates: false)
+ end
+
+ it 'does not broadcast to the issues channel' do
+ expect(IssuesChannel).not_to receive(:broadcast_to)
+
+ update_issue(update_params)
+ end
+ end
+ end
end
end
diff --git a/spec/services/jira_import/start_import_service_spec.rb b/spec/services/jira_import/start_import_service_spec.rb
index 90f38945a9f..759e4f3363f 100644
--- a/spec/services/jira_import/start_import_service_spec.rb
+++ b/spec/services/jira_import/start_import_service_spec.rb
@@ -3,113 +3,89 @@
require 'spec_helper'
describe JiraImport::StartImportService do
+ include JiraServiceHelper
+
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
let(:key) { 'KEY' }
subject { described_class.new(user, project, key).execute }
- context 'when feature flag disabled' do
+ context 'when an error is returned from the project validation' do
before do
- stub_feature_flags(jira_issue_import: false)
+ allow(project).to receive(:validate_jira_import_settings!)
+ .and_raise(Projects::ImportService::Error, 'Jira import feature is disabled.')
end
it_behaves_like 'responds with error', 'Jira import feature is disabled.'
end
- context 'when feature flag enabled' do
+ context 'when project validation is ok' do
+ let!(:jira_service) { create(:jira_service, project: project, active: true) }
+
before do
- stub_feature_flags(jira_issue_import: true)
+ stub_jira_service_test
+ allow(project).to receive(:validate_jira_import_settings!)
end
- context 'when user does not have permissions to run the import' do
- before do
- create(:jira_service, project: project, active: true)
+ context 'when Jira project key is not provided' do
+ let(:key) { '' }
- project.add_developer(user)
- end
-
- it_behaves_like 'responds with error', 'You do not have permissions to run the import.'
+ it_behaves_like 'responds with error', 'Unable to find Jira project to import data from.'
end
- context 'when user has permission to run import' do
- before do
- project.add_maintainer(user)
- end
+ context 'when correct data provided' do
+ let(:fake_key) { 'some-key' }
- context 'when Jira service was not setup' do
- it_behaves_like 'responds with error', 'Jira integration not configured.'
- end
+ subject { described_class.new(user, project, fake_key).execute }
- context 'when Jira service exists' do
- let!(:jira_service) { create(:jira_service, project: project, active: true) }
+ context 'when import is already running' do
+ let_it_be(:jira_import_state) { create(:jira_import_state, :started, project: project) }
- context 'when Jira project key is not provided' do
- let(:key) { '' }
+ it_behaves_like 'responds with error', 'Jira import is already running.'
+ end
- it_behaves_like 'responds with error', 'Unable to find Jira project to import data from.'
+ context 'when everything is ok' do
+ it 'returns success response' do
+ expect(subject).to be_a(ServiceResponse)
+ expect(subject).to be_success
end
- context 'when issues feature are disabled' do
- let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
+ it 'schedules Jira import' do
+ subject
- it_behaves_like 'responds with error', 'Cannot import because issues are not available in this project.'
+ expect(project.latest_jira_import).to be_scheduled
end
- context 'when correct data provided' do
- let(:fake_key) { 'some-key' }
-
- subject { described_class.new(user, project, fake_key).execute }
-
- context 'when import is already running' do
- let_it_be(:jira_import_state) { create(:jira_import_state, :started, project: project) }
+ it 'creates Jira import data' do
+ jira_import = subject.payload[:import_data]
- it_behaves_like 'responds with error', 'Jira import is already running.'
- end
-
- context 'when everything is ok' do
- it 'returns success response' do
- expect(subject).to be_a(ServiceResponse)
- expect(subject).to be_success
- end
-
- it 'schedules jira import' do
- subject
-
- expect(project.latest_jira_import).to be_scheduled
- end
- end
-
- it 'creates jira import data' do
- jira_import = subject.payload[:import_data]
-
- expect(jira_import.jira_project_xid).to eq(0)
- expect(jira_import.jira_project_name).to eq(fake_key)
- expect(jira_import.jira_project_key).to eq(fake_key)
- expect(jira_import.user).to eq(user)
- end
+ expect(jira_import.jira_project_xid).to eq(0)
+ expect(jira_import.jira_project_name).to eq(fake_key)
+ expect(jira_import.jira_project_key).to eq(fake_key)
+ expect(jira_import.user).to eq(user)
+ end
- it 'creates jira import label' do
- expect { subject }.to change { Label.count }.by(1)
- end
+ it 'creates Jira import label' do
+ expect { subject }.to change { Label.count }.by(1)
+ end
- it 'creates jira label title with correct number' do
- jira_import = subject.payload[:import_data]
+ it 'creates Jira label title with correct number' do
+ jira_import = subject.payload[:import_data]
- label_title = "jira-import::#{jira_import.jira_project_key}-1"
- expect(jira_import.label.title).to eq(label_title)
- end
+ label_title = "jira-import::#{jira_import.jira_project_key}-1"
+ expect(jira_import.label.title).to eq(label_title)
+ end
+ end
- context 'when multiple jira imports for same jira project' do
- let!(:jira_imports) { create_list(:jira_import_state, 3, :finished, project: project, jira_project_key: fake_key)}
+ context 'when multiple Jira imports for same Jira project' do
+ let!(:jira_imports) { create_list(:jira_import_state, 3, :finished, project: project, jira_project_key: fake_key)}
- it 'creates jira label title with correct number' do
- jira_import = subject.payload[:import_data]
+ it 'creates Jira label title with correct number' do
+ jira_import = subject.payload[:import_data]
- label_title = "jira-import::#{jira_import.jira_project_key}-4"
- expect(jira_import.label.title).to eq(label_title)
- end
- end
+ label_title = "jira-import::#{jira_import.jira_project_key}-4"
+ expect(jira_import.label.title).to eq(label_title)
end
end
end
diff --git a/spec/services/lfs/file_transformer_spec.rb b/spec/services/lfs/file_transformer_spec.rb
index 9973d64930b..13d9c369c42 100644
--- a/spec/services/lfs/file_transformer_spec.rb
+++ b/spec/services/lfs/file_transformer_spec.rb
@@ -81,6 +81,23 @@ describe Lfs::FileTransformer do
expect(LfsObject.last.file.read).to eq file_content
end
+
+ context 'when repository is a design repository' do
+ let(:file_path) { "/#{DesignManagement.designs_directory}/test_file.lfs" }
+ let(:repository) { project.design_repository }
+
+ it "creates an LfsObject with the file's content" do
+ subject.new_file(file_path, file)
+
+ expect(LfsObject.last.file.read).to eq(file_content)
+ end
+
+ it 'saves the correct repository_type to LfsObjectsProject' do
+ subject.new_file(file_path, file)
+
+ expect(project.lfs_objects_projects.first.repository_type).to eq('design')
+ end
+ end
end
context "when doesn't use LFS" do
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index dc34546a599..9155db16d17 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -224,19 +224,6 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
end
- context 'when ci_use_merge_request_ref feature flag is false' do
- before do
- stub_feature_flags(ci_use_merge_request_ref: false)
- end
-
- it 'create legacy detached merge request pipeline for non-fork merge request' do
- merge_request.reload
-
- expect(merge_request.actual_head_pipeline)
- .to be_legacy_detached_merge_request_pipeline
- end
- end
-
context 'when there are no commits between source branch and target branch' do
let(:opts) do
{
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index fa7f745d8a0..bcad822b1dc 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -118,7 +118,7 @@ describe MergeRequests::MergeService do
it 'closes GitLab issue tracker issues' do
issue = create :issue, project: project
- commit = instance_double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.now, authored_date: Time.now)
+ commit = instance_double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current)
allow(merge_request).to receive(:commits).and_return([commit])
merge_request.cache_merge_request_closes_issues!
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index 22df3b84243..69d555f838d 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -72,12 +72,15 @@ describe MergeRequests::RebaseService do
it_behaves_like 'sequence of failure and success'
context 'when unexpected error occurs' do
+ let(:exception) { RuntimeError.new('Something went wrong') }
+ let(:merge_request_ref) { merge_request.to_reference(full: true) }
+
before do
- allow(repository).to receive(:gitaly_operation_client).and_raise('Something went wrong')
+ allow(repository).to receive(:gitaly_operation_client).and_raise(exception)
end
it 'saves a generic error message' do
- subject.execute(merge_request)
+ service.execute(merge_request)
expect(merge_request.reload.merge_error).to eq(described_class::REBASE_ERROR)
end
@@ -86,6 +89,18 @@ describe MergeRequests::RebaseService do
expect(service.execute(merge_request)).to match(status: :error,
message: described_class::REBASE_ERROR)
end
+
+ it 'logs the error' do
+ expect(service).to receive(:log_error).with(exception: exception, message: described_class::REBASE_ERROR, save_message_on_model: true).and_call_original
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception,
+ class: described_class.to_s,
+ merge_request: merge_request_ref,
+ merge_request_id: merge_request.id,
+ message: described_class::REBASE_ERROR,
+ save_message_on_model: true).and_call_original
+
+ service.execute(merge_request)
+ end
end
context 'with git command failure' do
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 4f052fa3edb..94e65d895ac 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -94,6 +94,31 @@ describe MergeRequests::RefreshService do
expect(@fork_build_failed_todo).to be_done
end
+ context 'when a merge error exists' do
+ let(:error_message) { 'This is a merge error' }
+
+ before do
+ @merge_request = create(:merge_request,
+ source_project: @project,
+ source_branch: 'feature',
+ target_branch: 'master',
+ target_project: @project,
+ merge_error: error_message)
+ end
+
+ it 'clears merge errors when pushing to the source branch' do
+ expect { refresh_service.execute(@oldrev, @newrev, 'refs/heads/feature') }
+ .to change { @merge_request.reload.merge_error }
+ .from(error_message)
+ .to(nil)
+ end
+
+ it 'does not clear merge errors when pushing to the target branch' do
+ expect { refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') }
+ .not_to change { @merge_request.reload.merge_error }
+ end
+ end
+
it 'reloads source branch MRs memoization' do
refresh_service.execute(@oldrev, @newrev, 'refs/heads/master')
@@ -209,19 +234,6 @@ describe MergeRequests::RefreshService do
end
end
- context 'when ci_use_merge_request_ref feature flag is false' do
- before do
- stub_feature_flags(ci_use_merge_request_ref: false)
- end
-
- it 'create legacy detached merge request pipeline for non-fork merge request' do
- subject
-
- expect(@merge_request.pipelines_for_merge_request.first)
- .to be_legacy_detached_merge_request_pipeline
- end
- end
-
context "when branch pipeline was created before a detaced merge request pipeline has been created" do
before do
create(:ci_pipeline, project: @merge_request.source_project,
@@ -623,7 +635,7 @@ describe MergeRequests::RefreshService do
references: [issue],
author_name: commit_author.name,
author_email: commit_author.email,
- committed_date: Time.now
+ committed_date: Time.current
)
allow_any_instance_of(MergeRequest).to receive(:commits).and_return(CommitCollection.new(@project, [commit], 'feature'))
diff --git a/spec/services/merge_requests/squash_service_spec.rb b/spec/services/merge_requests/squash_service_spec.rb
index cb278eec692..a53314ed737 100644
--- a/spec/services/merge_requests/squash_service_spec.rb
+++ b/spec/services/merge_requests/squash_service_spec.rb
@@ -141,15 +141,14 @@ describe MergeRequests::SquashService do
let(:merge_request) { merge_request_with_only_new_files }
let(:error) { 'A test error' }
- context 'with gitaly enabled' do
+ context 'with an error in Gitaly UserSquash RPC' do
before do
allow(repository.gitaly_operation_client).to receive(:user_squash)
.and_raise(Gitlab::Git::Repository::GitError, error)
end
- it 'logs the stage and output' do
- expect(service).to receive(:log_error).with(log_error)
- expect(service).to receive(:log_error).with(error)
+ it 'logs the error' do
+ expect(service).to receive(:log_error).with(exception: an_instance_of(Gitlab::Git::Repository::GitError), message: 'Failed to squash merge request')
service.execute
end
@@ -158,19 +157,42 @@ describe MergeRequests::SquashService do
expect(service.execute).to match(status: :error, message: a_string_including('squash'))
end
end
+
+ context 'with an error in squash in progress check' do
+ before do
+ allow(repository).to receive(:squash_in_progress?)
+ .and_raise(Gitlab::Git::Repository::GitError, error)
+ end
+
+ it 'logs the stage and output' do
+ expect(service).to receive(:log_error).with(exception: an_instance_of(Gitlab::Git::Repository::GitError), message: 'Failed to check squash in progress')
+
+ service.execute
+ end
+
+ it 'returns an error' do
+ expect(service.execute).to match(status: :error, message: 'An error occurred while checking whether another squash is in progress.')
+ end
+ end
end
context 'when any other exception is thrown' do
let(:merge_request) { merge_request_with_only_new_files }
- let(:error) { 'A test error' }
+ let(:merge_request_ref) { merge_request.to_reference(full: true) }
+ let(:exception) { RuntimeError.new('A test error') }
before do
- allow(merge_request.target_project.repository).to receive(:squash).and_raise(error)
+ allow(merge_request.target_project.repository).to receive(:squash).and_raise(exception)
end
- it 'logs the MR reference and exception' do
- expect(service).to receive(:log_error).with(a_string_including("#{project.full_path}#{merge_request.to_reference}"))
- expect(service).to receive(:log_error).with(error)
+ it 'logs the error' do
+ expect(service).to receive(:log_error).with(exception: exception, message: 'Failed to squash merge request').and_call_original
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception,
+ class: described_class.to_s,
+ merge_request: merge_request_ref,
+ merge_request_id: merge_request.id,
+ message: 'Failed to squash merge request',
+ save_message_on_model: false).and_call_original
service.execute
end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 8c1800c495f..2b934b24757 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -92,6 +92,7 @@ describe MergeRequests::UpdateService, :mailer do
labels: [],
mentioned_users: [user2],
assignees: [user3],
+ milestone: nil,
total_time_spent: 0,
description: "FYI #{user2.to_reference}"
}
@@ -452,7 +453,7 @@ describe MergeRequests::UpdateService, :mailer do
end
it 'updates updated_at' do
- expect(merge_request.reload.updated_at).to be > Time.now
+ expect(merge_request.reload.updated_at).to be > Time.current
end
end
diff --git a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
index b386159541a..3d26ab2ede5 100644
--- a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
@@ -5,8 +5,6 @@ require 'spec_helper'
describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- STAGES = ::Gitlab::Metrics::Dashboard::Stages
-
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:environment) { create(:environment, project: project) }
@@ -83,7 +81,7 @@ describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_stor
allow(::Gitlab::Metrics::Dashboard::Processor).to receive(:new).and_return(double(process: file_content_hash))
end
- it_behaves_like 'valid dashboard cloning process', ::Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH, [STAGES::CommonMetricsInserter, STAGES::CustomMetricsInserter, STAGES::Sorter]
+ it_behaves_like 'valid dashboard cloning process', ::Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH, [::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter, ::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter, ::Gitlab::Metrics::Dashboard::Stages::Sorter]
context 'selected branch already exists' do
let(:branch) { 'existing_branch' }
diff --git a/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb b/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
index 034d6aba5d6..3547e1f0f8c 100644
--- a/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
@@ -154,7 +154,7 @@ describe Metrics::Dashboard::GrafanaMetricEmbedService do
context 'when value not present in cache' do
it 'returns nil' do
- expect(ReactiveCachingWorker)
+ expect(ExternalServiceReactiveCachingWorker)
.to receive(:perform_async)
.with(service.class, service.id, *cache_params)
@@ -217,7 +217,7 @@ describe Metrics::Dashboard::DatasourceNameParser do
include GrafanaApiHelpers
let(:grafana_url) { valid_grafana_dashboard_link('https://gitlab.grafana.net') }
- let(:grafana_dashboard) { JSON.parse(fixture_file('grafana/dashboard_response.json'), symbolize_names: true) }
+ let(:grafana_dashboard) { Gitlab::Json.parse(fixture_file('grafana/dashboard_response.json'), symbolize_names: true) }
subject { described_class.new(grafana_url, grafana_dashboard).parse }
diff --git a/spec/services/metrics/dashboard/transient_embed_service_spec.rb b/spec/services/metrics/dashboard/transient_embed_service_spec.rb
index 4982f56cddc..125fff7c23c 100644
--- a/spec/services/metrics/dashboard/transient_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/transient_embed_service_spec.rb
@@ -67,6 +67,12 @@ describe Metrics::Dashboard::TransientEmbedService, :use_clean_rails_memory_stor
expect(get_type_for_embed(alt_embed)).to eq('area-chart')
end
+ context 'when embed_json cannot be parsed as json' do
+ let(:embed_json) { '' }
+
+ it_behaves_like 'misconfigured dashboard service response', :unprocessable_entity
+ end
+
private
def get_embed_json(type = 'line-graph')
diff --git a/spec/services/metrics/users_starred_dashboards/create_service_spec.rb b/spec/services/metrics/users_starred_dashboards/create_service_spec.rb
new file mode 100644
index 00000000000..eac4965ba44
--- /dev/null
+++ b/spec/services/metrics/users_starred_dashboards/create_service_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::UsersStarredDashboards::CreateService do
+ let_it_be(:user) { create(:user) }
+ let(:dashboard_path) { 'config/prometheus/common_metrics.yml' }
+ let(:service_instance) { described_class.new(user, project, dashboard_path) }
+ let(:project) { create(:project) }
+ let(:starred_dashboard_params) do
+ {
+ user: user,
+ project: project,
+ dashboard_path: dashboard_path
+ }
+ end
+
+ shared_examples 'prevented starred dashboard creation' do |message|
+ it 'returns error response', :aggregate_failures do
+ expect(Metrics::UsersStarredDashboard).not_to receive(:new)
+
+ response = service_instance.execute
+
+ expect(response.status).to be :error
+ expect(response.message).to eql message
+ end
+ end
+
+ describe '.execute' do
+ context 'with anonymous user' do
+ it_behaves_like 'prevented starred dashboard creation', 'You are not authorized to add star to this dashboard'
+ end
+
+ context 'with reporter user' do
+ before do
+ project.add_reporter(user)
+ end
+
+ context 'incorrect dashboard_path' do
+ let(:dashboard_path) { 'something_incorrect.yml' }
+
+ it_behaves_like 'prevented starred dashboard creation', 'Dashboard with requested path can not be found'
+ end
+
+ context 'with valid dashboard path' do
+ it 'creates starred dashboard and returns success response', :aggregate_failures do
+ expect_next_instance_of(Metrics::UsersStarredDashboard, starred_dashboard_params) do |starred_dashboard|
+ expect(starred_dashboard).to receive(:save).and_return true
+ end
+
+ response = service_instance.execute
+
+ expect(response.status).to be :success
+ end
+
+ context 'Metrics::UsersStarredDashboard has validation errors' do
+ it 'returns error response', :aggregate_failures do
+ expect_next_instance_of(Metrics::UsersStarredDashboard, starred_dashboard_params) do |starred_dashboard|
+ expect(starred_dashboard).to receive(:save).and_return(false)
+ expect(starred_dashboard).to receive(:errors).and_return(double(messages: { base: ['Model validation error'] }))
+ end
+
+ response = service_instance.execute
+
+ expect(response.status).to be :error
+ expect(response.message).to eql(base: ['Model validation error'])
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/metrics/users_starred_dashboards/delete_service_spec.rb b/spec/services/metrics/users_starred_dashboards/delete_service_spec.rb
new file mode 100644
index 00000000000..68a2fef5931
--- /dev/null
+++ b/spec/services/metrics/users_starred_dashboards/delete_service_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::UsersStarredDashboards::DeleteService do
+ subject(:service_instance) { described_class.new(user, project, dashboard_path) }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ describe '#execute' do
+ let_it_be(:user_starred_dashboard_1) { create(:metrics_users_starred_dashboard, user: user, project: project, dashboard_path: 'dashboard_1') }
+ let_it_be(:user_starred_dashboard_2) { create(:metrics_users_starred_dashboard, user: user, project: project) }
+ let_it_be(:other_user_starred_dashboard) { create(:metrics_users_starred_dashboard, project: project) }
+ let_it_be(:other_project_starred_dashboard) { create(:metrics_users_starred_dashboard, user: user) }
+
+ context 'without dashboard_path' do
+ let(:dashboard_path) { nil }
+
+ it 'does not scope user starred dashboards by dashboard path' do
+ result = service_instance.execute
+
+ expect(result.success?).to be_truthy
+ expect(result.payload[:deleted_rows]).to be(2)
+ expect(Metrics::UsersStarredDashboard.all).to contain_exactly(other_user_starred_dashboard, other_project_starred_dashboard)
+ end
+ end
+
+ context 'with dashboard_path' do
+ let(:dashboard_path) { 'dashboard_1' }
+
+ it 'does scope user starred dashboards by dashboard path' do
+ result = service_instance.execute
+
+ expect(result.success?).to be_truthy
+ expect(result.payload[:deleted_rows]).to be(1)
+ expect(Metrics::UsersStarredDashboard.all).to contain_exactly(user_starred_dashboard_2, other_user_starred_dashboard, other_project_starred_dashboard)
+ end
+ end
+ end
+end
diff --git a/spec/services/namespaces/check_storage_size_service_spec.rb b/spec/services/namespaces/check_storage_size_service_spec.rb
new file mode 100644
index 00000000000..50359ef90ab
--- /dev/null
+++ b/spec/services/namespaces/check_storage_size_service_spec.rb
@@ -0,0 +1,159 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Namespaces::CheckStorageSizeService, '#execute' do
+ let(:namespace) { build_stubbed(:namespace) }
+ let(:user) { build(:user, namespace: namespace) }
+ let(:service) { described_class.new(namespace, user) }
+ let(:current_size) { 150.megabytes }
+ let(:limit) { 100.megabytes }
+
+ subject(:response) { service.execute }
+
+ before do
+ allow(namespace).to receive(:root_ancestor).and_return(namespace)
+
+ root_storage_size = instance_double("RootStorageSize",
+ current_size: current_size,
+ limit: limit,
+ usage_ratio: limit == 0 ? 0 : current_size.to_f / limit.to_f,
+ above_size_limit?: current_size > limit
+ )
+
+ expect(Namespace::RootStorageSize).to receive(:new).and_return(root_storage_size)
+ end
+
+ context 'feature flag' do
+ it 'is successful when disabled' do
+ stub_feature_flags(namespace_storage_limit: false)
+
+ expect(response).to be_success
+ end
+
+ it 'errors when enabled' do
+ stub_feature_flags(namespace_storage_limit: true)
+
+ expect(response).to be_error
+ end
+
+ it 'is successful when feature flag is activated for another namespace' do
+ stub_feature_flags(namespace_storage_limit: build(:namespace))
+
+ expect(response).to be_success
+ end
+
+ it 'errors when feature flag is activated for the current namespace' do
+ stub_feature_flags(namespace_storage_limit: namespace )
+
+ expect(response).to be_error
+ expect(response.message).to be_present
+ end
+ end
+
+ context 'when limit is set to 0' do
+ let(:limit) { 0 }
+
+ it 'is successful and has no payload' do
+ expect(response).to be_success
+ expect(response.payload).to be_empty
+ end
+ end
+
+ context 'when current size is below threshold' do
+ let(:current_size) { 10.megabytes }
+
+ it 'is successful and has no payload' do
+ expect(response).to be_success
+ expect(response.payload).to be_empty
+ end
+ end
+
+ context 'when not admin of the namespace' do
+ let(:other_namespace) { build_stubbed(:namespace) }
+
+ subject(:response) { described_class.new(other_namespace, user).execute }
+
+ before do
+ allow(other_namespace).to receive(:root_ancestor).and_return(other_namespace)
+ end
+
+ it 'errors and has no payload' do
+ expect(response).to be_error
+ expect(response.payload).to be_empty
+ end
+ end
+
+ context 'when providing the child namespace' do
+ let(:namespace) { build_stubbed(:group) }
+ let(:child_namespace) { build_stubbed(:group, parent: namespace) }
+
+ subject(:response) { described_class.new(child_namespace, user).execute }
+
+ before do
+ allow(child_namespace).to receive(:root_ancestor).and_return(namespace)
+ namespace.add_owner(user)
+ end
+
+ it 'uses the root namespace' do
+ expect(response).to be_error
+ end
+ end
+
+ describe 'payload alert_level' do
+ subject { service.execute.payload[:alert_level] }
+
+ context 'when above info threshold' do
+ let(:current_size) { 50.megabytes }
+
+ it { is_expected.to eq(:info) }
+ end
+
+ context 'when above warning threshold' do
+ let(:current_size) { 75.megabytes }
+
+ it { is_expected.to eq(:warning) }
+ end
+
+ context 'when above alert threshold' do
+ let(:current_size) { 95.megabytes }
+
+ it { is_expected.to eq(:alert) }
+ end
+
+ context 'when above error threshold' do
+ let(:current_size) { 100.megabytes }
+
+ it { is_expected.to eq(:error) }
+ end
+ end
+
+ describe 'payload explanation_message' do
+ subject(:response) { service.execute.payload[:explanation_message] }
+
+ context 'when above limit' do
+ let(:current_size) { 110.megabytes }
+
+ it 'returns message with read-only warning' do
+ expect(response).to include("#{namespace.name} is now read-only")
+ end
+ end
+
+ context 'when below limit' do
+ let(:current_size) { 60.megabytes }
+
+ it { is_expected.to include('If you reach 100% storage capacity') }
+ end
+ end
+
+ describe 'payload usage_message' do
+ let(:current_size) { 60.megabytes }
+
+ subject(:response) { service.execute.payload[:usage_message] }
+
+ it 'returns current usage information' do
+ expect(response).to include("60 MB of 100 MB")
+ expect(response).to include("60%")
+ end
+ end
+end
diff --git a/spec/services/note_summary_spec.rb b/spec/services/note_summary_spec.rb
index aa4e41f4d8c..038e0cdb703 100644
--- a/spec/services/note_summary_spec.rb
+++ b/spec/services/note_summary_spec.rb
@@ -25,18 +25,18 @@ describe NoteSummary do
it 'returns note hash' do
Timecop.freeze do
expect(create_note_summary.note).to eq(noteable: noteable, project: project, author: user, note: 'note',
- created_at: Time.now)
+ created_at: Time.current)
end
end
context 'when noteable is a commit' do
- let(:noteable) { build(:commit, system_note_timestamp: Time.at(43)) }
+ let(:noteable) { build(:commit, system_note_timestamp: Time.zone.at(43)) }
it 'returns note hash specific to commit' do
expect(create_note_summary.note).to eq(
noteable: nil, project: project, author: user, note: 'note',
noteable_type: 'Commit', commit_id: noteable.id,
- created_at: Time.at(43)
+ created_at: Time.zone.at(43)
)
end
end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index c461dd700ec..39d6fd26e31 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -342,6 +342,60 @@ describe Notes::CreateService do
end
end
+ context 'design note' do
+ subject(:service) { described_class.new(project, user, params) }
+
+ let_it_be(:design) { create(:design, :with_file) }
+ let_it_be(:project) { design.project }
+ let_it_be(:user) { project.owner }
+ let_it_be(:params) do
+ {
+ type: 'DiffNote',
+ noteable: design,
+ note: "A message",
+ position: {
+ old_path: design.full_path,
+ new_path: design.full_path,
+ position_type: 'image',
+ width: '100',
+ height: '100',
+ x: '50',
+ y: '50',
+ base_sha: design.diff_refs.base_sha,
+ start_sha: design.diff_refs.base_sha,
+ head_sha: design.diff_refs.head_sha
+ }
+ }
+ end
+
+ it 'can create diff notes for designs' do
+ note = service.execute
+
+ expect(note).to be_a(DiffNote)
+ expect(note).to be_persisted
+ expect(note.noteable).to eq(design)
+ end
+
+ it 'sends a notification about this note', :sidekiq_might_not_need_inline do
+ notifier = double
+ allow(::NotificationService).to receive(:new).and_return(notifier)
+
+ expect(notifier)
+ .to receive(:new_note)
+ .with have_attributes(noteable: design)
+
+ service.execute
+ end
+
+ it 'correctly builds the position of the note' do
+ note = service.execute
+
+ expect(note.position.new_path).to eq(design.full_path)
+ expect(note.position.old_path).to eq(design.full_path)
+ expect(note.position.diff_refs).to eq(design.diff_refs)
+ end
+ end
+
context 'note with emoji only' do
it 'creates regular note' do
opts = {
@@ -371,7 +425,7 @@ describe Notes::CreateService do
expect do
existing_note
- Timecop.freeze(Time.now + 1.minute) { subject }
+ Timecop.freeze(Time.current + 1.minute) { subject }
existing_note.reload
end.to change { existing_note.type }.from(nil).to('DiscussionNote')
diff --git a/spec/services/notes/post_process_service_spec.rb b/spec/services/notes/post_process_service_spec.rb
index 99db7897664..d564cacd2d8 100644
--- a/spec/services/notes/post_process_service_spec.rb
+++ b/spec/services/notes/post_process_service_spec.rb
@@ -43,5 +43,32 @@ describe Notes::PostProcessService do
described_class.new(@note).execute
end
end
+
+ context 'when the noteable is a design' do
+ let_it_be(:noteable) { create(:design, :with_file) }
+ let_it_be(:discussion_note) { create_note }
+
+ subject { described_class.new(note).execute }
+
+ def create_note(in_reply_to: nil)
+ create(:diff_note_on_design, noteable: noteable, in_reply_to: in_reply_to)
+ end
+
+ context 'when the note is the start of a new discussion' do
+ let(:note) { discussion_note }
+
+ it 'creates a new system note' do
+ expect { subject }.to change { Note.system.count }.by(1)
+ end
+ end
+
+ context 'when the note is a reply within a discussion' do
+ let_it_be(:note) { create_note(in_reply_to: discussion_note) }
+
+ it 'does not create a new system note' do
+ expect { subject }.not_to change { Note.system.count }
+ end
+ end
+ end
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 163ca0b9bc3..2a7166e3895 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -240,6 +240,17 @@ describe NotificationService, :mailer do
end
end
+ describe '#unknown_sign_in' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:ip) { '127.0.0.1' }
+
+ subject { notification.unknown_sign_in(user, ip) }
+
+ it 'sends email to the user' do
+ expect { subject }.to have_enqueued_email(user, ip, mail: 'unknown_sign_in_email')
+ end
+ end
+
describe 'Notes' do
context 'issue note' do
let(:project) { create(:project, :private) }
@@ -698,9 +709,60 @@ describe NotificationService, :mailer do
end
end
end
+
+ context 'when notified of a new design diff note' do
+ include DesignManagementTestHelpers
+
+ let_it_be(:design) { create(:design, :with_file) }
+ let_it_be(:project) { design.project }
+ let_it_be(:dev) { create(:user) }
+ let_it_be(:stranger) { create(:user) }
+ let_it_be(:note) do
+ create(:diff_note_on_design,
+ noteable: design,
+ note: "Hello #{dev.to_reference}, G'day #{stranger.to_reference}")
+ end
+ let(:mailer) { double(deliver_later: true) }
+
+ context 'design management is enabled' do
+ before do
+ enable_design_management
+ project.add_developer(dev)
+ allow(Notify).to receive(:note_design_email) { mailer }
+ end
+
+ it 'sends new note notifications' do
+ expect(subject).to receive(:send_new_note_notifications).with(note)
+
+ subject.new_note(note)
+ end
+
+ it 'sends a mail to the developer' do
+ expect(Notify)
+ .to receive(:note_design_email).with(dev.id, note.id, 'mentioned')
+
+ subject.new_note(note)
+ end
+
+ it 'does not notify non-developers' do
+ expect(Notify)
+ .not_to receive(:note_design_email).with(stranger.id, note.id)
+
+ subject.new_note(note)
+ end
+ end
+
+ context 'design management is disabled' do
+ it 'does not notify the user' do
+ expect(Notify).not_to receive(:note_design_email)
+
+ subject.new_note(note)
+ end
+ end
+ end
end
- describe '#send_new_release_notifications', :deliver_mails_inline, :sidekiq_inline do
+ describe '#send_new_release_notifications', :deliver_mails_inline do
context 'when recipients for a new release exist' do
let(:release) { create(:release) }
@@ -712,7 +774,7 @@ describe NotificationService, :mailer do
recipient_2 = NotificationRecipient.new(user_2, :custom, custom_action: :new_release)
allow(NotificationRecipients::BuildService).to receive(:build_new_release_recipients).and_return([recipient_1, recipient_2])
- release
+ notification.send_new_release_notifications(release)
should_email(user_1)
should_email(user_2)
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index 63fd0978c97..22fcc6b9a79 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -119,7 +119,7 @@ describe PagesDomains::ObtainLetsEncryptCertificateService do
cert = OpenSSL::X509::Certificate.new
cert.subject = cert.issuer = OpenSSL::X509::Name.parse(subject)
- cert.not_before = Time.now
+ cert.not_before = Time.current
cert.not_after = 1.year.from_now
cert.public_key = key.public_key
cert.serial = 0x0
diff --git a/spec/services/pod_logs/base_service_spec.rb b/spec/services/pod_logs/base_service_spec.rb
index 3ec5dc68c60..bc4989b59d9 100644
--- a/spec/services/pod_logs/base_service_spec.rb
+++ b/spec/services/pod_logs/base_service_spec.rb
@@ -103,6 +103,36 @@ describe ::PodLogs::BaseService do
expect(result[:container_name]).to eq(container_name)
end
end
+
+ context 'when pod_name is not a string' do
+ let(:params) do
+ {
+ 'pod_name' => { something_that_is: :not_a_string }
+ }
+ end
+
+ it 'returns error' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid pod_name')
+ end
+ end
+
+ context 'when container_name is not a string' do
+ let(:params) do
+ {
+ 'container_name' => { something_that_is: :not_a_string }
+ }
+ end
+
+ it 'returns error' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid container_name')
+ end
+ end
end
describe '#get_pod_names' do
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
index e3efce1134b..8060d07461a 100644
--- a/spec/services/pod_logs/elasticsearch_service_spec.rb
+++ b/spec/services/pod_logs/elasticsearch_service_spec.rb
@@ -158,6 +158,21 @@ describe ::PodLogs::ElasticsearchService do
end
end
+ context 'with search provided and invalid' do
+ let(:params) do
+ {
+ 'search' => { term: "foo-bar" }
+ }
+ end
+
+ it 'returns error' do
+ result = subject.send(:check_search, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq("Invalid search parameter")
+ end
+ end
+
context 'with search not provided' do
let(:params) do
{}
@@ -188,6 +203,21 @@ describe ::PodLogs::ElasticsearchService do
end
end
+ context 'with cursor provided and invalid' do
+ let(:params) do
+ {
+ 'cursor' => { term: "foo-bar" }
+ }
+ end
+
+ it 'returns error' do
+ result = subject.send(:check_cursor, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq("Invalid cursor parameter")
+ end
+ end
+
context 'with cursor not provided' do
let(:params) do
{}
@@ -225,7 +255,7 @@ describe ::PodLogs::ElasticsearchService do
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
- .with(namespace, pod_name: pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor)
+ .with(namespace, pod_name: pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor, chart_above_v2: true)
.and_return({ logs: expected_logs, cursor: expected_cursor })
result = subject.send(:pod_logs, result_arg)
diff --git a/spec/services/pod_logs/kubernetes_service_spec.rb b/spec/services/pod_logs/kubernetes_service_spec.rb
index da89c7ee117..a1f7645323b 100644
--- a/spec/services/pod_logs/kubernetes_service_spec.rb
+++ b/spec/services/pod_logs/kubernetes_service_spec.rb
@@ -218,7 +218,7 @@ describe ::PodLogs::KubernetesService do
end
it 'returns error if pod_name was specified but does not exist' do
- result = subject.send(:check_pod_name, pod_name: 'another_pod', pods: [pod_name])
+ result = subject.send(:check_pod_name, pod_name: 'another-pod', pods: [pod_name])
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Pod does not exist')
@@ -230,6 +230,13 @@ describe ::PodLogs::KubernetesService do
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('pod_name cannot be larger than 253 chars')
end
+
+ it 'returns error if pod_name is in invalid format' do
+ result = subject.send(:check_pod_name, pod_name: "Invalid_pod_name", pods: [pod_name])
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('pod_name can contain only lowercase letters, digits, \'-\', and \'.\' and must start and end with an alphanumeric character')
+ end
end
describe '#check_container_name' do
@@ -287,5 +294,16 @@ describe ::PodLogs::KubernetesService do
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('container_name cannot be larger than 253 chars')
end
+
+ it 'returns error if container_name is in invalid format' do
+ result = subject.send(:check_container_name,
+ container_name: "Invalid_container_name",
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('container_name can contain only lowercase letters, digits, \'-\', and \'.\' and must start and end with an alphanumeric character')
+ end
end
end
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index b4f48696b15..25f4122f134 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -166,6 +166,41 @@ describe PostReceiveService do
expect(subject).to include(build_alert_message(message))
end
end
+
+ context 'storage size limit alerts' do
+ let(:check_storage_size_response) { ServiceResponse.success }
+
+ before do
+ expect_next_instance_of(Namespaces::CheckStorageSizeService, project.namespace, user) do |check_storage_size_service|
+ expect(check_storage_size_service).to receive(:execute).and_return(check_storage_size_response)
+ end
+ end
+
+ context 'when there is no payload' do
+ it 'adds no alert' do
+ expect(subject.size).to eq(1)
+ end
+ end
+
+ context 'when there is payload' do
+ let(:check_storage_size_response) do
+ ServiceResponse.success(
+ payload: {
+ alert_level: :info,
+ usage_message: "Usage",
+ explanation_message: "Explanation"
+ }
+ )
+ end
+
+ it 'adds an alert' do
+ response = subject
+
+ expect(response.size).to eq(2)
+ expect(response).to include(build_alert_message("##### INFO #####\nUsage\nExplanation"))
+ end
+ end
+ end
end
context 'with PersonalSnippet' do
diff --git a/spec/services/projects/alerting/notify_service_spec.rb b/spec/services/projects/alerting/notify_service_spec.rb
index f08ecd397ec..b88f0ef5149 100644
--- a/spec/services/projects/alerting/notify_service_spec.rb
+++ b/spec/services/projects/alerting/notify_service_spec.rb
@@ -12,11 +12,16 @@ describe Projects::Alerting::NotifyService do
shared_examples 'processes incident issues' do |amount|
let(:create_incident_service) { spy }
+ let(:new_alert) { instance_double(AlertManagement::Alert, id: 503, persisted?: true) }
it 'processes issues' do
+ expect(AlertManagement::Alert)
+ .to receive(:create)
+ .and_return(new_alert)
+
expect(IncidentManagement::ProcessAlertWorker)
.to receive(:perform_async)
- .with(project.id, kind_of(Hash))
+ .with(project.id, kind_of(Hash), new_alert.id)
.exactly(amount).times
Sidekiq::Testing.inline! do
@@ -59,15 +64,26 @@ describe Projects::Alerting::NotifyService do
end
end
+ shared_examples 'NotifyService does not create alert' do
+ it 'does not create alert' do
+ expect { subject }.not_to change(AlertManagement::Alert, :count)
+ end
+ end
+
describe '#execute' do
let(:token) { 'invalid-token' }
- let(:starts_at) { Time.now.change(usec: 0) }
+ let(:starts_at) { Time.current.change(usec: 0) }
let(:service) { described_class.new(project, nil, payload) }
let(:payload_raw) do
{
- 'title' => 'alert title',
- 'start_time' => starts_at.rfc3339
- }
+ title: 'alert title',
+ start_time: starts_at.rfc3339,
+ severity: 'low',
+ monitoring_tool: 'GitLab RSpec',
+ service: 'GitLab Test Suite',
+ description: 'Very detailed description',
+ hosts: ['1.1.1.1', '2.2.2.2']
+ }.with_indifferent_access
end
let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
@@ -88,6 +104,73 @@ describe Projects::Alerting::NotifyService do
.and_return(incident_management_setting)
end
+ context 'with valid payload' do
+ let(:last_alert_attributes) do
+ AlertManagement::Alert.last.attributes
+ .except('id', 'iid', 'created_at', 'updated_at')
+ .with_indifferent_access
+ end
+
+ it 'creates AlertManagement::Alert' do
+ expect { subject }.to change(AlertManagement::Alert, :count).by(1)
+ end
+
+ it 'created alert has all data properly assigned' do
+ subject
+
+ expect(last_alert_attributes).to match(
+ project_id: project.id,
+ title: payload_raw.fetch(:title),
+ started_at: Time.zone.parse(payload_raw.fetch(:start_time)),
+ severity: payload_raw.fetch(:severity),
+ status: AlertManagement::Alert::STATUSES[:triggered],
+ events: 1,
+ hosts: payload_raw.fetch(:hosts),
+ payload: payload_raw.with_indifferent_access,
+ issue_id: nil,
+ description: payload_raw.fetch(:description),
+ monitoring_tool: payload_raw.fetch(:monitoring_tool),
+ service: payload_raw.fetch(:service),
+ fingerprint: nil,
+ ended_at: nil
+ )
+ end
+
+ context 'with a minimal payload' do
+ let(:payload_raw) do
+ {
+ title: 'alert title',
+ start_time: starts_at.rfc3339
+ }
+ end
+
+ it 'creates AlertManagement::Alert' do
+ expect { subject }.to change(AlertManagement::Alert, :count).by(1)
+ end
+
+ it 'created alert has all data properly assigned' do
+ subject
+
+ expect(last_alert_attributes).to match(
+ project_id: project.id,
+ title: payload_raw.fetch(:title),
+ started_at: Time.zone.parse(payload_raw.fetch(:start_time)),
+ severity: 'critical',
+ status: AlertManagement::Alert::STATUSES[:triggered],
+ events: 1,
+ hosts: [],
+ payload: payload_raw.with_indifferent_access,
+ issue_id: nil,
+ description: nil,
+ monitoring_tool: nil,
+ service: nil,
+ fingerprint: nil,
+ ended_at: nil
+ )
+ end
+ end
+ end
+
it_behaves_like 'does not process incident issues'
context 'issue enabled' do
@@ -103,6 +186,7 @@ describe Projects::Alerting::NotifyService do
end
it_behaves_like 'does not process incident issues due to error', http_status: :bad_request
+ it_behaves_like 'NotifyService does not create alert'
end
end
@@ -115,12 +199,14 @@ describe Projects::Alerting::NotifyService do
context 'with invalid token' do
it_behaves_like 'does not process incident issues due to error', http_status: :unauthorized
+ it_behaves_like 'NotifyService does not create alert'
end
context 'with deactivated Alerts Service' do
let!(:alerts_service) { create(:alerts_service, :inactive, project: project) }
it_behaves_like 'does not process incident issues due to error', http_status: :forbidden
+ it_behaves_like 'NotifyService does not create alert'
end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 1feea27eebc..e542f1e9108 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -489,6 +489,104 @@ describe Projects::CreateService, '#execute' do
end
end
+ it_behaves_like 'measurable service' do
+ before do
+ opts.merge!(
+ current_user: user,
+ path: 'foo'
+ )
+ end
+
+ let(:base_log_data) do
+ {
+ class: Projects::CreateService.name,
+ current_user: user.name,
+ project_full_path: "#{user.namespace.full_path}/#{opts[:path]}"
+ }
+ end
+
+ after do
+ create_project(user, opts)
+ end
+ end
+
+ context 'with specialized_project_authorization_workers' do
+ let_it_be(:other_user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let(:opts) do
+ {
+ name: 'GitLab',
+ namespace_id: group.id
+ }
+ end
+
+ before do
+ group.add_maintainer(user)
+ group.add_developer(other_user)
+ end
+
+ it 'updates authorization for current_user' do
+ expect(Users::RefreshAuthorizedProjectsService).to(
+ receive(:new).with(user).and_call_original
+ )
+
+ project = create_project(user, opts)
+
+ expect(
+ Ability.allowed?(user, :read_project, project)
+ ).to be_truthy
+ end
+
+ it 'schedules authorization update for users with access to group' do
+ expect(AuthorizedProjectsWorker).not_to(
+ receive(:bulk_perform_async)
+ )
+ expect(AuthorizedProjectUpdate::ProjectCreateWorker).to(
+ receive(:perform_async).and_call_original
+ )
+ expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour, array_including([user.id], [other_user.id]))
+ .and_call_original
+ )
+
+ create_project(user, opts)
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(specialized_project_authorization_workers: false)
+ end
+
+ it 'updates authorization for current_user' do
+ expect(Users::RefreshAuthorizedProjectsService).to(
+ receive(:new).with(user).and_call_original
+ )
+
+ project = create_project(user, opts)
+
+ expect(
+ Ability.allowed?(user, :read_project, project)
+ ).to be_truthy
+ end
+
+ it 'uses AuthorizedProjectsWorker' do
+ expect(AuthorizedProjectsWorker).to(
+ receive(:bulk_perform_async).with(array_including([user.id], [other_user.id])).and_call_original
+ )
+ expect(AuthorizedProjectUpdate::ProjectCreateWorker).not_to(
+ receive(:perform_async)
+ )
+ expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).not_to(
+ receive(:bulk_perform_in)
+ )
+
+ create_project(user, opts)
+ end
+ end
+ end
+
def create_project(user, opts)
Projects::CreateService.new(user, opts).execute
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index c8354f6ba4e..112a41c773b 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -320,7 +320,13 @@ describe Projects::ForkService do
allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum)
.and_return(::Gitlab::Git::BLANK_SHA)
- Projects::UpdateRepositoryStorageService.new(project).execute('test_second_storage')
+ storage_move = create(
+ :project_repository_storage_move,
+ :scheduled,
+ project: project,
+ destination_storage_name: 'test_second_storage'
+ )
+ Projects::UpdateRepositoryStorageService.new(storage_move).execute
fork_after_move = fork_project(project)
pool_repository_before_move = PoolRepository.joins(:shard)
.find_by(source_project: project, shards: { name: 'default' })
diff --git a/spec/services/projects/hashed_storage/base_attachment_service_spec.rb b/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
index 34c37be6703..070dd5fc1b8 100644
--- a/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
+++ b/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
@@ -31,7 +31,7 @@ describe Projects::HashedStorage::BaseAttachmentService do
expect(Dir.exist?(target_path)).to be_truthy
Timecop.freeze do
- suffix = Time.now.utc.to_i
+ suffix = Time.current.utc.to_i
subject.send(:discard_path!, target_path)
expected_renamed_path = "#{target_path}-#{suffix}"
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
index 71be335c11d..f1eaf8324e0 100644
--- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
@@ -6,7 +6,7 @@ describe Projects::HashedStorage::MigrateRepositoryService do
include GitHelpers
let(:gitlab_shell) { Gitlab::Shell.new }
- let(:project) { create(:project, :legacy_storage, :repository, :wiki_repo) }
+ let(:project) { create(:project, :legacy_storage, :repository, :wiki_repo, :design_repo) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::Hashed.new(project) }
@@ -45,11 +45,12 @@ describe Projects::HashedStorage::MigrateRepositoryService do
end
context 'when succeeds' do
- it 'renames project and wiki repositories' do
+ it 'renames project, wiki and design repositories' do
service.execute
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_truthy
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_truthy
+ expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_truthy
end
it 'updates project to be hashed and not read-only' do
@@ -59,9 +60,10 @@ describe Projects::HashedStorage::MigrateRepositoryService do
expect(project.repository_read_only).to be_falsey
end
- it 'move operation is called for both repositories' do
+ it 'move operation is called for all repositories' do
expect_move_repository(old_disk_path, new_disk_path)
expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
+ expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
service.execute
end
@@ -86,6 +88,7 @@ describe Projects::HashedStorage::MigrateRepositoryService do
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_falsey
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_falsey
+ expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_falsey
expect(project.repository_read_only?).to be_falsey
end
@@ -97,6 +100,7 @@ describe Projects::HashedStorage::MigrateRepositoryService do
it 'does not try to move nil repository over existing' do
expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage, old_disk_path, new_disk_path)
expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
+ expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
service.execute
end
diff --git a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
index 6dcd2ff4555..1c0f446d9cf 100644
--- a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
@@ -6,7 +6,7 @@ describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis
include GitHelpers
let(:gitlab_shell) { Gitlab::Shell.new }
- let(:project) { create(:project, :repository, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
+ let(:project) { create(:project, :repository, :wiki_repo, :design_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::Hashed.new(project) }
@@ -45,11 +45,12 @@ describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis
end
context 'when succeeds' do
- it 'renames project and wiki repositories' do
+ it 'renames project, wiki and design repositories' do
service.execute
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_truthy
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_truthy
+ expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_truthy
end
it 'updates project to be legacy and not read-only' do
@@ -62,6 +63,7 @@ describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis
it 'move operation is called for both repositories' do
expect_move_repository(old_disk_path, new_disk_path)
expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
+ expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
service.execute
end
@@ -86,6 +88,7 @@ describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_falsey
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_falsey
+ expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_falsey
expect(project.repository_read_only?).to be_falsey
end
@@ -97,6 +100,7 @@ describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis
it 'does not try to move nil repository over existing' do
expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage, old_disk_path, new_disk_path)
expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
+ expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
service.execute
end
diff --git a/spec/services/projects/import_export/export_service_spec.rb b/spec/services/projects/import_export/export_service_spec.rb
index e00507d1827..5f496cb1e56 100644
--- a/spec/services/projects/import_export/export_service_spec.rb
+++ b/spec/services/projects/import_export/export_service_spec.rb
@@ -7,9 +7,10 @@ describe Projects::ImportExport::ExportService do
let!(:user) { create(:user) }
let(:project) { create(:project) }
let(:shared) { project.import_export_shared }
- let(:service) { described_class.new(project, user) }
let!(:after_export_strategy) { Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy.new }
+ subject(:service) { described_class.new(project, user) }
+
before do
project.add_maintainer(user)
end
@@ -46,8 +47,8 @@ describe Projects::ImportExport::ExportService do
# in the corresponding EE spec.
skip if Gitlab.ee?
- # once for the normal repo, once for the wiki
- expect(Gitlab::ImportExport::RepoSaver).to receive(:new).twice.and_call_original
+ # once for the normal repo, once for the wiki repo, and once for the design repo
+ expect(Gitlab::ImportExport::RepoSaver).to receive(:new).exactly(3).times.and_call_original
service.execute
end
@@ -58,6 +59,12 @@ describe Projects::ImportExport::ExportService do
service.execute
end
+ it 'saves the design repo' do
+ expect(Gitlab::ImportExport::DesignRepoSaver).to receive(:new).and_call_original
+
+ service.execute
+ end
+
it 'saves the lfs objects' do
expect(Gitlab::ImportExport::LfsSaver).to receive(:new).and_call_original
@@ -177,5 +184,20 @@ describe Projects::ImportExport::ExportService do
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error).with_message(expected_message)
end
end
+
+ it_behaves_like 'measurable service' do
+ let(:base_log_data) do
+ {
+ class: described_class.name,
+ current_user: user.name,
+ project_full_path: project.full_path,
+ file_path: shared.export_path
+ }
+ end
+
+ after do
+ service.execute(after_export_strategy)
+ end
+ end
end
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index af8118f9b11..ca6750b373d 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -264,13 +264,33 @@ describe Projects::ImportService do
it 'fails with port 25' do
project.import_url = "https://github.com:25/vim/vim.git"
- result = described_class.new(project, user).execute
+ result = subject.execute
expect(result[:status]).to eq :error
expect(result[:message]).to include('Only allowed ports are 80, 443')
end
end
+ it_behaves_like 'measurable service' do
+ let(:base_log_data) do
+ {
+ class: described_class.name,
+ current_user: user.name,
+ project_full_path: project.full_path,
+ import_type: project.import_type,
+ file_path: project.import_source
+ }
+ end
+
+ before do
+ project.import_type = 'github'
+ end
+
+ after do
+ subject.execute
+ end
+ end
+
def stub_github_omniauth_provider
provider = OpenStruct.new(
'name' => 'github',
diff --git a/spec/services/projects/prometheus/alerts/create_events_service_spec.rb b/spec/services/projects/prometheus/alerts/create_events_service_spec.rb
index 1d726db6ce3..35f23afd7a2 100644
--- a/spec/services/projects/prometheus/alerts/create_events_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/create_events_service_spec.rb
@@ -50,7 +50,7 @@ describe Projects::Prometheus::Alerts::CreateEventsService do
let(:events) { service.execute }
context 'with a firing payload' do
- let(:started_at) { truncate_to_second(Time.now) }
+ let(:started_at) { truncate_to_second(Time.current) }
let(:firing_event) { alert_payload(status: 'firing', started_at: started_at) }
let(:alerts_payload) { { 'alerts' => [firing_event] } }
@@ -87,7 +87,7 @@ describe Projects::Prometheus::Alerts::CreateEventsService do
end
context 'with a resolved payload' do
- let(:started_at) { truncate_to_second(Time.now) }
+ let(:started_at) { truncate_to_second(Time.current) }
let(:ended_at) { started_at + 1 }
let(:payload_key) { PrometheusAlertEvent.payload_key_for(alert.prometheus_metric_id, utc_rfc3339(started_at)) }
let(:resolved_event) { alert_payload(status: 'resolved', started_at: started_at, ended_at: ended_at) }
@@ -285,7 +285,7 @@ describe Projects::Prometheus::Alerts::CreateEventsService do
private
- def alert_payload(status: 'firing', started_at: Time.now, ended_at: Time.now, gitlab_alert_id: alert.prometheus_metric_id, title: nil, environment: nil)
+ def alert_payload(status: 'firing', started_at: Time.current, ended_at: Time.current, gitlab_alert_id: alert.prometheus_metric_id, title: nil, environment: nil)
payload = {}
payload['status'] = status if status
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index dce96dda1e3..009543f9016 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -217,6 +217,32 @@ describe Projects::Prometheus::Alerts::NotifyService do
end
end
+ context 'process Alert Management alerts' do
+ let(:process_service) { instance_double(AlertManagement::ProcessPrometheusAlertService) }
+
+ before do
+ create(:prometheus_service, project: project)
+ create(:project_alerting_setting, project: project, token: token)
+ end
+
+ context 'with multiple firing alerts and resolving alerts' do
+ let(:payload_raw) do
+ payload_for(firing: [alert_firing, alert_firing], resolved: [alert_resolved])
+ end
+
+ it 'processes Prometheus alerts' do
+ expect(AlertManagement::ProcessPrometheusAlertService)
+ .to receive(:new)
+ .with(project, nil, kind_of(Hash))
+ .exactly(3).times
+ .and_return(process_service)
+ expect(process_service).to receive(:execute).exactly(3).times
+
+ subject
+ end
+ end
+ end
+
context 'process incident issues' do
before do
create(:prometheus_service, project: project)
@@ -286,6 +312,13 @@ describe Projects::Prometheus::Alerts::NotifyService do
it_behaves_like 'no notifications', http_status: :bad_request
+ it 'does not process Prometheus alerts' do
+ expect(AlertManagement::ProcessPrometheusAlertService)
+ .not_to receive(:new)
+
+ subject
+ end
+
it 'does not process issues' do
expect(IncidentManagement::ProcessPrometheusAlertWorker)
.not_to receive(:perform_async)
diff --git a/spec/services/projects/propagate_service_template_spec.rb b/spec/services/projects/propagate_service_template_spec.rb
index 2c3effec617..7188ac5f733 100644
--- a/spec/services/projects/propagate_service_template_spec.rb
+++ b/spec/services/projects/propagate_service_template_spec.rb
@@ -8,16 +8,19 @@ describe Projects::PropagateServiceTemplate do
PushoverService.create(
template: true,
active: true,
+ push_events: false,
properties: {
device: 'MyDevice',
sound: 'mic',
priority: 4,
user_key: 'asdf',
api_key: '123456789'
- })
+ }
+ )
end
let!(:project) { create(:project) }
+ let(:excluded_attributes) { %w[id project_id template created_at updated_at title description] }
it 'creates services for projects' do
expect(project.pushover_service).to be_nil
@@ -35,7 +38,7 @@ describe Projects::PropagateServiceTemplate do
properties: {
bamboo_url: 'http://gitlab.com',
username: 'mic',
- password: "password",
+ password: 'password',
build_key: 'build'
}
)
@@ -54,7 +57,7 @@ describe Projects::PropagateServiceTemplate do
properties: {
bamboo_url: 'http://gitlab.com',
username: 'mic',
- password: "password",
+ password: 'password',
build_key: 'build'
}
)
@@ -70,6 +73,33 @@ describe Projects::PropagateServiceTemplate do
described_class.propagate(service_template)
expect(project.pushover_service.properties).to eq(service_template.properties)
+
+ expect(project.pushover_service.attributes.except(*excluded_attributes))
+ .to eq(service_template.attributes.except(*excluded_attributes))
+ end
+
+ context 'service with data fields' do
+ let(:service_template) do
+ JiraService.create!(
+ template: true,
+ active: true,
+ push_events: false,
+ url: 'http://jira.instance.com',
+ username: 'user',
+ password: 'secret'
+ )
+ end
+
+ it 'creates the service containing the template attributes' do
+ described_class.propagate(service_template)
+
+ expect(project.jira_service.attributes.except(*excluded_attributes))
+ .to eq(service_template.attributes.except(*excluded_attributes))
+
+ excluded_attributes = %w[id service_id created_at updated_at]
+ expect(project.jira_service.data_fields.attributes.except(*excluded_attributes))
+ .to eq(service_template.data_fields.attributes.except(*excluded_attributes))
+ end
end
describe 'bulk update', :use_sql_query_cache do
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index f17ddb22d22..0e2431c0e44 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -9,18 +9,26 @@ describe Projects::TransferService do
let(:group) { create(:group) }
let(:project) { create(:project, :repository, :legacy_storage, namespace: user.namespace) }
+ subject(:execute_transfer) { described_class.new(project, user).execute(group) }
+
context 'namespace -> namespace' do
before do
- allow_any_instance_of(Gitlab::UploadsTransfer)
- .to receive(:move_project).and_return(true)
- allow_any_instance_of(Gitlab::PagesTransfer)
- .to receive(:move_project).and_return(true)
+ allow_next_instance_of(Gitlab::UploadsTransfer) do |service|
+ allow(service).to receive(:move_project).and_return(true)
+ end
+ allow_next_instance_of(Gitlab::PagesTransfer) do |service|
+ allow(service).to receive(:move_project).and_return(true)
+ end
+
group.add_owner(user)
- @result = transfer_project(project, user, group)
end
- it { expect(@result).to be_truthy }
- it { expect(project.namespace).to eq(group) }
+ it 'updates the namespace' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to be_truthy
+ expect(project.namespace).to eq(group)
+ end
end
context 'when transfer succeeds' do
@@ -31,26 +39,29 @@ describe Projects::TransferService do
it 'sends notifications' do
expect_any_instance_of(NotificationService).to receive(:project_was_moved)
- transfer_project(project, user, group)
+ execute_transfer
end
it 'invalidates the user\'s personal_project_count cache' do
expect(user).to receive(:invalidate_personal_projects_count)
- transfer_project(project, user, group)
+ execute_transfer
end
it 'executes system hooks' do
- transfer_project(project, user, group) do |service|
+ expect_next_instance_of(described_class) do |service|
expect(service).to receive(:execute_system_hooks)
end
+
+ execute_transfer
end
it 'moves the disk path', :aggregate_failures do
old_path = project.repository.disk_path
old_full_path = project.repository.full_path
- transfer_project(project, user, group)
+ execute_transfer
+
project.reload_repository!
expect(project.repository.disk_path).not_to eq(old_path)
@@ -60,13 +71,13 @@ describe Projects::TransferService do
end
it 'updates project full path in .git/config' do
- transfer_project(project, user, group)
+ execute_transfer
expect(rugged_config['gitlab.fullpath']).to eq "#{group.full_path}/#{project.path}"
end
it 'updates storage location' do
- transfer_project(project, user, group)
+ execute_transfer
expect(project.project_repository).to have_attributes(
disk_path: "#{group.full_path}/#{project.path}",
@@ -80,7 +91,7 @@ describe Projects::TransferService do
def attempt_project_transfer(&block)
expect do
- transfer_project(project, user, group, &block)
+ execute_transfer
end.to raise_error(ActiveRecord::ActiveRecordError)
end
@@ -138,13 +149,15 @@ describe Projects::TransferService do
end
context 'namespace -> no namespace' do
- before do
- @result = transfer_project(project, user, nil)
- end
+ let(:group) { nil }
+
+ it 'does not allow the project transfer' do
+ transfer_result = execute_transfer
- it { expect(@result).to eq false }
- it { expect(project.namespace).to eq(user.namespace) }
- it { expect(project.errors.messages[:new_namespace].first).to eq 'Please select a new namespace for your project.' }
+ expect(transfer_result).to eq false
+ expect(project.namespace).to eq(user.namespace)
+ expect(project.errors.messages[:new_namespace].first).to eq 'Please select a new namespace for your project.'
+ end
end
context 'disallow transferring of project with tags' do
@@ -156,18 +169,18 @@ describe Projects::TransferService do
project.container_repositories << container_repository
end
- subject { transfer_project(project, user, group) }
-
- it { is_expected.to be_falsey }
+ it 'does not allow the project transfer' do
+ expect(execute_transfer).to eq false
+ end
end
context 'namespace -> not allowed namespace' do
- before do
- @result = transfer_project(project, user, group)
- end
+ it 'does not allow the project transfer' do
+ transfer_result = execute_transfer
- it { expect(@result).to eq false }
- it { expect(project.namespace).to eq(user.namespace) }
+ expect(transfer_result).to eq false
+ expect(project.namespace).to eq(user.namespace)
+ end
end
context 'namespace which contains orphan repository with same projects path name' do
@@ -177,99 +190,94 @@ describe Projects::TransferService do
group.add_owner(user)
TestEnv.create_bare_repository(fake_repo_path)
-
- @result = transfer_project(project, user, group)
end
after do
FileUtils.rm_rf(fake_repo_path)
end
- it { expect(@result).to eq false }
- it { expect(project.namespace).to eq(user.namespace) }
- it { expect(project.errors[:new_namespace]).to include('Cannot move project') }
+ it 'does not allow the project transfer' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to eq false
+ expect(project.namespace).to eq(user.namespace)
+ expect(project.errors[:new_namespace]).to include('Cannot move project')
+ end
end
context 'target namespace containing the same project name' do
before do
group.add_owner(user)
- project.update(name: 'new_name')
+ create(:project, name: project.name, group: group, path: 'other')
+ end
- create(:project, name: 'new_name', group: group, path: 'other')
+ it 'does not allow the project transfer' do
+ transfer_result = execute_transfer
- @result = transfer_project(project, user, group)
+ expect(transfer_result).to eq false
+ expect(project.namespace).to eq(user.namespace)
+ expect(project.errors[:new_namespace]).to include('Project with same name or path in target namespace already exists')
end
-
- it { expect(@result).to eq false }
- it { expect(project.namespace).to eq(user.namespace) }
- it { expect(project.errors[:new_namespace]).to include('Project with same name or path in target namespace already exists') }
end
context 'target namespace containing the same project path' do
before do
group.add_owner(user)
-
create(:project, name: 'other-name', path: project.path, group: group)
-
- @result = transfer_project(project, user, group)
end
- it { expect(@result).to eq false }
- it { expect(project.namespace).to eq(user.namespace) }
- it { expect(project.errors[:new_namespace]).to include('Project with same name or path in target namespace already exists') }
+ it 'does not allow the project transfer' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to eq false
+ expect(project.namespace).to eq(user.namespace)
+ expect(project.errors[:new_namespace]).to include('Project with same name or path in target namespace already exists')
+ end
end
context 'target namespace allows developers to create projects' do
let(:group) { create(:group, project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) }
context 'the user is a member of the target namespace with developer permissions' do
- subject(:transfer_project_result) { transfer_project(project, user, group) }
-
before do
group.add_developer(user)
end
it 'does not allow project transfer to the target namespace' do
- expect(transfer_project_result).to eq false
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to eq false
expect(project.namespace).to eq(user.namespace)
expect(project.errors[:new_namespace]).to include('Transfer failed, please contact an admin.')
end
end
end
- def transfer_project(project, user, new_namespace)
- service = Projects::TransferService.new(project, user)
-
- yield(service) if block_given?
-
- service.execute(new_namespace)
- end
-
context 'visibility level' do
- let(:internal_group) { create(:group, :internal) }
+ let(:group) { create(:group, :internal) }
before do
- internal_group.add_owner(user)
+ group.add_owner(user)
end
context 'when namespace visibility level < project visibility level' do
- let(:public_project) { create(:project, :public, :repository, namespace: user.namespace) }
+ let(:project) { create(:project, :public, :repository, namespace: user.namespace) }
before do
- transfer_project(public_project, user, internal_group)
+ execute_transfer
end
- it { expect(public_project.visibility_level).to eq(internal_group.visibility_level) }
+ it { expect(project.visibility_level).to eq(group.visibility_level) }
end
context 'when namespace visibility level > project visibility level' do
- let(:private_project) { create(:project, :private, :repository, namespace: user.namespace) }
+ let(:project) { create(:project, :private, :repository, namespace: user.namespace) }
before do
- transfer_project(private_project, user, internal_group)
+ execute_transfer
end
- it { expect(private_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) }
+ it { expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) }
end
end
@@ -277,9 +285,11 @@ describe Projects::TransferService do
it 'delegates transfer to Labels::TransferService' do
group.add_owner(user)
- expect_any_instance_of(Labels::TransferService).to receive(:execute).once.and_call_original
+ expect_next_instance_of(Labels::TransferService, user, project.group, project) do |labels_transfer_service|
+ expect(labels_transfer_service).to receive(:execute).once.and_call_original
+ end
- transfer_project(project, user, group)
+ execute_transfer
end
end
@@ -287,49 +297,52 @@ describe Projects::TransferService do
it 'delegates transfer to Milestones::TransferService' do
group.add_owner(user)
- expect(Milestones::TransferService).to receive(:new).with(user, project.group, project).and_call_original
- expect_any_instance_of(Milestones::TransferService).to receive(:execute).once
+ expect_next_instance_of(Milestones::TransferService, user, project.group, project) do |milestones_transfer_service|
+ expect(milestones_transfer_service).to receive(:execute).once.and_call_original
+ end
- transfer_project(project, user, group)
+ execute_transfer
end
end
context 'when hashed storage in use' do
- let!(:hashed_project) { create(:project, :repository, namespace: user.namespace) }
- let!(:old_disk_path) { hashed_project.repository.disk_path }
+ let!(:project) { create(:project, :repository, namespace: user.namespace) }
+ let!(:old_disk_path) { project.repository.disk_path }
before do
group.add_owner(user)
end
it 'does not move the disk path', :aggregate_failures do
- new_full_path = "#{group.full_path}/#{hashed_project.path}"
+ new_full_path = "#{group.full_path}/#{project.path}"
- transfer_project(hashed_project, user, group)
- hashed_project.reload_repository!
+ execute_transfer
- expect(hashed_project.repository).to have_attributes(
+ project.reload_repository!
+
+ expect(project.repository).to have_attributes(
disk_path: old_disk_path,
full_path: new_full_path
)
- expect(hashed_project.disk_path).to eq(old_disk_path)
+ expect(project.disk_path).to eq(old_disk_path)
end
it 'does not move the disk path when the transfer fails', :aggregate_failures do
- old_full_path = hashed_project.full_path
+ old_full_path = project.full_path
expect_next_instance_of(described_class) do |service|
allow(service).to receive(:execute_system_hooks).and_raise('foo')
end
- expect { transfer_project(hashed_project, user, group) }.to raise_error('foo')
- hashed_project.reload_repository!
+ expect { execute_transfer }.to raise_error('foo')
+
+ project.reload_repository!
- expect(hashed_project.repository).to have_attributes(
+ expect(project.repository).to have_attributes(
disk_path: old_disk_path,
full_path: old_full_path
)
- expect(hashed_project.disk_path).to eq(old_disk_path)
+ expect(project.disk_path).to eq(old_disk_path)
end
end
@@ -344,18 +357,102 @@ describe Projects::TransferService do
end
it 'refreshes the permissions of the old and new namespace' do
- transfer_project(project, owner, group)
+ execute_transfer
expect(group_member.authorized_projects).to include(project)
expect(owner.authorized_projects).to include(project)
end
it 'only schedules a single job for every user' do
- expect(UserProjectAccessChangedService).to receive(:new)
- .with([owner.id, group_member.id])
- .and_call_original
+ expect_next_instance_of(UserProjectAccessChangedService, [owner.id, group_member.id]) do |service|
+ expect(service).to receive(:execute).once.and_call_original
+ end
+
+ execute_transfer
+ end
+ end
+
+ describe 'transferring a design repository' do
+ subject { described_class.new(project, user) }
+
+ before do
+ group.add_owner(user)
+ end
+
+ def design_repository
+ project.design_repository
+ end
+
+ it 'does not create a design repository' do
+ expect(subject.execute(group)).to be true
+
+ project.clear_memoization(:design_repository)
+
+ expect(design_repository.exists?).to be false
+ end
- transfer_project(project, owner, group)
+ describe 'when the project has a design repository' do
+ let(:project_repo_path) { "#{project.path}#{::Gitlab::GlRepository::DESIGN.path_suffix}" }
+ let(:old_full_path) { "#{user.namespace.full_path}/#{project_repo_path}" }
+ let(:new_full_path) { "#{group.full_path}/#{project_repo_path}" }
+
+ context 'with legacy storage' do
+ let(:project) { create(:project, :repository, :legacy_storage, :design_repo, namespace: user.namespace) }
+
+ it 'moves the repository' do
+ expect(subject.execute(group)).to be true
+
+ project.clear_memoization(:design_repository)
+
+ expect(design_repository).to have_attributes(
+ disk_path: new_full_path,
+ full_path: new_full_path
+ )
+ end
+
+ it 'does not move the repository when an error occurs', :aggregate_failures do
+ allow(subject).to receive(:execute_system_hooks).and_raise('foo')
+ expect { subject.execute(group) }.to raise_error('foo')
+
+ project.clear_memoization(:design_repository)
+
+ expect(design_repository).to have_attributes(
+ disk_path: old_full_path,
+ full_path: old_full_path
+ )
+ end
+ end
+
+ context 'with hashed storage' do
+ let(:project) { create(:project, :repository, namespace: user.namespace) }
+
+ it 'does not move the repository' do
+ old_disk_path = design_repository.disk_path
+
+ expect(subject.execute(group)).to be true
+
+ project.clear_memoization(:design_repository)
+
+ expect(design_repository).to have_attributes(
+ disk_path: old_disk_path,
+ full_path: new_full_path
+ )
+ end
+
+ it 'does not move the repository when an error occurs' do
+ old_disk_path = design_repository.disk_path
+
+ allow(subject).to receive(:execute_system_hooks).and_raise('foo')
+ expect { subject.execute(group) }.to raise_error('foo')
+
+ project.clear_memoization(:design_repository)
+
+ expect(design_repository).to have_attributes(
+ disk_path: old_disk_path,
+ full_path: old_full_path
+ )
+ end
+ end
end
end
diff --git a/spec/services/projects/update_remote_mirror_service_spec.rb b/spec/services/projects/update_remote_mirror_service_spec.rb
index 4396ccab584..38c2dc0780e 100644
--- a/spec/services/projects/update_remote_mirror_service_spec.rb
+++ b/spec/services/projects/update_remote_mirror_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Projects::UpdateRemoteMirrorService do
let(:project) { create(:project, :repository) }
let(:remote_project) { create(:forked_project_with_submodules) }
- let(:remote_mirror) { project.remote_mirrors.create!(url: remote_project.http_url_to_repo, enabled: true, only_protected_branches: false) }
+ let(:remote_mirror) { create(:remote_mirror, project: project, enabled: true) }
let(:remote_name) { remote_mirror.remote_name }
subject(:service) { described_class.new(project, project.creator) }
@@ -16,7 +16,9 @@ describe Projects::UpdateRemoteMirrorService do
before do
project.repository.add_branch(project.owner, 'existing-branch', 'master')
- allow(remote_mirror).to receive(:update_repository).and_return(true)
+ allow(remote_mirror)
+ .to receive(:update_repository)
+ .and_return(double(divergent_refs: []))
end
it 'ensures the remote exists' do
@@ -53,7 +55,7 @@ describe Projects::UpdateRemoteMirrorService do
it 'marks the mirror as failed and raises the error when an unexpected error occurs' do
allow(project.repository).to receive(:fetch_remote).and_raise('Badly broken')
- expect { execute! }.to raise_error /Badly broken/
+ expect { execute! }.to raise_error(/Badly broken/)
expect(remote_mirror).to be_failed
expect(remote_mirror.last_error).to include('Badly broken')
@@ -83,32 +85,21 @@ describe Projects::UpdateRemoteMirrorService do
end
end
- context 'when syncing all branches' do
- it 'push all the branches the first time' do
+ context 'when there are divergent refs' do
+ before do
stub_fetch_remote(project, remote_name: remote_name, ssh_auth: remote_mirror)
-
- expect(remote_mirror).to receive(:update_repository).with({})
-
- execute!
end
- end
- context 'when only syncing protected branches' do
- it 'sync updated protected branches' do
- stub_fetch_remote(project, remote_name: remote_name, ssh_auth: remote_mirror)
- protected_branch = create_protected_branch(project)
- remote_mirror.only_protected_branches = true
-
- expect(remote_mirror)
- .to receive(:update_repository)
- .with(only_branches_matching: [protected_branch.name])
+ it 'marks the mirror as failed and sets an error message' do
+ response = double(divergent_refs: %w[refs/heads/master refs/heads/develop])
+ expect(remote_mirror).to receive(:update_repository).and_return(response)
execute!
- end
- def create_protected_branch(project)
- branch_name = project.repository.branch_names.find { |n| n != 'existing-branch' }
- create(:protected_branch, project: project, name: branch_name)
+ expect(remote_mirror).to be_failed
+ expect(remote_mirror.last_error).to include("Some refs have diverged")
+ expect(remote_mirror.last_error).to include("refs/heads/master\n")
+ expect(remote_mirror.last_error).to include("refs/heads/develop")
end
end
end
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index 05555fa76f7..28b79bc61d9 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -5,17 +5,20 @@ require 'spec_helper'
describe Projects::UpdateRepositoryStorageService do
include Gitlab::ShellAdapter
- subject { described_class.new(project) }
+ subject { described_class.new(repository_storage_move) }
describe "#execute" do
- let(:time) { Time.now }
+ let(:time) { Time.current }
before do
allow(Time).to receive(:now).and_return(time)
+ allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(%w[default test_second_storage])
end
context 'without wiki and design repository' do
let(:project) { create(:project, :repository, repository_read_only: true, wiki_enabled: false) }
+ let(:destination) { 'test_second_storage' }
+ let(:repository_storage_move) { create(:project_repository_storage_move, :scheduled, project: project, destination_storage_name: destination) }
let!(:checksum) { project.repository.checksum }
let(:project_repository_double) { double(:repository) }
@@ -41,9 +44,9 @@ describe Projects::UpdateRepositoryStorageService do
expect(project_repository_double).to receive(:checksum)
.and_return(checksum)
- result = subject.execute('test_second_storage')
+ result = subject.execute
- expect(result[:status]).to eq(:success)
+ expect(result).to be_success
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('test_second_storage')
expect(gitlab_shell.repository_exists?('default', old_path)).to be(false)
@@ -52,11 +55,13 @@ describe Projects::UpdateRepositoryStorageService do
end
context 'when the filesystems are the same' do
+ let(:destination) { project.repository_storage }
+
it 'bails out and does nothing' do
- result = subject.execute(project.repository_storage)
+ result = subject.execute
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to match(/SameFilesystemError/)
+ expect(result).to be_error
+ expect(result.message).to match(/SameFilesystemError/)
end
end
@@ -72,9 +77,9 @@ describe Projects::UpdateRepositoryStorageService do
.and_raise(Gitlab::Git::CommandError)
expect(GitlabShellWorker).not_to receive(:perform_async)
- result = subject.execute('test_second_storage')
+ result = subject.execute
- expect(result[:status]).to eq(:error)
+ expect(result).to be_error
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
end
@@ -93,9 +98,9 @@ describe Projects::UpdateRepositoryStorageService do
.and_return('not matching checksum')
expect(GitlabShellWorker).not_to receive(:perform_async)
- result = subject.execute('test_second_storage')
+ result = subject.execute
- expect(result[:status]).to eq(:error)
+ expect(result).to be_error
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
end
@@ -115,9 +120,9 @@ describe Projects::UpdateRepositoryStorageService do
expect(project_repository_double).to receive(:checksum)
.and_return(checksum)
- result = subject.execute('test_second_storage')
+ result = subject.execute
- expect(result[:status]).to eq(:success)
+ expect(result).to be_success
expect(project.repository_storage).to eq('test_second_storage')
expect(project.reload_pool_repository).to be_nil
end
@@ -128,11 +133,26 @@ describe Projects::UpdateRepositoryStorageService do
include_examples 'moves repository to another storage', 'wiki' do
let(:project) { create(:project, :repository, repository_read_only: true, wiki_enabled: true) }
let(:repository) { project.wiki.repository }
+ let(:destination) { 'test_second_storage' }
+ let(:repository_storage_move) { create(:project_repository_storage_move, :scheduled, project: project, destination_storage_name: destination) }
before do
project.create_wiki
end
end
end
+
+ context 'with design repository' do
+ include_examples 'moves repository to another storage', 'design' do
+ let(:project) { create(:project, :repository, repository_read_only: true) }
+ let(:repository) { project.design_repository }
+ let(:destination) { 'test_second_storage' }
+ let(:repository_storage_move) { create(:project_repository_storage_move, :scheduled, project: project, destination_storage_name: destination) }
+
+ before do
+ project.design_repository.create_if_not_exists
+ end
+ end
+ end
end
end
diff --git a/spec/services/prometheus/proxy_service_spec.rb b/spec/services/prometheus/proxy_service_spec.rb
index 5a036194d01..656ccea10de 100644
--- a/spec/services/prometheus/proxy_service_spec.rb
+++ b/spec/services/prometheus/proxy_service_spec.rb
@@ -117,7 +117,7 @@ describe Prometheus::ProxyService do
context 'when value not present in cache' do
it 'returns nil' do
- expect(ReactiveCachingWorker)
+ expect(ExternalServiceReactiveCachingWorker)
.to receive(:perform_async)
.with(subject.class, subject.id, *opts)
diff --git a/spec/services/prometheus/proxy_variable_substitution_service_spec.rb b/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
index 9978c631366..82ea356d599 100644
--- a/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
+++ b/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
@@ -6,7 +6,7 @@ describe Prometheus::ProxyVariableSubstitutionService do
describe '#execute' do
let_it_be(:environment) { create(:environment) }
- let(:params_keys) { { query: 'up{environment="%{ci_environment_slug}"}' } }
+ let(:params_keys) { { query: 'up{environment="{{ci_environment_slug}}"}' } }
let(:params) { ActionController::Parameters.new(params_keys).permit! }
let(:result) { subject.execute }
@@ -32,21 +32,13 @@ describe Prometheus::ProxyVariableSubstitutionService do
expect(params).to eq(
ActionController::Parameters.new(
- query: 'up{environment="%{ci_environment_slug}"}'
+ query: 'up{environment="{{ci_environment_slug}}"}'
).permit!
)
end
end
context 'with predefined variables' do
- let(:params_keys) { { query: 'up{%{environment_filter}}' } }
-
- it_behaves_like 'success' do
- let(:expected_query) do
- %Q[up{container_name!="POD",environment="#{environment.slug}"}]
- end
- end
-
context 'with nil query' do
let(:params_keys) { {} }
@@ -64,18 +56,6 @@ describe Prometheus::ProxyVariableSubstitutionService do
let(:expected_query) { %Q[up{environment="#{environment.slug}"}] }
end
end
-
- context 'with ruby and liquid formats' do
- let(:params_keys) do
- { query: 'up{%{environment_filter},env2="{{ci_environment_slug}}"}' }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) do
- %Q[up{container_name!="POD",environment="#{environment.slug}",env2="#{environment.slug}"}]
- end
- end
- end
end
context 'with custom variables' do
@@ -92,20 +72,6 @@ describe Prometheus::ProxyVariableSubstitutionService do
let(:expected_query) { %q[up{pod_name="pod1"}] }
end
- context 'with ruby variable interpolation format' do
- let(:params_keys) do
- {
- query: 'up{pod_name="%{pod_name}"}',
- variables: ['pod_name', pod_name]
- }
- end
-
- it_behaves_like 'success' do
- # Custom variables cannot be used with the Ruby interpolation format.
- let(:expected_query) { "up{pod_name=\"%{pod_name}\"}" }
- end
- end
-
context 'with predefined variables in variables parameter' do
let(:params_keys) do
{
@@ -142,62 +108,47 @@ describe Prometheus::ProxyVariableSubstitutionService do
end
it_behaves_like 'success' do
- let(:expected_query) { 'up{pod_name=""}' }
+ let(:expected_query) { 'up{pod_name="{{pod_name}}"}' }
end
end
+ end
- context 'with ruby and liquid variables' do
+ context 'gsub variable substitution tolerance for weirdness' do
+ context 'with whitespace around variable' do
let(:params_keys) do
{
- query: 'up{env1="%{ruby_variable}",env2="{{ liquid_variable }}"}',
- variables: %w(ruby_variable value liquid_variable env_slug)
+ query: 'up{' \
+ "env1={{ ci_environment_slug}}," \
+ "env2={{ci_environment_slug }}," \
+ "{{ environment_filter }}" \
+ '}'
}
end
it_behaves_like 'success' do
- # It should replace only liquid variables with their values
- let(:expected_query) { %q[up{env1="%{ruby_variable}",env2="env_slug"}] }
+ let(:expected_query) do
+ 'up{' \
+ "env1=#{environment.slug}," \
+ "env2=#{environment.slug}," \
+ "container_name!=\"POD\",environment=\"#{environment.slug}\"" \
+ '}'
+ end
end
end
- end
-
- context 'with liquid tags and ruby format variables' do
- let(:params_keys) do
- {
- query: 'up{ {% if true %}env1="%{ci_environment_slug}",' \
- 'env2="{{ci_environment_slug}}"{% endif %} }'
- }
- end
-
- # The following spec will fail and should be changed to a 'success' spec
- # once we remove support for the Ruby interpolation format.
- # https://gitlab.com/gitlab-org/gitlab/issues/37990
- #
- # Liquid tags `{% %}` cannot be used currently because the Ruby `%`
- # operator raises an error when it encounters a Liquid `{% %}` tag in the
- # string.
- #
- # Once we remove support for the Ruby format, users can start using
- # Liquid tags.
-
- it_behaves_like 'error', 'Malformed string'
- end
- context 'ruby template rendering' do
- let(:params_keys) do
- { query: 'up{env=%{ci_environment_slug},%{environment_filter}}' }
- end
+ context 'with empty variables' do
+ let(:params_keys) do
+ { query: "up{env1={{}},env2={{ }}}" }
+ end
- it_behaves_like 'success' do
- let(:expected_query) do
- "up{env=#{environment.slug},container_name!=\"POD\"," \
- "environment=\"#{environment.slug}\"}"
+ it_behaves_like 'success' do
+ let(:expected_query) { "up{env1={{}},env2={{ }}}" }
end
end
context 'with multiple occurrences of variable in string' do
let(:params_keys) do
- { query: 'up{env1=%{ci_environment_slug},env2=%{ci_environment_slug}}' }
+ { query: "up{env1={{ci_environment_slug}},env2={{ci_environment_slug}}}" }
end
it_behaves_like 'success' do
@@ -207,7 +158,7 @@ describe Prometheus::ProxyVariableSubstitutionService do
context 'with multiple variables in string' do
let(:params_keys) do
- { query: 'up{env=%{ci_environment_slug},%{environment_filter}}' }
+ { query: "up{env={{ci_environment_slug}},{{environment_filter}}}" }
end
it_behaves_like 'success' do
@@ -219,69 +170,22 @@ describe Prometheus::ProxyVariableSubstitutionService do
end
context 'with unknown variables in string' do
- let(:params_keys) { { query: 'up{env=%{env_slug}}' } }
-
- it_behaves_like 'success' do
- let(:expected_query) { 'up{env=%{env_slug}}' }
- end
- end
-
- # This spec is needed if there are multiple keys in the context provided
- # by `Gitlab::Prometheus::QueryVariables.call(environment)` which is
- # passed to the Ruby `%` operator.
- # If the number of keys in the context is one, there is no need for
- # this spec.
- context 'with extra variables in context' do
- let(:params_keys) { { query: 'up{env=%{ci_environment_slug}}' } }
+ let(:params_keys) { { query: "up{env={{env_slug}}}" } }
it_behaves_like 'success' do
- let(:expected_query) { "up{env=#{environment.slug}}" }
- end
-
- it 'has more than one variable in context' do
- expect(Gitlab::Prometheus::QueryVariables.call(environment).size).to be > 1
+ let(:expected_query) { "up{env={{env_slug}}}" }
end
end
- # The ruby % operator will not replace known variables if there are unknown
- # variables also in the string. It doesn't raise an error
- # (though the `sprintf` and `format` methods do).
context 'with unknown and known variables in string' do
let(:params_keys) do
- { query: 'up{env=%{ci_environment_slug},other_env=%{env_slug}}' }
+ { query: "up{env={{ci_environment_slug}},other_env={{env_slug}}}" }
end
it_behaves_like 'success' do
- let(:expected_query) { 'up{env=%{ci_environment_slug},other_env=%{env_slug}}' }
+ let(:expected_query) { "up{env=#{environment.slug},other_env={{env_slug}}}" }
end
end
-
- context 'when rendering raises error' do
- context 'when TypeError is raised' do
- let(:params_keys) { { query: '{% a %}' } }
-
- it_behaves_like 'error', 'Malformed string'
- end
-
- context 'when ArgumentError is raised' do
- let(:params_keys) { { query: '%<' } }
-
- it_behaves_like 'error', 'Malformed string'
- end
- end
- end
-
- context 'when liquid template rendering raises error' do
- before do
- liquid_service = instance_double(TemplateEngines::LiquidService)
-
- allow(TemplateEngines::LiquidService).to receive(:new).and_return(liquid_service)
- allow(liquid_service).to receive(:render).and_raise(
- TemplateEngines::LiquidService::RenderError, 'error message'
- )
- end
-
- it_behaves_like 'error', 'error message'
end
end
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 36f9966c0ef..a9de0a747f6 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -361,7 +361,7 @@ describe QuickActions::InterpretService do
expect(updates).to eq(spend_time: {
duration: 3600,
user_id: developer.id,
- spent_at: DateTime.now.to_date
+ spent_at: DateTime.current.to_date
})
end
@@ -379,7 +379,7 @@ describe QuickActions::InterpretService do
expect(updates).to eq(spend_time: {
duration: -1800,
user_id: developer.id,
- spent_at: DateTime.now.to_date
+ spent_at: DateTime.current.to_date
})
end
end
diff --git a/spec/services/releases/create_service_spec.rb b/spec/services/releases/create_service_spec.rb
index 255f044db90..d0859500440 100644
--- a/spec/services/releases/create_service_spec.rb
+++ b/spec/services/releases/create_service_spec.rb
@@ -20,6 +20,8 @@ describe Releases::CreateService do
describe '#execute' do
shared_examples 'a successful release creation' do
it 'creates a new release' do
+ expected_job_count = MailScheduler::NotificationServiceWorker.jobs.size + 1
+
result = service.execute
expect(project.releases.count).to eq(1)
@@ -30,6 +32,7 @@ describe Releases::CreateService do
expect(result[:release].name).to eq(name)
expect(result[:release].author).to eq(user)
expect(result[:release].sha).to eq(tag_sha)
+ expect(MailScheduler::NotificationServiceWorker.jobs.size).to eq(expected_job_count)
end
end
diff --git a/spec/services/repository_archive_clean_up_service_spec.rb b/spec/services/repository_archive_clean_up_service_spec.rb
index c0b286ac675..80b177a0174 100644
--- a/spec/services/repository_archive_clean_up_service_spec.rb
+++ b/spec/services/repository_archive_clean_up_service_spec.rb
@@ -110,6 +110,8 @@ describe RepositoryArchiveCleanUpService do
def create_temporary_files(dir, extensions, mtime)
FileUtils.mkdir_p(dir)
+ # rubocop: disable Rails/TimeZone
FileUtils.touch(extensions.map { |ext| File.join(dir, "sample.#{ext}") }, mtime: Time.now - mtime)
+ # rubocop: enable Rails/TimeZone
end
end
diff --git a/spec/services/resources/create_access_token_service_spec.rb b/spec/services/resource_access_tokens/create_service_spec.rb
index 8c108d9937a..57e7e4e66de 100644
--- a/spec/services/resources/create_access_token_service_spec.rb
+++ b/spec/services/resource_access_tokens/create_service_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-describe Resources::CreateAccessTokenService do
- subject { described_class.new(resource_type, resource, user, params).execute }
+describe ResourceAccessTokens::CreateService do
+ subject { described_class.new(user, resource, params).execute }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :private) }
@@ -12,7 +12,7 @@ describe Resources::CreateAccessTokenService do
describe '#execute' do
# Created shared_examples as it will easy to include specs for group bots in https://gitlab.com/gitlab-org/gitlab/-/issues/214046
shared_examples 'fails when user does not have the permission to create a Resource Bot' do
- before do
+ before_all do
resource.add_developer(user)
end
@@ -56,7 +56,7 @@ describe Resources::CreateAccessTokenService do
end
context 'when user provides value' do
- let(:params) { { name: 'Random bot' } }
+ let_it_be(:params) { { name: 'Random bot' } }
it 'overrides the default value' do
response = subject
@@ -83,12 +83,12 @@ describe Resources::CreateAccessTokenService do
response = subject
access_token = response.payload[:access_token]
- expect(access_token.scopes).to eq(Gitlab::Auth::API_SCOPES + Gitlab::Auth::REPOSITORY_SCOPES + Gitlab::Auth.registry_scopes - [:read_user])
+ expect(access_token.scopes).to eq(Gitlab::Auth.resource_bot_scopes)
end
end
context 'when user provides scope explicitly' do
- let(:params) { { scopes: Gitlab::Auth::REPOSITORY_SCOPES } }
+ let_it_be(:params) { { scopes: Gitlab::Auth::REPOSITORY_SCOPES } }
it 'overrides the default value' do
response = subject
@@ -109,7 +109,7 @@ describe Resources::CreateAccessTokenService do
end
context 'when user provides value' do
- let(:params) { { expires_at: Date.today + 1.month } }
+ let_it_be(:params) { { expires_at: Date.today + 1.month } }
it 'overrides the default value' do
response = subject
@@ -120,7 +120,7 @@ describe Resources::CreateAccessTokenService do
end
context 'when invalid scope is passed' do
- let(:params) { { scopes: [:invalid_scope] } }
+ let_it_be(:params) { { scopes: [:invalid_scope] } }
it 'returns error' do
response = subject
@@ -145,14 +145,14 @@ describe Resources::CreateAccessTokenService do
end
context 'when resource is a project' do
- let(:resource_type) { 'project' }
- let(:resource) { project }
+ let_it_be(:resource_type) { 'project' }
+ let_it_be(:resource) { project }
it_behaves_like 'fails when user does not have the permission to create a Resource Bot'
it_behaves_like 'fails when flag is disabled'
context 'user with valid permission' do
- before do
+ before_all do
resource.add_maintainer(user)
end
diff --git a/spec/services/resource_access_tokens/revoke_service_spec.rb b/spec/services/resource_access_tokens/revoke_service_spec.rb
new file mode 100644
index 00000000000..3ce82745b9e
--- /dev/null
+++ b/spec/services/resource_access_tokens/revoke_service_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ResourceAccessTokens::RevokeService do
+ subject { described_class.new(user, resource, access_token).execute }
+
+ let_it_be(:user) { create(:user) }
+ let(:access_token) { create(:personal_access_token, user: resource_bot) }
+
+ describe '#execute' do
+ # Created shared_examples as it will easy to include specs for group bots in https://gitlab.com/gitlab-org/gitlab/-/issues/214046
+ shared_examples 'revokes access token' do
+ it { expect(subject.success?).to be true }
+
+ it { expect(subject.message).to eq("Revoked access token: #{access_token.name}") }
+
+ it 'revokes token access' do
+ subject
+
+ expect(access_token.reload.revoked?).to be true
+ end
+
+ it 'removes membership of bot user' do
+ subject
+
+ expect(resource.reload.users).not_to include(resource_bot)
+ end
+
+ it 'transfer issuables of bot user to ghost user' do
+ issue = create(:issue, author: resource_bot)
+
+ subject
+
+ expect(issue.reload.author.ghost?).to be true
+ end
+ end
+
+ shared_examples 'rollback revoke steps' do
+ it 'does not revoke the access token' do
+ subject
+
+ expect(access_token.reload.revoked?).to be false
+ end
+
+ it 'does not remove bot from member list' do
+ subject
+
+ expect(resource.reload.users).to include(resource_bot)
+ end
+
+ it 'does not transfer issuables of bot user to ghost user' do
+ issue = create(:issue, author: resource_bot)
+
+ subject
+
+ expect(issue.reload.author.ghost?).to be false
+ end
+ end
+
+ context 'when resource is a project' do
+ let_it_be(:resource) { create(:project, :private) }
+ let_it_be(:resource_bot) { create(:user, :project_bot) }
+
+ before_all do
+ resource.add_maintainer(user)
+ resource.add_maintainer(resource_bot)
+ end
+
+ it_behaves_like 'revokes access token'
+
+ context 'when revoke fails' do
+ context 'invalid resource type' do
+ subject { described_class.new(user, resource, access_token).execute }
+
+ let_it_be(:resource) { double }
+ let_it_be(:resource_bot) { create(:user, :project_bot) }
+
+ it 'returns error response' do
+ response = subject
+
+ expect(response.success?).to be false
+ expect(response.message).to eq("Failed to find bot user")
+ end
+
+ it { expect { subject }.not_to change(access_token.reload, :revoked) }
+ end
+
+ context 'when migration to ghost user fails' do
+ before do
+ allow_next_instance_of(::Members::DestroyService) do |service|
+ allow(service).to receive(:execute).and_return(false)
+ end
+ end
+
+ it_behaves_like 'rollback revoke steps'
+ end
+
+ context 'when migration to ghost user fails' do
+ before do
+ allow_next_instance_of(::Users::MigrateToGhostUserService) do |service|
+ allow(service).to receive(:execute).and_return(false)
+ end
+ end
+
+ it_behaves_like 'rollback revoke steps'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/resource_events/change_milestone_service_spec.rb b/spec/services/resource_events/change_milestone_service_spec.rb
index bc634fadadd..dec01d0db8d 100644
--- a/spec/services/resource_events/change_milestone_service_spec.rb
+++ b/spec/services/resource_events/change_milestone_service_spec.rb
@@ -3,11 +3,9 @@
require 'spec_helper'
describe ResourceEvents::ChangeMilestoneService do
- it_behaves_like 'a milestone events creator' do
- let(:resource) { create(:issue) }
- end
-
- it_behaves_like 'a milestone events creator' do
- let(:resource) { create(:merge_request) }
+ [:issue, :merge_request].each do |issuable|
+ it_behaves_like 'a milestone events creator' do
+ let(:resource) { create(issuable) }
+ end
end
end
diff --git a/spec/services/resource_events/merge_into_notes_service_spec.rb b/spec/services/resource_events/merge_into_notes_service_spec.rb
index 6bad1b86fca..2664a27244d 100644
--- a/spec/services/resource_events/merge_into_notes_service_spec.rb
+++ b/spec/services/resource_events/merge_into_notes_service_spec.rb
@@ -21,7 +21,7 @@ describe ResourceEvents::MergeIntoNotesService do
let_it_be(:resource) { create(:issue, project: project) }
let_it_be(:label) { create(:label, project: project) }
let_it_be(:label2) { create(:label, project: project) }
- let(:time) { Time.now }
+ let(:time) { Time.current }
describe '#execute' do
it 'merges label events into notes in order of created_at' do
diff --git a/spec/services/search/snippet_service_spec.rb b/spec/services/search/snippet_service_spec.rb
index 430c71880a3..cb2bb0c43fd 100644
--- a/spec/services/search/snippet_service_spec.rb
+++ b/spec/services/search/snippet_service_spec.rb
@@ -3,59 +3,67 @@
require 'spec_helper'
describe Search::SnippetService do
- let(:author) { create(:author) }
- let(:project) { create(:project, :public) }
+ let_it_be(:author) { create(:author) }
+ let_it_be(:project) { create(:project, :public) }
- let!(:public_snippet) { create(:snippet, :public, content: 'password: XXX') }
- let!(:internal_snippet) { create(:snippet, :internal, content: 'password: XXX') }
- let!(:private_snippet) { create(:snippet, :private, content: 'password: XXX', author: author) }
+ let_it_be(:public_snippet) { create(:snippet, :public, title: 'Foo Bar Title') }
+ let_it_be(:internal_snippet) { create(:snippet, :internal, title: 'Foo Bar Title') }
+ let_it_be(:private_snippet) { create(:snippet, :private, title: 'Foo Bar Title', author: author) }
- let!(:project_public_snippet) { create(:snippet, :public, project: project, content: 'password: XXX') }
- let!(:project_internal_snippet) { create(:snippet, :internal, project: project, content: 'password: XXX') }
- let!(:project_private_snippet) { create(:snippet, :private, project: project, content: 'password: XXX') }
+ let_it_be(:project_public_snippet) { create(:snippet, :public, project: project, title: 'Foo Bar Title') }
+ let_it_be(:project_internal_snippet) { create(:snippet, :internal, project: project, title: 'Foo Bar Title') }
+ let_it_be(:project_private_snippet) { create(:snippet, :private, project: project, title: 'Foo Bar Title') }
+
+ let_it_be(:user) { create(:user) }
describe '#execute' do
context 'unauthenticated' do
it 'returns public snippets only' do
- search = described_class.new(nil, search: 'password')
+ search = described_class.new(nil, search: 'bar')
results = search.execute
- expect(results.objects('snippet_blobs')).to match_array [public_snippet, project_public_snippet]
+ expect(results.objects('snippet_titles')).to match_array [public_snippet, project_public_snippet]
end
end
context 'authenticated' do
it 'returns only public & internal snippets for regular users' do
- user = create(:user)
- search = described_class.new(user, search: 'password')
+ search = described_class.new(user, search: 'bar')
results = search.execute
- expect(results.objects('snippet_blobs')).to match_array [public_snippet, internal_snippet, project_public_snippet, project_internal_snippet]
+ expect(results.objects('snippet_titles')).to match_array [public_snippet, internal_snippet, project_public_snippet, project_internal_snippet]
end
it 'returns public, internal snippets and project private snippets for project members' do
- member = create(:user)
- project.add_developer(member)
- search = described_class.new(member, search: 'password')
+ project.add_developer(user)
+ search = described_class.new(user, search: 'bar')
results = search.execute
- expect(results.objects('snippet_blobs')).to match_array [public_snippet, internal_snippet, project_public_snippet, project_internal_snippet, project_private_snippet]
+ expect(results.objects('snippet_titles')).to match_array [public_snippet, internal_snippet, project_public_snippet, project_internal_snippet, project_private_snippet]
end
it 'returns public, internal and private snippets where user is the author' do
- search = described_class.new(author, search: 'password')
+ search = described_class.new(author, search: 'bar')
results = search.execute
- expect(results.objects('snippet_blobs')).to match_array [public_snippet, internal_snippet, private_snippet, project_public_snippet, project_internal_snippet]
+ expect(results.objects('snippet_titles')).to match_array [public_snippet, internal_snippet, private_snippet, project_public_snippet, project_internal_snippet]
end
it 'returns all snippets when user is admin' do
admin = create(:admin)
- search = described_class.new(admin, search: 'password')
+ search = described_class.new(admin, search: 'bar')
results = search.execute
- expect(results.objects('snippet_blobs')).to match_array [public_snippet, internal_snippet, private_snippet, project_public_snippet, project_internal_snippet, project_private_snippet]
+ expect(results.objects('snippet_titles')).to match_array [public_snippet, internal_snippet, private_snippet, project_public_snippet, project_internal_snippet, project_private_snippet]
end
end
end
+
+ describe '#scope' do
+ it 'always scopes to snippet_titles' do
+ search = described_class.new(user, search: 'bar')
+
+ expect(search.scope).to eq 'snippet_titles'
+ end
+ end
end
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index 97d7ca6e1ad..0333eb85fb6 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -18,7 +18,9 @@ describe SearchService do
let(:group_project) { create(:project, group: accessible_group, name: 'group_project') }
let(:public_project) { create(:project, :public, name: 'public_project') }
- subject(:search_service) { described_class.new(user, search: search, scope: scope, page: 1) }
+ let(:per_page) { described_class::DEFAULT_PER_PAGE }
+
+ subject(:search_service) { described_class.new(user, search: search, scope: scope, page: 1, per_page: per_page) }
before do
accessible_project.add_maintainer(user)
@@ -151,7 +153,7 @@ describe SearchService do
it 'returns the default scope' do
scope = described_class.new(user, snippets: 'true', scope: 'projects').scope
- expect(scope).to eq 'snippet_blobs'
+ expect(scope).to eq 'snippet_titles'
end
end
@@ -159,7 +161,7 @@ describe SearchService do
it 'returns the default scope' do
scope = described_class.new(user, snippets: 'true').scope
- expect(scope).to eq 'snippet_blobs'
+ expect(scope).to eq 'snippet_titles'
end
end
end
@@ -222,7 +224,7 @@ describe SearchService do
search_results = described_class.new(
user,
snippets: 'true',
- search: snippet.content).search_results
+ search: snippet.title).search_results
expect(search_results).to be_a Gitlab::SnippetSearchResults
end
@@ -240,6 +242,76 @@ describe SearchService do
end
describe '#search_objects' do
+ context 'handling per_page param' do
+ let(:search) { '' }
+ let(:scope) { nil }
+
+ context 'when nil' do
+ let(:per_page) { nil }
+
+ it "defaults to #{described_class::DEFAULT_PER_PAGE}" do
+ expect_any_instance_of(Gitlab::SearchResults)
+ .to receive(:objects)
+ .with(anything, hash_including(per_page: described_class::DEFAULT_PER_PAGE))
+ .and_call_original
+
+ subject.search_objects
+ end
+ end
+
+ context 'when empty string' do
+ let(:per_page) { '' }
+
+ it "defaults to #{described_class::DEFAULT_PER_PAGE}" do
+ expect_any_instance_of(Gitlab::SearchResults)
+ .to receive(:objects)
+ .with(anything, hash_including(per_page: described_class::DEFAULT_PER_PAGE))
+ .and_call_original
+
+ subject.search_objects
+ end
+ end
+
+ context 'when negative' do
+ let(:per_page) { '-1' }
+
+ it "defaults to #{described_class::DEFAULT_PER_PAGE}" do
+ expect_any_instance_of(Gitlab::SearchResults)
+ .to receive(:objects)
+ .with(anything, hash_including(per_page: described_class::DEFAULT_PER_PAGE))
+ .and_call_original
+
+ subject.search_objects
+ end
+ end
+
+ context 'when present' do
+ let(:per_page) { '50' }
+
+ it "converts to integer and passes to search results" do
+ expect_any_instance_of(Gitlab::SearchResults)
+ .to receive(:objects)
+ .with(anything, hash_including(per_page: 50))
+ .and_call_original
+
+ subject.search_objects
+ end
+ end
+
+ context "when greater than #{described_class::MAX_PER_PAGE}" do
+ let(:per_page) { described_class::MAX_PER_PAGE + 1 }
+
+ it "passes #{described_class::MAX_PER_PAGE}" do
+ expect_any_instance_of(Gitlab::SearchResults)
+ .to receive(:objects)
+ .with(anything, hash_including(per_page: described_class::MAX_PER_PAGE))
+ .and_call_original
+
+ subject.search_objects
+ end
+ end
+ end
+
context 'with accessible project_id' do
it 'returns objects in the project' do
search_objects = described_class.new(
@@ -270,7 +342,7 @@ describe SearchService do
search_objects = described_class.new(
user,
snippets: 'true',
- search: snippet.content).search_objects
+ search: snippet.title).search_objects
expect(search_objects.first).to eq snippet
end
@@ -383,7 +455,7 @@ describe SearchService do
let(:readable) { create(:project_snippet, project: accessible_project) }
let(:unreadable) { create(:project_snippet, project: inaccessible_project) }
let(:unredacted_results) { ar_relation(ProjectSnippet, readable, unreadable) }
- let(:scope) { 'snippet_blobs' }
+ let(:scope) { 'snippet_titles' }
it 'redacts the inaccessible snippet' do
expect(result).to contain_exactly(readable)
@@ -394,7 +466,7 @@ describe SearchService do
let(:readable) { create(:personal_snippet, :private, author: user) }
let(:unreadable) { create(:personal_snippet, :private) }
let(:unredacted_results) { ar_relation(PersonalSnippet, readable, unreadable) }
- let(:scope) { 'snippet_blobs' }
+ let(:scope) { 'snippet_titles' }
it 'redacts the inaccessible snippet' do
expect(result).to contain_exactly(readable)
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
index c1a8a026b90..786fc3ec8dd 100644
--- a/spec/services/snippets/create_service_spec.rb
+++ b/spec/services/snippets/create_service_spec.rb
@@ -74,47 +74,6 @@ describe Snippets::CreateService do
end
end
- shared_examples 'spam check is performed' do
- shared_examples 'marked as spam' do
- it 'marks a snippet as spam' do
- expect(snippet).to be_spam
- end
-
- it 'invalidates the snippet' do
- expect(snippet).to be_invalid
- end
-
- it 'creates a new spam_log' do
- expect { snippet }
- .to have_spam_log(title: snippet.title, noteable_type: snippet.class.name)
- end
-
- it 'assigns a spam_log to an issue' do
- expect(snippet.spam_log).to eq(SpamLog.last)
- end
- end
-
- let(:extra_opts) do
- { visibility_level: Gitlab::VisibilityLevel::PUBLIC, request: double(:request, env: {}) }
- end
-
- before do
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: true)
- end
- end
-
- [true, false, nil].each do |allow_possible_spam|
- context "when recaptcha_disabled flag is #{allow_possible_spam.inspect}" do
- before do
- stub_feature_flags(allow_possible_spam: allow_possible_spam) unless allow_possible_spam.nil?
- end
-
- it_behaves_like 'marked as spam'
- end
- end
- end
-
shared_examples 'snippet create data is tracked' do
let(:counter) { Gitlab::UsageDataCounters::SnippetCounter }
@@ -169,8 +128,8 @@ describe Snippets::CreateService do
expect { subject }.not_to change { Snippet.count }
end
- it 'returns the error' do
- expect(snippet.errors.full_messages).to include('Repository could not be created')
+ it 'returns a generic creation error' do
+ expect(snippet.errors[:repository]).to eq ['Error creating the snippet - Repository could not be created']
end
it 'does not return a snippet with an id' do
@@ -178,6 +137,14 @@ describe Snippets::CreateService do
end
end
+ context 'when repository creation fails with invalid file name' do
+ let(:extra_opts) { { file_name: 'invalid://file/name/here' } }
+
+ it 'returns an appropriate error' do
+ expect(snippet.errors[:repository]).to eq ['Error creating the snippet - Invalid file name']
+ end
+ end
+
context 'when the commit action fails' do
before do
allow_next_instance_of(SnippetRepository) do |instance|
@@ -209,11 +176,11 @@ describe Snippets::CreateService do
subject
end
- it 'returns the error' do
+ it 'returns a generic error' do
response = subject
expect(response).to be_error
- expect(response.payload[:snippet].errors.full_messages).to eq ['foobar']
+ expect(response.payload[:snippet].errors[:repository]).to eq ['Error creating the snippet']
end
end
@@ -228,36 +195,14 @@ describe Snippets::CreateService do
expect(snippet.repository_exists?).to be_falsey
end
end
-
- context 'when feature flag :version_snippets is disabled' do
- before do
- stub_feature_flags(version_snippets: false)
- end
-
- it 'does not create snippet repository' do
- expect do
- subject
- end.to change(Snippet, :count).by(1)
-
- expect(snippet.repository_exists?).to be_falsey
- end
-
- it 'does not try to commit files' do
- expect_next_instance_of(described_class) do |instance|
- expect(instance).not_to receive(:create_commit)
- end
-
- subject
- end
- end
end
- shared_examples 'after_save callback to store_mentions' do
+ shared_examples 'after_save callback to store_mentions' do |mentionable_class|
context 'when mentionable attributes change' do
let(:extra_opts) { { description: "Description with #{user.to_reference}" } }
it 'saves mentions' do
- expect_next_instance_of(Snippet) do |instance|
+ expect_next_instance_of(mentionable_class) do |instance|
expect(instance).to receive(:store_mentions!).and_call_original
end
expect(snippet.user_mentions.count).to eq 1
@@ -266,7 +211,7 @@ describe Snippets::CreateService do
context 'when mentionable attributes do not change' do
it 'does not call store_mentions' do
- expect_next_instance_of(Snippet) do |instance|
+ expect_next_instance_of(mentionable_class) do |instance|
expect(instance).not_to receive(:store_mentions!)
end
expect(snippet.user_mentions.count).to eq 0
@@ -277,7 +222,7 @@ describe Snippets::CreateService do
it 'does not call store_mentions' do
base_opts.delete(:title)
- expect_next_instance_of(Snippet) do |instance|
+ expect_next_instance_of(mentionable_class) do |instance|
expect(instance).not_to receive(:store_mentions!)
end
expect(snippet.valid?).to be false
@@ -294,11 +239,25 @@ describe Snippets::CreateService do
it_behaves_like 'a service that creates a snippet'
it_behaves_like 'public visibility level restrictions apply'
- it_behaves_like 'spam check is performed'
+ it_behaves_like 'snippets spam check is performed'
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
it_behaves_like 'creates repository and files'
- it_behaves_like 'after_save callback to store_mentions'
+ it_behaves_like 'after_save callback to store_mentions', ProjectSnippet
+
+ context 'when uploaded files are passed to the service' do
+ let(:extra_opts) { { files: ['foo'] } }
+
+ it 'does not move uploaded files to the snippet' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:move_temporary_files).and_call_original
+ end
+
+ expect_any_instance_of(FileMover).not_to receive(:execute)
+
+ subject
+ end
+ end
end
context 'when PersonalSnippet' do
@@ -306,12 +265,55 @@ describe Snippets::CreateService do
it_behaves_like 'a service that creates a snippet'
it_behaves_like 'public visibility level restrictions apply'
- it_behaves_like 'spam check is performed'
+ it_behaves_like 'snippets spam check is performed'
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
it_behaves_like 'creates repository and files'
- pending('See https://gitlab.com/gitlab-org/gitlab/issues/30742') do
- it_behaves_like 'after_save callback to store_mentions'
+ it_behaves_like 'after_save callback to store_mentions', PersonalSnippet
+
+ context 'when the snippet description contains files' do
+ include FileMoverHelpers
+
+ let(:title) { 'Title' }
+ let(:picture_secret) { SecureRandom.hex }
+ let(:text_secret) { SecureRandom.hex }
+ let(:picture_file) { "/-/system/user/#{creator.id}/#{picture_secret}/picture.jpg" }
+ let(:text_file) { "/-/system/user/#{creator.id}/#{text_secret}/text.txt" }
+ let(:files) { [picture_file, text_file] }
+ let(:description) do
+ "Description with picture: ![picture](/uploads#{picture_file}) and "\
+ "text: [text.txt](/uploads#{text_file})"
+ end
+
+ before do
+ allow(FileUtils).to receive(:mkdir_p)
+ allow(FileUtils).to receive(:move)
+ end
+
+ let(:extra_opts) { { description: description, title: title, files: files } }
+
+ it 'stores the snippet description correctly' do
+ stub_file_mover(text_file)
+ stub_file_mover(picture_file)
+
+ snippet = subject.payload[:snippet]
+
+ expected_description = "Description with picture: "\
+ "![picture](/uploads/-/system/personal_snippet/#{snippet.id}/#{picture_secret}/picture.jpg) and "\
+ "text: [text.txt](/uploads/-/system/personal_snippet/#{snippet.id}/#{text_secret}/text.txt)"
+
+ expect(snippet.description).to eq(expected_description)
+ end
+
+ context 'when there is a validation error' do
+ let(:title) { nil }
+
+ it 'does not move uploaded files to the snippet' do
+ expect_any_instance_of(described_class).not_to receive(:move_temporary_files)
+
+ subject
+ end
+ end
end
end
end
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
index 05fb725c065..38747ae907f 100644
--- a/spec/services/snippets/update_service_spec.rb
+++ b/spec/services/snippets/update_service_spec.rb
@@ -7,7 +7,7 @@ describe Snippets::UpdateService do
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create :user, admin: true }
let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
- let(:options) do
+ let(:base_opts) do
{
title: 'Test snippet',
file_name: 'snippet.rb',
@@ -15,6 +15,8 @@ describe Snippets::UpdateService do
visibility_level: visibility_level
}
end
+ let(:extra_opts) { {} }
+ let(:options) { base_opts.merge(extra_opts) }
let(:updater) { user }
let(:service) { Snippets::UpdateService.new(project, updater, options) }
@@ -85,7 +87,7 @@ describe Snippets::UpdateService do
end
context 'when update fails' do
- let(:options) { { title: '' } }
+ let(:extra_opts) { { title: '' } }
it 'does not increment count' do
expect { subject }.not_to change { counter.read(:update) }
@@ -112,25 +114,16 @@ describe Snippets::UpdateService do
expect(blob.data).to eq options[:content]
end
- context 'when the repository does not exist' do
- it 'does not try to commit file' do
- allow(snippet).to receive(:repository_exists?).and_return(false)
-
- expect(service).not_to receive(:create_commit)
-
- subject
- end
- end
-
- context 'when feature flag is disabled' do
+ context 'when the repository creation fails' do
before do
- stub_feature_flags(version_snippets: false)
+ allow(snippet).to receive(:repository_exists?).and_return(false)
end
- it 'does not create repository' do
- subject
+ it 'raise an error' do
+ response = subject
- expect(snippet.repository).not_to exist
+ expect(response).to be_error
+ expect(response.payload[:snippet].errors[:repository].to_sentence).to eq 'Error updating the snippet - Repository could not be created'
end
it 'does not try to commit file' do
@@ -205,14 +198,24 @@ describe Snippets::UpdateService do
end
end
- it 'rolls back any snippet modifications' do
- option_keys = options.stringify_keys.keys
- orig_attrs = snippet.attributes.select { |k, v| k.in?(option_keys) }
+ context 'with snippet modifications' do
+ let(:option_keys) { options.stringify_keys.keys }
- subject
+ it 'rolls back any snippet modifications' do
+ orig_attrs = snippet.attributes.select { |k, v| k.in?(option_keys) }
+
+ subject
+
+ persisted_attrs = snippet.reload.attributes.select { |k, v| k.in?(option_keys) }
+ expect(orig_attrs).to eq persisted_attrs
+ end
+
+ it 'keeps any snippet modifications' do
+ subject
- current_attrs = snippet.attributes.select { |k, v| k.in?(option_keys) }
- expect(orig_attrs).to eq current_attrs
+ instance_attrs = snippet.attributes.select { |k, v| k.in?(option_keys) }
+ expect(options.stringify_keys).to eq instance_attrs
+ end
end
end
@@ -270,6 +273,35 @@ describe Snippets::UpdateService do
end
end
+ shared_examples 'committable attributes' do
+ context 'when file_name is updated' do
+ let(:extra_opts) { { file_name: 'snippet.rb' } }
+
+ it 'commits to repository' do
+ expect(service).to receive(:create_commit)
+ expect(subject).to be_success
+ end
+ end
+
+ context 'when content is updated' do
+ let(:extra_opts) { { content: 'puts "hello world"' } }
+
+ it 'commits to repository' do
+ expect(service).to receive(:create_commit)
+ expect(subject).to be_success
+ end
+ end
+
+ context 'when content or file_name is not updated' do
+ let(:options) { { title: 'Test snippet' } }
+
+ it 'does not perform any commit' do
+ expect(service).not_to receive(:create_commit)
+ expect(subject).to be_success
+ end
+ end
+ end
+
context 'when Project Snippet' do
let_it_be(:project) { create(:project) }
let!(:snippet) { create(:project_snippet, :repository, author: user, project: project) }
@@ -283,6 +315,12 @@ describe Snippets::UpdateService do
it_behaves_like 'snippet update data is tracked'
it_behaves_like 'updates repository content'
it_behaves_like 'commit operation fails'
+ it_behaves_like 'committable attributes'
+ it_behaves_like 'snippets spam check is performed' do
+ before do
+ subject
+ end
+ end
context 'when snippet does not have a repository' do
let!(:snippet) { create(:project_snippet, author: user, project: project) }
@@ -301,6 +339,12 @@ describe Snippets::UpdateService do
it_behaves_like 'snippet update data is tracked'
it_behaves_like 'updates repository content'
it_behaves_like 'commit operation fails'
+ it_behaves_like 'committable attributes'
+ it_behaves_like 'snippets spam check is performed' do
+ before do
+ subject
+ end
+ end
context 'when snippet does not have a repository' do
let!(:snippet) { create(:personal_snippet, author: user, project: project) }
diff --git a/spec/services/spam/spam_check_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index 3d0cb1447bd..560833aba97 100644
--- a/spec/services/spam/spam_check_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-describe Spam::SpamCheckService do
+describe Spam::SpamActionService do
+ include_context 'includes Spam constants'
+
let(:fake_ip) { '1.2.3.4' }
let(:fake_user_agent) { 'fake-user-agent' }
let(:fake_referrer) { 'fake-http-referrer' }
@@ -15,7 +17,7 @@ describe Spam::SpamCheckService do
let_it_be(:project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
- let_it_be(:issue) { create(:issue, project: project, author: user) }
+ let(:issue) { create(:issue, project: project, author: user) }
before do
issue.spam = false
@@ -51,7 +53,7 @@ describe Spam::SpamCheckService do
shared_examples 'only checks for spam if a request is provided' do
context 'when request is missing' do
- let(:request) { nil }
+ subject { described_class.new(spammable: issue, request: nil) }
it "doesn't check as spam" do
subject
@@ -70,21 +72,28 @@ describe Spam::SpamCheckService do
describe '#execute' do
let(:request) { double(:request, env: env) }
+ let(:fake_verdict_service) { double(:spam_verdict_service) }
+ let(:allowlisted) { false }
let_it_be(:existing_spam_log) { create(:spam_log, user: user, recaptcha_verified: false) }
subject do
described_service = described_class.new(spammable: issue, request: request)
- described_service.execute(user_id: user.id, api: nil, recaptcha_verified: recaptcha_verified, spam_log_id: existing_spam_log.id)
+ allow(described_service).to receive(:allowlisted?).and_return(allowlisted)
+ described_service.execute(user: user, api: nil, recaptcha_verified: recaptcha_verified, spam_log_id: existing_spam_log.id)
end
- context 'when recaptcha was already verified' do
+ before do
+ allow(Spam::SpamVerdictService).to receive(:new).and_return(fake_verdict_service)
+ end
+
+ context 'when reCAPTCHA was already verified' do
let(:recaptcha_verified) { true }
- it "updates spam log and doesn't check Akismet" do
+ it "doesn't check with the SpamVerdictService" do
aggregate_failures do
- expect(SpamLog).not_to receive(:create!)
- expect(an_instance_of(described_class)).not_to receive(:check)
+ expect(SpamLog).to receive(:verify_recaptcha!)
+ expect(fake_verdict_service).not_to receive(:execute)
end
subject
@@ -95,18 +104,12 @@ describe Spam::SpamCheckService do
end
end
- context 'when recaptcha was not verified' do
+ context 'when reCAPTCHA was not verified' do
let(:recaptcha_verified) { false }
context 'when spammable attributes have not changed' do
before do
issue.closed_at = Time.zone.now
-
- allow(Spam::AkismetService).to receive(:new).and_return(double(spam?: true))
- end
-
- it 'returns false' do
- expect(subject).to be_falsey
end
it 'does not create a spam log' do
@@ -120,9 +123,19 @@ describe Spam::SpamCheckService do
issue.description = 'SPAM!'
end
- context 'when indicated as spam by Akismet' do
+ context 'if allowlisted' do
+ let(:allowlisted) { true }
+
+ it 'does not perform spam check' do
+ expect(Spam::SpamVerdictService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'when disallowed by the spam verdict service' do
before do
- allow(Spam::AkismetService).to receive(:new).and_return(double(spam?: true))
+ allow(fake_verdict_service).to receive(:execute).and_return(DISALLOW)
end
context 'when allow_possible_spam feature flag is false' do
@@ -150,13 +163,45 @@ describe Spam::SpamCheckService do
end
end
- context 'when not indicated as spam by Akismet' do
+ context 'when spam verdict service requires reCAPTCHA' do
before do
- allow(Spam::AkismetService).to receive(:new).and_return(double(spam?: false))
+ allow(fake_verdict_service).to receive(:execute).and_return(REQUIRE_RECAPTCHA)
+ end
+
+ context 'when allow_possible_spam feature flag is false' do
+ before do
+ stub_feature_flags(allow_possible_spam: false)
+ end
+
+ it_behaves_like 'only checks for spam if a request is provided'
+
+ it 'does not mark as spam' do
+ subject
+
+ expect(issue).not_to be_spam
+ end
+
+ it 'marks as needing reCAPTCHA' do
+ subject
+
+ expect(issue.needs_recaptcha?).to be_truthy
+ end
end
- it 'returns false' do
- expect(subject).to be_falsey
+ context 'when allow_possible_spam feature flag is true' do
+ it_behaves_like 'only checks for spam if a request is provided'
+
+ it 'does not mark as needing reCAPTCHA' do
+ subject
+
+ expect(issue.needs_recaptcha).to be_falsey
+ end
+ end
+ end
+
+ context 'when spam verdict service allows creation' do
+ before do
+ allow(fake_verdict_service).to receive(:execute).and_return(ALLOW)
end
it 'does not create a spam log' do
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
new file mode 100644
index 00000000000..93460a5e7d7
--- /dev/null
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Spam::SpamVerdictService do
+ include_context 'includes Spam constants'
+
+ let(:fake_ip) { '1.2.3.4' }
+ let(:fake_user_agent) { 'fake-user-agent' }
+ let(:fake_referrer) { 'fake-http-referrer' }
+ let(:env) do
+ { 'action_dispatch.remote_ip' => fake_ip,
+ 'HTTP_USER_AGENT' => fake_user_agent,
+ 'HTTP_REFERRER' => fake_referrer }
+ end
+ let(:request) { double(:request, env: env) }
+
+ let(:check_for_spam) { true }
+ let(:issue) { build(:issue) }
+ let(:service) do
+ described_class.new(target: issue, request: request, options: {})
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ before do
+ allow_next_instance_of(Spam::AkismetService) do |service|
+ allow(service).to receive(:spam?).and_return(spam_verdict)
+ end
+ end
+
+ context 'if Akismet considers it spam' do
+ let(:spam_verdict) { true }
+
+ context 'if reCAPTCHA is enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: true)
+ end
+
+ it 'requires reCAPTCHA' do
+ expect(subject).to eq REQUIRE_RECAPTCHA
+ end
+ end
+
+ context 'if reCAPTCHA is not enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: false)
+ end
+
+ it 'disallows the change' do
+ expect(subject).to eq DISALLOW
+ end
+ end
+ end
+
+ context 'if Akismet does not consider it spam' do
+ let(:spam_verdict) { false }
+
+ it 'allows the change' do
+ expect(subject).to eq ALLOW
+ end
+ end
+ end
+end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 5b87ec022ae..66f9b5d092f 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -6,6 +6,7 @@ describe SystemNoteService do
include Gitlab::Routing
include RepoHelpers
include AssetsHelpers
+ include DesignManagementTestHelpers
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, group: group) }
@@ -462,7 +463,8 @@ describe SystemNoteService do
describe "existing reference" do
before do
allow(JIRA::Resource::Remotelink).to receive(:all).and_return([])
- message = "[#{author.name}|http://localhost/#{author.username}] mentioned this issue in [a commit of #{project.full_path}|http://localhost/#{project.full_path}/-/commit/#{commit.id}]:\n'#{commit.title.chomp}'"
+ message = double('message')
+ allow(message).to receive(:include?) { true }
allow_next_instance_of(JIRA::Resource::Issue) do |instance|
allow(instance).to receive(:comments).and_return([OpenStruct.new(body: message)])
end
@@ -635,4 +637,28 @@ describe SystemNoteService do
described_class.auto_resolve_prometheus_alert(noteable, project, author)
end
end
+
+ describe '.design_version_added' do
+ let(:version) { create(:design_version) }
+
+ it 'calls DesignManagementService' do
+ expect_next_instance_of(SystemNotes::DesignManagementService) do |service|
+ expect(service).to receive(:design_version_added).with(version)
+ end
+
+ described_class.design_version_added(version)
+ end
+ end
+
+ describe '.design_discussion_added' do
+ let(:discussion_note) { create(:diff_note_on_design) }
+
+ it 'calls DesignManagementService' do
+ expect_next_instance_of(SystemNotes::DesignManagementService) do |service|
+ expect(service).to receive(:design_discussion_added).with(discussion_note)
+ end
+
+ described_class.design_discussion_added(discussion_note)
+ end
+ end
end
diff --git a/spec/services/system_notes/design_management_service_spec.rb b/spec/services/system_notes/design_management_service_spec.rb
new file mode 100644
index 00000000000..08511e62341
--- /dev/null
+++ b/spec/services/system_notes/design_management_service_spec.rb
@@ -0,0 +1,155 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe SystemNotes::DesignManagementService do
+ let(:project) { create(:project) }
+ let(:issue) { create(:issue, project: project) }
+
+ let(:instance) { described_class.new(noteable: instance_noteable, project: instance_project, author: instance_author) }
+
+ describe '#design_version_added' do
+ let(:instance_noteable) { version.issue }
+ let(:instance_project) { version.issue.project }
+ let(:instance_author) { version.author }
+
+ subject { instance.design_version_added(version) }
+
+ # default (valid) parameters:
+ let(:n_designs) { 3 }
+ let(:designs) { create_list(:design, n_designs, issue: issue) }
+ let(:user) { build(:user) }
+ let(:version) do
+ create(:design_version, issue: issue, designs: designs)
+ end
+
+ before do
+ # Avoid needing to call into gitaly
+ allow(version).to receive(:author).and_return(user)
+ end
+
+ context 'with one kind of event' do
+ before do
+ DesignManagement::Action
+ .where(design: designs).update_all(event: :modification)
+ end
+
+ it 'makes just one note' do
+ expect(subject).to contain_exactly(Note)
+ end
+
+ it 'adds a new system note' do
+ expect { subject }.to change { Note.system.count }.by(1)
+ end
+ end
+
+ context 'with a mixture of events' do
+ let(:n_designs) { DesignManagement::Action.events.size }
+
+ before do
+ designs.each_with_index do |design, i|
+ design.actions.update_all(event: i)
+ end
+ end
+
+ it 'makes one note for each kind of event' do
+ expect(subject).to have_attributes(size: n_designs)
+ end
+
+ it 'adds a system note for each kind of event' do
+ expect { subject }.to change { Note.system.count }.by(n_designs)
+ end
+ end
+
+ describe 'icons' do
+ where(:action) do
+ [
+ [:creation],
+ [:modification],
+ [:deletion]
+ ]
+ end
+
+ with_them do
+ before do
+ version.actions.update_all(event: action)
+ end
+
+ subject(:metadata) do
+ instance.design_version_added(version)
+ .first.system_note_metadata
+ end
+
+ it 'has a valid action' do
+ expect(::SystemNoteHelper::ICON_NAMES_BY_ACTION)
+ .to include(metadata.action)
+ end
+ end
+ end
+
+ context 'it succeeds' do
+ where(:action, :icon, :human_description) do
+ [
+ [:creation, 'designs_added', 'added'],
+ [:modification, 'designs_modified', 'updated'],
+ [:deletion, 'designs_removed', 'removed']
+ ]
+ end
+
+ with_them do
+ before do
+ version.actions.update_all(event: action)
+ end
+
+ let(:anchor_tag) { %r{ <a[^>]*>#{link}</a>} }
+ let(:href) { instance.send(:designs_path, { version: version.id }) }
+ let(:link) { "#{n_designs} designs" }
+
+ subject(:note) { instance.design_version_added(version).first }
+
+ it 'has the correct data' do
+ expect(note)
+ .to be_system
+ .and have_attributes(
+ system_note_metadata: have_attributes(action: icon),
+ note: include(human_description)
+ .and(include link)
+ .and(include href),
+ note_html: a_string_matching(anchor_tag)
+ )
+ end
+ end
+ end
+ end
+
+ describe '#design_discussion_added' do
+ let(:instance_noteable) { design.issue }
+ let(:instance_project) { design.issue.project }
+ let(:instance_author) { discussion_note.author }
+
+ subject { instance.design_discussion_added(discussion_note) }
+
+ let(:design) { create(:design, :with_file, issue: issue) }
+ let(:author) { create(:user) }
+ let(:discussion_note) do
+ create(:diff_note_on_design, noteable: design, author: author)
+ end
+ let(:action) { 'designs_discussion_added' }
+
+ it_behaves_like 'a system note' do
+ let(:noteable) { discussion_note.noteable.issue }
+ end
+
+ it 'adds a new system note' do
+ expect { subject }.to change { Note.system.count }.by(1)
+ end
+
+ it 'has the correct note text' do
+ href = instance.send(:designs_path,
+ { vueroute: design.filename, anchor: ActionView::RecordIdentifier.dom_id(discussion_note) }
+ )
+
+ expect(subject.note).to eq("started a discussion on [#{design.filename}](#{href})")
+ end
+ end
+end
diff --git a/spec/services/template_engines/liquid_service_spec.rb b/spec/services/template_engines/liquid_service_spec.rb
deleted file mode 100644
index 7c5262bc264..00000000000
--- a/spec/services/template_engines/liquid_service_spec.rb
+++ /dev/null
@@ -1,126 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe TemplateEngines::LiquidService do
- describe '#render' do
- let(:template) { 'up{env={{ci_environment_slug}}}' }
- let(:result) { subject }
-
- let_it_be(:slug) { 'env_slug' }
-
- let_it_be(:context) do
- {
- ci_environment_slug: slug,
- environment_filter: "container_name!=\"POD\",environment=\"#{slug}\""
- }
- end
-
- subject { described_class.new(template).render(context) }
-
- it 'with symbol keys in context it substitutes variables' do
- expect(result).to include("up{env=#{slug}")
- end
-
- context 'with multiple occurrences of variable in template' do
- let(:template) do
- 'up{env1={{ci_environment_slug}},env2={{ci_environment_slug}}}'
- end
-
- it 'substitutes variables' do
- expect(result).to eq("up{env1=#{slug},env2=#{slug}}")
- end
- end
-
- context 'with multiple variables in template' do
- let(:template) do
- 'up{env={{ci_environment_slug}},' \
- '{{environment_filter}}}'
- end
-
- it 'substitutes all variables' do
- expect(result).to eq(
- "up{env=#{slug}," \
- "container_name!=\"POD\",environment=\"#{slug}\"}"
- )
- end
- end
-
- context 'with unknown variables in template' do
- let(:template) { 'up{env={{env_slug}}}' }
-
- it 'does not substitute unknown variables' do
- expect(result).to eq("up{env=}")
- end
- end
-
- context 'with extra variables in context' do
- let(:template) { 'up{env={{ci_environment_slug}}}' }
-
- it 'substitutes variables' do
- # If context has only 1 key, there is no need for this spec.
- expect(context.count).to be > 1
- expect(result).to eq("up{env=#{slug}}")
- end
- end
-
- context 'with unknown and known variables in template' do
- let(:template) { 'up{env={{ci_environment_slug}},other_env={{env_slug}}}' }
-
- it 'substitutes known variables' do
- expect(result).to eq("up{env=#{slug},other_env=}")
- end
- end
-
- context 'Liquid errors' do
- shared_examples 'raises RenderError' do |message|
- it do
- expect { result }.to raise_error(described_class::RenderError, message)
- end
- end
-
- context 'when liquid raises error' do
- let(:template) { 'up{env={{ci_environment_slug}}' }
- let(:liquid_template) { Liquid::Template.new }
-
- before do
- allow(Liquid::Template).to receive(:parse).with(template).and_return(liquid_template)
- allow(liquid_template).to receive(:render!).and_raise(exception, message)
- end
-
- context 'raises Liquid::MemoryError' do
- let(:exception) { Liquid::MemoryError }
- let(:message) { 'Liquid error: Memory limits exceeded' }
-
- it_behaves_like 'raises RenderError', 'Memory limit exceeded while rendering template'
- end
-
- context 'raises Liquid::Error' do
- let(:exception) { Liquid::Error }
- let(:message) { 'Liquid error: Generic error message' }
-
- it_behaves_like 'raises RenderError', 'Error rendering query'
- end
- end
-
- context 'with template that is expensive to render' do
- let(:template) do
- '{% assign loop_count = 1000 %}'\
- '{% assign padStr = "0" %}'\
- '{% assign number_to_pad = "1" %}'\
- '{% assign strLength = number_to_pad | size %}'\
- '{% assign padLength = loop_count | minus: strLength %}'\
- '{% if padLength > 0 %}'\
- ' {% assign padded = number_to_pad %}'\
- ' {% for position in (1..padLength) %}'\
- ' {% assign padded = padded | prepend: padStr %}'\
- ' {% endfor %}'\
- ' {{ padded }}'\
- '{% endif %}'
- end
-
- it_behaves_like 'raises RenderError', 'Memory limit exceeded while rendering template'
- end
- end
- end
-end
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 9b92590cb63..4894cf12372 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -895,6 +895,36 @@ describe TodoService do
end
end
+ describe 'Designs' do
+ include DesignManagementTestHelpers
+
+ let(:issue) { create(:issue, project: project) }
+ let(:design) { create(:design, issue: issue) }
+
+ before do
+ enable_design_management
+
+ project.add_guest(author)
+ project.add_developer(john_doe)
+ end
+
+ let(:note) do
+ build(:diff_note_on_design,
+ noteable: design,
+ author: author,
+ note: "Hey #{john_doe.to_reference}")
+ end
+
+ it 'creates a todo for mentioned user on new diff note' do
+ service.new_note(note, author)
+
+ should_create_todo(user: john_doe,
+ target: design,
+ action: Todo::MENTIONED,
+ note: note)
+ end
+ end
+
describe '#update_note' do
let(:noteable) { create(:issue, project: project) }
let(:note) { create(:note, project: project, note: mentions, noteable: noteable) }
diff --git a/spec/services/update_merge_request_metrics_service_spec.rb b/spec/services/update_merge_request_metrics_service_spec.rb
index bb07dfa1a0e..1aaf5e712f9 100644
--- a/spec/services/update_merge_request_metrics_service_spec.rb
+++ b/spec/services/update_merge_request_metrics_service_spec.rb
@@ -9,7 +9,7 @@ describe MergeRequestMetricsService do
it 'updates metrics' do
user = create(:user)
service = described_class.new(metrics)
- event = double(Event, author_id: user.id, created_at: Time.now)
+ event = double(Event, author_id: user.id, created_at: Time.current)
service.merge(event)
@@ -22,7 +22,7 @@ describe MergeRequestMetricsService do
it 'updates metrics' do
user = create(:user)
service = described_class.new(metrics)
- event = double(Event, author_id: user.id, created_at: Time.now)
+ event = double(Event, author_id: user.id, created_at: Time.current)
service.close(event)
diff --git a/spec/services/user_project_access_changed_service_spec.rb b/spec/services/user_project_access_changed_service_spec.rb
index 902ed723e09..f27eeb74265 100644
--- a/spec/services/user_project_access_changed_service_spec.rb
+++ b/spec/services/user_project_access_changed_service_spec.rb
@@ -17,5 +17,14 @@ describe UserProjectAccessChangedService do
described_class.new([1, 2]).execute(blocking: false)
end
+
+ it 'permits low-priority operation' do
+ expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to(
+ receive(:bulk_perform_in).with(described_class::DELAY, [[1], [2]])
+ )
+
+ described_class.new([1, 2]).execute(blocking: false,
+ priority: described_class::LOW_PRIORITY)
+ end
end
end
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index 216d9170274..6e4b293286b 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -42,13 +42,11 @@ describe Users::DestroyService do
it 'calls the bulk snippet destroy service for the user personal snippets' do
repo1 = create(:personal_snippet, :repository, author: user).snippet_repository
- repo2 = create(:project_snippet, :repository, author: user).snippet_repository
- repo3 = create(:project_snippet, :repository, project: project, author: user).snippet_repository
+ repo2 = create(:project_snippet, :repository, project: project, author: user).snippet_repository
aggregate_failures do
expect(gitlab_shell.repository_exists?(repo1.shard_name, repo1.disk_path + '.git')).to be_truthy
expect(gitlab_shell.repository_exists?(repo2.shard_name, repo2.disk_path + '.git')).to be_truthy
- expect(gitlab_shell.repository_exists?(repo3.shard_name, repo3.disk_path + '.git')).to be_truthy
end
# Call made when destroying user personal projects
@@ -59,17 +57,23 @@ describe Users::DestroyService do
# project snippets where projects are not user personal
# ones
expect(Snippets::BulkDestroyService).to receive(:new)
- .with(admin, user.snippets).and_call_original
+ .with(admin, user.snippets.only_personal_snippets).and_call_original
service.execute(user)
aggregate_failures do
expect(gitlab_shell.repository_exists?(repo1.shard_name, repo1.disk_path + '.git')).to be_falsey
expect(gitlab_shell.repository_exists?(repo2.shard_name, repo2.disk_path + '.git')).to be_falsey
- expect(gitlab_shell.repository_exists?(repo3.shard_name, repo3.disk_path + '.git')).to be_falsey
end
end
+ it 'does not delete project snippets that the user is the author of' do
+ repo = create(:project_snippet, :repository, author: user).snippet_repository
+ service.execute(user)
+ expect(gitlab_shell.repository_exists?(repo.shard_name, repo.disk_path + '.git')).to be_truthy
+ expect(User.ghost.snippets).to include(repo.snippet)
+ end
+
context 'when an error is raised deleting snippets' do
it 'does not delete user' do
snippet = create(:personal_snippet, :repository, author: user)
diff --git a/spec/services/users/migrate_to_ghost_user_service_spec.rb b/spec/services/users/migrate_to_ghost_user_service_spec.rb
index 40206775aed..a7d7c16a66f 100644
--- a/spec/services/users/migrate_to_ghost_user_service_spec.rb
+++ b/spec/services/users/migrate_to_ghost_user_service_spec.rb
@@ -78,6 +78,12 @@ describe Users::MigrateToGhostUserService do
end
end
+ context 'snippets' do
+ include_examples "migrating a deleted user's associated records to the ghost user", Snippet do
+ let(:created_record) { create(:snippet, project: project, author: user) }
+ end
+ end
+
context "when record migration fails with a rollback exception" do
before do
expect_any_instance_of(ActiveRecord::Associations::CollectionProxy)
diff --git a/spec/services/verify_pages_domain_service_spec.rb b/spec/services/verify_pages_domain_service_spec.rb
index f2b3b44d223..3f08ae84c14 100644
--- a/spec/services/verify_pages_domain_service_spec.rb
+++ b/spec/services/verify_pages_domain_service_spec.rb
@@ -348,7 +348,7 @@ describe VerifyPagesDomainService do
end
it 'does not shorten any grace period' do
- grace = Time.now + 1.year
+ grace = Time.current + 1.year
domain.update!(enabled_until: grace)
disallow_resolver!
diff --git a/spec/services/wiki_pages/base_service_spec.rb b/spec/services/wiki_pages/base_service_spec.rb
index 4c44c195ac8..fede86a5192 100644
--- a/spec/services/wiki_pages/base_service_spec.rb
+++ b/spec/services/wiki_pages/base_service_spec.rb
@@ -10,7 +10,7 @@ describe WikiPages::BaseService do
counter = Gitlab::UsageDataCounters::WikiPageCounter
error = counter::UnknownEvent
- let(:subject) { bad_service_class.new(project, user, {}) }
+ let(:subject) { bad_service_class.new(container: project, current_user: user) }
context 'the class implements usage_counter_action incorrectly' do
let(:bad_service_class) do
diff --git a/spec/services/wiki_pages/create_service_spec.rb b/spec/services/wiki_pages/create_service_spec.rb
index d63d62e9492..2a17805110e 100644
--- a/spec/services/wiki_pages/create_service_spec.rb
+++ b/spec/services/wiki_pages/create_service_spec.rb
@@ -3,96 +3,5 @@
require 'spec_helper'
describe WikiPages::CreateService do
- let(:project) { create(:project, :wiki_repo) }
- let(:user) { create(:user) }
- let(:page_title) { 'Title' }
-
- let(:opts) do
- {
- title: page_title,
- content: 'Content for wiki page',
- format: 'markdown'
- }
- end
-
- subject(:service) { described_class.new(project, user, opts) }
-
- before do
- project.add_developer(user)
- end
-
- describe '#execute' do
- it 'creates wiki page with valid attributes' do
- page = service.execute
-
- expect(page).to be_valid
- expect(page.title).to eq(opts[:title])
- expect(page.content).to eq(opts[:content])
- expect(page.format).to eq(opts[:format].to_sym)
- end
-
- it 'executes webhooks' do
- expect(service).to receive(:execute_hooks).once.with(WikiPage)
-
- service.execute
- end
-
- it 'counts wiki page creation' do
- counter = Gitlab::UsageDataCounters::WikiPageCounter
-
- expect { service.execute }.to change { counter.read(:create) }.by 1
- end
-
- shared_examples 'correct event created' do
- it 'creates appropriate events' do
- expect { service.execute }.to change { Event.count }.by 1
-
- expect(Event.recent.first).to have_attributes(
- action: Event::CREATED,
- target: have_attributes(canonical_slug: page_title)
- )
- end
- end
-
- context 'the new page is at the top level' do
- let(:page_title) { 'root-level-page' }
-
- include_examples 'correct event created'
- end
-
- context 'the new page is in a subsection' do
- let(:page_title) { 'subsection/page' }
-
- include_examples 'correct event created'
- end
-
- context 'the feature is disabled' do
- before do
- stub_feature_flags(wiki_events: false)
- end
-
- it 'does not record the activity' do
- expect { service.execute }.not_to change(Event, :count)
- end
- end
-
- context 'when the options are bad' do
- let(:page_title) { '' }
-
- it 'does not count a creation event' do
- counter = Gitlab::UsageDataCounters::WikiPageCounter
-
- expect { service.execute }.not_to change { counter.read(:create) }
- end
-
- it 'does not record the activity' do
- expect { service.execute }.not_to change(Event, :count)
- end
-
- it 'reports the error' do
- expect(service.execute).to be_invalid
- .and have_attributes(errors: be_present)
- end
- end
- end
+ it_behaves_like 'WikiPages::CreateService#execute', :project
end
diff --git a/spec/services/wiki_pages/destroy_service_spec.rb b/spec/services/wiki_pages/destroy_service_spec.rb
index e205bedfdb9..b6fee1fd896 100644
--- a/spec/services/wiki_pages/destroy_service_spec.rb
+++ b/spec/services/wiki_pages/destroy_service_spec.rb
@@ -3,52 +3,5 @@
require 'spec_helper'
describe WikiPages::DestroyService do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
- let(:page) { create(:wiki_page) }
-
- subject(:service) { described_class.new(project, user) }
-
- before do
- project.add_developer(user)
- end
-
- describe '#execute' do
- it 'executes webhooks' do
- expect(service).to receive(:execute_hooks).once.with(page)
-
- service.execute(page)
- end
-
- it 'increments the delete count' do
- counter = Gitlab::UsageDataCounters::WikiPageCounter
-
- expect { service.execute(page) }.to change { counter.read(:delete) }.by 1
- end
-
- it 'creates a new wiki page deletion event' do
- expect { service.execute(page) }.to change { Event.count }.by 1
-
- expect(Event.recent.first).to have_attributes(
- action: Event::DESTROYED,
- target: have_attributes(canonical_slug: page.slug)
- )
- end
-
- it 'does not increment the delete count if the deletion failed' do
- counter = Gitlab::UsageDataCounters::WikiPageCounter
-
- expect { service.execute(nil) }.not_to change { counter.read(:delete) }
- end
- end
-
- context 'the feature is disabled' do
- before do
- stub_feature_flags(wiki_events: false)
- end
-
- it 'does not record the activity' do
- expect { service.execute(page) }.not_to change(Event, :count)
- end
- end
+ it_behaves_like 'WikiPages::DestroyService#execute', :project
end
diff --git a/spec/services/wiki_pages/event_create_service_spec.rb b/spec/services/wiki_pages/event_create_service_spec.rb
new file mode 100644
index 00000000000..cf971b0a02c
--- /dev/null
+++ b/spec/services/wiki_pages/event_create_service_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe WikiPages::EventCreateService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ subject { described_class.new(user) }
+
+ describe '#execute' do
+ let_it_be(:page) { create(:wiki_page, project: project) }
+ let(:slug) { generate(:sluggified_title) }
+ let(:action) { Event::CREATED }
+ let(:response) { subject.execute(slug, page, action) }
+
+ context 'feature flag is not enabled' do
+ before do
+ stub_feature_flags(wiki_events: false)
+ end
+
+ it 'does not error' do
+ expect(response).to be_success
+ .and have_attributes(message: /No event created/)
+ end
+
+ it 'does not create an event' do
+ expect { response }.not_to change(Event, :count)
+ end
+ end
+
+ context 'the user is nil' do
+ subject { described_class.new(nil) }
+
+ it 'raises an error on construction' do
+ expect { subject }.to raise_error ArgumentError
+ end
+ end
+
+ context 'the action is illegal' do
+ let(:action) { Event::WIKI_ACTIONS.max + 1 }
+
+ it 'returns an error' do
+ expect(response).to be_error
+ end
+
+ it 'does not create an event' do
+ expect { response }.not_to change(Event, :count)
+ end
+
+ it 'does not create a metadata record' do
+ expect { response }.not_to change(WikiPage::Meta, :count)
+ end
+ end
+
+ it 'returns a successful response' do
+ expect(response).to be_success
+ end
+
+ context 'the action is a deletion' do
+ let(:action) { Event::DESTROYED }
+
+ it 'does not synchronize the wiki metadata timestamps with the git commit' do
+ expect_next_instance_of(WikiPage::Meta) do |instance|
+ expect(instance).not_to receive(:synch_times_with_page)
+ end
+
+ response
+ end
+ end
+
+ it 'creates a wiki page event' do
+ expect { response }.to change(Event, :count).by(1)
+ end
+
+ it 'returns an event in the payload' do
+ expect(response.payload).to include(event: have_attributes(author: user, wiki_page?: true, action: action))
+ end
+
+ it 'records the slug for the page' do
+ response
+ meta = WikiPage::Meta.find_or_create(page.slug, page)
+
+ expect(meta.slugs.pluck(:slug)).to include(slug)
+ end
+ end
+end
diff --git a/spec/services/wiki_pages/update_service_spec.rb b/spec/services/wiki_pages/update_service_spec.rb
index ece714ee8e5..ac629a96f9a 100644
--- a/spec/services/wiki_pages/update_service_spec.rb
+++ b/spec/services/wiki_pages/update_service_spec.rb
@@ -3,100 +3,5 @@
require 'spec_helper'
describe WikiPages::UpdateService do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
- let(:page) { create(:wiki_page) }
- let(:page_title) { 'New Title' }
-
- let(:opts) do
- {
- content: 'New content for wiki page',
- format: 'markdown',
- message: 'New wiki message',
- title: page_title
- }
- end
-
- subject(:service) { described_class.new(project, user, opts) }
-
- before do
- project.add_developer(user)
- end
-
- describe '#execute' do
- it 'updates the wiki page' do
- updated_page = service.execute(page)
-
- expect(updated_page).to be_valid
- expect(updated_page.message).to eq(opts[:message])
- expect(updated_page.content).to eq(opts[:content])
- expect(updated_page.format).to eq(opts[:format].to_sym)
- expect(updated_page.title).to eq(page_title)
- end
-
- it 'executes webhooks' do
- expect(service).to receive(:execute_hooks).once.with(WikiPage)
-
- service.execute(page)
- end
-
- it 'counts edit events' do
- counter = Gitlab::UsageDataCounters::WikiPageCounter
-
- expect { service.execute page }.to change { counter.read(:update) }.by 1
- end
-
- shared_examples 'adds activity event' do
- it 'adds a new wiki page activity event' do
- expect { service.execute(page) }.to change { Event.count }.by 1
-
- expect(Event.recent.first).to have_attributes(
- action: Event::UPDATED,
- wiki_page: page,
- target_title: page.title
- )
- end
- end
-
- context 'the page is at the top level' do
- let(:page_title) { 'Top level page' }
-
- include_examples 'adds activity event'
- end
-
- context 'the page is in a subsection' do
- let(:page_title) { 'Subsection / secondary page' }
-
- include_examples 'adds activity event'
- end
-
- context 'the feature is disabled' do
- before do
- stub_feature_flags(wiki_events: false)
- end
-
- it 'does not record the activity' do
- expect { service.execute(page) }.not_to change(Event, :count)
- end
- end
-
- context 'when the options are bad' do
- let(:page_title) { '' }
-
- it 'does not count an edit event' do
- counter = Gitlab::UsageDataCounters::WikiPageCounter
-
- expect { service.execute page }.not_to change { counter.read(:update) }
- end
-
- it 'does not record the activity' do
- expect { service.execute page }.not_to change(Event, :count)
- end
-
- it 'reports the error' do
- expect(service.execute(page)).to be_invalid
- .and have_attributes(errors: be_present)
- end
- end
- end
+ it_behaves_like 'WikiPages::UpdateService#execute', :project
end
diff --git a/spec/services/wikis/create_attachment_service_spec.rb b/spec/services/wikis/create_attachment_service_spec.rb
index 7a73a0a555f..4adfaa24874 100644
--- a/spec/services/wikis/create_attachment_service_spec.rb
+++ b/spec/services/wikis/create_attachment_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
describe Wikis::CreateAttachmentService do
- let(:project) { create(:project, :wiki_repo) }
+ let(:container) { create(:project, :wiki_repo) }
let(:user) { create(:user) }
let(:file_name) { 'filename.txt' }
let(:file_path_regex) { %r{#{described_class::ATTACHMENT_PATH}/\h{32}/#{file_name}} }
@@ -15,25 +15,21 @@ describe Wikis::CreateAttachmentService do
end
let(:opts) { file_opts }
- subject(:service) { described_class.new(project, user, opts) }
+ subject(:service) { described_class.new(container: container, current_user: user, params: opts) }
before do
- project.add_developer(user)
+ container.add_developer(user)
end
describe 'initialization' do
context 'author commit info' do
it 'does not raise error if user is nil' do
- service = described_class.new(project, nil, opts)
+ service = described_class.new(container: container, current_user: nil, params: opts)
expect(service.instance_variable_get(:@author_email)).to be_nil
expect(service.instance_variable_get(:@author_name)).to be_nil
end
- it 'fills file_path from the repository uploads folder' do
- expect(service.instance_variable_get(:@file_path)).to match(file_path_regex)
- end
-
context 'when no author info provided' do
it 'fills author_email and author_name from current_user info' do
expect(service.instance_variable_get(:@author_email)).to eq user.email
@@ -73,7 +69,7 @@ describe Wikis::CreateAttachmentService do
context 'branch name' do
context 'when no branch provided' do
it 'sets the branch from the wiki default_branch' do
- expect(service.instance_variable_get(:@branch_name)).to eq project.wiki.default_branch
+ expect(service.instance_variable_get(:@branch_name)).to eq container.wiki.default_branch
end
end
@@ -151,7 +147,7 @@ describe Wikis::CreateAttachmentService do
context 'when user' do
shared_examples 'wiki attachment user validations' do
it 'returns error' do
- result = described_class.new(project, user2, opts).execute
+ result = described_class.new(container: container, current_user: user2, params: opts).execute
expect(result[:status]).to eq :error
expect(result[:message]).to eq 'You are not allowed to push to the wiki'
@@ -172,54 +168,5 @@ describe Wikis::CreateAttachmentService do
end
end
- describe '#execute' do
- let(:wiki) { project.wiki }
-
- subject(:service_execute) { service.execute[:result] }
-
- context 'creates branch if it does not exists' do
- let(:branch_name) { 'new_branch' }
- let(:opts) { file_opts.merge(branch_name: branch_name) }
-
- it do
- expect(wiki.repository.branches).to be_empty
- expect { service.execute }.to change { wiki.repository.branches.count }.by(1)
- expect(wiki.repository.branches.first.name).to eq branch_name
- end
- end
-
- it 'adds file to the repository' do
- expect(wiki.repository.ls_files('HEAD')).to be_empty
-
- service.execute
-
- files = wiki.repository.ls_files('HEAD')
- expect(files.count).to eq 1
- expect(files.first).to match(file_path_regex)
- end
-
- context 'returns' do
- before do
- allow(SecureRandom).to receive(:hex).and_return('fixed_hex')
-
- service_execute
- end
-
- it 'returns the file name' do
- expect(service_execute[:file_name]).to eq file_name
- end
-
- it 'returns the path where file was stored' do
- expect(service_execute[:file_path]).to eq 'uploads/fixed_hex/filename.txt'
- end
-
- it 'returns the branch where the file was pushed' do
- expect(service_execute[:branch]).to eq wiki.default_branch
- end
-
- it 'returns the commit id' do
- expect(service_execute[:commit]).not_to be_empty
- end
- end
- end
+ it_behaves_like 'Wikis::CreateAttachmentService#execute', :project
end