Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-02-08 18:08:59 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-02-08 18:08:59 +0300
commitc6c5dd8848b78528d7ad7f044a0c95be629d372e (patch)
tree261577e229ade85472353eb5b380c1e4fed9bc60 /spec
parentd0aeb5df3d6b06165355b023a25b79c7bd74a27d (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/factories/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation.rb9
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js70
-rw-r--r--spec/lib/api/ci/helpers/runner_helpers_spec.rb7
-rw-r--r--spec/lib/api/ci/helpers/runner_spec.rb72
-rw-r--r--spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb65
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb11
-rw-r--r--spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb336
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb13
-rw-r--r--spec/lib/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation_spec.rb34
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_base_spec.rb88
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_creator_spec.rb38
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb38
-rw-r--r--spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb36
-rw-r--r--spec/lib/gitlab/database/async_indexes_spec.rb57
-rw-r--r--spec/lib/gitlab/database/load_balancing/sticking_spec.rb443
-rw-r--r--spec/lib/gitlab/redis/db_load_balancing_spec.rb52
-rw-r--r--spec/migrations/20210603222333_remove_builds_email_service_from_services_spec.rb24
-rw-r--r--spec/migrations/20210610153556_delete_legacy_operations_feature_flags_spec.rb45
-rw-r--r--spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb22
-rw-r--r--spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb76
-rw-r--r--spec/migrations/20210713042000_fix_ci_sources_pipelines_index_names_spec.rb67
-rw-r--r--spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb31
-rw-r--r--spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb66
-rw-r--r--spec/migrations/20210804150320_create_base_work_item_types_spec.rb43
-rw-r--r--spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb137
-rw-r--r--spec/migrations/20210811122206_update_external_project_bots_spec.rb25
-rw-r--r--spec/migrations/20210812013042_remove_duplicate_project_authorizations_spec.rb62
-rw-r--r--spec/migrations/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session_spec.rb21
-rw-r--r--spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb35
-rw-r--r--spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb88
-rw-r--r--spec/migrations/add_triggers_to_integrations_type_new_spec.rb77
-rw-r--r--spec/migrations/add_upvotes_count_index_to_issues_spec.rb22
-rw-r--r--spec/migrations/associate_existing_dast_builds_with_variables_spec.rb10
-rw-r--r--spec/migrations/backfill_cadence_id_for_boards_scoped_to_iteration_spec.rb108
-rw-r--r--spec/migrations/backfill_integrations_type_new_spec.rb38
-rw-r--r--spec/migrations/backfill_issues_upvotes_count_spec.rb35
-rw-r--r--spec/migrations/backfill_stage_event_hash_spec.rb103
-rw-r--r--spec/migrations/cleanup_remaining_orphan_invites_spec.rb37
-rw-r--r--spec/migrations/confirm_security_bot_spec.rb38
-rw-r--r--spec/migrations/disable_expiration_policies_linked_to_no_container_images_spec.rb46
-rw-r--r--spec/migrations/fix_batched_migrations_old_format_job_arguments_spec.rb63
-rw-r--r--spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb42
-rw-r--r--spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb34
-rw-r--r--spec/migrations/orphaned_invite_tokens_cleanup_spec.rb50
-rw-r--r--spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb62
-rw-r--r--spec/migrations/remove_duplicate_dast_site_tokens_spec.rb53
-rw-r--r--spec/migrations/remove_duplicate_dast_site_tokens_with_same_token_spec.rb53
-rw-r--r--spec/migrations/rename_services_to_integrations_spec.rb255
-rw-r--r--spec/migrations/replace_external_wiki_triggers_spec.rb132
-rw-r--r--spec/migrations/reschedule_delete_orphaned_deployments_spec.rb74
-rw-r--r--spec/migrations/reset_job_token_scope_enabled_again_spec.rb25
-rw-r--r--spec/migrations/reset_job_token_scope_enabled_spec.rb25
-rw-r--r--spec/migrations/reset_severity_levels_to_new_default_spec.rb33
-rw-r--r--spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb10
-rw-r--r--spec/migrations/schedule_security_setting_creation_spec.rb58
-rw-r--r--spec/migrations/set_default_job_token_scope_true_spec.rb33
-rw-r--r--spec/migrations/steal_merge_request_diff_commit_users_migration_spec.rb29
-rw-r--r--spec/migrations/update_integrations_trigger_type_new_on_insert_spec.rb102
-rw-r--r--spec/models/ci/build_spec.rb9
-rw-r--r--spec/models/ci/pipeline_spec.rb318
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb28
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_schedule_play_spec.rb4
-rw-r--r--spec/requests/api/project_attributes.yml1
-rw-r--r--spec/requests/api/projects_spec.rb2
-rw-r--r--spec/serializers/integrations/field_entity_spec.rb6
-rw-r--r--spec/services/ci/pipeline_schedule_service_spec.rb4
-rw-r--r--spec/services/ci/register_job_service_spec.rb1161
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb74
-rw-r--r--spec/views/notify/user_deactivated_email.html.haml_spec.rb56
-rw-r--r--spec/views/notify/user_deactivated_email.text.erb_spec.rb58
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb24
84 files changed, 1919 insertions, 3610 deletions
diff --git a/spec/factories/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation.rb b/spec/factories/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation.rb
new file mode 100644
index 00000000000..a61b5cde7a0
--- /dev/null
+++ b/spec/factories/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :postgres_async_foreign_key_validation,
+ class: 'Gitlab::Database::AsyncForeignKeys::PostgresAsyncForeignKeyValidation' do
+ sequence(:name) { |n| "fk_users_id_#{n}" }
+ table_name { "users" }
+ end
+end
diff --git a/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js b/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js
index 4a995e2fde1..d2dd4afe09e 100644
--- a/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js
@@ -1,15 +1,10 @@
-import hljs from 'highlight.js/lib/core';
-import languageLoader from '~/content_editor/services/highlight_js_language_loader';
+import hljs from 'highlight.js';
import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
import { highlight } from '~/vue_shared/components/source_viewer/workers/highlight_utils';
+import { LINES_PER_CHUNK, NEWLINE } from '~/vue_shared/components/source_viewer/constants';
-jest.mock('highlight.js/lib/core', () => ({
- highlight: jest.fn().mockReturnValue({}),
- registerLanguage: jest.fn(),
-}));
-
-jest.mock('~/content_editor/services/highlight_js_language_loader', () => ({
- javascript: jest.fn().mockReturnValue({ default: jest.fn() }),
+jest.mock('highlight.js', () => ({
+ highlight: jest.fn().mockReturnValue({ value: 'highlighted content' }),
}));
jest.mock('~/vue_shared/components/source_viewer/plugins/index', () => ({
@@ -17,28 +12,61 @@ jest.mock('~/vue_shared/components/source_viewer/plugins/index', () => ({
}));
const fileType = 'text';
-const content = 'function test() { return true };';
+const rawContent = 'function test() { return true }; \n // newline';
+const highlightedContent = 'highlighted content';
const language = 'javascript';
describe('Highlight utility', () => {
- beforeEach(() => highlight(fileType, content, language));
-
- it('loads the language', () => {
- expect(languageLoader.javascript).toHaveBeenCalled();
- });
+ beforeEach(() => highlight(fileType, rawContent, language));
it('registers the plugins', () => {
expect(registerPlugins).toHaveBeenCalled();
});
- it('registers the language', () => {
- expect(hljs.registerLanguage).toHaveBeenCalledWith(
- language,
- languageLoader[language]().default,
+ it('highlights the content', () => {
+ expect(hljs.highlight).toHaveBeenCalledWith(rawContent, { language });
+ });
+
+ it('splits the content into chunks', () => {
+ const contentArray = Array.from({ length: 140 }, () => 'newline'); // simulate 140 lines of code
+
+ const chunks = [
+ {
+ language,
+ highlightedContent,
+ rawContent: contentArray.slice(0, 70).join(NEWLINE), // first 70 lines
+ startingFrom: 0,
+ totalLines: LINES_PER_CHUNK,
+ },
+ {
+ language,
+ highlightedContent: '',
+ rawContent: contentArray.slice(70, 140).join(NEWLINE), // last 70 lines
+ startingFrom: 70,
+ totalLines: LINES_PER_CHUNK,
+ },
+ ];
+
+ expect(highlight(fileType, contentArray.join(NEWLINE), language)).toEqual(
+ expect.arrayContaining(chunks),
);
});
+});
- it('highlights the content', () => {
- expect(hljs.highlight).toHaveBeenCalledWith(content, { language });
+describe('unsupported languages', () => {
+ const unsupportedLanguage = 'some_unsupported_language';
+
+ beforeEach(() => highlight(fileType, rawContent, unsupportedLanguage));
+
+ it('does not register plugins', () => {
+ expect(registerPlugins).not.toHaveBeenCalled();
+ });
+
+ it('does not attempt to highlight the content', () => {
+ expect(hljs.highlight).not.toHaveBeenCalled();
+ });
+
+ it('does not return a result', () => {
+ expect(highlight(fileType, rawContent, unsupportedLanguage)).toBe(undefined);
});
});
diff --git a/spec/lib/api/ci/helpers/runner_helpers_spec.rb b/spec/lib/api/ci/helpers/runner_helpers_spec.rb
index d32f7e4f0be..c36c8d23e88 100644
--- a/spec/lib/api/ci/helpers/runner_helpers_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_helpers_spec.rb
@@ -34,6 +34,7 @@ RSpec.describe API::Ci::Helpers::Runner, feature_category: :runner do
context 'when runner info is present' do
let(:name) { 'runner' }
+ let(:system_id) { 's_c2d22f638c25' }
let(:version) { '1.2.3' }
let(:revision) { '10.0' }
let(:platform) { 'test' }
@@ -42,6 +43,7 @@ RSpec.describe API::Ci::Helpers::Runner, feature_category: :runner do
let(:config) { { 'gpus' => 'all' } }
let(:runner_params) do
{
+ system_id: system_id,
'info' =>
{
'name' => name,
@@ -59,7 +61,10 @@ RSpec.describe API::Ci::Helpers::Runner, feature_category: :runner do
subject(:details) { runner_helper.get_runner_details_from_request }
it 'extracts the runner details', :aggregate_failures do
- expect(details.keys).to match_array(%w(name version revision platform architecture executor config ip_address))
+ expect(details.keys).to match_array(
+ %w(system_id name version revision platform architecture executor config ip_address)
+ )
+ expect(details['system_id']).to eq(system_id)
expect(details['name']).to eq(name)
expect(details['version']).to eq(version)
expect(details['revision']).to eq(revision)
diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb
index 6801d16d13e..b5046bb9983 100644
--- a/spec/lib/api/ci/helpers/runner_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe API::Ci::Helpers::Runner do
allow(helper).to receive(:env).and_return({})
end
- describe '#current_job' do
+ describe '#current_job', feature_category: :continuous_integration do
let(:build) { create(:ci_build, :running) }
it 'handles sticking of a build when a build ID is specified' do
@@ -38,7 +38,7 @@ RSpec.describe API::Ci::Helpers::Runner do
end
end
- describe '#current_runner' do
+ describe '#current_runner', feature_category: :runner do
let(:runner) { create(:ci_runner, token: 'foo') }
it 'handles sticking of a runner if a token is specified' do
@@ -67,7 +67,73 @@ RSpec.describe API::Ci::Helpers::Runner do
end
end
- describe '#track_runner_authentication', :prometheus do
+ describe '#current_runner_machine', :freeze_time, feature_category: :runner_fleet do
+ let(:runner) { create(:ci_runner, token: 'foo') }
+ let(:runner_machine) { create(:ci_runner_machine, runner: runner, machine_xid: 'bar', contacted_at: 1.hour.ago) }
+
+ subject(:current_runner_machine) { helper.current_runner_machine }
+
+ context 'with create_runner_machine FF enabled' do
+ before do
+ stub_feature_flags(create_runner_machine: true)
+ end
+
+ it 'does not return runner machine if no system_id specified' do
+ allow(helper).to receive(:params).and_return(token: runner.token)
+
+ is_expected.to be_nil
+ end
+
+ context 'when runner machine already exists' do
+ before do
+ allow(helper).to receive(:params).and_return(token: runner.token, system_id: runner_machine.machine_xid)
+ end
+
+ it { is_expected.to eq(runner_machine) }
+
+ it 'does not update the contacted_at field' do
+ expect(current_runner_machine.contacted_at).to eq 1.hour.ago
+ end
+ end
+
+ it 'creates a new runner machine if one could be not be found', :aggregate_failures do
+ allow(helper).to receive(:params).and_return(token: runner.token, system_id: 'new_system_id')
+
+ expect { current_runner_machine }.to change { Ci::RunnerMachine.count }.by(1)
+
+ expect(current_runner_machine).not_to be_nil
+ expect(current_runner_machine.machine_xid).to eq('new_system_id')
+ expect(current_runner_machine.contacted_at).to eq(Time.current)
+ expect(current_runner_machine.runner).to eq(runner)
+ end
+ end
+
+ context 'with create_runner_machine FF disabled' do
+ before do
+ stub_feature_flags(create_runner_machine: false)
+ end
+
+ it 'does not return runner machine if no system_id specified' do
+ allow(helper).to receive(:params).and_return(token: runner.token)
+
+ is_expected.to be_nil
+ end
+
+ context 'when runner machine can not be found' do
+ before do
+ allow(helper).to receive(:params).and_return(token: runner.token, system_id: 'new_system_id')
+ end
+
+ it 'does not create a new runner machine', :aggregate_failures do
+ expect { current_runner_machine }.not_to change { Ci::RunnerMachine.count }
+
+ expect(current_runner_machine).to be_nil
+ end
+ end
+ end
+ end
+
+ describe '#track_runner_authentication', :prometheus, feature_category: :runner do
subject { helper.track_runner_authentication }
let(:runner) { create(:ci_runner, token: 'foo') }
diff --git a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
deleted file mode 100644
index 96adea03d43..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20210602155110 do
- let!(:jira_integration_temp) { described_class::JiraServiceTemp }
- let!(:jira_tracker_data_temp) { described_class::JiraTrackerDataTemp }
- let!(:atlassian_host) { 'https://api.atlassian.net' }
- let!(:mixedcase_host) { 'https://api.AtlassiaN.nEt' }
- let!(:server_host) { 'https://my.server.net' }
-
- let(:jira_integration) { jira_integration_temp.create!(type: 'JiraService', active: true, category: 'issue_tracker') }
-
- subject { described_class.new }
-
- def create_tracker_data(options = {})
- jira_tracker_data_temp.create!({ service_id: jira_integration.id }.merge(options))
- end
-
- describe '#perform' do
- context do
- it 'ignores if deployment already set' do
- tracker_data = create_tracker_data(url: atlassian_host, deployment_type: 'server')
-
- expect(subject).not_to receive(:collect_deployment_type)
-
- subject.perform(tracker_data.id, tracker_data.id)
-
- expect(tracker_data.reload.deployment_type).to eq 'server'
- end
-
- it 'ignores if no url is set' do
- tracker_data = create_tracker_data(deployment_type: 'unknown')
-
- expect(subject).to receive(:collect_deployment_type)
-
- subject.perform(tracker_data.id, tracker_data.id)
-
- expect(tracker_data.reload.deployment_type).to eq 'unknown'
- end
- end
-
- context 'when tracker is valid' do
- let!(:tracker_1) { create_tracker_data(url: atlassian_host, deployment_type: 0) }
- let!(:tracker_2) { create_tracker_data(url: mixedcase_host, deployment_type: 0) }
- let!(:tracker_3) { create_tracker_data(url: server_host, deployment_type: 0) }
- let!(:tracker_4) { create_tracker_data(api_url: server_host, deployment_type: 0) }
- let!(:tracker_nextbatch) { create_tracker_data(api_url: atlassian_host, deployment_type: 0) }
-
- it 'sets the proper deployment_type', :aggregate_failures do
- subject.perform(tracker_1.id, tracker_4.id)
-
- expect(tracker_1.reload.deployment_cloud?).to be_truthy
- expect(tracker_2.reload.deployment_cloud?).to be_truthy
- expect(tracker_3.reload.deployment_server?).to be_truthy
- expect(tracker_4.reload.deployment_server?).to be_truthy
- expect(tracker_nextbatch.reload.deployment_unknown?).to be_truthy
- end
- end
-
- it_behaves_like 'marks background migration job records' do
- let(:arguments) { [1, 4] }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb
index 15956d2ea80..876eb070745 100644
--- a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsChildren, :migration, schema: 20210602155110 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsChildren, :migration, schema: 20210826171758 do
let(:namespaces_table) { table(:namespaces) }
let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) }
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb
index 019c6d54068..ad9b54608c6 100644
--- a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsRoots, :migration, schema: 20210602155110 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsRoots, :migration, schema: 20210826171758 do
let(:namespaces_table) { table(:namespaces) }
let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) }
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index 456d0165fe6..80fd86e90bb 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 20210602155110,
+RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 20210826171758,
feature_category: :source_code_management do
let(:gitlab_shell) { Gitlab::Shell.new }
let(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb b/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb
index b084e3fe885..7142aea3ab2 100644
--- a/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillUpvotesCountOnIssues, schema: 20210701111909 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillUpvotesCountOnIssues, schema: 20210826171758 do
let(:award_emoji) { table(:award_emoji) }
let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
diff --git a/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb b/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb
index 0d9d9eb929c..5ffe665f0ad 100644
--- a/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedLfsObjectsProjects, schema: 20210602155110 do
+RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedLfsObjectsProjects, schema: 20210826171758 do
let(:lfs_objects_projects) { table(:lfs_objects_projects) }
let(:lfs_objects) { table(:lfs_objects) }
let(:projects) { table(:projects) }
diff --git a/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb b/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb
index c4039b85459..8f058c875a2 100644
--- a/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb
+++ b/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedDeployments, :migration, schema: 20210617161348 do
+RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedDeployments, :migration, schema: 20210826171758 do
let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
let!(:environment) { table(:environments).create!(name: 'production', slug: 'production', project_id: project.id) }
@@ -10,17 +10,14 @@ RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedDeployments, :migratio
before do
create_deployment!(environment.id, project.id)
- create_deployment!(non_existing_record_id, project.id)
end
it 'deletes only orphaned deployments' do
expect(valid_deployments.pluck(:id)).not_to be_empty
- expect(orphaned_deployments.pluck(:id)).not_to be_empty
subject.perform(table(:deployments).minimum(:id), table(:deployments).maximum(:id))
expect(valid_deployments.pluck(:id)).not_to be_empty
- expect(orphaned_deployments.pluck(:id)).to be_empty
end
it 'marks jobs as done' do
@@ -29,15 +26,9 @@ RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedDeployments, :migratio
arguments: [table(:deployments).minimum(:id), table(:deployments).minimum(:id)]
)
- second_job = background_migration_jobs.create!(
- class_name: 'DeleteOrphanedDeployments',
- arguments: [table(:deployments).maximum(:id), table(:deployments).maximum(:id)]
- )
-
subject.perform(table(:deployments).minimum(:id), table(:deployments).minimum(:id))
expect(first_job.reload.status).to eq(Gitlab::Database::BackgroundMigrationJob.statuses[:succeeded])
- expect(second_job.reload.status).to eq(Gitlab::Database::BackgroundMigrationJob.statuses[:pending])
end
private
diff --git a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
index 66e16b16270..8f3ef44e00c 100644
--- a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
+++ b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20210602155110 do
+RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20210826171758 do
let!(:background_migration_jobs) { table(:background_migration_jobs) }
let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let!(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb b/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb
index 51a09d50a19..586e75ffb37 100644
--- a/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb
+++ b/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::ExtractProjectTopicsIntoSeparateTable,
- :suppress_gitlab_schemas_validate_connection, schema: 20210730104800 do
+ :suppress_gitlab_schemas_validate_connection, schema: 20210826171758 do
it 'correctly extracts project topics into separate table' do
namespaces = table(:namespaces)
projects = table(:projects)
diff --git a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
index 4d7c836cff4..b252df4ecff 100644
--- a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MigrateProjectTaggingsContextFromTagsToTopics,
- :suppress_gitlab_schemas_validate_connection, schema: 20210602155110 do
+ :suppress_gitlab_schemas_validate_connection, schema: 20210826171758 do
it 'correctly migrates project taggings context from tags to topics' do
taggings = table(:taggings)
diff --git a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
index fe45eaac3b7..08fde0d0ff4 100644
--- a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require 'webauthn/u2f_migrator'
-RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20210602155110 do
+RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20210826171758 do
let(:users) { table(:users) }
let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) }
diff --git a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
index cafddb6aeaf..71cf58a933f 100644
--- a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
+++ b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 20210602155110 do
+RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 20210826171758 do
let(:enabled) { 20 }
let(:disabled) { 0 }
diff --git a/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb b/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb
index 7261758e010..b8c3bf8f3ac 100644
--- a/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb
+++ b/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsProjectId, schema: 20210602155110 do
+RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsProjectId, schema: 20210826171758 do
let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
let!(:project1) { table(:projects).create!(namespace_id: namespace.id) }
let!(:project2) { table(:projects).create!(namespace_id: namespace.id) }
diff --git a/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb b/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb
index 4599491b580..f16ae489b78 100644
--- a/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb
+++ b/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::UpdateUsersWhereTwoFactorAuthRequiredFromGroup, :migration, schema: 20210602155110 do
+RSpec.describe Gitlab::BackgroundMigration::UpdateUsersWhereTwoFactorAuthRequiredFromGroup, :migration, schema: 20210826171758 do
include MigrationHelpers::NamespacesHelpers
let(:group_with_2fa_parent) { create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: true) }
diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
new file mode 100644
index 00000000000..a5365ae53b8
--- /dev/null
+++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
@@ -0,0 +1,336 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :pipeline_authoring do
+ let_it_be(:project) { create_default(:project, :repository, create_tag: 'test').freeze }
+ let_it_be(:user) { create(:user) }
+
+ let(:pipeline) { build(:ci_empty_pipeline, :created, project: project) }
+
+ describe '#predefined_variables' do
+ subject { described_class.new(pipeline).predefined_variables }
+
+ it 'includes all predefined variables in a valid order' do
+ keys = subject.pluck(:key)
+
+ expect(keys).to contain_exactly(*%w[
+ CI_PIPELINE_IID
+ CI_PIPELINE_SOURCE
+ CI_PIPELINE_CREATED_AT
+ CI_COMMIT_SHA
+ CI_COMMIT_SHORT_SHA
+ CI_COMMIT_BEFORE_SHA
+ CI_COMMIT_REF_NAME
+ CI_COMMIT_REF_SLUG
+ CI_COMMIT_BRANCH
+ CI_COMMIT_MESSAGE
+ CI_COMMIT_TITLE
+ CI_COMMIT_DESCRIPTION
+ CI_COMMIT_REF_PROTECTED
+ CI_COMMIT_TIMESTAMP
+ CI_COMMIT_AUTHOR
+ CI_BUILD_REF
+ CI_BUILD_BEFORE_SHA
+ CI_BUILD_REF_NAME
+ CI_BUILD_REF_SLUG
+ ])
+ end
+
+ context 'when the pipeline is running for a tag' do
+ let(:pipeline) { build(:ci_empty_pipeline, :created, project: project, ref: 'test', tag: true) }
+
+ it 'includes all predefined variables in a valid order' do
+ keys = subject.pluck(:key)
+
+ expect(keys).to contain_exactly(*%w[
+ CI_PIPELINE_IID
+ CI_PIPELINE_SOURCE
+ CI_PIPELINE_CREATED_AT
+ CI_COMMIT_SHA
+ CI_COMMIT_SHORT_SHA
+ CI_COMMIT_BEFORE_SHA
+ CI_COMMIT_REF_NAME
+ CI_COMMIT_REF_SLUG
+ CI_COMMIT_MESSAGE
+ CI_COMMIT_TITLE
+ CI_COMMIT_DESCRIPTION
+ CI_COMMIT_REF_PROTECTED
+ CI_COMMIT_TIMESTAMP
+ CI_COMMIT_AUTHOR
+ CI_BUILD_REF
+ CI_BUILD_BEFORE_SHA
+ CI_BUILD_REF_NAME
+ CI_BUILD_REF_SLUG
+ CI_COMMIT_TAG
+ CI_COMMIT_TAG_MESSAGE
+ CI_BUILD_TAG
+ ])
+ end
+ end
+
+ context 'when merge request is present' do
+ let_it_be(:assignees) { create_list(:user, 2) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:labels) { create_list(:label, 2) }
+
+ let(:merge_request) do
+ create(:merge_request, :simple,
+ source_project: project,
+ target_project: project,
+ assignees: assignees,
+ milestone: milestone,
+ labels: labels)
+ end
+
+ context 'when pipeline for merge request is created' do
+ let(:pipeline) do
+ create(:ci_pipeline, :detached_merge_request_pipeline,
+ ci_ref_presence: false,
+ user: user,
+ merge_request: merge_request)
+ end
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'exposes merge request pipeline variables' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_ID' => merge_request.id.to_s,
+ 'CI_MERGE_REQUEST_IID' => merge_request.iid.to_s,
+ 'CI_MERGE_REQUEST_REF_PATH' => merge_request.ref_path.to_s,
+ 'CI_MERGE_REQUEST_PROJECT_ID' => merge_request.project.id.to_s,
+ 'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path,
+ 'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED' => ProtectedBranch.protected?(
+ merge_request.target_project,
+ merge_request.target_branch
+ ).to_s,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => '',
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url,
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => '',
+ 'CI_MERGE_REQUEST_TITLE' => merge_request.title,
+ 'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
+ 'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
+ 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
+ 'CI_MERGE_REQUEST_EVENT_TYPE' => 'detached',
+ 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true))
+ end
+
+ it 'exposes diff variables' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DIFF_ID' => merge_request.merge_request_diff.id.to_s,
+ 'CI_MERGE_REQUEST_DIFF_BASE_SHA' => merge_request.merge_request_diff.base_commit_sha)
+ end
+
+ context 'without assignee' do
+ let(:assignees) { [] }
+
+ it 'does not expose assignee variable' do
+ expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_ASSIGNEES')
+ end
+ end
+
+ context 'without milestone' do
+ let(:milestone) { nil }
+
+ it 'does not expose milestone variable' do
+ expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_MILESTONE')
+ end
+ end
+
+ context 'without labels' do
+ let(:labels) { [] }
+
+ it 'does not expose labels variable' do
+ expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_LABELS')
+ end
+ end
+ end
+
+ context 'when pipeline on branch is created' do
+ let(:pipeline) do
+ create(:ci_pipeline, project: project, user: user, ref: 'feature')
+ end
+
+ context 'when a merge request is created' do
+ before do
+ merge_request
+ end
+
+ context 'when user has access to project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'merge request references are returned matching the pipeline' do
+ expect(subject.to_hash).to include(
+ 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true))
+ end
+ end
+
+ context 'when user does not have access to project' do
+ it 'CI_OPEN_MERGE_REQUESTS is not returned' do
+ expect(subject.to_hash).not_to have_key('CI_OPEN_MERGE_REQUESTS')
+ end
+ end
+ end
+
+ context 'when no a merge request is created' do
+ it 'CI_OPEN_MERGE_REQUESTS is not returned' do
+ expect(subject.to_hash).not_to have_key('CI_OPEN_MERGE_REQUESTS')
+ end
+ end
+ end
+
+ context 'with merged results' do
+ let(:pipeline) do
+ create(:ci_pipeline, :merged_result_pipeline, merge_request: merge_request)
+ end
+
+ it 'exposes merge request pipeline variables' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_ID' => merge_request.id.to_s,
+ 'CI_MERGE_REQUEST_IID' => merge_request.iid.to_s,
+ 'CI_MERGE_REQUEST_REF_PATH' => merge_request.ref_path.to_s,
+ 'CI_MERGE_REQUEST_PROJECT_ID' => merge_request.project.id.to_s,
+ 'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path,
+ 'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED' => ProtectedBranch.protected?(
+ merge_request.target_project,
+ merge_request.target_branch
+ ).to_s,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => merge_request.target_branch_sha,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url,
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => merge_request.source_branch_sha,
+ 'CI_MERGE_REQUEST_TITLE' => merge_request.title,
+ 'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
+ 'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
+ 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
+ 'CI_MERGE_REQUEST_EVENT_TYPE' => 'merged_result')
+ end
+
+ it 'exposes diff variables' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DIFF_ID' => merge_request.merge_request_diff.id.to_s,
+ 'CI_MERGE_REQUEST_DIFF_BASE_SHA' => merge_request.merge_request_diff.base_commit_sha)
+ end
+ end
+ end
+
+ context 'when source is external pull request' do
+ let(:pipeline) do
+ create(:ci_pipeline, source: :external_pull_request_event, external_pull_request: pull_request)
+ end
+
+ let(:pull_request) { create(:external_pull_request, project: project) }
+
+ it 'exposes external pull request pipeline variables' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_EXTERNAL_PULL_REQUEST_IID' => pull_request.pull_request_iid.to_s,
+ 'CI_EXTERNAL_PULL_REQUEST_SOURCE_REPOSITORY' => pull_request.source_repository,
+ 'CI_EXTERNAL_PULL_REQUEST_TARGET_REPOSITORY' => pull_request.target_repository,
+ 'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_SHA' => pull_request.source_sha,
+ 'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_SHA' => pull_request.target_sha,
+ 'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_NAME' => pull_request.source_branch,
+ 'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_NAME' => pull_request.target_branch
+ )
+ end
+ end
+
+ describe 'variable CI_KUBERNETES_ACTIVE' do
+ context 'when pipeline.has_kubernetes_active? is true' do
+ before do
+ allow(pipeline).to receive(:has_kubernetes_active?).and_return(true)
+ end
+
+ it "is included with value 'true'" do
+ expect(subject.to_hash).to include('CI_KUBERNETES_ACTIVE' => 'true')
+ end
+ end
+
+ context 'when pipeline.has_kubernetes_active? is false' do
+ before do
+ allow(pipeline).to receive(:has_kubernetes_active?).and_return(false)
+ end
+
+ it 'is not included' do
+ expect(subject.to_hash).not_to have_key('CI_KUBERNETES_ACTIVE')
+ end
+ end
+ end
+
+ describe 'variable CI_GITLAB_FIPS_MODE' do
+ context 'when FIPS flag is enabled' do
+ before do
+ allow(Gitlab::FIPS).to receive(:enabled?).and_return(true)
+ end
+
+ it "is included with value 'true'" do
+ expect(subject.to_hash).to include('CI_GITLAB_FIPS_MODE' => 'true')
+ end
+ end
+
+ context 'when FIPS flag is disabled' do
+ before do
+ allow(Gitlab::FIPS).to receive(:enabled?).and_return(false)
+ end
+
+ it 'is not included' do
+ expect(subject.to_hash).not_to have_key('CI_GITLAB_FIPS_MODE')
+ end
+ end
+ end
+
+ context 'when tag is not found' do
+ let(:pipeline) do
+ create(:ci_pipeline, project: project, ref: 'not_found_tag', tag: true)
+ end
+
+ it 'does not expose tag variables' do
+ expect(subject.to_hash.keys)
+ .not_to include(
+ 'CI_COMMIT_TAG',
+ 'CI_COMMIT_TAG_MESSAGE',
+ 'CI_BUILD_TAG'
+ )
+ end
+ end
+
+ context 'without a commit' do
+ let(:pipeline) { build(:ci_empty_pipeline, :created, sha: nil) }
+
+ it 'does not expose commit variables' do
+ expect(subject.to_hash.keys)
+ .not_to include(
+ 'CI_COMMIT_SHA',
+ 'CI_COMMIT_SHORT_SHA',
+ 'CI_COMMIT_BEFORE_SHA',
+ 'CI_COMMIT_REF_NAME',
+ 'CI_COMMIT_REF_SLUG',
+ 'CI_COMMIT_BRANCH',
+ 'CI_COMMIT_TAG',
+ 'CI_COMMIT_MESSAGE',
+ 'CI_COMMIT_TITLE',
+ 'CI_COMMIT_DESCRIPTION',
+ 'CI_COMMIT_REF_PROTECTED',
+ 'CI_COMMIT_TIMESTAMP',
+ 'CI_COMMIT_AUTHOR')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index 5aa752ee429..bbd3dc54e6a 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -166,8 +166,14 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
end
before do
+ pipeline_variables_builder = double(
+ ::Gitlab::Ci::Variables::Builder::Pipeline,
+ predefined_variables: [var('C', 3), var('D', 3)]
+ )
+
allow(builder).to receive(:predefined_variables) { [var('A', 1), var('B', 1)] }
allow(pipeline.project).to receive(:predefined_variables) { [var('B', 2), var('C', 2)] }
+ allow(builder).to receive(:pipeline_variables_builder) { pipeline_variables_builder }
allow(pipeline).to receive(:predefined_variables) { [var('C', 3), var('D', 3)] }
allow(job).to receive(:runner) { double(predefined_variables: [var('D', 4), var('E', 4)]) }
allow(builder).to receive(:kubernetes_variables) { [var('E', 5), var('F', 5)] }
@@ -635,8 +641,13 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
end
before do
+ pipeline_variables_builder = double(
+ ::Gitlab::Ci::Variables::Builder::Pipeline,
+ predefined_variables: [var('B', 2), var('C', 2)]
+ )
+
allow(pipeline.project).to receive(:predefined_variables) { [var('A', 1), var('B', 1)] }
- allow(pipeline).to receive(:predefined_variables) { [var('B', 2), var('C', 2)] }
+ allow(builder).to receive(:pipeline_variables_builder) { pipeline_variables_builder }
allow(builder).to receive(:secret_instance_variables) { [var('C', 3), var('D', 3)] }
allow(builder).to receive(:secret_group_variables) { [var('D', 4), var('E', 4)] }
allow(builder).to receive(:secret_project_variables) { [var('E', 5), var('F', 5)] }
diff --git a/spec/lib/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation_spec.rb b/spec/lib/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation_spec.rb
new file mode 100644
index 00000000000..23313387400
--- /dev/null
+++ b/spec/lib/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::AsyncForeignKeys::PostgresAsyncForeignKeyValidation, type: :model,
+ feature_category: :database do
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
+ describe 'validations' do
+ let_it_be(:fk_validation) { create(:postgres_async_foreign_key_validation) }
+ let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH }
+ let(:last_error_limit) { described_class::MAX_LAST_ERROR_LENGTH }
+
+ subject { fk_validation }
+
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_uniqueness_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(identifier_limit) }
+ it { is_expected.to validate_presence_of(:table_name) }
+ it { is_expected.to validate_length_of(:table_name).is_at_most(identifier_limit) }
+ it { is_expected.to validate_length_of(:last_error).is_at_most(last_error_limit) }
+ end
+
+ describe 'scopes' do
+ let!(:failed_validation) { create(:postgres_async_foreign_key_validation, attempts: 1) }
+ let!(:new_validation) { create(:postgres_async_foreign_key_validation) }
+
+ describe '.ordered' do
+ subject { described_class.ordered }
+
+ it { is_expected.to eq([new_validation, failed_validation]) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/async_indexes/index_base_spec.rb b/spec/lib/gitlab/database/async_indexes/index_base_spec.rb
new file mode 100644
index 00000000000..a4c9e2c2880
--- /dev/null
+++ b/spec/lib/gitlab/database/async_indexes/index_base_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::AsyncIndexes::IndexBase, feature_category: :database do
+ include ExclusiveLeaseHelpers
+
+ describe '#perform' do
+ subject { described_class.new(async_index) }
+
+ let(:async_index) { create(:postgres_async_index) }
+
+ let(:model) { Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] }
+ let(:connection) { model.connection }
+
+ let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) }
+ let(:lease_key) { "gitlab/database/indexing/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
+ let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ describe '#preconditions_met?' do
+ it 'raises errors if preconditions is not defined' do
+ expect { subject.perform }.to raise_error NotImplementedError, 'must implement preconditions_met?'
+ end
+ end
+
+ describe '#action_type' do
+ before do
+ allow(subject).to receive(:preconditions_met?).and_return(true)
+ end
+
+ it 'raises errors if action_type is not defined' do
+ expect { subject.perform }.to raise_error NotImplementedError, 'must implement action_type'
+ end
+ end
+
+ context 'with error handling' do
+ before do
+ allow(subject).to receive(:preconditions_met?).and_return(true)
+ allow(subject).to receive(:action_type).and_return('test')
+ allow(async_index.connection).to receive(:execute).and_call_original
+
+ allow(async_index.connection)
+ .to receive(:execute)
+ .with(async_index.definition)
+ .and_raise(ActiveRecord::StatementInvalid)
+ end
+
+ context 'on production' do
+ before do
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false)
+ end
+
+ it 'increases execution attempts' do
+ expect { subject.perform }.to change { async_index.attempts }.by(1)
+
+ expect(async_index.last_error).to be_present
+ expect(async_index).not_to be_destroyed
+ end
+
+ it 'logs an error message including the index_name' do
+ expect(Gitlab::AppLogger)
+ .to receive(:error)
+ .with(a_hash_including(:message, :index_name))
+ .and_call_original
+
+ subject.perform
+ end
+ end
+
+ context 'on development' do
+ it 'also raises errors' do
+ expect { subject.perform }
+ .to raise_error(ActiveRecord::StatementInvalid)
+ .and change { async_index.attempts }.by(1)
+
+ expect(async_index.last_error).to be_present
+ expect(async_index).not_to be_destroyed
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
index 207aedd1a38..f980e458f14 100644
--- a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do
+RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator, feature_category: :database do
include ExclusiveLeaseHelpers
describe '#perform' do
@@ -35,6 +35,24 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do
subject.perform
end
+
+ it 'removes the index preparation record from postgres_async_indexes' do
+ expect(async_index).to receive(:destroy!).and_call_original
+
+ expect { subject.perform }.to change { index_model.count }.by(-1)
+ end
+
+ it 'logs an appropriate message' do
+ expected_message = 'Skipping index creation since preconditions are not met. The queuing entry will be deleted'
+
+ allow(Gitlab::AppLogger).to receive(:info).and_call_original
+
+ subject.perform
+
+ expect(Gitlab::AppLogger)
+ .to have_received(:info)
+ .with(a_hash_including(message: expected_message))
+ end
end
it 'creates the index while controlling statement timeout' do
@@ -47,7 +65,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do
end
it 'removes the index preparation record from postgres_async_indexes' do
- expect(async_index).to receive(:destroy).and_call_original
+ expect(async_index).to receive(:destroy!).and_call_original
expect { subject.perform }.to change { index_model.count }.by(-1)
end
@@ -55,9 +73,23 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do
it 'skips logic if not able to acquire exclusive lease' do
expect(lease).to receive(:try_obtain).ordered.and_return(false)
expect(connection).not_to receive(:execute).with(/CREATE INDEX/)
- expect(async_index).not_to receive(:destroy)
+ expect(async_index).not_to receive(:destroy!)
expect { subject.perform }.not_to change { index_model.count }
end
+
+ it 'logs messages around execution' do
+ allow(Gitlab::AppLogger).to receive(:info).and_call_original
+
+ subject.perform
+
+ expect(Gitlab::AppLogger)
+ .to have_received(:info)
+ .with(a_hash_including(message: 'Starting async index creation'))
+
+ expect(Gitlab::AppLogger)
+ .to have_received(:info)
+ .with(a_hash_including(message: 'Finished async index creation'))
+ end
end
end
diff --git a/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
index 11039ad4f7e..c70acf53dc2 100644
--- a/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor do
+RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor, feature_category: :database do
include ExclusiveLeaseHelpers
describe '#perform' do
@@ -39,6 +39,24 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor do
subject.perform
end
+
+ it 'removes the index preparation record from postgres_async_indexes' do
+ expect(async_index).to receive(:destroy!).and_call_original
+
+ expect { subject.perform }.to change { index_model.count }.by(-1)
+ end
+
+ it 'logs an appropriate message' do
+ expected_message = 'Skipping index removal since preconditions are not met. The queuing entry will be deleted'
+
+ allow(Gitlab::AppLogger).to receive(:info).and_call_original
+
+ subject.perform
+
+ expect(Gitlab::AppLogger)
+ .to have_received(:info)
+ .with(a_hash_including(message: expected_message))
+ end
end
it 'creates the index while controlling lock timeout' do
@@ -53,7 +71,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor do
end
it 'removes the index preparation record from postgres_async_indexes' do
- expect(async_index).to receive(:destroy).and_call_original
+ expect(async_index).to receive(:destroy!).and_call_original
expect { subject.perform }.to change { index_model.count }.by(-1)
end
@@ -61,9 +79,23 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor do
it 'skips logic if not able to acquire exclusive lease' do
expect(lease).to receive(:try_obtain).ordered.and_return(false)
expect(connection).not_to receive(:execute).with(/DROP INDEX/)
- expect(async_index).not_to receive(:destroy)
+ expect(async_index).not_to receive(:destroy!)
expect { subject.perform }.not_to change { index_model.count }
end
+
+ it 'logs messages around execution' do
+ allow(Gitlab::AppLogger).to receive(:info).and_call_original
+
+ subject.perform
+
+ expect(Gitlab::AppLogger)
+ .to have_received(:info)
+ .with(a_hash_including(message: 'Starting async index removal'))
+
+ expect(Gitlab::AppLogger)
+ .to have_received(:info)
+ .with(a_hash_including(message: 'Finished async index removal'))
+ end
end
end
diff --git a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
index 52f5e37eff2..7c5c368fcb5 100644
--- a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
+RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers, feature_category: :database do
let(:migration) { ActiveRecord::Migration.new.extend(described_class) }
let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
let(:connection) { ApplicationRecord.connection }
diff --git a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
index 806d57af4b3..5e9d4f78a4a 100644
--- a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
@@ -2,12 +2,13 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model do
+RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model, feature_category: :database do
it { is_expected.to be_a Gitlab::Database::SharedModel }
describe 'validations' do
let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH }
let(:definition_limit) { described_class::MAX_DEFINITION_LENGTH }
+ let(:last_error_limit) { described_class::MAX_LAST_ERROR_LENGTH }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_length_of(:name).is_at_most(identifier_limit) }
@@ -15,11 +16,12 @@ RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model
it { is_expected.to validate_length_of(:table_name).is_at_most(identifier_limit) }
it { is_expected.to validate_presence_of(:definition) }
it { is_expected.to validate_length_of(:definition).is_at_most(definition_limit) }
+ it { is_expected.to validate_length_of(:last_error).is_at_most(last_error_limit) }
end
describe 'scopes' do
- let!(:async_index_creation) { create(:postgres_async_index) }
- let!(:async_index_destruction) { create(:postgres_async_index, :with_drop) }
+ let_it_be(:async_index_creation) { create(:postgres_async_index) }
+ let_it_be(:async_index_destruction) { create(:postgres_async_index, :with_drop) }
describe '.to_create' do
subject { described_class.to_create }
@@ -32,5 +34,33 @@ RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model
it { is_expected.to contain_exactly(async_index_destruction) }
end
+
+ describe '.ordered' do
+ before do
+ async_index_creation.update!(attempts: 3)
+ end
+
+ subject { described_class.ordered.limit(1) }
+
+ it { is_expected.to contain_exactly(async_index_destruction) }
+ end
+ end
+
+ describe '#handle_exception!' do
+ let_it_be_with_reload(:async_index_creation) { create(:postgres_async_index) }
+
+ let(:error) { instance_double(StandardError, message: 'Oups', backtrace: %w[this that]) }
+
+ subject { async_index_creation.handle_exception!(error) }
+
+ it 'increases the attempts number' do
+ expect { subject }.to change { async_index_creation.reload.attempts }.by(1)
+ end
+
+ it 'saves error details' do
+ subject
+
+ expect(async_index_creation.reload.last_error).to eq("Oups\nthis\nthat")
+ end
end
end
diff --git a/spec/lib/gitlab/database/async_indexes_spec.rb b/spec/lib/gitlab/database/async_indexes_spec.rb
index 8a5509f892f..c6991bf4e06 100644
--- a/spec/lib/gitlab/database/async_indexes_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::AsyncIndexes do
+RSpec.describe Gitlab::Database::AsyncIndexes, feature_category: :database do
describe '.create_pending_indexes!' do
subject { described_class.create_pending_indexes! }
@@ -11,9 +11,9 @@ RSpec.describe Gitlab::Database::AsyncIndexes do
end
it 'takes 2 pending indexes and creates those' do
- Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.to_create.order(:id).limit(2).each do |index|
- creator = double('index creator')
- expect(Gitlab::Database::AsyncIndexes::IndexCreator).to receive(:new).with(index).and_return(creator)
+ indexes = described_class::PostgresAsyncIndex.to_create.order(:id).limit(2).to_a
+
+ expect_next_instances_of(described_class::IndexCreator, 2, indexes) do |creator|
expect(creator).to receive(:perform)
end
@@ -29,13 +29,56 @@ RSpec.describe Gitlab::Database::AsyncIndexes do
end
it 'takes 2 pending indexes and destroys those' do
- Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.to_drop.order(:id).limit(2).each do |index|
- destructor = double('index destructor')
- expect(Gitlab::Database::AsyncIndexes::IndexDestructor).to receive(:new).with(index).and_return(destructor)
+ indexes = described_class::PostgresAsyncIndex.to_drop.order(:id).limit(2).to_a
+
+ expect_next_instances_of(described_class::IndexDestructor, 2, indexes) do |destructor|
expect(destructor).to receive(:perform)
end
subject
end
end
+
+ describe '.execute_pending_actions!' do
+ subject { described_class.execute_pending_actions!(how_many: how_many) }
+
+ let_it_be(:failed_creation_entry) { create(:postgres_async_index, attempts: 5) }
+ let_it_be(:failed_removal_entry) { create(:postgres_async_index, :with_drop, attempts: 1) }
+ let_it_be(:creation_entry) { create(:postgres_async_index) }
+ let_it_be(:removal_entry) { create(:postgres_async_index, :with_drop) }
+
+ context 'with one entry' do
+ let(:how_many) { 1 }
+
+ it 'executes instructions ordered by attempts and ids' do
+ expect { subject }
+ .to change { queued_entries_exist?(creation_entry) }.to(false)
+ .and change { described_class::PostgresAsyncIndex.count }.by(-how_many)
+ end
+ end
+
+ context 'with two entries' do
+ let(:how_many) { 2 }
+
+ it 'executes instructions ordered by attempts' do
+ expect { subject }
+ .to change { queued_entries_exist?(creation_entry, removal_entry) }.to(false)
+ .and change { described_class::PostgresAsyncIndex.count }.by(-how_many)
+ end
+ end
+
+ context 'when the budget allows more instructions' do
+ let(:how_many) { 3 }
+
+ it 'retries failed attempts' do
+ expect { subject }
+ .to change { queued_entries_exist?(creation_entry, removal_entry, failed_removal_entry) }.to(false)
+ .and change { described_class::PostgresAsyncIndex.count }.by(-how_many)
+ end
+ end
+
+ def queued_entries_exist?(*records)
+ described_class::PostgresAsyncIndex.where(id: records).exists?
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
index 1e316c55786..ff31a5cd6cb 100644
--- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
@@ -11,304 +11,319 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
Gitlab::Database::LoadBalancing::Session.clear_session
end
- describe '#stick_or_unstick_request' do
- it 'sticks or unsticks a single object and updates the Rack environment' do
- expect(sticking)
- .to receive(:unstick_or_continue_sticking)
- .with(:user, 42)
-
- env = {}
-
- sticking.stick_or_unstick_request(env, :user, 42)
-
- expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a)
- .to eq([[sticking, :user, 42]])
+ shared_examples 'sticking' do
+ before do
+ allow(ActiveRecord::Base.load_balancer)
+ .to receive(:primary_write_location)
+ .and_return('foo')
end
- it 'sticks or unsticks multiple objects and updates the Rack environment' do
- expect(sticking)
- .to receive(:unstick_or_continue_sticking)
- .with(:user, 42)
- .ordered
+ it 'sticks an entity to the primary', :aggregate_failures do
+ allow(ActiveRecord::Base.load_balancer)
+ .to receive(:primary_only?)
+ .and_return(false)
- expect(sticking)
- .to receive(:unstick_or_continue_sticking)
- .with(:runner, '123456789')
- .ordered
+ ids.each do |id|
+ expect(sticking)
+ .to receive(:set_write_location_for)
+ .with(:user, id, 'foo')
+ end
- env = {}
+ expect(Gitlab::Database::LoadBalancing::Session.current)
+ .to receive(:use_primary!)
- sticking.stick_or_unstick_request(env, :user, 42)
- sticking.stick_or_unstick_request(env, :runner, '123456789')
+ subject
+ end
- expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a).to eq(
- [
- [sticking, :user, 42],
- [sticking, :runner,
- '123456789']
- ])
+ it 'does not update the write location when no replicas are used' do
+ expect(sticking).not_to receive(:set_write_location_for)
+
+ subject
end
end
- describe '#stick_if_necessary' do
- it 'does not stick if no write was performed' do
- allow(Gitlab::Database::LoadBalancing::Session.current)
- .to receive(:performed_write?)
- .and_return(false)
+ shared_examples 'tracking status in redis' do
+ describe '#stick_or_unstick_request' do
+ it 'sticks or unsticks a single object and updates the Rack environment' do
+ expect(sticking)
+ .to receive(:unstick_or_continue_sticking)
+ .with(:user, 42)
- expect(sticking).not_to receive(:stick)
+ env = {}
- sticking.stick_if_necessary(:user, 42)
- end
+ sticking.stick_or_unstick_request(env, :user, 42)
- it 'sticks to the primary if a write was performed' do
- allow(Gitlab::Database::LoadBalancing::Session.current)
- .to receive(:performed_write?)
- .and_return(true)
+ expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a)
+ .to eq([[sticking, :user, 42]])
+ end
- expect(sticking)
- .to receive(:stick)
- .with(:user, 42)
+ it 'sticks or unsticks multiple objects and updates the Rack environment' do
+ expect(sticking)
+ .to receive(:unstick_or_continue_sticking)
+ .with(:user, 42)
+ .ordered
- sticking.stick_if_necessary(:user, 42)
- end
- end
+ expect(sticking)
+ .to receive(:unstick_or_continue_sticking)
+ .with(:runner, '123456789')
+ .ordered
- describe '#all_caught_up?' do
- let(:lb) { ActiveRecord::Base.load_balancer }
- let(:last_write_location) { 'foo' }
+ env = {}
- before do
- allow(ActiveSupport::Notifications).to receive(:instrument).and_call_original
+ sticking.stick_or_unstick_request(env, :user, 42)
+ sticking.stick_or_unstick_request(env, :runner, '123456789')
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return(last_write_location)
+ expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a).to eq(
+ [
+ [sticking, :user, 42],
+ [sticking, :runner,
+ '123456789']
+ ])
+ end
end
- context 'when no write location could be found' do
- let(:last_write_location) { nil }
+ describe '#stick_if_necessary' do
+ it 'does not stick if no write was performed' do
+ allow(Gitlab::Database::LoadBalancing::Session.current)
+ .to receive(:performed_write?)
+ .and_return(false)
- it 'returns true' do
- expect(lb).not_to receive(:select_up_to_date_host)
+ expect(sticking).not_to receive(:stick)
- expect(sticking.all_caught_up?(:user, 42)).to eq(true)
+ sticking.stick_if_necessary(:user, 42)
end
- end
- context 'when all secondaries have caught up' do
- before do
- allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true)
- end
+ it 'sticks to the primary if a write was performed' do
+ allow(Gitlab::Database::LoadBalancing::Session.current)
+ .to receive(:performed_write?)
+ .and_return(true)
- it 'returns true, and unsticks' do
expect(sticking)
- .to receive(:unstick)
+ .to receive(:stick)
.with(:user, 42)
- expect(sticking.all_caught_up?(:user, 42)).to eq(true)
- end
-
- it 'notifies with the proper event payload' do
- expect(ActiveSupport::Notifications)
- .to receive(:instrument)
- .with('caught_up_replica_pick.load_balancing', { result: true })
- .and_call_original
-
- sticking.all_caught_up?(:user, 42)
+ sticking.stick_if_necessary(:user, 42)
end
end
- context 'when the secondaries have not yet caught up' do
+ describe '#all_caught_up?' do
+ let(:lb) { ActiveRecord::Base.load_balancer }
+ let(:last_write_location) { 'foo' }
+
before do
- allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false)
- end
+ allow(ActiveSupport::Notifications).to receive(:instrument).and_call_original
- it 'returns false' do
- expect(sticking.all_caught_up?(:user, 42)).to eq(false)
+ allow(sticking)
+ .to receive(:last_write_location_for)
+ .with(:user, 42)
+ .and_return(last_write_location)
end
- it 'notifies with the proper event payload' do
- expect(ActiveSupport::Notifications)
- .to receive(:instrument)
- .with('caught_up_replica_pick.load_balancing', { result: false })
- .and_call_original
+ context 'when no write location could be found' do
+ let(:last_write_location) { nil }
+
+ it 'returns true' do
+ expect(lb).not_to receive(:select_up_to_date_host)
- sticking.all_caught_up?(:user, 42)
+ expect(sticking.all_caught_up?(:user, 42)).to eq(true)
+ end
end
- end
- end
- describe '#unstick_or_continue_sticking' do
- let(:lb) { ActiveRecord::Base.load_balancer }
+ context 'when all secondaries have caught up' do
+ before do
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true)
+ end
- it 'simply returns if no write location could be found' do
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return(nil)
+ it 'returns true, and unsticks' do
+ expect(sticking)
+ .to receive(:unstick)
+ .with(:user, 42)
- expect(lb).not_to receive(:select_up_to_date_host)
+ expect(sticking.all_caught_up?(:user, 42)).to eq(true)
+ end
- sticking.unstick_or_continue_sticking(:user, 42)
- end
+ it 'notifies with the proper event payload' do
+ expect(ActiveSupport::Notifications)
+ .to receive(:instrument)
+ .with('caught_up_replica_pick.load_balancing', { result: true })
+ .and_call_original
- it 'unsticks if all secondaries have caught up' do
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return('foo')
+ sticking.all_caught_up?(:user, 42)
+ end
+ end
- allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true)
+ context 'when the secondaries have not yet caught up' do
+ before do
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false)
+ end
- expect(sticking)
- .to receive(:unstick)
- .with(:user, 42)
+ it 'returns false' do
+ expect(sticking.all_caught_up?(:user, 42)).to eq(false)
+ end
- sticking.unstick_or_continue_sticking(:user, 42)
+ it 'notifies with the proper event payload' do
+ expect(ActiveSupport::Notifications)
+ .to receive(:instrument)
+ .with('caught_up_replica_pick.load_balancing', { result: false })
+ .and_call_original
+
+ sticking.all_caught_up?(:user, 42)
+ end
+ end
end
- it 'continues using the primary if the secondaries have not yet caught up' do
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return('foo')
+ describe '#unstick_or_continue_sticking' do
+ let(:lb) { ActiveRecord::Base.load_balancer }
- allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false)
+ it 'simply returns if no write location could be found' do
+ allow(sticking)
+ .to receive(:last_write_location_for)
+ .with(:user, 42)
+ .and_return(nil)
- expect(Gitlab::Database::LoadBalancing::Session.current)
- .to receive(:use_primary!)
+ expect(lb).not_to receive(:select_up_to_date_host)
- sticking.unstick_or_continue_sticking(:user, 42)
- end
- end
+ sticking.unstick_or_continue_sticking(:user, 42)
+ end
- RSpec.shared_examples 'sticking' do
- before do
- allow(ActiveRecord::Base.load_balancer)
- .to receive(:primary_write_location)
- .and_return('foo')
- end
+ it 'unsticks if all secondaries have caught up' do
+ allow(sticking)
+ .to receive(:last_write_location_for)
+ .with(:user, 42)
+ .and_return('foo')
- it 'sticks an entity to the primary', :aggregate_failures do
- allow(ActiveRecord::Base.load_balancer)
- .to receive(:primary_only?)
- .and_return(false)
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true)
- ids.each do |id|
expect(sticking)
- .to receive(:set_write_location_for)
- .with(:user, id, 'foo')
+ .to receive(:unstick)
+ .with(:user, 42)
+
+ sticking.unstick_or_continue_sticking(:user, 42)
end
- expect(Gitlab::Database::LoadBalancing::Session.current)
- .to receive(:use_primary!)
+ it 'continues using the primary if the secondaries have not yet caught up' do
+ allow(sticking)
+ .to receive(:last_write_location_for)
+ .with(:user, 42)
+ .and_return('foo')
- subject
- end
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false)
- it 'does not update the write location when no replicas are used' do
- expect(sticking).not_to receive(:set_write_location_for)
+ expect(Gitlab::Database::LoadBalancing::Session.current)
+ .to receive(:use_primary!)
- subject
+ sticking.unstick_or_continue_sticking(:user, 42)
+ end
end
- end
- describe '#stick' do
- it_behaves_like 'sticking' do
- let(:ids) { [42] }
- subject { sticking.stick(:user, ids.first) }
+ describe '#stick' do
+ it_behaves_like 'sticking' do
+ let(:ids) { [42] }
+ subject { sticking.stick(:user, ids.first) }
+ end
end
- end
- describe '#bulk_stick' do
- it_behaves_like 'sticking' do
- let(:ids) { [42, 43] }
- subject { sticking.bulk_stick(:user, ids) }
+ describe '#bulk_stick' do
+ it_behaves_like 'sticking' do
+ let(:ids) { [42, 43] }
+ subject { sticking.bulk_stick(:user, ids) }
+ end
end
- end
- describe '#mark_primary_write_location' do
- it 'updates the write location with the load balancer' do
- allow(ActiveRecord::Base.load_balancer)
- .to receive(:primary_write_location)
- .and_return('foo')
+ describe '#mark_primary_write_location' do
+ it 'updates the write location with the load balancer' do
+ allow(ActiveRecord::Base.load_balancer)
+ .to receive(:primary_write_location)
+ .and_return('foo')
- allow(ActiveRecord::Base.load_balancer)
- .to receive(:primary_only?)
- .and_return(false)
+ allow(ActiveRecord::Base.load_balancer)
+ .to receive(:primary_only?)
+ .and_return(false)
+
+ expect(sticking)
+ .to receive(:set_write_location_for)
+ .with(:user, 42, 'foo')
+
+ sticking.mark_primary_write_location(:user, 42)
+ end
- expect(sticking)
- .to receive(:set_write_location_for)
- .with(:user, 42, 'foo')
+ it 'does nothing when no replicas are used' do
+ expect(sticking).not_to receive(:set_write_location_for)
- sticking.mark_primary_write_location(:user, 42)
+ sticking.mark_primary_write_location(:user, 42)
+ end
end
- it 'does nothing when no replicas are used' do
- expect(sticking).not_to receive(:set_write_location_for)
+ describe '#unstick' do
+ it 'removes the sticking data from Redis' do
+ sticking.set_write_location_for(:user, 4, 'foo')
+ sticking.unstick(:user, 4)
- sticking.mark_primary_write_location(:user, 42)
+ expect(sticking.last_write_location_for(:user, 4)).to be_nil
+ end
end
- end
- describe '#unstick' do
- it 'removes the sticking data from Redis' do
- sticking.set_write_location_for(:user, 4, 'foo')
- sticking.unstick(:user, 4)
+ describe '#last_write_location_for' do
+ it 'returns the last WAL write location for a user' do
+ sticking.set_write_location_for(:user, 4, 'foo')
- expect(sticking.last_write_location_for(:user, 4)).to be_nil
+ expect(sticking.last_write_location_for(:user, 4)).to eq('foo')
+ end
end
- end
- describe '#last_write_location_for' do
- it 'returns the last WAL write location for a user' do
- sticking.set_write_location_for(:user, 4, 'foo')
+ describe '#select_caught_up_replicas' do
+ let(:lb) { ActiveRecord::Base.load_balancer }
+
+ context 'with no write location' do
+ before do
+ allow(sticking)
+ .to receive(:last_write_location_for)
+ .with(:project, 42)
+ .and_return(nil)
+ end
+
+ it 'returns false and does not try to find caught up hosts' do
+ expect(lb).not_to receive(:select_up_to_date_host)
+ expect(sticking.select_caught_up_replicas(:project, 42)).to be false
+ end
+ end
- expect(sticking.last_write_location_for(:user, 4)).to eq('foo')
+ context 'with write location' do
+ before do
+ allow(sticking)
+ .to receive(:last_write_location_for)
+ .with(:project, 42)
+ .and_return('foo')
+ end
+
+ it 'returns true, selects hosts, and unsticks if any secondary has caught up' do
+ expect(lb).to receive(:select_up_to_date_host).and_return(true)
+ expect(sticking)
+ .to receive(:unstick)
+ .with(:project, 42)
+ expect(sticking.select_caught_up_replicas(:project, 42)).to be true
+ end
+ end
end
end
- describe '#redis_key_for' do
- it 'returns a String' do
- expect(sticking.redis_key_for(:user, 42))
- .to eq('database-load-balancing/write-location/main/user/42')
- end
+ context 'with multi-store feature flags turned on' do
+ it_behaves_like 'tracking status in redis'
end
- describe '#select_caught_up_replicas' do
- let(:lb) { ActiveRecord::Base.load_balancer }
-
- context 'with no write location' do
- before do
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:project, 42)
- .and_return(nil)
- end
-
- it 'returns false and does not try to find caught up hosts' do
- expect(lb).not_to receive(:select_up_to_date_host)
- expect(sticking.select_caught_up_replicas(:project, 42)).to be false
- end
+ context 'when both multi-store feature flags are off' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_db_load_balancing: false)
+ stub_feature_flags(use_primary_store_as_default_for_db_load_balancing: false)
end
- context 'with write location' do
- before do
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:project, 42)
- .and_return('foo')
- end
+ it_behaves_like 'tracking status in redis'
+ end
- it 'returns true, selects hosts, and unsticks if any secondary has caught up' do
- expect(lb).to receive(:select_up_to_date_host).and_return(true)
- expect(sticking)
- .to receive(:unstick)
- .with(:project, 42)
- expect(sticking.select_caught_up_replicas(:project, 42)).to be true
- end
+ describe '#redis_key_for' do
+ it 'returns a String' do
+ expect(sticking.redis_key_for(:user, 42))
+ .to eq('database-load-balancing/write-location/main/user/42')
end
end
end
diff --git a/spec/lib/gitlab/redis/db_load_balancing_spec.rb b/spec/lib/gitlab/redis/db_load_balancing_spec.rb
new file mode 100644
index 00000000000..d633413ddec
--- /dev/null
+++ b/spec/lib/gitlab/redis/db_load_balancing_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::DbLoadBalancing, feature_category: :scalability do
+ include_examples "redis_new_instance_shared_examples", 'db_load_balancing', Gitlab::Redis::SharedState
+ include_examples "redis_shared_examples"
+
+ describe '#pool' do
+ let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
+ let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
+
+ subject { described_class.pool }
+
+ before do
+ allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
+
+ # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists
+ allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(mktmpdir)
+ allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket)
+ end
+
+ around do |example|
+ clear_pool
+ example.run
+ ensure
+ clear_pool
+ end
+
+ it 'instantiates an instance of MultiStore' do
+ subject.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
+
+ expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
+ expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
+
+ expect(redis_instance.instance_name).to eq('DbLoadBalancing')
+ end
+ end
+
+ it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_db_load_balancing,
+ :use_primary_store_as_default_for_db_load_balancing
+ end
+
+ describe '#raw_config_hash' do
+ it 'has a legacy default URL' do
+ expect(subject).to receive(:fetch_config).and_return(false)
+
+ expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6382')
+ end
+ end
+end
diff --git a/spec/migrations/20210603222333_remove_builds_email_service_from_services_spec.rb b/spec/migrations/20210603222333_remove_builds_email_service_from_services_spec.rb
deleted file mode 100644
index 706e0b14492..00000000000
--- a/spec/migrations/20210603222333_remove_builds_email_service_from_services_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RemoveBuildsEmailServiceFromServices, feature_category: :navigation do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:services) { table(:services) }
- let(:namespace) { namespaces.create!(name: 'foo', path: 'bar') }
- let(:project) { projects.create!(namespace_id: namespace.id) }
-
- it 'correctly deletes `BuildsEmailService` services' do
- services.create!(project_id: project.id, type: 'BuildsEmailService')
- services.create!(project_id: project.id, type: 'OtherService')
-
- expect(services.all.pluck(:type)).to match_array %w[BuildsEmailService OtherService]
-
- migrate!
-
- expect(services.all.pluck(:type)).to eq %w[OtherService]
- end
-end
diff --git a/spec/migrations/20210610153556_delete_legacy_operations_feature_flags_spec.rb b/spec/migrations/20210610153556_delete_legacy_operations_feature_flags_spec.rb
deleted file mode 100644
index 300c43b9133..00000000000
--- a/spec/migrations/20210610153556_delete_legacy_operations_feature_flags_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe DeleteLegacyOperationsFeatureFlags, feature_category: :feature_flags do
- let(:namespace) { table(:namespaces).create!(name: 'foo', path: 'bar') }
- let(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let(:issue) { table(:issues).create!(id: 123, project_id: project.id) }
- let(:operations_feature_flags) { table(:operations_feature_flags) }
- let(:operations_feature_flag_scopes) { table(:operations_feature_flag_scopes) }
- let(:operations_strategies) { table(:operations_strategies) }
- let(:operations_scopes) { table(:operations_scopes) }
- let(:operations_feature_flags_issues) { table(:operations_feature_flags_issues) }
-
- it 'correctly deletes legacy feature flags' do
- # Legacy version of a feature flag - dropped support in GitLab 14.0.
- legacy_flag = operations_feature_flags.create!(project_id: project.id, version: 1, name: 'flag_a', active: true, iid: 1)
- operations_feature_flag_scopes.create!(feature_flag_id: legacy_flag.id, active: true)
- operations_feature_flags_issues.create!(feature_flag_id: legacy_flag.id, issue_id: issue.id)
- # New version of a feature flag.
- new_flag = operations_feature_flags.create!(project_id: project.id, version: 2, name: 'flag_b', active: true, iid: 2)
- new_strategy = operations_strategies.create!(feature_flag_id: new_flag.id, name: 'default')
- operations_scopes.create!(strategy_id: new_strategy.id, environment_scope: '*')
- operations_feature_flags_issues.create!(feature_flag_id: new_flag.id, issue_id: issue.id)
-
- expect(operations_feature_flags.all.pluck(:version)).to contain_exactly(1, 2)
- expect(operations_feature_flag_scopes.count).to eq(1)
- expect(operations_strategies.count).to eq(1)
- expect(operations_scopes.count).to eq(1)
- expect(operations_feature_flags_issues.all.pluck(:feature_flag_id)).to contain_exactly(legacy_flag.id, new_flag.id)
-
- migrate!
-
- # Legacy flag is deleted.
- expect(operations_feature_flags.all.pluck(:version)).to contain_exactly(2)
- # The associated entries of the legacy flag are deleted too.
- expect(operations_feature_flag_scopes.count).to eq(0)
- # The associated entries of the new flag stay instact.
- expect(operations_strategies.count).to eq(1)
- expect(operations_scopes.count).to eq(1)
- expect(operations_feature_flags_issues.all.pluck(:feature_flag_id)).to contain_exactly(new_flag.id)
- end
-end
diff --git a/spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb b/spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb
deleted file mode 100644
index baa5fd7efbd..00000000000
--- a/spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe CascadeDeleteFreezePeriods, :suppress_gitlab_schemas_validate_connection, feature_category: :continuous_delivery do
- let(:namespace) { table(:namespaces).create!(name: 'deploy_freeze', path: 'deploy_freeze') }
- let(:project) { table(:projects).create!(id: 1, namespace_id: namespace.id) }
- let(:freeze_periods) { table(:ci_freeze_periods) }
-
- describe "#up" do
- it 'allows for a project to be deleted' do
- freeze_periods.create!(id: 1, project_id: project.id, freeze_start: '5 * * * *', freeze_end: '6 * * * *', cron_timezone: 'UTC')
- migrate!
-
- project.delete
-
- expect(freeze_periods.where(project_id: project.id).count).to be_zero
- end
- end
-end
diff --git a/spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb b/spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb
deleted file mode 100644
index 0f202129e82..00000000000
--- a/spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RescheduleMergeRequestDiffUsersBackgroundMigration,
- :migration, feature_category: :code_review_workflow do
- let(:migration) { described_class.new }
-
- describe '#up' do
- before do
- allow(described_class::MergeRequestDiff)
- .to receive(:minimum)
- .with(:id)
- .and_return(42)
-
- allow(described_class::MergeRequestDiff)
- .to receive(:maximum)
- .with(:id)
- .and_return(85_123)
- end
-
- it 'deletes existing background migration job records' do
- args = [150_000, 300_000]
-
- Gitlab::Database::BackgroundMigrationJob
- .create!(class_name: described_class::MIGRATION_NAME, arguments: args)
-
- migration.up
-
- found = Gitlab::Database::BackgroundMigrationJob
- .where(class_name: described_class::MIGRATION_NAME, arguments: args)
- .count
-
- expect(found).to eq(0)
- end
-
- it 'schedules the migrations in batches' do
- expect(migration)
- .to receive(:migrate_in)
- .ordered
- .with(2.minutes.to_i, described_class::MIGRATION_NAME, [42, 40_042])
-
- expect(migration)
- .to receive(:migrate_in)
- .ordered
- .with(4.minutes.to_i, described_class::MIGRATION_NAME, [40_042, 80_042])
-
- expect(migration)
- .to receive(:migrate_in)
- .ordered
- .with(6.minutes.to_i, described_class::MIGRATION_NAME, [80_042, 120_042])
-
- migration.up
- end
-
- it 'creates rows to track the background migration jobs' do
- expect(Gitlab::Database::BackgroundMigrationJob)
- .to receive(:create!)
- .ordered
- .with(class_name: described_class::MIGRATION_NAME, arguments: [42, 40_042])
-
- expect(Gitlab::Database::BackgroundMigrationJob)
- .to receive(:create!)
- .ordered
- .with(class_name: described_class::MIGRATION_NAME, arguments: [40_042, 80_042])
-
- expect(Gitlab::Database::BackgroundMigrationJob)
- .to receive(:create!)
- .ordered
- .with(class_name: described_class::MIGRATION_NAME, arguments: [80_042, 120_042])
-
- migration.up
- end
- end
-end
diff --git a/spec/migrations/20210713042000_fix_ci_sources_pipelines_index_names_spec.rb b/spec/migrations/20210713042000_fix_ci_sources_pipelines_index_names_spec.rb
deleted file mode 100644
index 6761b69aed5..00000000000
--- a/spec/migrations/20210713042000_fix_ci_sources_pipelines_index_names_spec.rb
+++ /dev/null
@@ -1,67 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixCiSourcesPipelinesIndexNames, :migration, feature_category: :continuous_integration do
- def validate_foreign_keys_and_index!
- aggregate_failures do
- expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_be5624bf37')).to be_truthy
- expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_pipelines, column: :pipeline_id, name: 'fk_e1bad85861')).to be_truthy
- expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_pipelines, column: :source_pipeline_id, name: 'fk_d4e29af7d7')).to be_truthy
- expect(subject.foreign_key_exists?(:ci_sources_pipelines, :projects, column: :source_project_id, name: 'fk_acd9737679')).to be_truthy
- expect(subject.foreign_key_exists?(:ci_sources_pipelines, :projects, name: 'fk_1e53c97c0a')).to be_truthy
- expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_builds, column: :source_job_id_convert_to_bigint, name: 'fk_be5624bf37_tmp')).to be_falsey
-
- expect(subject.index_exists_by_name?(:ci_sources_pipelines, described_class::NEW_INDEX_NAME)).to be_truthy
- expect(subject.index_exists_by_name?(:ci_sources_pipelines, described_class::OLD_INDEX_NAME)).to be_falsey
- end
- end
-
- it 'existing foreign keys and indexes are untouched' do
- validate_foreign_keys_and_index!
-
- migrate!
-
- validate_foreign_keys_and_index!
- end
-
- context 'with a legacy (pre-GitLab 10.0) foreign key' do
- let(:old_foreign_keys) { described_class::OLD_TO_NEW_FOREIGN_KEY_DEFS.keys }
- let(:new_foreign_keys) { described_class::OLD_TO_NEW_FOREIGN_KEY_DEFS.values.map { |entry| entry[:name] } }
-
- before do
- new_foreign_keys.each { |name| subject.remove_foreign_key_if_exists(:ci_sources_pipelines, name: name) }
-
- # GitLab 9.5.4: https://gitlab.com/gitlab-org/gitlab/-/blob/v9.5.4-ee/db/schema.rb#L2026-2030
- subject.add_foreign_key(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_3f0c88d7dc', on_delete: :cascade)
- subject.add_foreign_key(:ci_sources_pipelines, :ci_pipelines, column: :pipeline_id, name: "fk_b8c0fac459", on_delete: :cascade)
- subject.add_foreign_key(:ci_sources_pipelines, :ci_pipelines, column: :source_pipeline_id, name: "fk_3a3e3cb83a", on_delete: :cascade)
- subject.add_foreign_key(:ci_sources_pipelines, :projects, column: :source_project_id, name: "fk_8868d0f3e4", on_delete: :cascade)
- subject.add_foreign_key(:ci_sources_pipelines, :projects, name: "fk_83b4346e48", on_delete: :cascade)
-
- # https://gitlab.com/gitlab-org/gitlab/-/blob/v9.5.4-ee/db/schema.rb#L443
- subject.add_index "ci_sources_pipelines", ["source_job_id"], name: described_class::OLD_INDEX_NAME, using: :btree
- end
-
- context 'when new index already exists' do
- it 'corrects foreign key constraints and drops old index' do
- expect { migrate! }.to change { subject.foreign_key_exists?(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_3f0c88d7dc') }.from(true).to(false)
-
- validate_foreign_keys_and_index!
- end
- end
-
- context 'when new index does not exist' do
- before do
- subject.remove_index("ci_sources_pipelines", name: described_class::NEW_INDEX_NAME)
- end
-
- it 'drops the old index' do
- expect { migrate! }.to change { subject.index_exists_by_name?(:ci_sources_pipelines, described_class::OLD_INDEX_NAME) }.from(true).to(false)
-
- validate_foreign_keys_and_index!
- end
- end
- end
-end
diff --git a/spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb b/spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb
deleted file mode 100644
index 5674efbf187..00000000000
--- a/spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe UpdateIssuableSlasWhereIssueClosed, :migration, feature_category: :team_planning do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:issues) { table(:issues) }
- let(:issuable_slas) { table(:issuable_slas) }
- let(:issue_params) { { title: 'title', project_id: project.id } }
- let(:issue_closed_state) { 2 }
-
- let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
- let!(:project) { projects.create!(namespace_id: namespace.id) }
- let!(:issue_open) { issues.create!(issue_params) }
- let!(:issue_closed) { issues.create!(issue_params.merge(state_id: issue_closed_state)) }
-
- let!(:issuable_sla_open_issue) { issuable_slas.create!(issue_id: issue_open.id, due_at: Time.now) }
- let!(:issuable_sla_closed_issue) { issuable_slas.create!(issue_id: issue_closed.id, due_at: Time.now) }
-
- it 'sets the issuable_closed attribute to false' do
- expect(issuable_sla_open_issue.issuable_closed).to eq(false)
- expect(issuable_sla_closed_issue.issuable_closed).to eq(false)
-
- migrate!
-
- expect(issuable_sla_open_issue.reload.issuable_closed).to eq(false)
- expect(issuable_sla_closed_issue.reload.issuable_closed).to eq(true)
- end
-end
diff --git a/spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb b/spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb
deleted file mode 100644
index 098dd647b27..00000000000
--- a/spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb
+++ /dev/null
@@ -1,66 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe OperationsFeatureFlagsCorrectFlexibleRolloutValues, :migration, feature_category: :feature_flags do
- let!(:strategies) { table(:operations_strategies) }
-
- let(:namespace) { table(:namespaces).create!(name: 'feature_flag', path: 'feature_flag') }
- let(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let(:feature_flag) { table(:operations_feature_flags).create!(project_id: project.id, active: true, name: 'foo', iid: 1) }
-
- describe "#up" do
- described_class::STICKINESS.each do |old, new|
- it "corrects parameters for flexible rollout stickiness #{old}" do
- reversible_migration do |migration|
- parameters = { groupId: "default", rollout: "100", stickiness: old }
- strategy = create_strategy(parameters)
-
- migration.before -> {
- expect(strategy.reload.parameters).to eq({ "groupId" => "default", "rollout" => "100", "stickiness" => old })
- }
-
- migration.after -> {
- expect(strategy.reload.parameters).to eq({ "groupId" => "default", "rollout" => "100", "stickiness" => new })
- }
- end
- end
- end
-
- it 'ignores other strategies' do
- reversible_migration do |migration|
- parameters = { "groupId" => "default", "rollout" => "100", "stickiness" => "USERID" }
- strategy = create_strategy(parameters, name: 'default')
-
- migration.before -> {
- expect(strategy.reload.parameters).to eq(parameters)
- }
-
- migration.after -> {
- expect(strategy.reload.parameters).to eq(parameters)
- }
- end
- end
-
- it 'ignores other stickiness' do
- reversible_migration do |migration|
- parameters = { "groupId" => "default", "rollout" => "100", "stickiness" => "FOO" }
- strategy = create_strategy(parameters)
-
- migration.before -> {
- expect(strategy.reload.parameters).to eq(parameters)
- }
-
- migration.after -> {
- expect(strategy.reload.parameters).to eq(parameters)
- }
- end
- end
- end
-
- def create_strategy(params, name: 'flexibleRollout')
- strategies.create!(name: name, parameters: params, feature_flag_id: feature_flag.id)
- end
-end
diff --git a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
deleted file mode 100644
index e7f76eb0ae0..00000000000
--- a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CreateBaseWorkItemTypes, :migration, feature_category: :team_planning do
- include MigrationHelpers::WorkItemTypesHelper
-
- let!(:work_item_types) { table(:work_item_types) }
-
- let(:base_types) do
- {
- issue: 0,
- incident: 1,
- test_case: 2,
- requirement: 3
- }
- end
-
- # We use append_after to make sure this runs after the schema was reset to its latest state
- append_after(:all) do
- # Make sure base types are recreated after running the migration
- # because migration specs are not run in a transaction
- reset_work_item_types
- end
-
- it 'creates default data' do
- # Need to delete all as base types are seeded before entire test suite
- work_item_types.delete_all
-
- reversible_migration do |migration|
- migration.before -> {
- # Depending on whether the migration has been run before,
- # the size could be 4, or 0, so we don't set any expectations
- }
-
- migration.after -> {
- expect(work_item_types.count).to eq(4)
- expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values)
- }
- end
- end
-end
diff --git a/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb b/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb
deleted file mode 100644
index d18673db757..00000000000
--- a/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb
+++ /dev/null
@@ -1,137 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe UpdateTrialPlansCiDailyPipelineScheduleTriggers, :migration, feature_category: :purchase do
- let!(:plans) { table(:plans) }
- let!(:plan_limits) { table(:plan_limits) }
- let!(:premium_trial_plan) { plans.create!(name: 'premium_trial', title: 'Premium Trial') }
- let!(:ultimate_trial_plan) { plans.create!(name: 'ultimate_trial', title: 'Ultimate Trial') }
-
- describe '#up' do
- let!(:premium_trial_plan_limits) { plan_limits.create!(plan_id: premium_trial_plan.id, ci_daily_pipeline_schedule_triggers: 0) }
- let!(:ultimate_trial_plan_limits) { plan_limits.create!(plan_id: ultimate_trial_plan.id, ci_daily_pipeline_schedule_triggers: 0) }
-
- context 'when the environment is dev or com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'sets the trial plan limits for ci_daily_pipeline_schedule_triggers' do
- disable_migrations_output { migrate! }
-
- expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
- expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
- end
-
- it 'does not change the plan limits if the ultimate trial plan is missing' do
- ultimate_trial_plan.destroy!
-
- expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count }
- expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
- end
-
- it 'does not change the plan limits if the ultimate trial plan limits is missing' do
- ultimate_trial_plan_limits.destroy!
-
- expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count }
- expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
- end
-
- it 'does not change the plan limits if the premium trial plan is missing' do
- premium_trial_plan.destroy!
-
- expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count }
- expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
- end
-
- it 'does not change the plan limits if the premium trial plan limits is missing' do
- premium_trial_plan_limits.destroy!
-
- expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count }
- expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
- end
- end
-
- context 'when the environment is anything other than dev or com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- it 'does not update the plan limits' do
- disable_migrations_output { migrate! }
-
- expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
- expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
- end
- end
- end
-
- describe '#down' do
- let!(:premium_trial_plan_limits) { plan_limits.create!(plan_id: premium_trial_plan.id, ci_daily_pipeline_schedule_triggers: 288) }
- let!(:ultimate_trial_plan_limits) { plan_limits.create!(plan_id: ultimate_trial_plan.id, ci_daily_pipeline_schedule_triggers: 288) }
-
- context 'when the environment is dev or com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'sets the trial plan limits ci_daily_pipeline_schedule_triggers to zero' do
- migrate_down!
-
- expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
- expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0)
- end
-
- it 'does not change the plan limits if the ultimate trial plan is missing' do
- ultimate_trial_plan.destroy!
-
- expect { migrate_down! }.not_to change { plan_limits.count }
- expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
- end
-
- it 'does not change the plan limits if the ultimate trial plan limits is missing' do
- ultimate_trial_plan_limits.destroy!
-
- expect { migrate_down! }.not_to change { plan_limits.count }
- expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
- end
-
- it 'does not change the plan limits if the premium trial plan is missing' do
- premium_trial_plan.destroy!
-
- expect { migrate_down! }.not_to change { plan_limits.count }
- expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
- end
-
- it 'does not change the plan limits if the premium trial plan limits is missing' do
- premium_trial_plan_limits.destroy!
-
- expect { migrate_down! }.not_to change { plan_limits.count }
- expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
- end
- end
-
- context 'when the environment is anything other than dev or com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- it 'does not change the ultimate trial plan limits' do
- migrate_down!
-
- expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
- expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288)
- end
- end
- end
-
- def migrate_down!
- disable_migrations_output do
- migrate!
- described_class.new.down
- end
- end
-end
diff --git a/spec/migrations/20210811122206_update_external_project_bots_spec.rb b/spec/migrations/20210811122206_update_external_project_bots_spec.rb
deleted file mode 100644
index b18239f4fd5..00000000000
--- a/spec/migrations/20210811122206_update_external_project_bots_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe UpdateExternalProjectBots, :migration, feature_category: :user_profile do
- def create_user(**extra_options)
- defaults = { projects_limit: 0, email: "#{extra_options[:username]}@example.com" }
-
- table(:users).create!(defaults.merge(extra_options))
- end
-
- it 'sets bot users as external if were created by external users' do
- internal_user = create_user(username: 'foo')
- external_user = create_user(username: 'bar', external: true)
-
- internal_project_bot = create_user(username: 'foo2', user_type: 6, created_by_id: internal_user.id, external: false)
- external_project_bot = create_user(username: 'bar2', user_type: 6, created_by_id: external_user.id, external: false)
-
- migrate!
-
- expect(table(:users).find(internal_project_bot.id).external).to eq false
- expect(table(:users).find(external_project_bot.id).external).to eq true
- end
-end
diff --git a/spec/migrations/20210812013042_remove_duplicate_project_authorizations_spec.rb b/spec/migrations/20210812013042_remove_duplicate_project_authorizations_spec.rb
deleted file mode 100644
index fcc2e1657d0..00000000000
--- a/spec/migrations/20210812013042_remove_duplicate_project_authorizations_spec.rb
+++ /dev/null
@@ -1,62 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!('remove_duplicate_project_authorizations')
-
-RSpec.describe RemoveDuplicateProjectAuthorizations, :migration, feature_category: :authentication_and_authorization do
- let(:users) { table(:users) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:project_authorizations) { table(:project_authorizations) }
-
- let!(:user_1) { users.create! email: 'user1@example.com', projects_limit: 0 }
- let!(:user_2) { users.create! email: 'user2@example.com', projects_limit: 0 }
- let!(:namespace_1) { namespaces.create! name: 'namespace 1', path: 'namespace1' }
- let!(:namespace_2) { namespaces.create! name: 'namespace 2', path: 'namespace2' }
- let!(:project_1) { projects.create! namespace_id: namespace_1.id }
- let!(:project_2) { projects.create! namespace_id: namespace_2.id }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- describe '#up' do
- subject { migrate! }
-
- context 'User with multiple projects' do
- before do
- project_authorizations.create! project_id: project_1.id, user_id: user_1.id, access_level: Gitlab::Access::DEVELOPER
- project_authorizations.create! project_id: project_2.id, user_id: user_1.id, access_level: Gitlab::Access::DEVELOPER
- end
-
- it { expect { subject }.not_to change { ProjectAuthorization.count } }
- end
-
- context 'Project with multiple users' do
- before do
- project_authorizations.create! project_id: project_1.id, user_id: user_1.id, access_level: Gitlab::Access::DEVELOPER
- project_authorizations.create! project_id: project_1.id, user_id: user_2.id, access_level: Gitlab::Access::DEVELOPER
- end
-
- it { expect { subject }.not_to change { ProjectAuthorization.count } }
- end
-
- context 'Same project and user but different access level' do
- before do
- project_authorizations.create! project_id: project_1.id, user_id: user_1.id, access_level: Gitlab::Access::DEVELOPER
- project_authorizations.create! project_id: project_1.id, user_id: user_1.id, access_level: Gitlab::Access::MAINTAINER
- project_authorizations.create! project_id: project_1.id, user_id: user_1.id, access_level: Gitlab::Access::REPORTER
- end
-
- it { expect { subject }.to change { ProjectAuthorization.count }.from(3).to(1) }
-
- it 'retains the highest access level' do
- subject
-
- all_records = ProjectAuthorization.all.to_a
- expect(all_records.count).to eq 1
- expect(all_records.first.access_level).to eq Gitlab::Access::MAINTAINER
- end
- end
- end
-end
diff --git a/spec/migrations/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session_spec.rb b/spec/migrations/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session_spec.rb
deleted file mode 100644
index e48f933ad5f..00000000000
--- a/spec/migrations/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe DropTemporaryColumnsAndTriggersForCiBuildsRunnerSession, :migration, feature_category: :runner do
- let(:ci_builds_runner_session_table) { table(:ci_builds_runner_session) }
-
- it 'correctly migrates up and down' do
- reversible_migration do |migration|
- migration.before -> {
- expect(ci_builds_runner_session_table.column_names).to include('build_id_convert_to_bigint')
- }
-
- migration.after -> {
- ci_builds_runner_session_table.reset_column_information
- expect(ci_builds_runner_session_table.column_names).not_to include('build_id_convert_to_bigint')
- }
- end
- end
-end
diff --git a/spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb b/spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb
deleted file mode 100644
index a6c892db131..00000000000
--- a/spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AddDefaultProjectApprovalRulesVulnAllowed, feature_category: :source_code_management do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
- let(:project) { projects.create!(name: 'project', path: 'project', namespace_id: namespace.id) }
- let(:approval_project_rules) { table(:approval_project_rules) }
-
- it 'updates records when vulnerabilities_allowed is nil' do
- records_to_migrate = 10
-
- records_to_migrate.times do |i|
- approval_project_rules.create!(name: "rule #{i}", project_id: project.id)
- end
-
- expect { migrate! }
- .to change { approval_project_rules.where(vulnerabilities_allowed: nil).count }
- .from(records_to_migrate)
- .to(0)
- end
-
- it 'defaults vulnerabilities_allowed to 0' do
- approval_project_rule = approval_project_rules.create!(name: "new rule", project_id: project.id)
-
- expect(approval_project_rule.vulnerabilities_allowed).to be_nil
-
- migrate!
-
- expect(approval_project_rule.reload.vulnerabilities_allowed).to eq(0)
- end
-end
diff --git a/spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb b/spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb
deleted file mode 100644
index 670541128a0..00000000000
--- a/spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddPremiumAndUltimatePlanLimits, :migration, feature_category: :purchase do
- shared_examples_for 'a migration that does not alter plans or plan limits' do
- it do
- expect { migrate! }.not_to change {
- [
- AddPremiumAndUltimatePlanLimits::Plan.count,
- AddPremiumAndUltimatePlanLimits::PlanLimits.count
- ]
- }
- end
- end
-
- describe '#up' do
- context 'when not .com?' do
- before do
- allow(Gitlab).to receive(:com?).and_return false
- end
-
- it_behaves_like 'a migration that does not alter plans or plan limits'
- end
-
- context 'when .com?' do
- before do
- allow(Gitlab).to receive(:com?).and_return true
- end
-
- context 'when source plan does not exist' do
- it_behaves_like 'a migration that does not alter plans or plan limits'
- end
-
- context 'when target plan does not exist' do
- before do
- table(:plans).create!(name: 'silver', title: 'Silver')
- table(:plans).create!(name: 'gold', title: 'Gold')
- end
-
- it_behaves_like 'a migration that does not alter plans or plan limits'
- end
-
- context 'when source and target plans exist' do
- let!(:silver) { table(:plans).create!(name: 'silver', title: 'Silver') }
- let!(:gold) { table(:plans).create!(name: 'gold', title: 'Gold') }
- let!(:premium) { table(:plans).create!(name: 'premium', title: 'Premium') }
- let!(:ultimate) { table(:plans).create!(name: 'ultimate', title: 'Ultimate') }
-
- let!(:silver_limits) { table(:plan_limits).create!(plan_id: silver.id, storage_size_limit: 111) }
- let!(:gold_limits) { table(:plan_limits).create!(plan_id: gold.id, storage_size_limit: 222) }
-
- context 'when target has plan limits' do
- before do
- table(:plan_limits).create!(plan_id: premium.id, storage_size_limit: 999)
- table(:plan_limits).create!(plan_id: ultimate.id, storage_size_limit: 999)
- end
-
- it 'does not overwrite the limits' do
- expect { migrate! }.not_to change {
- [
- AddPremiumAndUltimatePlanLimits::Plan.count,
- AddPremiumAndUltimatePlanLimits::PlanLimits.pluck(:id, :storage_size_limit).sort
- ]
- }
- end
- end
-
- context 'when target has no plan limits' do
- it 'creates plan limits from the source plan' do
- migrate!
-
- expect(AddPremiumAndUltimatePlanLimits::PlanLimits.pluck(:plan_id, :storage_size_limit))
- .to match_array(
- [
- [silver.id, silver_limits.storage_size_limit],
- [gold.id, gold_limits.storage_size_limit],
- [premium.id, silver_limits.storage_size_limit],
- [ultimate.id, gold_limits.storage_size_limit]
- ])
- end
- end
- end
- end
- end
-end
diff --git a/spec/migrations/add_triggers_to_integrations_type_new_spec.rb b/spec/migrations/add_triggers_to_integrations_type_new_spec.rb
deleted file mode 100644
index 4fa5fe31d2b..00000000000
--- a/spec/migrations/add_triggers_to_integrations_type_new_spec.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddTriggersToIntegrationsTypeNew, feature_category: :purchase do
- let(:migration) { described_class.new }
- let(:integrations) { table(:integrations) }
-
- # This matches Gitlab::Integrations::StiType at the time the trigger was added
- let(:namespaced_integrations) do
- %w[
- Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog
- Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Irker Jenkins Jira Mattermost
- MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker
- Prometheus Pushover Redmine Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack
-
- Github GitlabSlackApplication
- ]
- end
-
- describe '#up' do
- before do
- migrate!
- end
-
- describe 'INSERT trigger' do
- it 'sets `type_new` to the transformed `type` class name' do
- namespaced_integrations.each do |type|
- integration = integrations.create!(type: "#{type}Service")
-
- expect(integration.reload).to have_attributes(
- type: "#{type}Service",
- type_new: "Integrations::#{type}"
- )
- end
- end
-
- it 'ignores types that are not namespaced' do
- # We don't actually have any integrations without namespaces,
- # but we can abuse one of the integration base classes.
- integration = integrations.create!(type: 'BaseIssueTracker')
-
- expect(integration.reload).to have_attributes(
- type: 'BaseIssueTracker',
- type_new: nil
- )
- end
-
- it 'ignores types that are unknown' do
- integration = integrations.create!(type: 'FooBar')
-
- expect(integration.reload).to have_attributes(
- type: 'FooBar',
- type_new: nil
- )
- end
- end
- end
-
- describe '#down' do
- before do
- migration.up
- migration.down
- end
-
- it 'drops the INSERT trigger' do
- integration = integrations.create!(type: 'JiraService')
-
- expect(integration.reload).to have_attributes(
- type: 'JiraService',
- type_new: nil
- )
- end
- end
-end
diff --git a/spec/migrations/add_upvotes_count_index_to_issues_spec.rb b/spec/migrations/add_upvotes_count_index_to_issues_spec.rb
deleted file mode 100644
index 0012b8a0b96..00000000000
--- a/spec/migrations/add_upvotes_count_index_to_issues_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AddUpvotesCountIndexToIssues, feature_category: :team_planning do
- let(:migration_instance) { described_class.new }
-
- describe '#up' do
- it 'adds index' do
- expect { migrate! }.to change { migration_instance.index_exists?(:issues, [:project_id, :upvotes_count], name: described_class::INDEX_NAME) }.from(false).to(true)
- end
- end
-
- describe '#down' do
- it 'removes index' do
- migrate!
-
- expect { schema_migrate_down! }.to change { migration_instance.index_exists?(:issues, [:project_id, :upvotes_count], name: described_class::INDEX_NAME) }.from(true).to(false)
- end
- end
-end
diff --git a/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb b/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb
deleted file mode 100644
index 67d215c781b..00000000000
--- a/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb
+++ /dev/null
@@ -1,10 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AssociateExistingDastBuildsWithVariables, feature_category: :dynamic_application_security_testing do
- it 'is a no-op' do
- migrate!
- end
-end
diff --git a/spec/migrations/backfill_cadence_id_for_boards_scoped_to_iteration_spec.rb b/spec/migrations/backfill_cadence_id_for_boards_scoped_to_iteration_spec.rb
deleted file mode 100644
index a9500b9f942..00000000000
--- a/spec/migrations/backfill_cadence_id_for_boards_scoped_to_iteration_spec.rb
+++ /dev/null
@@ -1,108 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillCadenceIdForBoardsScopedToIteration, :migration, feature_category: :team_planning do
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
- let(:iterations_cadences) { table(:iterations_cadences) }
- let(:boards) { table(:boards) }
-
- let!(:group) { namespaces.create!(name: 'group1', path: 'group1', type: 'Group') }
- let!(:cadence) { iterations_cadences.create!(title: 'group cadence', group_id: group.id, start_date: Time.current) }
- let!(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
- let!(:project_board1) { boards.create!(name: 'Project Dev1', project_id: project.id) }
- let!(:project_board2) { boards.create!(name: 'Project Dev2', project_id: project.id, iteration_id: -4) }
- let!(:project_board3) { boards.create!(name: 'Project Dev3', project_id: project.id, iteration_id: -4) }
- let!(:project_board4) { boards.create!(name: 'Project Dev4', project_id: project.id, iteration_id: -4) }
-
- let!(:group_board1) { boards.create!(name: 'Group Dev1', group_id: group.id) }
- let!(:group_board2) { boards.create!(name: 'Group Dev2', group_id: group.id, iteration_id: -4) }
- let!(:group_board3) { boards.create!(name: 'Group Dev3', group_id: group.id, iteration_id: -4) }
- let!(:group_board4) { boards.create!(name: 'Group Dev4', group_id: group.id, iteration_id: -4) }
-
- describe '#up' do
- it 'schedules background migrations' do
- Sidekiq::Testing.fake! do
- freeze_time do
- described_class.new.up
-
- migration = described_class::MIGRATION
-
- expect(migration).to be_scheduled_delayed_migration(2.minutes, 'group', 'up', group_board2.id, group_board4.id)
- expect(migration).to be_scheduled_delayed_migration(2.minutes, 'project', 'up', project_board2.id, project_board4.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq 2
- end
- end
- end
-
- context 'in batches' do
- before do
- stub_const('BackfillCadenceIdForBoardsScopedToIteration::BATCH_SIZE', 2)
- end
-
- it 'schedules background migrations' do
- Sidekiq::Testing.fake! do
- freeze_time do
- described_class.new.up
-
- migration = described_class::MIGRATION
-
- expect(migration).to be_scheduled_delayed_migration(2.minutes, 'group', 'up', group_board2.id, group_board3.id)
- expect(migration).to be_scheduled_delayed_migration(4.minutes, 'group', 'up', group_board4.id, group_board4.id)
- expect(migration).to be_scheduled_delayed_migration(2.minutes, 'project', 'up', project_board2.id, project_board3.id)
- expect(migration).to be_scheduled_delayed_migration(4.minutes, 'project', 'up', project_board4.id, project_board4.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq 4
- end
- end
- end
- end
- end
-
- describe '#down' do
- let!(:project_board1) { boards.create!(name: 'Project Dev1', project_id: project.id) }
- let!(:project_board2) { boards.create!(name: 'Project Dev2', project_id: project.id, iteration_cadence_id: cadence.id) }
- let!(:project_board3) { boards.create!(name: 'Project Dev3', project_id: project.id, iteration_id: -4, iteration_cadence_id: cadence.id) }
- let!(:project_board4) { boards.create!(name: 'Project Dev4', project_id: project.id, iteration_id: -4, iteration_cadence_id: cadence.id) }
-
- let!(:group_board1) { boards.create!(name: 'Group Dev1', group_id: group.id) }
- let!(:group_board2) { boards.create!(name: 'Group Dev2', group_id: group.id, iteration_cadence_id: cadence.id) }
- let!(:group_board3) { boards.create!(name: 'Group Dev3', group_id: group.id, iteration_id: -4, iteration_cadence_id: cadence.id) }
- let!(:group_board4) { boards.create!(name: 'Group Dev4', group_id: group.id, iteration_id: -4, iteration_cadence_id: cadence.id) }
-
- it 'schedules background migrations' do
- Sidekiq::Testing.fake! do
- freeze_time do
- described_class.new.down
-
- migration = described_class::MIGRATION
-
- expect(migration).to be_scheduled_delayed_migration(2.minutes, 'none', 'down', project_board2.id, group_board4.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq 1
- end
- end
- end
-
- context 'in batches' do
- before do
- stub_const('BackfillCadenceIdForBoardsScopedToIteration::BATCH_SIZE', 2)
- end
-
- it 'schedules background migrations' do
- Sidekiq::Testing.fake! do
- freeze_time do
- described_class.new.down
-
- migration = described_class::MIGRATION
-
- expect(migration).to be_scheduled_delayed_migration(2.minutes, 'none', 'down', project_board2.id, project_board3.id)
- expect(migration).to be_scheduled_delayed_migration(4.minutes, 'none', 'down', project_board4.id, group_board2.id)
- expect(migration).to be_scheduled_delayed_migration(6.minutes, 'none', 'down', group_board3.id, group_board4.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq 3
- end
- end
- end
- end
- end
-end
diff --git a/spec/migrations/backfill_integrations_type_new_spec.rb b/spec/migrations/backfill_integrations_type_new_spec.rb
deleted file mode 100644
index 79519c4439a..00000000000
--- a/spec/migrations/backfill_integrations_type_new_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillIntegrationsTypeNew, feature_category: :integrations do
- let!(:migration) { described_class::MIGRATION }
- let!(:integrations) { table(:integrations) }
-
- before do
- integrations.create!(id: 1)
- integrations.create!(id: 2)
- integrations.create!(id: 3)
- integrations.create!(id: 4)
- integrations.create!(id: 5)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of integrations' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :integrations,
- column_name: :id,
- interval: described_class::INTERVAL
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/backfill_issues_upvotes_count_spec.rb b/spec/migrations/backfill_issues_upvotes_count_spec.rb
deleted file mode 100644
index b8687595b35..00000000000
--- a/spec/migrations/backfill_issues_upvotes_count_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillIssuesUpvotesCount, feature_category: :team_planning do
- let(:migration) { described_class.new }
- let(:issues) { table(:issues) }
- let(:award_emoji) { table(:award_emoji) }
-
- let!(:issue1) { issues.create! }
- let!(:issue2) { issues.create! }
- let!(:issue3) { issues.create! }
- let!(:issue4) { issues.create! }
- let!(:issue4_without_thumbsup) { issues.create! }
-
- let!(:award_emoji1) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue1.id) }
- let!(:award_emoji2) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue2.id) }
- let!(:award_emoji3) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue3.id) }
- let!(:award_emoji4) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue4.id) }
-
- it 'correctly schedules background migrations', :aggregate_failures do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_migration(issue1.id, issue2.id)
- expect(described_class::MIGRATION).to be_scheduled_migration(issue3.id, issue4.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/backfill_stage_event_hash_spec.rb b/spec/migrations/backfill_stage_event_hash_spec.rb
deleted file mode 100644
index 399a9c4dfde..00000000000
--- a/spec/migrations/backfill_stage_event_hash_spec.rb
+++ /dev/null
@@ -1,103 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe BackfillStageEventHash, schema: 20210730103808, feature_category: :value_stream_management do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:labels) { table(:labels) }
- let(:group_stages) { table(:analytics_cycle_analytics_group_stages) }
- let(:project_stages) { table(:analytics_cycle_analytics_project_stages) }
- let(:group_value_streams) { table(:analytics_cycle_analytics_group_value_streams) }
- let(:project_value_streams) { table(:analytics_cycle_analytics_project_value_streams) }
- let(:stage_event_hashes) { table(:analytics_cycle_analytics_stage_event_hashes) }
-
- let(:issue_created) { 1 }
- let(:issue_closed) { 3 }
- let(:issue_label_removed) { 9 }
- let(:unknown_stage_event) { -1 }
-
- let(:namespace) { namespaces.create!(name: 'ns', path: 'ns', type: 'Group') }
- let(:project) { projects.create!(name: 'project', path: 'project', namespace_id: namespace.id) }
- let(:group_label) { labels.create!(title: 'label', type: 'GroupLabel', group_id: namespace.id) }
- let(:group_value_stream) { group_value_streams.create!(name: 'group vs', group_id: namespace.id) }
- let(:project_value_stream) { project_value_streams.create!(name: 'project vs', project_id: project.id) }
-
- let(:group_stage_1) do
- group_stages.create!(
- name: 'stage 1',
- group_id: namespace.id,
- start_event_identifier: issue_created,
- end_event_identifier: issue_closed,
- group_value_stream_id: group_value_stream.id
- )
- end
-
- let(:group_stage_2) do
- group_stages.create!(
- name: 'stage 2',
- group_id: namespace.id,
- start_event_identifier: issue_created,
- end_event_identifier: issue_label_removed,
- end_event_label_id: group_label.id,
- group_value_stream_id: group_value_stream.id
- )
- end
-
- let(:project_stage_1) do
- project_stages.create!(
- name: 'stage 1',
- project_id: project.id,
- start_event_identifier: issue_created,
- end_event_identifier: issue_closed,
- project_value_stream_id: project_value_stream.id
- )
- end
-
- let(:invalid_group_stage) do
- group_stages.create!(
- name: 'stage 3',
- group_id: namespace.id,
- start_event_identifier: issue_created,
- end_event_identifier: unknown_stage_event,
- group_value_stream_id: group_value_stream.id
- )
- end
-
- describe '#up' do
- it 'populates stage_event_hash_id column' do
- group_stage_1
- group_stage_2
- project_stage_1
-
- migrate!
-
- group_stage_1.reload
- group_stage_2.reload
- project_stage_1.reload
-
- expect(group_stage_1.stage_event_hash_id).not_to be_nil
- expect(group_stage_2.stage_event_hash_id).not_to be_nil
- expect(project_stage_1.stage_event_hash_id).not_to be_nil
-
- expect(stage_event_hashes.count).to eq(2) # group_stage_1 and project_stage_1 has the same hash
- end
-
- it 'runs without problem without stages' do
- expect { migrate! }.not_to raise_error
- end
-
- context 'when invalid event identifier is discovered' do
- it 'removes the stage' do
- group_stage_1
- invalid_group_stage
-
- expect { migrate! }.not_to change { group_stage_1 }
-
- expect(group_stages.find_by_id(invalid_group_stage.id)).to eq(nil)
- end
- end
- end
-end
diff --git a/spec/migrations/cleanup_remaining_orphan_invites_spec.rb b/spec/migrations/cleanup_remaining_orphan_invites_spec.rb
deleted file mode 100644
index 598030c99a0..00000000000
--- a/spec/migrations/cleanup_remaining_orphan_invites_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanupRemainingOrphanInvites, :migration, feature_category: :subgroups do
- def create_member(**extra_attributes)
- defaults = {
- access_level: 10,
- source_id: 1,
- source_type: "Project",
- notification_level: 0,
- type: 'ProjectMember'
- }
-
- table(:members).create!(defaults.merge(extra_attributes))
- end
-
- def create_user(**extra_attributes)
- defaults = { projects_limit: 0 }
- table(:users).create!(defaults.merge(extra_attributes))
- end
-
- describe '#up', :aggregate_failures do
- it 'removes invite tokens for accepted records' do
- record1 = create_member(invite_token: 'foo', user_id: nil)
- record2 = create_member(invite_token: 'foo2', user_id: create_user(username: 'foo', email: 'foo@example.com').id)
- record3 = create_member(invite_token: nil, user_id: create_user(username: 'bar', email: 'bar@example.com').id)
-
- migrate!
-
- expect(table(:members).find(record1.id).invite_token).to eq 'foo'
- expect(table(:members).find(record2.id).invite_token).to eq nil
- expect(table(:members).find(record3.id).invite_token).to eq nil
- end
- end
-end
diff --git a/spec/migrations/confirm_security_bot_spec.rb b/spec/migrations/confirm_security_bot_spec.rb
deleted file mode 100644
index 55053c29233..00000000000
--- a/spec/migrations/confirm_security_bot_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ConfirmSecurityBot, :migration, feature_category: :user_profile do
- let(:users) { table(:users) }
-
- let(:user_type) { 8 }
-
- context 'when bot is not created' do
- it 'skips migration' do
- migrate!
-
- bot = users.find_by(user_type: user_type)
-
- expect(bot).to be_nil
- end
- end
-
- context 'when bot is confirmed' do
- let(:bot) { table(:users).create!(user_type: user_type, confirmed_at: Time.current, projects_limit: 1) }
-
- it 'skips migration' do
- expect { migrate! }.not_to change { bot.reload.confirmed_at }
- end
- end
-
- context 'when bot is not confirmed' do
- let(:bot) { table(:users).create!(user_type: user_type, projects_limit: 1) }
-
- it 'update confirmed_at' do
- freeze_time do
- expect { migrate! }.to change { bot.reload.confirmed_at }.from(nil).to(Time.current)
- end
- end
- end
-end
diff --git a/spec/migrations/disable_expiration_policies_linked_to_no_container_images_spec.rb b/spec/migrations/disable_expiration_policies_linked_to_no_container_images_spec.rb
deleted file mode 100644
index 1d948257fcc..00000000000
--- a/spec/migrations/disable_expiration_policies_linked_to_no_container_images_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe DisableExpirationPoliciesLinkedToNoContainerImages, feature_category: :container_registry do
- let(:projects) { table(:projects) }
- let(:container_expiration_policies) { table(:container_expiration_policies) }
- let(:container_repositories) { table(:container_repositories) }
- let(:namespaces) { table(:namespaces) }
-
- let!(:namespace) { namespaces.create!(name: 'test', path: 'test') }
- let!(:project) { projects.create!(id: 1, namespace_id: namespace.id, name: 'gitlab1') }
- let!(:container_expiration_policy) { container_expiration_policies.create!(project_id: project.id, enabled: true) }
-
- before do
- projects.create!(id: 2, namespace_id: namespace.id, name: 'gitlab2')
- container_expiration_policies.create!(project_id: 2, enabled: true)
- container_repositories.create!(id: 1, project_id: 2, name: 'image2')
-
- projects.create!(id: 3, namespace_id: namespace.id, name: 'gitlab3')
- container_expiration_policies.create!(project_id: 3, enabled: false)
- container_repositories.create!(id: 2, project_id: 3, name: 'image3')
- end
-
- it 'correctly disable expiration policies linked to no container images' do
- expect(enabled_policies.count).to eq 2
- expect(disabled_policies.count).to eq 1
- expect(container_expiration_policy.enabled).to eq true
-
- migrate!
-
- expect(enabled_policies.count).to eq 1
- expect(disabled_policies.count).to eq 2
- expect(container_expiration_policy.reload.enabled).to eq false
- end
-
- def enabled_policies
- container_expiration_policies.where(enabled: true)
- end
-
- def disabled_policies
- container_expiration_policies.where(enabled: false)
- end
-end
diff --git a/spec/migrations/fix_batched_migrations_old_format_job_arguments_spec.rb b/spec/migrations/fix_batched_migrations_old_format_job_arguments_spec.rb
deleted file mode 100644
index 7c03ed2c870..00000000000
--- a/spec/migrations/fix_batched_migrations_old_format_job_arguments_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-# rubocop:disable Style/WordArray
-RSpec.describe FixBatchedMigrationsOldFormatJobArguments, feature_category: :user_profile do
- let(:batched_background_migrations) { table(:batched_background_migrations) }
-
- context 'when migrations with legacy job arguments exists' do
- it 'updates job arguments to current format' do
- legacy_events_migration = create_batched_migration('events', 'id', ['id', 'id_convert_to_bigint'])
- legacy_push_event_payloads_migration = create_batched_migration('push_event_payloads', 'event_id', ['event_id', 'event_id_convert_to_bigint'])
-
- migrate!
-
- expect(legacy_events_migration.reload.job_arguments).to eq([['id'], ['id_convert_to_bigint']])
- expect(legacy_push_event_payloads_migration.reload.job_arguments).to eq([['event_id'], ['event_id_convert_to_bigint']])
- end
- end
-
- context 'when only migrations with current job arguments exists' do
- it 'updates nothing' do
- events_migration = create_batched_migration('events', 'id', [['id'], ['id_convert_to_bigint']])
- push_event_payloads_migration = create_batched_migration('push_event_payloads', 'event_id', [['event_id'], ['event_id_convert_to_bigint']])
-
- migrate!
-
- expect(events_migration.reload.job_arguments).to eq([['id'], ['id_convert_to_bigint']])
- expect(push_event_payloads_migration.reload.job_arguments).to eq([['event_id'], ['event_id_convert_to_bigint']])
- end
- end
-
- context 'when migrations with both legacy and current job arguments exist' do
- it 'updates nothing' do
- legacy_events_migration = create_batched_migration('events', 'id', ['id', 'id_convert_to_bigint'])
- events_migration = create_batched_migration('events', 'id', [['id'], ['id_convert_to_bigint']])
- legacy_push_event_payloads_migration = create_batched_migration('push_event_payloads', 'event_id', ['event_id', 'event_id_convert_to_bigint'])
- push_event_payloads_migration = create_batched_migration('push_event_payloads', 'event_id', [['event_id'], ['event_id_convert_to_bigint']])
-
- migrate!
-
- expect(legacy_events_migration.reload.job_arguments).to eq(['id', 'id_convert_to_bigint'])
- expect(events_migration.reload.job_arguments).to eq([['id'], ['id_convert_to_bigint']])
- expect(legacy_push_event_payloads_migration.reload.job_arguments).to eq(['event_id', 'event_id_convert_to_bigint'])
- expect(push_event_payloads_migration.reload.job_arguments).to eq([['event_id'], ['event_id_convert_to_bigint']])
- end
- end
-
- def create_batched_migration(table_name, column_name, job_arguments)
- batched_background_migrations.create!(
- max_value: 10,
- batch_size: 10,
- sub_batch_size: 10,
- interval: 1,
- job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
- table_name: table_name,
- column_name: column_name,
- job_arguments: job_arguments
- )
- end
-end
-# rubocop:enable Style/WordArray
diff --git a/spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb b/spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb
deleted file mode 100644
index 1385b67b607..00000000000
--- a/spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe GenerateCustomersDotJwtSigningKey, feature_category: :customersdot_application do
- let(:application_settings) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'application_settings'
-
- attr_encrypted :customers_dot_jwt_signing_key, {
- mode: :per_attribute_iv,
- key: Gitlab::Utils.ensure_utf8_size(Rails.application.secrets.db_key_base, bytes: 32.bytes),
- algorithm: 'aes-256-gcm',
- encode: true
- }
- end
- end
-
- it 'generates JWT signing key' do
- application_settings.create!
-
- reversible_migration do |migration|
- migration.before -> {
- settings = application_settings.first
-
- expect(settings.customers_dot_jwt_signing_key).to be_nil
- expect(settings.encrypted_customers_dot_jwt_signing_key).to be_nil
- expect(settings.encrypted_customers_dot_jwt_signing_key_iv).to be_nil
- }
-
- migration.after -> {
- settings = application_settings.first
-
- expect(settings.encrypted_customers_dot_jwt_signing_key).to be_present
- expect(settings.encrypted_customers_dot_jwt_signing_key_iv).to be_present
- expect { OpenSSL::PKey::RSA.new(settings.customers_dot_jwt_signing_key) }.not_to raise_error
- }
- end
- end
-end
diff --git a/spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb b/spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb
deleted file mode 100644
index 2f62147da9d..00000000000
--- a/spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MigrateProtectedAttributeToPendingBuilds, :suppress_gitlab_schemas_validate_connection,
-feature_category: :continuous_integration do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:queue) { table(:ci_pending_builds) }
- let(:builds) { table(:ci_builds) }
-
- before do
- namespaces.create!(id: 123, name: 'sample', path: 'sample')
- projects.create!(id: 123, name: 'sample', path: 'sample', namespace_id: 123)
-
- builds.create!(id: 1, project_id: 123, status: 'pending', protected: false, type: 'Ci::Build')
- builds.create!(id: 2, project_id: 123, status: 'pending', protected: true, type: 'Ci::Build')
- builds.create!(id: 3, project_id: 123, status: 'pending', protected: false, type: 'Ci::Build')
- builds.create!(id: 4, project_id: 123, status: 'pending', protected: true, type: 'Ci::Bridge')
- builds.create!(id: 5, project_id: 123, status: 'success', protected: true, type: 'Ci::Build')
-
- queue.create!(id: 1, project_id: 123, build_id: 1)
- queue.create!(id: 2, project_id: 123, build_id: 2)
- queue.create!(id: 3, project_id: 123, build_id: 3)
- end
-
- it 'updates entries that should be protected' do
- migrate!
-
- expect(queue.where(protected: true).count).to eq 1
- expect(queue.find_by(protected: true).id).to eq 2
- end
-end
diff --git a/spec/migrations/orphaned_invite_tokens_cleanup_spec.rb b/spec/migrations/orphaned_invite_tokens_cleanup_spec.rb
deleted file mode 100644
index 56f47fca864..00000000000
--- a/spec/migrations/orphaned_invite_tokens_cleanup_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe OrphanedInviteTokensCleanup, :migration, feature_category: :subgroups do
- def create_member(**extra_attributes)
- defaults = {
- access_level: 10,
- source_id: 1,
- source_type: "Project",
- notification_level: 0,
- type: 'ProjectMember'
- }
-
- table(:members).create!(defaults.merge(extra_attributes))
- end
-
- shared_examples 'removes orphaned invite tokens' do
- it 'removes invite tokens for accepted records with invite_accepted_at < created_at' do
- record1 = create_member(invite_token: 'foo', invite_accepted_at: 1.day.ago, created_at: 1.hour.ago)
- record2 = create_member(invite_token: 'foo2', invite_accepted_at: nil, created_at: 1.hour.ago)
- record3 = create_member(invite_token: 'foo3', invite_accepted_at: 1.day.ago, created_at: 1.year.ago)
-
- migrate!
-
- expect(table(:members).find(record1.id).invite_token).to eq nil
- expect(table(:members).find(record2.id).invite_token).to eq 'foo2'
- expect(table(:members).find(record3.id).invite_token).to eq 'foo3'
- end
- end
-
- describe '#up', :aggregate_failures do
- it_behaves_like 'removes orphaned invite tokens'
- end
-
- context 'when there is a mix of timestamptz and timestamp types' do
- around do |example|
- ActiveRecord::Base.connection.execute "ALTER TABLE members alter created_at type timestamp with time zone"
-
- example.run
-
- ActiveRecord::Base.connection.execute "ALTER TABLE members alter created_at type timestamp without time zone"
- end
-
- describe '#up', :aggregate_failures do
- it_behaves_like 'removes orphaned invite tokens'
- end
- end
-end
diff --git a/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb b/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb
deleted file mode 100644
index 5ebe6787f15..00000000000
--- a/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb
+++ /dev/null
@@ -1,62 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ReScheduleLatestPipelineIdPopulationWithAllSecurityRelatedArtifactTypes,
- :suppress_gitlab_schemas_validate_connection, feature_category: :vulnerability_management do
- let(:namespaces) { table(:namespaces) }
- let(:pipelines) { table(:ci_pipelines) }
- let(:projects) { table(:projects) }
- let(:project_settings) { table(:project_settings) }
- let(:vulnerability_statistics) { table(:vulnerability_statistics) }
-
- let(:letter_grade_a) { 0 }
-
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project_1) { projects.create!(namespace_id: namespace.id, name: 'Foo 1') }
- let(:project_2) { projects.create!(namespace_id: namespace.id, name: 'Foo 2') }
- let(:project_3) { projects.create!(namespace_id: namespace.id, name: 'Foo 3') }
- let(:project_4) { projects.create!(namespace_id: namespace.id, name: 'Foo 4') }
-
- before do
- project_settings.create!(project_id: project_1.id, has_vulnerabilities: true)
- project_settings.create!(project_id: project_2.id, has_vulnerabilities: true)
- project_settings.create!(project_id: project_3.id)
- project_settings.create!(project_id: project_4.id, has_vulnerabilities: true)
-
- pipeline = pipelines.create!(project_id: project_2.id, ref: 'master', sha: 'adf43c3a')
-
- vulnerability_statistics.create!(project_id: project_2.id, letter_grade: letter_grade_a, latest_pipeline_id: pipeline.id)
- vulnerability_statistics.create!(project_id: project_4.id, letter_grade: letter_grade_a)
-
- allow(Gitlab).to receive(:ee?).and_return(is_ee?)
- stub_const("#{described_class.name}::BATCH_SIZE", 1)
- end
-
- around do |example|
- freeze_time { example.run }
- end
-
- context 'when the installation is FOSS' do
- let(:is_ee?) { false }
-
- it 'does not schedule any background job' do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to be(0)
- end
- end
-
- context 'when the installation is EE' do
- let(:is_ee?) { true }
-
- it 'schedules the background jobs' do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to be(2)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(described_class::DELAY_INTERVAL, project_1.id, project_1.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2 * described_class::DELAY_INTERVAL, project_4.id, project_4.id)
- end
- end
-end
diff --git a/spec/migrations/remove_duplicate_dast_site_tokens_spec.rb b/spec/migrations/remove_duplicate_dast_site_tokens_spec.rb
deleted file mode 100644
index 2b21dc3b67f..00000000000
--- a/spec/migrations/remove_duplicate_dast_site_tokens_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RemoveDuplicateDastSiteTokens, feature_category: :dynamic_application_security_testing do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:dast_site_tokens) { table(:dast_site_tokens) }
- let!(:namespace) { namespaces.create!(id: 1, name: 'group', path: 'group') }
- let!(:project1) { projects.create!(id: 1, namespace_id: namespace.id, path: 'project1') }
- # create non duplicate dast site token
- let!(:dast_site_token1) { dast_site_tokens.create!(project_id: project1.id, url: 'https://gitlab.com', token: SecureRandom.uuid) }
-
- context 'when duplicate dast site tokens exists' do
- # create duplicate dast site token
- let!(:duplicate_url) { 'https://about.gitlab.com' }
-
- let!(:project2) { projects.create!(id: 2, namespace_id: namespace.id, path: 'project2') }
- let!(:dast_site_token2) { dast_site_tokens.create!(project_id: project2.id, url: duplicate_url, token: SecureRandom.uuid) }
- let!(:dast_site_token3) { dast_site_tokens.create!(project_id: project2.id, url: 'https://temp_url.com', token: SecureRandom.uuid) }
- let!(:dast_site_token4) { dast_site_tokens.create!(project_id: project2.id, url: 'https://other_temp_url.com', token: SecureRandom.uuid) }
-
- before 'update URL to bypass uniqueness validation' do
- dast_site_tokens.where(project_id: 2).update_all(url: duplicate_url)
- end
-
- describe 'migration up' do
- it 'does remove duplicated dast site tokens' do
- expect(dast_site_tokens.count).to eq(4)
- expect(dast_site_tokens.where(project_id: 2, url: duplicate_url).size).to eq(3)
-
- migrate!
-
- expect(dast_site_tokens.count).to eq(2)
- expect(dast_site_tokens.where(project_id: 2, url: duplicate_url).size).to eq(1)
- end
- end
- end
-
- context 'when duplicate dast site tokens does not exists' do
- before do
- dast_site_tokens.create!(project_id: 1, url: 'https://about.gitlab.com/handbook', token: SecureRandom.uuid)
- end
-
- describe 'migration up' do
- it 'does remove duplicated dast site tokens' do
- expect { migrate! }.not_to change(dast_site_tokens, :count)
- end
- end
- end
-end
diff --git a/spec/migrations/remove_duplicate_dast_site_tokens_with_same_token_spec.rb b/spec/migrations/remove_duplicate_dast_site_tokens_with_same_token_spec.rb
deleted file mode 100644
index 6cc25b74d02..00000000000
--- a/spec/migrations/remove_duplicate_dast_site_tokens_with_same_token_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RemoveDuplicateDastSiteTokensWithSameToken, feature_category: :dynamic_application_security_testing do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:dast_site_tokens) { table(:dast_site_tokens) }
- let!(:namespace) { namespaces.create!(id: 1, name: 'group', path: 'group') }
- let!(:project1) { projects.create!(id: 1, namespace_id: namespace.id, path: 'project1') }
- # create non duplicate dast site token
- let!(:dast_site_token1) { dast_site_tokens.create!(project_id: project1.id, url: 'https://gitlab.com', token: SecureRandom.uuid) }
-
- context 'when duplicate dast site tokens exists' do
- # create duplicate dast site token
- let!(:duplicate_token) { 'duplicate_token' }
- let!(:other_duplicate_token) { 'other_duplicate_token' }
-
- let!(:project2) { projects.create!(id: 2, namespace_id: namespace.id, path: 'project2') }
- let!(:dast_site_token2) { dast_site_tokens.create!(project_id: project2.id, url: 'https://gitlab2.com', token: duplicate_token) }
- let!(:dast_site_token3) { dast_site_tokens.create!(project_id: project2.id, url: 'https://gitlab3.com', token: duplicate_token) }
- let!(:dast_site_token4) { dast_site_tokens.create!(project_id: project2.id, url: 'https://gitlab4.com', token: duplicate_token) }
-
- let!(:project3) { projects.create!(id: 3, namespace_id: namespace.id, path: 'project3') }
- let!(:dast_site_token5) { dast_site_tokens.create!(project_id: project3.id, url: 'https://gitlab2.com', token: other_duplicate_token) }
- let!(:dast_site_token6) { dast_site_tokens.create!(project_id: project3.id, url: 'https://gitlab3.com', token: other_duplicate_token) }
- let!(:dast_site_token7) { dast_site_tokens.create!(project_id: project3.id, url: 'https://gitlab4.com', token: other_duplicate_token) }
-
- describe 'migration up' do
- it 'does remove duplicated dast site tokens with the same token' do
- expect(dast_site_tokens.count).to eq(7)
- expect(dast_site_tokens.where(token: duplicate_token).size).to eq(3)
-
- migrate!
-
- expect(dast_site_tokens.count).to eq(3)
- expect(dast_site_tokens.where(token: duplicate_token).size).to eq(1)
- end
- end
- end
-
- context 'when duplicate dast site tokens do not exist' do
- let!(:dast_site_token5) { dast_site_tokens.create!(project_id: 1, url: 'https://gitlab5.com', token: SecureRandom.uuid) }
-
- describe 'migration up' do
- it 'does not remove any dast site tokens' do
- expect { migrate! }.not_to change(dast_site_tokens, :count)
- end
- end
- end
-end
diff --git a/spec/migrations/rename_services_to_integrations_spec.rb b/spec/migrations/rename_services_to_integrations_spec.rb
deleted file mode 100644
index a90b0bfabd2..00000000000
--- a/spec/migrations/rename_services_to_integrations_spec.rb
+++ /dev/null
@@ -1,255 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RenameServicesToIntegrations, feature_category: :integrations do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:integrations) { table(:integrations) }
- let(:services) { table(:services) }
-
- before do
- @namespace = namespaces.create!(name: 'foo', path: 'foo')
- @project = projects.create!(namespace_id: @namespace.id)
- end
-
- RSpec.shared_examples 'a table (or view) with triggers' do
- describe 'INSERT tracker trigger' do
- it 'sets `has_external_issue_tracker` to true when active `issue_tracker` is inserted' do
- expect do
- subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
- end.to change { @project.reload.has_external_issue_tracker }.to(true)
- end
-
- it 'does not set `has_external_issue_tracker` to true when integration is for a different project' do
- different_project = projects.create!(namespace_id: @namespace.id)
-
- expect do
- subject.create!(category: 'issue_tracker', active: true, project_id: different_project.id)
- end.not_to change { @project.reload.has_external_issue_tracker }
- end
-
- it 'does not set `has_external_issue_tracker` to true when inactive `issue_tracker` is inserted' do
- expect do
- subject.create!(category: 'issue_tracker', active: false, project_id: @project.id)
- end.not_to change { @project.reload.has_external_issue_tracker }
- end
-
- it 'does not set `has_external_issue_tracker` to true when a non-`issue tracker` active integration is inserted' do
- expect do
- subject.create!(category: 'my_type', active: true, project_id: @project.id)
- end.not_to change { @project.reload.has_external_issue_tracker }
- end
- end
-
- describe 'UPDATE tracker trigger' do
- it 'sets `has_external_issue_tracker` to true when `issue_tracker` is made active' do
- integration = subject.create!(category: 'issue_tracker', active: false, project_id: @project.id)
-
- expect do
- integration.update!(active: true)
- end.to change { @project.reload.has_external_issue_tracker }.to(true)
- end
-
- it 'sets `has_external_issue_tracker` to false when `issue_tracker` is made inactive' do
- integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
-
- expect do
- integration.update!(active: false)
- end.to change { @project.reload.has_external_issue_tracker }.to(false)
- end
-
- it 'sets `has_external_issue_tracker` to false when `issue_tracker` is made inactive, and an inactive `issue_tracker` exists' do
- subject.create!(category: 'issue_tracker', active: false, project_id: @project.id)
- integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
-
- expect do
- integration.update!(active: false)
- end.to change { @project.reload.has_external_issue_tracker }.to(false)
- end
-
- it 'does not change `has_external_issue_tracker` when `issue_tracker` is made inactive, if an active `issue_tracker` exists' do
- subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
- integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
-
- expect do
- integration.update!(active: false)
- end.not_to change { @project.reload.has_external_issue_tracker }
- end
-
- it 'does not change `has_external_issue_tracker` when integration is for a different project' do
- different_project = projects.create!(namespace_id: @namespace.id)
- integration = subject.create!(category: 'issue_tracker', active: false, project_id: different_project.id)
-
- expect do
- integration.update!(active: true)
- end.not_to change { @project.reload.has_external_issue_tracker }
- end
- end
-
- describe 'DELETE tracker trigger' do
- it 'sets `has_external_issue_tracker` to false when `issue_tracker` is deleted' do
- integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
-
- expect do
- integration.delete
- end.to change { @project.reload.has_external_issue_tracker }.to(false)
- end
-
- it 'sets `has_external_issue_tracker` to false when `issue_tracker` is deleted, if an inactive `issue_tracker` still exists' do
- subject.create!(category: 'issue_tracker', active: false, project_id: @project.id)
- integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
-
- expect do
- integration.delete
- end.to change { @project.reload.has_external_issue_tracker }.to(false)
- end
-
- it 'does not change `has_external_issue_tracker` when `issue_tracker` is deleted, if an active `issue_tracker` still exists' do
- subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
- integration = subject.create!(category: 'issue_tracker', active: true, project_id: @project.id)
-
- expect do
- integration.delete
- end.not_to change { @project.reload.has_external_issue_tracker }
- end
-
- it 'does not change `has_external_issue_tracker` when integration is for a different project' do
- different_project = projects.create!(namespace_id: @namespace.id)
- integration = subject.create!(category: 'issue_tracker', active: true, project_id: different_project.id)
-
- expect do
- integration.delete
- end.not_to change { @project.reload.has_external_issue_tracker }
- end
- end
-
- describe 'INSERT wiki trigger' do
- it 'sets `has_external_wiki` to true when active `ExternalWikiService` is inserted' do
- expect do
- subject.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
- end.to change { @project.reload.has_external_wiki }.to(true)
- end
-
- it 'does not set `has_external_wiki` to true when integration is for a different project' do
- different_project = projects.create!(namespace_id: @namespace.id)
-
- expect do
- subject.create!(type: 'ExternalWikiService', active: true, project_id: different_project.id)
- end.not_to change { @project.reload.has_external_wiki }
- end
-
- it 'does not set `has_external_wiki` to true when inactive `ExternalWikiService` is inserted' do
- expect do
- subject.create!(type: 'ExternalWikiService', active: false, project_id: @project.id)
- end.not_to change { @project.reload.has_external_wiki }
- end
-
- it 'does not set `has_external_wiki` to true when active other integration is inserted' do
- expect do
- subject.create!(type: 'MyService', active: true, project_id: @project.id)
- end.not_to change { @project.reload.has_external_wiki }
- end
- end
-
- describe 'UPDATE wiki trigger' do
- it 'sets `has_external_wiki` to true when `ExternalWikiService` is made active' do
- integration = subject.create!(type: 'ExternalWikiService', active: false, project_id: @project.id)
-
- expect do
- integration.update!(active: true)
- end.to change { @project.reload.has_external_wiki }.to(true)
- end
-
- it 'sets `has_external_wiki` to false when `ExternalWikiService` is made inactive' do
- integration = subject.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
-
- expect do
- integration.update!(active: false)
- end.to change { @project.reload.has_external_wiki }.to(false)
- end
-
- it 'does not change `has_external_wiki` when integration is for a different project' do
- different_project = projects.create!(namespace_id: @namespace.id)
- integration = subject.create!(type: 'ExternalWikiService', active: false, project_id: different_project.id)
-
- expect do
- integration.update!(active: true)
- end.not_to change { @project.reload.has_external_wiki }
- end
- end
-
- describe 'DELETE wiki trigger' do
- it 'sets `has_external_wiki` to false when `ExternalWikiService` is deleted' do
- integration = subject.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
-
- expect do
- integration.delete
- end.to change { @project.reload.has_external_wiki }.to(false)
- end
-
- it 'does not change `has_external_wiki` when integration is for a different project' do
- different_project = projects.create!(namespace_id: @namespace.id)
- integration = subject.create!(type: 'ExternalWikiService', active: true, project_id: different_project.id)
-
- expect do
- integration.delete
- end.not_to change { @project.reload.has_external_wiki }
- end
- end
- end
-
- RSpec.shared_examples 'a table (or view) without triggers' do
- specify do
- number_of_triggers = ActiveRecord::Base.connection
- .execute("SELECT count(*) FROM information_schema.triggers WHERE event_object_table = '#{subject.table_name}'")
- .first['count']
-
- expect(number_of_triggers).to eq(0)
- end
- end
-
- describe '#up' do
- before do
- # LOCK TABLE statements must be in a transaction
- ActiveRecord::Base.transaction { migrate! }
- end
-
- context 'the integrations table' do
- subject { integrations }
-
- it_behaves_like 'a table (or view) with triggers'
- end
-
- context 'the services table' do
- subject { services }
-
- it_behaves_like 'a table (or view) without triggers'
- end
- end
-
- describe '#down' do
- before do
- # LOCK TABLE statements must be in a transaction
- ActiveRecord::Base.transaction do
- migration.up
- migration.down
- end
- end
-
- context 'the services table' do
- subject { services }
-
- it_behaves_like 'a table (or view) with triggers'
- end
-
- context 'the integrations table' do
- subject { integrations }
-
- it_behaves_like 'a table (or view) without triggers'
- end
- end
-end
diff --git a/spec/migrations/replace_external_wiki_triggers_spec.rb b/spec/migrations/replace_external_wiki_triggers_spec.rb
deleted file mode 100644
index c2bc5c44c77..00000000000
--- a/spec/migrations/replace_external_wiki_triggers_spec.rb
+++ /dev/null
@@ -1,132 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe ReplaceExternalWikiTriggers, feature_category: :integrations do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:integrations) { table(:integrations) }
-
- before do
- @namespace = namespaces.create!(name: 'foo', path: 'foo')
- @project = projects.create!(namespace_id: @namespace.id)
- end
-
- def create_external_wiki_integration(**attrs)
- attrs.merge!(type_info)
-
- integrations.create!(**attrs)
- end
-
- def has_external_wiki
- !!@project.reload.has_external_wiki
- end
-
- shared_examples 'external wiki triggers' do
- describe 'INSERT trigger' do
- it 'sets `has_external_wiki` to true when active external wiki integration is inserted' do
- expect do
- create_external_wiki_integration(active: true, project_id: @project.id)
- end.to change { has_external_wiki }.to(true)
- end
-
- it 'does not set `has_external_wiki` to true when integration is for a different project' do
- different_project = projects.create!(namespace_id: @namespace.id)
-
- expect do
- create_external_wiki_integration(active: true, project_id: different_project.id)
- end.not_to change { has_external_wiki }
- end
-
- it 'does not set `has_external_wiki` to true when inactive external wiki integration is inserted' do
- expect do
- create_external_wiki_integration(active: false, project_id: @project.id)
- end.not_to change { has_external_wiki }
- end
-
- it 'does not set `has_external_wiki` to true when active other service is inserted' do
- expect do
- integrations.create!(type_new: 'Integrations::MyService', type: 'MyService', active: true, project_id: @project.id)
- end.not_to change { has_external_wiki }
- end
- end
-
- describe 'UPDATE trigger' do
- it 'sets `has_external_wiki` to true when `ExternalWikiService` is made active' do
- service = create_external_wiki_integration(active: false, project_id: @project.id)
-
- expect do
- service.update!(active: true)
- end.to change { has_external_wiki }.to(true)
- end
-
- it 'sets `has_external_wiki` to false when integration is made inactive' do
- service = create_external_wiki_integration(active: true, project_id: @project.id)
-
- expect do
- service.update!(active: false)
- end.to change { has_external_wiki }.to(false)
- end
-
- it 'does not change `has_external_wiki` when integration is for a different project' do
- different_project = projects.create!(namespace_id: @namespace.id)
- service = create_external_wiki_integration(active: false, project_id: different_project.id)
-
- expect do
- service.update!(active: true)
- end.not_to change { has_external_wiki }
- end
- end
-
- describe 'DELETE trigger' do
- it 'sets `has_external_wiki` to false when integration is deleted' do
- service = create_external_wiki_integration(active: true, project_id: @project.id)
-
- expect do
- service.delete
- end.to change { has_external_wiki }.to(false)
- end
-
- it 'does not change `has_external_wiki` when integration is for a different project' do
- different_project = projects.create!(namespace_id: @namespace.id)
- service = create_external_wiki_integration(active: true, project_id: different_project.id)
-
- expect do
- service.delete
- end.not_to change { has_external_wiki }
- end
- end
- end
-
- describe '#up' do
- before do
- migrate!
- end
-
- context 'when integrations are created with the new STI value' do
- let(:type_info) { { type_new: 'Integrations::ExternalWiki' } }
-
- it_behaves_like 'external wiki triggers'
- end
-
- context 'when integrations are created with the old STI value' do
- let(:type_info) { { type: 'ExternalWikiService' } }
-
- it_behaves_like 'external wiki triggers'
- end
- end
-
- describe '#down' do
- before do
- migration.up
- migration.down
- end
-
- let(:type_info) { { type: 'ExternalWikiService' } }
-
- it_behaves_like 'external wiki triggers'
- end
-end
diff --git a/spec/migrations/reschedule_delete_orphaned_deployments_spec.rb b/spec/migrations/reschedule_delete_orphaned_deployments_spec.rb
deleted file mode 100644
index bbc4494837a..00000000000
--- a/spec/migrations/reschedule_delete_orphaned_deployments_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RescheduleDeleteOrphanedDeployments, :sidekiq, schema: 20210617161348,
- feature_category: :continuous_delivery do
- let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let!(:environment) { table(:environments).create!(name: 'production', slug: 'production', project_id: project.id) }
- let(:background_migration_jobs) { table(:background_migration_jobs) }
-
- before do
- create_deployment!(environment.id, project.id)
- create_deployment!(environment.id, project.id)
- create_deployment!(environment.id, project.id)
- create_deployment!(non_existing_record_id, project.id)
- create_deployment!(non_existing_record_id, project.id)
- create_deployment!(non_existing_record_id, project.id)
- create_deployment!(non_existing_record_id, project.id)
-
- stub_const("#{described_class}::BATCH_SIZE", 1)
- end
-
- it 'steal existing background migration jobs' do
- expect(Gitlab::BackgroundMigration).to receive(:steal).with('DeleteOrphanedDeployments')
-
- migrate!
- end
-
- it 'cleans up background migration jobs tracking records' do
- old_successful_job = background_migration_jobs.create!(
- class_name: 'DeleteOrphanedDeployments',
- status: Gitlab::Database::BackgroundMigrationJob.statuses[:succeeded],
- arguments: [table(:deployments).minimum(:id), table(:deployments).minimum(:id)]
- )
-
- old_pending_job = background_migration_jobs.create!(
- class_name: 'DeleteOrphanedDeployments',
- status: Gitlab::Database::BackgroundMigrationJob.statuses[:pending],
- arguments: [table(:deployments).maximum(:id), table(:deployments).maximum(:id)]
- )
-
- migrate!
-
- expect { old_successful_job.reload }.to raise_error(ActiveRecord::RecordNotFound)
- expect { old_pending_job.reload }.to raise_error(ActiveRecord::RecordNotFound)
- end
-
- it 'schedules DeleteOrphanedDeployments background jobs' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(7)
- table(:deployments).find_each do |deployment|
- expect(described_class::MIGRATION).to be_scheduled_migration(deployment.id, deployment.id)
- end
- end
- end
- end
-
- def create_deployment!(environment_id, project_id)
- table(:deployments).create!(
- environment_id: environment_id,
- project_id: project_id,
- ref: 'master',
- tag: false,
- sha: 'x',
- status: 1,
- iid: table(:deployments).count + 1)
- end
-end
diff --git a/spec/migrations/reset_job_token_scope_enabled_again_spec.rb b/spec/migrations/reset_job_token_scope_enabled_again_spec.rb
deleted file mode 100644
index 9f1180b6e24..00000000000
--- a/spec/migrations/reset_job_token_scope_enabled_again_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe ResetJobTokenScopeEnabledAgain, feature_category: :continuous_integration do
- let(:settings) { table(:project_ci_cd_settings) }
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project_1) { projects.create!(name: 'proj-1', path: 'gitlab-org', namespace_id: namespace.id) }
- let(:project_2) { projects.create!(name: 'proj-2', path: 'gitlab-org', namespace_id: namespace.id) }
-
- before do
- settings.create!(id: 1, project_id: project_1.id, job_token_scope_enabled: true)
- settings.create!(id: 2, project_id: project_2.id, job_token_scope_enabled: false)
- end
-
- it 'migrates job_token_scope_enabled to be always false' do
- expect { migrate! }
- .to change { settings.where(job_token_scope_enabled: false).count }
- .from(1).to(2)
- end
-end
diff --git a/spec/migrations/reset_job_token_scope_enabled_spec.rb b/spec/migrations/reset_job_token_scope_enabled_spec.rb
deleted file mode 100644
index 4ce9078246a..00000000000
--- a/spec/migrations/reset_job_token_scope_enabled_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe ResetJobTokenScopeEnabled, feature_category: :continuous_integration do
- let(:settings) { table(:project_ci_cd_settings) }
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project_1) { projects.create!(name: 'proj-1', path: 'gitlab-org', namespace_id: namespace.id) }
- let(:project_2) { projects.create!(name: 'proj-2', path: 'gitlab-org', namespace_id: namespace.id) }
-
- before do
- settings.create!(id: 1, project_id: project_1.id, job_token_scope_enabled: true)
- settings.create!(id: 2, project_id: project_2.id, job_token_scope_enabled: false)
- end
-
- it 'migrates job_token_scope_enabled to be always false' do
- expect { migrate! }
- .to change { settings.where(job_token_scope_enabled: false).count }
- .from(1).to(2)
- end
-end
diff --git a/spec/migrations/reset_severity_levels_to_new_default_spec.rb b/spec/migrations/reset_severity_levels_to_new_default_spec.rb
deleted file mode 100644
index 83e57b852a0..00000000000
--- a/spec/migrations/reset_severity_levels_to_new_default_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe ResetSeverityLevelsToNewDefault, feature_category: :source_code_management do
- let(:approval_project_rules) { table(:approval_project_rules) }
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
- let(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
- let(:project) { projects.create!(name: 'project', path: 'project', namespace_id: namespace.id) }
- let(:approval_project_rule) { approval_project_rules.create!(name: 'rule', project_id: project.id, severity_levels: severity_levels) }
-
- context 'without having all severity levels selected' do
- let(:severity_levels) { ['high'] }
-
- it 'does not change severity_levels' do
- expect(approval_project_rule.severity_levels).to eq(severity_levels)
- expect { migrate! }.not_to change { approval_project_rule.reload.severity_levels }
- end
- end
-
- context 'with all scanners selected' do
- let(:severity_levels) { ::Enums::Vulnerability::SEVERITY_LEVELS.keys }
- let(:default_levels) { %w(unknown high critical) }
-
- it 'changes severity_levels to the default value' do
- expect(approval_project_rule.severity_levels).to eq(severity_levels)
- expect { migrate! }.to change { approval_project_rule.reload.severity_levels }.from(severity_levels).to(default_levels)
- end
- end
-end
diff --git a/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb b/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb
deleted file mode 100644
index 63678a094a7..00000000000
--- a/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb
+++ /dev/null
@@ -1,10 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleCopyCiBuildsColumnsToSecurityScans2, feature_category: :dependency_scanning do
- it 'is a no-op' do
- migrate!
- end
-end
diff --git a/spec/migrations/schedule_security_setting_creation_spec.rb b/spec/migrations/schedule_security_setting_creation_spec.rb
deleted file mode 100644
index edabb2a2299..00000000000
--- a/spec/migrations/schedule_security_setting_creation_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleSecuritySettingCreation, :sidekiq, feature_category: :projects do
- describe '#up' do
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
-
- context 'for EE version' do
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- allow(Gitlab).to receive(:ee?).and_return(true)
- end
-
- it 'schedules background migration job' do
- namespace = namespaces.create!(name: 'test', path: 'test')
- projects.create!(id: 12, namespace_id: namespace.id, name: 'red', path: 'red')
- projects.create!(id: 13, namespace_id: namespace.id, name: 'green', path: 'green')
- projects.create!(id: 14, namespace_id: namespace.id, name: 'blue', path: 'blue')
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(5.minutes, 12, 13)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(10.minutes, 14, 14)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
- end
-
- context 'for FOSS version' do
- before do
- allow(Gitlab).to receive(:ee?).and_return(false)
- end
-
- it 'does not schedule any jobs' do
- namespace = namespaces.create!(name: 'test', path: 'test')
- projects.create!(id: 12, namespace_id: namespace.id, name: 'red', path: 'red')
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(0)
- end
- end
- end
- end
- end
-end
diff --git a/spec/migrations/set_default_job_token_scope_true_spec.rb b/spec/migrations/set_default_job_token_scope_true_spec.rb
deleted file mode 100644
index 25f4f07e15a..00000000000
--- a/spec/migrations/set_default_job_token_scope_true_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe SetDefaultJobTokenScopeTrue, schema: 20210819153805, feature_category: :continuous_integration do
- let(:ci_cd_settings) { table(:project_ci_cd_settings) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- let(:namespace) { namespaces.create!(name: 'test', path: 'path', type: 'Group') }
- let(:project) { projects.create!(namespace_id: namespace.id) }
-
- describe '#up' do
- it 'sets the job_token_scope_enabled default to true' do
- described_class.new.up
-
- settings = ci_cd_settings.create!(project_id: project.id)
-
- expect(settings.job_token_scope_enabled).to be_truthy
- end
- end
-
- describe '#down' do
- it 'sets the job_token_scope_enabled default to false' do
- described_class.new.down
-
- settings = ci_cd_settings.create!(project_id: project.id)
-
- expect(settings.job_token_scope_enabled).to be_falsey
- end
- end
-end
diff --git a/spec/migrations/steal_merge_request_diff_commit_users_migration_spec.rb b/spec/migrations/steal_merge_request_diff_commit_users_migration_spec.rb
deleted file mode 100644
index d2cd7a6980d..00000000000
--- a/spec/migrations/steal_merge_request_diff_commit_users_migration_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe StealMergeRequestDiffCommitUsersMigration, :migration, feature_category: :source_code_management do
- let(:migration) { described_class.new }
-
- describe '#up' do
- it 'schedules a job if there are pending jobs' do
- Gitlab::Database::BackgroundMigrationJob.create!(
- class_name: 'MigrateMergeRequestDiffCommitUsers',
- arguments: [10, 20]
- )
-
- expect(migration)
- .to receive(:migrate_in)
- .with(1.hour, 'StealMigrateMergeRequestDiffCommitUsers', [10, 20])
-
- migration.up
- end
-
- it 'does not schedule any jobs when all jobs have been completed' do
- expect(migration).not_to receive(:migrate_in)
-
- migration.up
- end
- end
-end
diff --git a/spec/migrations/update_integrations_trigger_type_new_on_insert_spec.rb b/spec/migrations/update_integrations_trigger_type_new_on_insert_spec.rb
deleted file mode 100644
index efc051d9a68..00000000000
--- a/spec/migrations/update_integrations_trigger_type_new_on_insert_spec.rb
+++ /dev/null
@@ -1,102 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe UpdateIntegrationsTriggerTypeNewOnInsert, feature_category: :integrations do
- let(:migration) { described_class.new }
- let(:integrations) { table(:integrations) }
-
- shared_examples 'transforms known types' do
- # This matches Gitlab::Integrations::StiType at the time the original trigger
- # was added in db/migrate/20210721135638_add_triggers_to_integrations_type_new.rb
- let(:namespaced_integrations) do
- %w[
- Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog
- Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Irker Jenkins Jira Mattermost
- MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker
- Prometheus Pushover Redmine Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack
-
- Github GitlabSlackApplication
- ]
- end
-
- it 'sets `type_new` to the transformed `type` class name' do
- namespaced_integrations.each do |type|
- integration = integrations.create!(type: "#{type}Service")
-
- expect(integration.reload).to have_attributes(
- type: "#{type}Service",
- type_new: "Integrations::#{type}"
- )
- end
- end
- end
-
- describe '#up' do
- before do
- migrate!
- end
-
- describe 'INSERT trigger with dynamic mapping' do
- it_behaves_like 'transforms known types'
-
- it 'transforms unknown types if it ends in "Service"' do
- integration = integrations.create!(type: 'AcmeService')
-
- expect(integration.reload).to have_attributes(
- type: 'AcmeService',
- type_new: 'Integrations::Acme'
- )
- end
-
- it 'ignores "Service" occurring elsewhere in the type' do
- integration = integrations.create!(type: 'ServiceAcmeService')
-
- expect(integration.reload).to have_attributes(
- type: 'ServiceAcmeService',
- type_new: 'Integrations::ServiceAcme'
- )
- end
-
- it 'copies unknown types if it does not end with "Service"' do
- integration = integrations.create!(type: 'Integrations::Acme')
-
- expect(integration.reload).to have_attributes(
- type: 'Integrations::Acme',
- type_new: 'Integrations::Acme'
- )
- end
- end
- end
-
- describe '#down' do
- before do
- migration.up
- migration.down
- end
-
- describe 'INSERT trigger with static mapping' do
- it_behaves_like 'transforms known types'
-
- it 'ignores types that are already namespaced' do
- integration = integrations.create!(type: 'Integrations::Asana')
-
- expect(integration.reload).to have_attributes(
- type: 'Integrations::Asana',
- type_new: nil
- )
- end
-
- it 'ignores types that are unknown' do
- integration = integrations.create!(type: 'FooBar')
-
- expect(integration.reload).to have_attributes(
- type: 'FooBar',
- type_new: nil
- )
- end
- end
- end
-end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 2b0791323ff..9faa4a98c07 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -2873,7 +2873,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
before do
allow_next_instance_of(Gitlab::Ci::Variables::Builder) do |builder|
+ pipeline_variables_builder = double(
+ ::Gitlab::Ci::Variables::Builder::Pipeline,
+ predefined_variables: [pipeline_pre_var]
+ )
+
allow(builder).to receive(:predefined_variables) { [build_pre_var] }
+ allow(builder).to receive(:pipeline_variables_builder) { pipeline_variables_builder }
end
allow(build).to receive(:yaml_variables) { [build_yaml_var] }
@@ -2886,9 +2892,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
.to receive(:predefined_variables) { [project_pre_var] }
project.variables.create!(key: 'secret', value: 'value')
-
- allow(build.pipeline)
- .to receive(:predefined_variables).and_return([pipeline_pre_var])
end
it 'returns variables in order depending on resource hierarchy' do
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 4a59f8d8efc..61422978df7 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -1070,296 +1070,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
- describe '#predefined_variables' do
- subject { pipeline.predefined_variables }
-
- let(:pipeline) { build(:ci_empty_pipeline, :created) }
-
- it 'includes all predefined variables in a valid order' do
- keys = subject.map { |variable| variable[:key] }
-
- expect(keys).to eq %w[
- CI_PIPELINE_IID
- CI_PIPELINE_SOURCE
- CI_PIPELINE_CREATED_AT
- CI_COMMIT_SHA
- CI_COMMIT_SHORT_SHA
- CI_COMMIT_BEFORE_SHA
- CI_COMMIT_REF_NAME
- CI_COMMIT_REF_SLUG
- CI_COMMIT_BRANCH
- CI_COMMIT_MESSAGE
- CI_COMMIT_TITLE
- CI_COMMIT_DESCRIPTION
- CI_COMMIT_REF_PROTECTED
- CI_COMMIT_TIMESTAMP
- CI_COMMIT_AUTHOR
- CI_BUILD_REF
- CI_BUILD_BEFORE_SHA
- CI_BUILD_REF_NAME
- CI_BUILD_REF_SLUG
- ]
- end
-
- context 'when merge request is present' do
- let_it_be(:assignees) { create_list(:user, 2) }
- let_it_be(:milestone) { create(:milestone, project: project) }
- let_it_be(:labels) { create_list(:label, 2) }
-
- let(:merge_request) do
- create(:merge_request, :simple,
- source_project: project,
- target_project: project,
- assignees: assignees,
- milestone: milestone,
- labels: labels)
- end
-
- context 'when pipeline for merge request is created' do
- let(:pipeline) do
- create(:ci_pipeline, :detached_merge_request_pipeline,
- ci_ref_presence: false,
- user: user,
- merge_request: merge_request)
- end
-
- before do
- project.add_developer(user)
- end
-
- it 'exposes merge request pipeline variables' do
- expect(subject.to_hash)
- .to include(
- 'CI_MERGE_REQUEST_ID' => merge_request.id.to_s,
- 'CI_MERGE_REQUEST_IID' => merge_request.iid.to_s,
- 'CI_MERGE_REQUEST_REF_PATH' => merge_request.ref_path.to_s,
- 'CI_MERGE_REQUEST_PROJECT_ID' => merge_request.project.id.to_s,
- 'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path,
- 'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url,
- 'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s,
- 'CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED' => ProtectedBranch.protected?(merge_request.target_project, merge_request.target_branch).to_s,
- 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => '',
- 'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s,
- 'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
- 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url,
- 'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
- 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => '',
- 'CI_MERGE_REQUEST_TITLE' => merge_request.title,
- 'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
- 'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
- 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
- 'CI_MERGE_REQUEST_EVENT_TYPE' => 'detached',
- 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true))
- end
-
- it 'exposes diff variables' do
- expect(subject.to_hash)
- .to include(
- 'CI_MERGE_REQUEST_DIFF_ID' => merge_request.merge_request_diff.id.to_s,
- 'CI_MERGE_REQUEST_DIFF_BASE_SHA' => merge_request.merge_request_diff.base_commit_sha)
- end
-
- context 'without assignee' do
- let(:assignees) { [] }
-
- it 'does not expose assignee variable' do
- expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_ASSIGNEES')
- end
- end
-
- context 'without milestone' do
- let(:milestone) { nil }
-
- it 'does not expose milestone variable' do
- expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_MILESTONE')
- end
- end
-
- context 'without labels' do
- let(:labels) { [] }
-
- it 'does not expose labels variable' do
- expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_LABELS')
- end
- end
- end
-
- context 'when pipeline on branch is created' do
- let(:pipeline) do
- create(:ci_pipeline, project: project, user: user, ref: 'feature')
- end
-
- context 'when a merge request is created' do
- before do
- merge_request
- end
-
- context 'when user has access to project' do
- before do
- project.add_developer(user)
- end
-
- it 'merge request references are returned matching the pipeline' do
- expect(subject.to_hash).to include(
- 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true))
- end
- end
-
- context 'when user does not have access to project' do
- it 'CI_OPEN_MERGE_REQUESTS is not returned' do
- expect(subject.to_hash).not_to have_key('CI_OPEN_MERGE_REQUESTS')
- end
- end
- end
-
- context 'when no a merge request is created' do
- it 'CI_OPEN_MERGE_REQUESTS is not returned' do
- expect(subject.to_hash).not_to have_key('CI_OPEN_MERGE_REQUESTS')
- end
- end
- end
-
- context 'with merged results' do
- let(:pipeline) do
- create(:ci_pipeline, :merged_result_pipeline, merge_request: merge_request)
- end
-
- it 'exposes merge request pipeline variables' do
- expect(subject.to_hash)
- .to include(
- 'CI_MERGE_REQUEST_ID' => merge_request.id.to_s,
- 'CI_MERGE_REQUEST_IID' => merge_request.iid.to_s,
- 'CI_MERGE_REQUEST_REF_PATH' => merge_request.ref_path.to_s,
- 'CI_MERGE_REQUEST_PROJECT_ID' => merge_request.project.id.to_s,
- 'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path,
- 'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url,
- 'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s,
- 'CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED' => ProtectedBranch.protected?(merge_request.target_project, merge_request.target_branch).to_s,
- 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => merge_request.target_branch_sha,
- 'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s,
- 'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
- 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url,
- 'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
- 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => merge_request.source_branch_sha,
- 'CI_MERGE_REQUEST_TITLE' => merge_request.title,
- 'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
- 'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
- 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
- 'CI_MERGE_REQUEST_EVENT_TYPE' => 'merged_result')
- end
-
- it 'exposes diff variables' do
- expect(subject.to_hash)
- .to include(
- 'CI_MERGE_REQUEST_DIFF_ID' => merge_request.merge_request_diff.id.to_s,
- 'CI_MERGE_REQUEST_DIFF_BASE_SHA' => merge_request.merge_request_diff.base_commit_sha)
- end
- end
- end
-
- context 'when source is external pull request' do
- let(:pipeline) do
- create(:ci_pipeline, source: :external_pull_request_event, external_pull_request: pull_request)
- end
-
- let(:pull_request) { create(:external_pull_request, project: project) }
-
- it 'exposes external pull request pipeline variables' do
- expect(subject.to_hash)
- .to include(
- 'CI_EXTERNAL_PULL_REQUEST_IID' => pull_request.pull_request_iid.to_s,
- 'CI_EXTERNAL_PULL_REQUEST_SOURCE_REPOSITORY' => pull_request.source_repository,
- 'CI_EXTERNAL_PULL_REQUEST_TARGET_REPOSITORY' => pull_request.target_repository,
- 'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_SHA' => pull_request.source_sha,
- 'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_SHA' => pull_request.target_sha,
- 'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_NAME' => pull_request.source_branch,
- 'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_NAME' => pull_request.target_branch
- )
- end
- end
-
- describe 'variable CI_KUBERNETES_ACTIVE' do
- context 'when pipeline.has_kubernetes_active? is true' do
- before do
- allow(pipeline).to receive(:has_kubernetes_active?).and_return(true)
- end
-
- it "is included with value 'true'" do
- expect(subject.to_hash).to include('CI_KUBERNETES_ACTIVE' => 'true')
- end
- end
-
- context 'when pipeline.has_kubernetes_active? is false' do
- before do
- allow(pipeline).to receive(:has_kubernetes_active?).and_return(false)
- end
-
- it 'is not included' do
- expect(subject.to_hash).not_to have_key('CI_KUBERNETES_ACTIVE')
- end
- end
- end
-
- describe 'variable CI_GITLAB_FIPS_MODE' do
- context 'when FIPS flag is enabled' do
- before do
- allow(Gitlab::FIPS).to receive(:enabled?).and_return(true)
- end
-
- it "is included with value 'true'" do
- expect(subject.to_hash).to include('CI_GITLAB_FIPS_MODE' => 'true')
- end
- end
-
- context 'when FIPS flag is disabled' do
- before do
- allow(Gitlab::FIPS).to receive(:enabled?).and_return(false)
- end
-
- it 'is not included' do
- expect(subject.to_hash).not_to have_key('CI_GITLAB_FIPS_MODE')
- end
- end
- end
-
- context 'when tag is not found' do
- let(:pipeline) do
- create(:ci_pipeline, project: project, ref: 'not_found_tag', tag: true)
- end
-
- it 'does not expose tag variables' do
- expect(subject.to_hash.keys)
- .not_to include(
- 'CI_COMMIT_TAG',
- 'CI_COMMIT_TAG_MESSAGE',
- 'CI_BUILD_TAG'
- )
- end
- end
-
- context 'without a commit' do
- let(:pipeline) { build(:ci_empty_pipeline, :created, sha: nil) }
-
- it 'does not expose commit variables' do
- expect(subject.to_hash.keys)
- .not_to include(
- 'CI_COMMIT_SHA',
- 'CI_COMMIT_SHORT_SHA',
- 'CI_COMMIT_BEFORE_SHA',
- 'CI_COMMIT_REF_NAME',
- 'CI_COMMIT_REF_SLUG',
- 'CI_COMMIT_BRANCH',
- 'CI_COMMIT_TAG',
- 'CI_COMMIT_MESSAGE',
- 'CI_COMMIT_TITLE',
- 'CI_COMMIT_DESCRIPTION',
- 'CI_COMMIT_REF_PROTECTED',
- 'CI_COMMIT_TIMESTAMP',
- 'CI_COMMIT_AUTHOR')
- end
- end
- end
-
describe '#protected_ref?' do
let(:pipeline) { build(:ci_empty_pipeline, :created) }
@@ -5664,6 +5374,34 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
+ describe '#merge_request_diff' do
+ context 'when the pipeline has no merge request' do
+ it 'is nil' do
+ pipeline = build(:ci_empty_pipeline)
+
+ expect(pipeline.merge_request_diff).to be_nil
+ end
+ end
+
+ context 'when the pipeline has a merge request' do
+ context 'when the pipeline is a merged result pipeline' do
+ it 'returns the diff for the source sha' do
+ pipeline = create(:ci_pipeline, :merged_result_pipeline)
+
+ expect(pipeline.merge_request_diff.head_commit_sha).to eq(pipeline.source_sha)
+ end
+ end
+
+ context 'when the pipeline is not a merged result pipeline' do
+ it 'returns the diff for the pipeline sha' do
+ pipeline = create(:ci_pipeline, merge_request: create(:merge_request))
+
+ expect(pipeline.merge_request_diff.head_commit_sha).to eq(pipeline.sha)
+ end
+ end
+ end
+ end
+
describe 'partitioning' do
let(:pipeline) { build(:ci_pipeline, partition_id: nil) }
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index d15bc9d2dd5..c11381fe448 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_category: :runner do
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_category: :continuous_integration do
include StubGitlabCalls
include RedisHelpers
include WorkhorseHelpers
@@ -119,6 +119,32 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
end
end
+ context 'when system_id parameter is specified' do
+ subject(:request) { request_job(**args) }
+
+ context 'when ci_runner_machines with same machine_xid does not exist' do
+ let(:args) { { system_id: 's_some_system_id' } }
+
+ it 'creates respective ci_runner_machines record', :freeze_time do
+ expect { request }.to change { runner.runner_machines.reload.count }.from(0).to(1)
+
+ machine = runner.runner_machines.last
+ expect(machine.machine_xid).to eq args[:system_id]
+ expect(machine.runner).to eq runner
+ expect(machine.contacted_at).to eq Time.current
+ end
+ end
+
+ context 'when ci_runner_machines with same machine_xid already exists' do
+ let(:args) { { system_id: 's_existing_system_id' } }
+ let!(:runner_machine) { create(:ci_runner_machine, runner: runner, machine_xid: args[:system_id]) }
+
+ it 'does not create new ci_runner_machines record' do
+ expect { request }.not_to change { Ci::RunnerMachine.count }
+ end
+ end
+ end
+
context 'when jobs are finished' do
before do
job.success
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_schedule_play_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_schedule_play_spec.rb
index 4c0635c8640..9161304cfc3 100644
--- a/spec/requests/api/graphql/mutations/ci/pipeline_schedule_play_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_schedule_play_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe 'PipelineSchedulePlay', feature_category: :continuous_integration
it do
expect(RunPipelineScheduleWorker)
.to receive(:perform_async)
- .with(pipeline_schedule.id, user.id, next_run_scheduled: true).and_return(nil)
+ .with(pipeline_schedule.id, user.id).and_return(nil)
post_graphql_mutation(mutation, current_user: user)
@@ -102,7 +102,7 @@ RSpec.describe 'PipelineSchedulePlay', feature_category: :continuous_integration
it do
expect(RunPipelineScheduleWorker)
.to receive(:perform_async)
- .with(pipeline_schedule.id, user.id, next_run_scheduled: true).and_return(nil)
+ .with(pipeline_schedule.id, user.id).and_return(nil)
post_graphql_mutation(mutation, current_user: user)
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index 3fdb737ea5c..66cca8fbe56 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -90,7 +90,6 @@ ci_cd_settings:
unexposed_attributes:
- id
- project_id
- - group_runners_enabled
- merge_trains_enabled
- merge_pipelines_enabled
- auto_rollback_enabled
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 1c8aec878ea..b86aa04439e 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -2442,6 +2442,7 @@ RSpec.describe API::Projects, feature_category: :projects do
expect(json_response['created_at']).to be_present
expect(json_response['last_activity_at']).to be_present
expect(json_response['shared_runners_enabled']).to be_present
+ expect(json_response['group_runners_enabled']).to be_present
expect(json_response['creator_id']).to be_present
expect(json_response['namespace']).to be_present
expect(json_response['avatar_url']).to be_nil
@@ -2546,6 +2547,7 @@ RSpec.describe API::Projects, feature_category: :projects do
expect(json_response['created_at']).to be_present
expect(json_response['last_activity_at']).to be_present
expect(json_response['shared_runners_enabled']).to be_present
+ expect(json_response['group_runners_enabled']).to be_present
expect(json_response['creator_id']).to be_present
expect(json_response['namespace']).to be_present
expect(json_response['import_status']).to be_present
diff --git a/spec/serializers/integrations/field_entity_spec.rb b/spec/serializers/integrations/field_entity_spec.rb
index 4212a1ee6a2..1ca1545c11a 100644
--- a/spec/serializers/integrations/field_entity_spec.rb
+++ b/spec/serializers/integrations/field_entity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::FieldEntity do
+RSpec.describe Integrations::FieldEntity, feature_category: :integrations do
let(:request) { EntityRequest.new(integration: integration) }
subject { described_class.new(field, request: request, integration: integration).as_json }
@@ -23,9 +23,9 @@ RSpec.describe Integrations::FieldEntity do
section: 'connection',
type: 'text',
name: 'username',
- title: 'Username or Email',
+ title: 'Username or email',
placeholder: nil,
- help: 'Use a username for server version and an email for cloud version.',
+ help: 'Username for the server version or an email for the cloud version',
required: true,
choices: nil,
value: 'jira_username',
diff --git a/spec/services/ci/pipeline_schedule_service_spec.rb b/spec/services/ci/pipeline_schedule_service_spec.rb
index 4f20a4389ca..8896d8ace30 100644
--- a/spec/services/ci/pipeline_schedule_service_spec.rb
+++ b/spec/services/ci/pipeline_schedule_service_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Ci::PipelineScheduleService, feature_category: :continuous_integr
it 'runs RunPipelineScheduleWorker' do
expect(RunPipelineScheduleWorker)
- .to receive(:perform_async).with(schedule.id, schedule.owner.id, next_run_scheduled: true)
+ .to receive(:perform_async).with(schedule.id, schedule.owner.id)
subject
end
@@ -43,7 +43,7 @@ RSpec.describe Ci::PipelineScheduleService, feature_category: :continuous_integr
it 'does not run RunPipelineScheduleWorker' do
expect(RunPipelineScheduleWorker)
- .not_to receive(:perform_async).with(schedule.id, schedule.owner.id, next_run_scheduled: true)
+ .not_to receive(:perform_async).with(schedule.id, schedule.owner.id)
subject
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index f834bc2674b..9183df359b4 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -3,795 +3,830 @@
require 'spec_helper'
module Ci
- RSpec.describe RegisterJobService do
+ RSpec.describe RegisterJobService, feature_category: :continuous_integration do
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:project) { create(:project, group: group, shared_runners_enabled: false, group_runners_enabled: false) }
let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) }
- let!(:shared_runner) { create(:ci_runner, :instance) }
+ let_it_be(:shared_runner) { create(:ci_runner, :instance) }
let!(:project_runner) { create(:ci_runner, :project, projects: [project]) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
describe '#execute' do
- subject { described_class.new(shared_runner).execute }
+ subject(:execute) { described_class.new(runner, runner_machine).execute }
+
+ context 'with runner_machine specified' do
+ let(:runner) { project_runner }
+ let!(:runner_machine) { create(:ci_runner_machine, runner: project_runner) }
- context 'checks database loadbalancing stickiness' do
before do
- project.update!(shared_runners_enabled: false)
+ pending_job.update!(tag_list: ["linux"])
+ pending_job.reload
+ pending_job.create_queuing_entry!
+ project_runner.update!(tag_list: ["linux"])
end
- it 'result is valid if replica did caught-up', :aggregate_failures do
- expect(ApplicationRecord.sticking).to receive(:all_caught_up?)
- .with(:runner, shared_runner.id) { true }
+ it 'sets runner_machine on job' do
+ expect { execute }.to change { pending_job.reload.runner_machine }.from(nil).to(runner_machine)
- expect(subject).to be_valid
- expect(subject.build).to be_nil
- expect(subject.build_json).to be_nil
+ expect(execute.build).to eq(pending_job)
end
+ end
- it 'result is invalid if replica did not caught-up', :aggregate_failures do
- expect(ApplicationRecord.sticking).to receive(:all_caught_up?)
- .with(:runner, shared_runner.id) { false }
+ context 'with no runner machine' do
+ let(:runner_machine) { nil }
- expect(subject).not_to be_valid
- expect(subject.build).to be_nil
- expect(subject.build_json).to be_nil
- end
- end
+ context 'checks database loadbalancing stickiness' do
+ let(:runner) { shared_runner }
- shared_examples 'handles runner assignment' do
- context 'runner follow tag list' do
- it "picks build with the same tag" do
- pending_job.update!(tag_list: ["linux"])
- pending_job.reload
- pending_job.create_queuing_entry!
- project_runner.update!(tag_list: ["linux"])
- expect(execute(project_runner)).to eq(pending_job)
+ before do
+ project.update!(shared_runners_enabled: false)
end
- it "does not pick build with different tag" do
- pending_job.update!(tag_list: ["linux"])
- pending_job.reload
- pending_job.create_queuing_entry!
- project_runner.update!(tag_list: ["win32"])
- expect(execute(project_runner)).to be_falsey
- end
+ it 'result is valid if replica did caught-up', :aggregate_failures do
+ expect(ApplicationRecord.sticking).to receive(:all_caught_up?).with(:runner, runner.id) { true }
- it "picks build without tag" do
- expect(execute(project_runner)).to eq(pending_job)
+ expect(execute).to be_valid
+ expect(execute.build).to be_nil
+ expect(execute.build_json).to be_nil
end
- it "does not pick build with tag" do
- pending_job.update!(tag_list: ["linux"])
- pending_job.reload
- pending_job.create_queuing_entry!
- expect(execute(project_runner)).to be_falsey
- end
+ it 'result is invalid if replica did not caught-up', :aggregate_failures do
+ expect(ApplicationRecord.sticking).to receive(:all_caught_up?)
+ .with(:runner, shared_runner.id) { false }
- it "pick build without tag" do
- project_runner.update!(tag_list: ["win32"])
- expect(execute(project_runner)).to eq(pending_job)
+ expect(subject).not_to be_valid
+ expect(subject.build).to be_nil
+ expect(subject.build_json).to be_nil
end
end
- context 'deleted projects' do
- before do
- project.update!(pending_delete: true)
- end
+ shared_examples 'handles runner assignment' do
+ context 'runner follow tag list' do
+ it "picks build with the same tag" do
+ pending_job.update!(tag_list: ["linux"])
+ pending_job.reload
+ pending_job.create_queuing_entry!
+ project_runner.update!(tag_list: ["linux"])
+ expect(build_on(project_runner)).to eq(pending_job)
+ end
- context 'for shared runners' do
- before do
- project.update!(shared_runners_enabled: true)
+ it "does not pick build with different tag" do
+ pending_job.update!(tag_list: ["linux"])
+ pending_job.reload
+ pending_job.create_queuing_entry!
+ project_runner.update!(tag_list: ["win32"])
+ expect(build_on(project_runner)).to be_falsey
end
- it 'does not pick a build' do
- expect(execute(shared_runner)).to be_nil
+ it "picks build without tag" do
+ expect(build_on(project_runner)).to eq(pending_job)
end
- end
- context 'for project runner' do
- it 'does not pick a build' do
- expect(execute(project_runner)).to be_nil
- expect(pending_job.reload).to be_failed
- expect(pending_job.queuing_entry).to be_nil
+ it "does not pick build with tag" do
+ pending_job.update!(tag_list: ["linux"])
+ pending_job.reload
+ pending_job.create_queuing_entry!
+ expect(build_on(project_runner)).to be_falsey
end
- end
- end
- context 'allow shared runners' do
- before do
- project.update!(shared_runners_enabled: true)
- pipeline.reload
- pending_job.reload
- pending_job.create_queuing_entry!
+ it "pick build without tag" do
+ project_runner.update!(tag_list: ["win32"])
+ expect(build_on(project_runner)).to eq(pending_job)
+ end
end
- context 'when build owner has been blocked' do
- let(:user) { create(:user, :blocked) }
-
+ context 'deleted projects' do
before do
- pending_job.update!(user: user)
+ project.update!(pending_delete: true)
end
- it 'does not pick the build and drops the build' do
- expect(execute(shared_runner)).to be_falsey
+ context 'for shared runners' do
+ before do
+ project.update!(shared_runners_enabled: true)
+ end
- expect(pending_job.reload).to be_user_blocked
+ it 'does not pick a build' do
+ expect(build_on(shared_runner)).to be_nil
+ end
+ end
+
+ context 'for project runner' do
+ it 'does not pick a build' do
+ expect(build_on(project_runner)).to be_nil
+ expect(pending_job.reload).to be_failed
+ expect(pending_job.queuing_entry).to be_nil
+ end
end
end
- context 'for multiple builds' do
- let!(:project2) { create :project, shared_runners_enabled: true }
- let!(:pipeline2) { create :ci_pipeline, project: project2 }
- let!(:project3) { create :project, shared_runners_enabled: true }
- let!(:pipeline3) { create :ci_pipeline, project: project3 }
- let!(:build1_project1) { pending_job }
- let!(:build2_project1) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
- let!(:build3_project1) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
- let!(:build1_project2) { create(:ci_build, :pending, :queued, pipeline: pipeline2) }
- let!(:build2_project2) { create(:ci_build, :pending, :queued, pipeline: pipeline2) }
- let!(:build1_project3) { create(:ci_build, :pending, :queued, pipeline: pipeline3) }
-
- it 'picks builds one-by-one' do
- expect(Ci::Build).to receive(:find).with(pending_job.id).and_call_original
-
- expect(execute(shared_runner)).to eq(build1_project1)
- end
-
- context 'when using fair scheduling' do
- context 'when all builds are pending' do
- it 'prefers projects without builds first' do
- # it gets for one build from each of the projects
- expect(execute(shared_runner)).to eq(build1_project1)
- expect(execute(shared_runner)).to eq(build1_project2)
- expect(execute(shared_runner)).to eq(build1_project3)
-
- # then it gets a second build from each of the projects
- expect(execute(shared_runner)).to eq(build2_project1)
- expect(execute(shared_runner)).to eq(build2_project2)
-
- # in the end the third build
- expect(execute(shared_runner)).to eq(build3_project1)
- end
+ context 'allow shared runners' do
+ before do
+ project.update!(shared_runners_enabled: true)
+ pipeline.reload
+ pending_job.reload
+ pending_job.create_queuing_entry!
+ end
+
+ context 'when build owner has been blocked' do
+ let(:user) { create(:user, :blocked) }
+
+ before do
+ pending_job.update!(user: user)
end
- context 'when some builds transition to success' do
- it 'equalises number of running builds' do
- # after finishing the first build for project 1, get a second build from the same project
- expect(execute(shared_runner)).to eq(build1_project1)
- build1_project1.reload.success
- expect(execute(shared_runner)).to eq(build2_project1)
+ it 'does not pick the build and drops the build' do
+ expect(build_on(shared_runner)).to be_falsey
- expect(execute(shared_runner)).to eq(build1_project2)
- build1_project2.reload.success
- expect(execute(shared_runner)).to eq(build2_project2)
- expect(execute(shared_runner)).to eq(build1_project3)
- expect(execute(shared_runner)).to eq(build3_project1)
- end
+ expect(pending_job.reload).to be_user_blocked
end
end
- context 'when using DEFCON mode that disables fair scheduling' do
- before do
- stub_feature_flags(ci_queueing_disaster_recovery_disable_fair_scheduling: true)
- end
-
- context 'when all builds are pending' do
- it 'returns builds in order of creation (FIFO)' do
- # it gets for one build from each of the projects
- expect(execute(shared_runner)).to eq(build1_project1)
- expect(execute(shared_runner)).to eq(build2_project1)
- expect(execute(shared_runner)).to eq(build3_project1)
- expect(execute(shared_runner)).to eq(build1_project2)
- expect(execute(shared_runner)).to eq(build2_project2)
- expect(execute(shared_runner)).to eq(build1_project3)
+ context 'for multiple builds' do
+ let!(:project2) { create :project, shared_runners_enabled: true }
+ let!(:pipeline2) { create :ci_pipeline, project: project2 }
+ let!(:project3) { create :project, shared_runners_enabled: true }
+ let!(:pipeline3) { create :ci_pipeline, project: project3 }
+ let!(:build1_project1) { pending_job }
+ let!(:build2_project1) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
+ let!(:build3_project1) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
+ let!(:build1_project2) { create(:ci_build, :pending, :queued, pipeline: pipeline2) }
+ let!(:build2_project2) { create(:ci_build, :pending, :queued, pipeline: pipeline2) }
+ let!(:build1_project3) { create(:ci_build, :pending, :queued, pipeline: pipeline3) }
+
+ it 'picks builds one-by-one' do
+ expect(Ci::Build).to receive(:find).with(pending_job.id).and_call_original
+
+ expect(build_on(shared_runner)).to eq(build1_project1)
+ end
+
+ context 'when using fair scheduling' do
+ context 'when all builds are pending' do
+ it 'prefers projects without builds first' do
+ # it gets for one build from each of the projects
+ expect(build_on(shared_runner)).to eq(build1_project1)
+ expect(build_on(shared_runner)).to eq(build1_project2)
+ expect(build_on(shared_runner)).to eq(build1_project3)
+
+ # then it gets a second build from each of the projects
+ expect(build_on(shared_runner)).to eq(build2_project1)
+ expect(build_on(shared_runner)).to eq(build2_project2)
+
+ # in the end the third build
+ expect(build_on(shared_runner)).to eq(build3_project1)
+ end
+ end
+
+ context 'when some builds transition to success' do
+ it 'equalises number of running builds' do
+ # after finishing the first build for project 1, get a second build from the same project
+ expect(build_on(shared_runner)).to eq(build1_project1)
+ build1_project1.reload.success
+ expect(build_on(shared_runner)).to eq(build2_project1)
+
+ expect(build_on(shared_runner)).to eq(build1_project2)
+ build1_project2.reload.success
+ expect(build_on(shared_runner)).to eq(build2_project2)
+ expect(build_on(shared_runner)).to eq(build1_project3)
+ expect(build_on(shared_runner)).to eq(build3_project1)
+ end
end
end
- context 'when some builds transition to success' do
- it 'returns builds in order of creation (FIFO)' do
- expect(execute(shared_runner)).to eq(build1_project1)
- build1_project1.reload.success
- expect(execute(shared_runner)).to eq(build2_project1)
+ context 'when using DEFCON mode that disables fair scheduling' do
+ before do
+ stub_feature_flags(ci_queueing_disaster_recovery_disable_fair_scheduling: true)
+ end
+
+ context 'when all builds are pending' do
+ it 'returns builds in order of creation (FIFO)' do
+ # it gets for one build from each of the projects
+ expect(build_on(shared_runner)).to eq(build1_project1)
+ expect(build_on(shared_runner)).to eq(build2_project1)
+ expect(build_on(shared_runner)).to eq(build3_project1)
+ expect(build_on(shared_runner)).to eq(build1_project2)
+ expect(build_on(shared_runner)).to eq(build2_project2)
+ expect(build_on(shared_runner)).to eq(build1_project3)
+ end
+ end
- expect(execute(shared_runner)).to eq(build3_project1)
- build2_project1.reload.success
- expect(execute(shared_runner)).to eq(build1_project2)
- expect(execute(shared_runner)).to eq(build2_project2)
- expect(execute(shared_runner)).to eq(build1_project3)
+ context 'when some builds transition to success' do
+ it 'returns builds in order of creation (FIFO)' do
+ expect(build_on(shared_runner)).to eq(build1_project1)
+ build1_project1.reload.success
+ expect(build_on(shared_runner)).to eq(build2_project1)
+
+ expect(build_on(shared_runner)).to eq(build3_project1)
+ build2_project1.reload.success
+ expect(build_on(shared_runner)).to eq(build1_project2)
+ expect(build_on(shared_runner)).to eq(build2_project2)
+ expect(build_on(shared_runner)).to eq(build1_project3)
+ end
end
end
end
- end
- context 'shared runner' do
- let(:response) { described_class.new(shared_runner).execute }
- let(:build) { response.build }
+ context 'shared runner' do
+ let(:response) { described_class.new(shared_runner, nil).execute }
+ let(:build) { response.build }
- it { expect(build).to be_kind_of(Build) }
- it { expect(build).to be_valid }
- it { expect(build).to be_running }
- it { expect(build.runner).to eq(shared_runner) }
- it { expect(Gitlab::Json.parse(response.build_json)['id']).to eq(build.id) }
- end
+ it { expect(build).to be_kind_of(Build) }
+ it { expect(build).to be_valid }
+ it { expect(build).to be_running }
+ it { expect(build.runner).to eq(shared_runner) }
+ it { expect(Gitlab::Json.parse(response.build_json)['id']).to eq(build.id) }
+ end
- context 'project runner' do
- let(:build) { execute(project_runner) }
+ context 'project runner' do
+ let(:build) { build_on(project_runner) }
- it { expect(build).to be_kind_of(Build) }
- it { expect(build).to be_valid }
- it { expect(build).to be_running }
- it { expect(build.runner).to eq(project_runner) }
+ it { expect(build).to be_kind_of(Build) }
+ it { expect(build).to be_valid }
+ it { expect(build).to be_running }
+ it { expect(build.runner).to eq(project_runner) }
+ end
end
- end
- context 'disallow shared runners' do
- before do
- project.update!(shared_runners_enabled: false)
- end
+ context 'disallow shared runners' do
+ before do
+ project.update!(shared_runners_enabled: false)
+ end
- context 'shared runner' do
- let(:build) { execute(shared_runner) }
+ context 'shared runner' do
+ let(:build) { build_on(shared_runner) }
- it { expect(build).to be_nil }
- end
+ it { expect(build).to be_nil }
+ end
- context 'project runner' do
- let(:build) { execute(project_runner) }
+ context 'project runner' do
+ let(:build) { build_on(project_runner) }
- it { expect(build).to be_kind_of(Build) }
- it { expect(build).to be_valid }
- it { expect(build).to be_running }
- it { expect(build.runner).to eq(project_runner) }
+ it { expect(build).to be_kind_of(Build) }
+ it { expect(build).to be_valid }
+ it { expect(build).to be_running }
+ it { expect(build.runner).to eq(project_runner) }
+ end
end
- end
- context 'disallow when builds are disabled' do
- before do
- project.update!(shared_runners_enabled: true, group_runners_enabled: true)
- project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED)
+ context 'disallow when builds are disabled' do
+ before do
+ project.update!(shared_runners_enabled: true, group_runners_enabled: true)
+ project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED)
- pending_job.reload.create_queuing_entry!
- end
+ pending_job.reload.create_queuing_entry!
+ end
- context 'and uses shared runner' do
- let(:build) { execute(shared_runner) }
+ context 'and uses shared runner' do
+ let(:build) { build_on(shared_runner) }
- it { expect(build).to be_nil }
- end
+ it { expect(build).to be_nil }
+ end
- context 'and uses group runner' do
- let(:build) { execute(group_runner) }
+ context 'and uses group runner' do
+ let(:build) { build_on(group_runner) }
- it { expect(build).to be_nil }
- end
+ it { expect(build).to be_nil }
+ end
- context 'and uses project runner' do
- let(:build) { execute(project_runner) }
+ context 'and uses project runner' do
+ let(:build) { build_on(project_runner) }
- it 'does not pick a build' do
- expect(build).to be_nil
- expect(pending_job.reload).to be_failed
- expect(pending_job.queuing_entry).to be_nil
+ it 'does not pick a build' do
+ expect(build).to be_nil
+ expect(pending_job.reload).to be_failed
+ expect(pending_job.queuing_entry).to be_nil
+ end
end
end
- end
- context 'allow group runners' do
- before do
- project.update!(group_runners_enabled: true)
- end
+ context 'allow group runners' do
+ before do
+ project.update!(group_runners_enabled: true)
+ end
- context 'for multiple builds' do
- let!(:project2) { create(:project, group_runners_enabled: true, group: group) }
- let!(:pipeline2) { create(:ci_pipeline, project: project2) }
- let!(:project3) { create(:project, group_runners_enabled: true, group: group) }
- let!(:pipeline3) { create(:ci_pipeline, project: project3) }
+ context 'for multiple builds' do
+ let!(:project2) { create(:project, group_runners_enabled: true, group: group) }
+ let!(:pipeline2) { create(:ci_pipeline, project: project2) }
+ let!(:project3) { create(:project, group_runners_enabled: true, group: group) }
+ let!(:pipeline3) { create(:ci_pipeline, project: project3) }
- let!(:build1_project1) { pending_job }
- let!(:build2_project1) { create(:ci_build, :queued, pipeline: pipeline) }
- let!(:build3_project1) { create(:ci_build, :queued, pipeline: pipeline) }
- let!(:build1_project2) { create(:ci_build, :queued, pipeline: pipeline2) }
- let!(:build2_project2) { create(:ci_build, :queued, pipeline: pipeline2) }
- let!(:build1_project3) { create(:ci_build, :queued, pipeline: pipeline3) }
+ let!(:build1_project1) { pending_job }
+ let!(:build2_project1) { create(:ci_build, :queued, pipeline: pipeline) }
+ let!(:build3_project1) { create(:ci_build, :queued, pipeline: pipeline) }
+ let!(:build1_project2) { create(:ci_build, :queued, pipeline: pipeline2) }
+ let!(:build2_project2) { create(:ci_build, :queued, pipeline: pipeline2) }
+ let!(:build1_project3) { create(:ci_build, :queued, pipeline: pipeline3) }
- # these shouldn't influence the scheduling
- let!(:unrelated_group) { create(:group) }
- let!(:unrelated_project) { create(:project, group_runners_enabled: true, group: unrelated_group) }
- let!(:unrelated_pipeline) { create(:ci_pipeline, project: unrelated_project) }
- let!(:build1_unrelated_project) { create(:ci_build, :pending, :queued, pipeline: unrelated_pipeline) }
- let!(:unrelated_group_runner) { create(:ci_runner, :group, groups: [unrelated_group]) }
+ # these shouldn't influence the scheduling
+ let!(:unrelated_group) { create(:group) }
+ let!(:unrelated_project) { create(:project, group_runners_enabled: true, group: unrelated_group) }
+ let!(:unrelated_pipeline) { create(:ci_pipeline, project: unrelated_project) }
+ let!(:build1_unrelated_project) { create(:ci_build, :pending, :queued, pipeline: unrelated_pipeline) }
+ let!(:unrelated_group_runner) { create(:ci_runner, :group, groups: [unrelated_group]) }
- it 'does not consider builds from other group runners' do
- queue = ::Ci::Queue::BuildQueueService.new(group_runner)
+ it 'does not consider builds from other group runners' do
+ queue = ::Ci::Queue::BuildQueueService.new(group_runner)
- expect(queue.builds_for_group_runner.size).to eq 6
- execute(group_runner)
+ expect(queue.builds_for_group_runner.size).to eq 6
+ build_on(group_runner)
- expect(queue.builds_for_group_runner.size).to eq 5
- execute(group_runner)
+ expect(queue.builds_for_group_runner.size).to eq 5
+ build_on(group_runner)
- expect(queue.builds_for_group_runner.size).to eq 4
- execute(group_runner)
+ expect(queue.builds_for_group_runner.size).to eq 4
+ build_on(group_runner)
- expect(queue.builds_for_group_runner.size).to eq 3
- execute(group_runner)
+ expect(queue.builds_for_group_runner.size).to eq 3
+ build_on(group_runner)
- expect(queue.builds_for_group_runner.size).to eq 2
- execute(group_runner)
+ expect(queue.builds_for_group_runner.size).to eq 2
+ build_on(group_runner)
- expect(queue.builds_for_group_runner.size).to eq 1
- execute(group_runner)
+ expect(queue.builds_for_group_runner.size).to eq 1
+ build_on(group_runner)
- expect(queue.builds_for_group_runner.size).to eq 0
- expect(execute(group_runner)).to be_nil
+ expect(queue.builds_for_group_runner.size).to eq 0
+ expect(build_on(group_runner)).to be_nil
+ end
end
- end
- context 'group runner' do
- let(:build) { execute(group_runner) }
+ context 'group runner' do
+ let(:build) { build_on(group_runner) }
- it { expect(build).to be_kind_of(Build) }
- it { expect(build).to be_valid }
- it { expect(build).to be_running }
- it { expect(build.runner).to eq(group_runner) }
+ it { expect(build).to be_kind_of(Build) }
+ it { expect(build).to be_valid }
+ it { expect(build).to be_running }
+ it { expect(build.runner).to eq(group_runner) }
+ end
end
- end
- context 'disallow group runners' do
- before do
- project.update!(group_runners_enabled: false)
+ context 'disallow group runners' do
+ before do
+ project.update!(group_runners_enabled: false)
- pending_job.reload.create_queuing_entry!
- end
+ pending_job.reload.create_queuing_entry!
+ end
- context 'group runner' do
- let(:build) { execute(group_runner) }
+ context 'group runner' do
+ let(:build) { build_on(group_runner) }
- it { expect(build).to be_nil }
+ it { expect(build).to be_nil }
+ end
end
- end
- context 'when first build is stalled' do
- before do
- allow_any_instance_of(Ci::RegisterJobService).to receive(:assign_runner!).and_call_original
- allow_any_instance_of(Ci::RegisterJobService).to receive(:assign_runner!)
- .with(pending_job, anything).and_raise(ActiveRecord::StaleObjectError)
- end
+ context 'when first build is stalled' do
+ before do
+ allow_any_instance_of(Ci::RegisterJobService).to receive(:assign_runner!).and_call_original
+ allow_any_instance_of(Ci::RegisterJobService).to receive(:assign_runner!)
+ .with(pending_job, anything).and_raise(ActiveRecord::StaleObjectError)
+ end
- subject { described_class.new(project_runner).execute }
+ subject { described_class.new(project_runner, nil).execute }
- context 'with multiple builds are in queue' do
- let!(:other_build) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
+ context 'with multiple builds are in queue' do
+ let!(:other_build) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
- before do
- allow_any_instance_of(::Ci::Queue::BuildQueueService)
- .to receive(:execute)
- .and_return(Ci::Build.where(id: [pending_job, other_build]).pluck(:id))
- end
+ before do
+ allow_any_instance_of(::Ci::Queue::BuildQueueService)
+ .to receive(:execute)
+ .and_return(Ci::Build.where(id: [pending_job, other_build]).pluck(:id))
+ end
- it "receives second build from the queue" do
- expect(subject).to be_valid
- expect(subject.build).to eq(other_build)
+ it "receives second build from the queue" do
+ expect(subject).to be_valid
+ expect(subject.build).to eq(other_build)
+ end
end
- end
- context 'when single build is in queue' do
- before do
- allow_any_instance_of(::Ci::Queue::BuildQueueService)
- .to receive(:execute)
- .and_return(Ci::Build.where(id: pending_job).pluck(:id))
- end
+ context 'when single build is in queue' do
+ before do
+ allow_any_instance_of(::Ci::Queue::BuildQueueService)
+ .to receive(:execute)
+ .and_return(Ci::Build.where(id: pending_job).pluck(:id))
+ end
- it "does not receive any valid result" do
- expect(subject).not_to be_valid
+ it "does not receive any valid result" do
+ expect(subject).not_to be_valid
+ end
end
- end
- context 'when there is no build in queue' do
- before do
- allow_any_instance_of(::Ci::Queue::BuildQueueService)
- .to receive(:execute)
- .and_return([])
- end
+ context 'when there is no build in queue' do
+ before do
+ allow_any_instance_of(::Ci::Queue::BuildQueueService)
+ .to receive(:execute)
+ .and_return([])
+ end
- it "does not receive builds but result is valid" do
- expect(subject).to be_valid
- expect(subject.build).to be_nil
+ it "does not receive builds but result is valid" do
+ expect(subject).to be_valid
+ expect(subject.build).to be_nil
+ end
end
end
- end
- context 'when access_level of runner is not_protected' do
- let!(:project_runner) { create(:ci_runner, :project, projects: [project]) }
+ context 'when access_level of runner is not_protected' do
+ let!(:project_runner) { create(:ci_runner, :project, projects: [project]) }
- context 'when a job is protected' do
- let!(:pending_job) { create(:ci_build, :pending, :queued, :protected, pipeline: pipeline) }
+ context 'when a job is protected' do
+ let!(:pending_job) { create(:ci_build, :pending, :queued, :protected, pipeline: pipeline) }
- it 'picks the job' do
- expect(execute(project_runner)).to eq(pending_job)
+ it 'picks the job' do
+ expect(build_on(project_runner)).to eq(pending_job)
+ end
end
- end
- context 'when a job is unprotected' do
- let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
+ context 'when a job is unprotected' do
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
- it 'picks the job' do
- expect(execute(project_runner)).to eq(pending_job)
+ it 'picks the job' do
+ expect(build_on(project_runner)).to eq(pending_job)
+ end
end
- end
- context 'when protected attribute of a job is nil' do
- let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
+ context 'when protected attribute of a job is nil' do
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
- before do
- pending_job.update_attribute(:protected, nil)
- end
+ before do
+ pending_job.update_attribute(:protected, nil)
+ end
- it 'picks the job' do
- expect(execute(project_runner)).to eq(pending_job)
+ it 'picks the job' do
+ expect(build_on(project_runner)).to eq(pending_job)
+ end
end
end
- end
- context 'when access_level of runner is ref_protected' do
- let!(:project_runner) { create(:ci_runner, :project, :ref_protected, projects: [project]) }
+ context 'when access_level of runner is ref_protected' do
+ let!(:project_runner) { create(:ci_runner, :project, :ref_protected, projects: [project]) }
- context 'when a job is protected' do
- let!(:pending_job) { create(:ci_build, :pending, :queued, :protected, pipeline: pipeline) }
+ context 'when a job is protected' do
+ let!(:pending_job) { create(:ci_build, :pending, :queued, :protected, pipeline: pipeline) }
- it 'picks the job' do
- expect(execute(project_runner)).to eq(pending_job)
+ it 'picks the job' do
+ expect(build_on(project_runner)).to eq(pending_job)
+ end
end
- end
- context 'when a job is unprotected' do
- let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
+ context 'when a job is unprotected' do
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
- it 'does not pick the job' do
- expect(execute(project_runner)).to be_nil
+ it 'does not pick the job' do
+ expect(build_on(project_runner)).to be_nil
+ end
end
- end
- context 'when protected attribute of a job is nil' do
- let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
+ context 'when protected attribute of a job is nil' do
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
- before do
- pending_job.update_attribute(:protected, nil)
- end
+ before do
+ pending_job.update_attribute(:protected, nil)
+ end
- it 'does not pick the job' do
- expect(execute(project_runner)).to be_nil
+ it 'does not pick the job' do
+ expect(build_on(project_runner)).to be_nil
+ end
end
end
- end
- context 'runner feature set is verified' do
- let(:options) { { artifacts: { reports: { junit: "junit.xml" } } } }
- let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline, options: options) }
+ context 'runner feature set is verified' do
+ let(:options) { { artifacts: { reports: { junit: "junit.xml" } } } }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline, options: options) }
- subject { execute(project_runner, params) }
+ subject { build_on(project_runner, params: params) }
- context 'when feature is missing by runner' do
- let(:params) { {} }
+ context 'when feature is missing by runner' do
+ let(:params) { {} }
- it 'does not pick the build and drops the build' do
- expect(subject).to be_nil
- expect(pending_job.reload).to be_failed
- expect(pending_job).to be_runner_unsupported
+ it 'does not pick the build and drops the build' do
+ expect(subject).to be_nil
+ expect(pending_job.reload).to be_failed
+ expect(pending_job).to be_runner_unsupported
+ end
end
- end
- context 'when feature is supported by runner' do
- let(:params) do
- { info: { features: { upload_multiple_artifacts: true } } }
- end
+ context 'when feature is supported by runner' do
+ let(:params) do
+ { info: { features: { upload_multiple_artifacts: true } } }
+ end
- it 'does pick job' do
- expect(subject).not_to be_nil
+ it 'does pick job' do
+ expect(subject).not_to be_nil
+ end
end
end
- end
-
- context 'when "dependencies" keyword is specified' do
- let!(:pre_stage_job) do
- create(:ci_build, :success, :artifacts, pipeline: pipeline, name: 'test', stage_idx: 0)
- end
- let!(:pending_job) do
- create(:ci_build, :pending, :queued,
- pipeline: pipeline, stage_idx: 1,
- options: { script: ["bash"], dependencies: dependencies })
- end
+ context 'when "dependencies" keyword is specified' do
+ let!(:pre_stage_job) do
+ create(:ci_build, :success, :artifacts, pipeline: pipeline, name: 'test', stage_idx: 0)
+ end
- let(:dependencies) { %w[test] }
+ let!(:pending_job) do
+ create(:ci_build, :pending, :queued,
+ pipeline: pipeline, stage_idx: 1,
+ options: { script: ["bash"], dependencies: dependencies })
+ end
- subject { execute(project_runner) }
+ let(:dependencies) { %w[test] }
- it 'picks a build with a dependency' do
- picked_build = execute(project_runner)
+ subject { build_on(project_runner) }
- expect(picked_build).to be_present
- end
+ it 'picks a build with a dependency' do
+ picked_build = build_on(project_runner)
- context 'when there are multiple dependencies with artifacts' do
- let!(:pre_stage_job_second) do
- create(:ci_build, :success, :artifacts, pipeline: pipeline, name: 'deploy', stage_idx: 0)
+ expect(picked_build).to be_present
end
- let(:dependencies) { %w[test deploy] }
-
- it 'logs build artifacts size' do
- execute(project_runner)
-
- artifacts_size = [pre_stage_job, pre_stage_job_second].sum do |job|
- job.job_artifacts_archive.size
+ context 'when there are multiple dependencies with artifacts' do
+ let!(:pre_stage_job_second) do
+ create(:ci_build, :success, :artifacts, pipeline: pipeline, name: 'deploy', stage_idx: 0)
end
- expect(artifacts_size).to eq 107464 * 2
- expect(Gitlab::ApplicationContext.current).to include({
- 'meta.artifacts_dependencies_size' => artifacts_size,
- 'meta.artifacts_dependencies_count' => 2
- })
- end
- end
+ let(:dependencies) { %w[test deploy] }
- shared_examples 'not pick' do
- it 'does not pick the build and drops the build' do
- expect(subject).to be_nil
- expect(pending_job.reload).to be_failed
- expect(pending_job).to be_missing_dependency_failure
- end
- end
+ it 'logs build artifacts size' do
+ build_on(project_runner)
- shared_examples 'validation is active' do
- context 'when depended job has not been completed yet' do
- let!(:pre_stage_job) { create(:ci_build, :pending, :queued, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ artifacts_size = [pre_stage_job, pre_stage_job_second].sum do |job|
+ job.job_artifacts_archive.size
+ end
- it { is_expected.to eq(pending_job) }
+ expect(artifacts_size).to eq 107464 * 2
+ expect(Gitlab::ApplicationContext.current).to include({
+ 'meta.artifacts_dependencies_size' => artifacts_size,
+ 'meta.artifacts_dependencies_count' => 2
+ })
+ end
end
- context 'when artifacts of depended job has been expired' do
- let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ shared_examples 'not pick' do
+ it 'does not pick the build and drops the build' do
+ expect(subject).to be_nil
+ expect(pending_job.reload).to be_failed
+ expect(pending_job).to be_missing_dependency_failure
+ end
+ end
- context 'when the pipeline is locked' do
- before do
- pipeline.artifacts_locked!
+ shared_examples 'validation is active' do
+ context 'when depended job has not been completed yet' do
+ let!(:pre_stage_job) do
+ create(:ci_build, :pending, :queued, :manual, pipeline: pipeline, name: 'test', stage_idx: 0)
end
it { is_expected.to eq(pending_job) }
end
- context 'when the pipeline is unlocked' do
- before do
- pipeline.unlocked!
+ context 'when artifacts of depended job has been expired' do
+ let!(:pre_stage_job) do
+ create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0)
end
- it_behaves_like 'not pick'
+ context 'when the pipeline is locked' do
+ before do
+ pipeline.artifacts_locked!
+ end
+
+ it { is_expected.to eq(pending_job) }
+ end
+
+ context 'when the pipeline is unlocked' do
+ before do
+ pipeline.unlocked!
+ end
+
+ it_behaves_like 'not pick'
+ end
end
- end
- context 'when artifacts of depended job has been erased' do
- let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
+ context 'when artifacts of depended job has been erased' do
+ let!(:pre_stage_job) do
+ create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago)
+ end
- it_behaves_like 'not pick'
- end
+ it_behaves_like 'not pick'
+ end
- context 'when job object is staled' do
- let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ context 'when job object is staled' do
+ let!(:pre_stage_job) do
+ create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0)
+ end
- before do
- pipeline.unlocked!
+ before do
+ pipeline.unlocked!
- allow_next_instance_of(Ci::Build) do |build|
- expect(build).to receive(:drop!)
- .and_raise(ActiveRecord::StaleObjectError.new(pending_job, :drop!))
+ allow_next_instance_of(Ci::Build) do |build|
+ expect(build).to receive(:drop!)
+ .and_raise(ActiveRecord::StaleObjectError.new(pending_job, :drop!))
+ end
end
- end
- it 'does not drop nor pick' do
- expect(subject).to be_nil
+ it 'does not drop nor pick' do
+ expect(subject).to be_nil
+ end
end
end
- end
- shared_examples 'validation is not active' do
- context 'when depended job has not been completed yet' do
- let!(:pre_stage_job) { create(:ci_build, :pending, :queued, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ shared_examples 'validation is not active' do
+ context 'when depended job has not been completed yet' do
+ let!(:pre_stage_job) do
+ create(:ci_build, :pending, :queued, :manual, pipeline: pipeline, name: 'test', stage_idx: 0)
+ end
- it { expect(subject).to eq(pending_job) }
- end
+ it { expect(subject).to eq(pending_job) }
+ end
- context 'when artifacts of depended job has been expired' do
- let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ context 'when artifacts of depended job has been expired' do
+ let!(:pre_stage_job) do
+ create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0)
+ end
- it { expect(subject).to eq(pending_job) }
- end
+ it { expect(subject).to eq(pending_job) }
+ end
- context 'when artifacts of depended job has been erased' do
- let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
+ context 'when artifacts of depended job has been erased' do
+ let!(:pre_stage_job) do
+ create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago)
+ end
- it { expect(subject).to eq(pending_job) }
+ it { expect(subject).to eq(pending_job) }
+ end
end
- end
- it_behaves_like 'validation is active'
- end
+ it_behaves_like 'validation is active'
+ end
- context 'when build is degenerated' do
- let!(:pending_job) { create(:ci_build, :pending, :queued, :degenerated, pipeline: pipeline) }
+ context 'when build is degenerated' do
+ let!(:pending_job) { create(:ci_build, :pending, :queued, :degenerated, pipeline: pipeline) }
- subject { execute(project_runner, {}) }
+ subject { build_on(project_runner) }
- it 'does not pick the build and drops the build' do
- expect(subject).to be_nil
+ it 'does not pick the build and drops the build' do
+ expect(subject).to be_nil
- pending_job.reload
- expect(pending_job).to be_failed
- expect(pending_job).to be_archived_failure
+ pending_job.reload
+ expect(pending_job).to be_failed
+ expect(pending_job).to be_archived_failure
+ end
end
- end
- context 'when build has data integrity problem' do
- let!(:pending_job) do
- create(:ci_build, :pending, :queued, pipeline: pipeline)
- end
+ context 'when build has data integrity problem' do
+ let!(:pending_job) do
+ create(:ci_build, :pending, :queued, pipeline: pipeline)
+ end
- before do
- pending_job.update_columns(options: "string")
- end
+ before do
+ pending_job.update_columns(options: "string")
+ end
- subject { execute(project_runner, {}) }
+ subject { build_on(project_runner) }
- it 'does drop the build and logs both failures' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(anything, a_hash_including(build_id: pending_job.id))
- .twice
- .and_call_original
+ it 'does drop the build and logs both failures' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(anything, a_hash_including(build_id: pending_job.id))
+ .twice
+ .and_call_original
- expect(subject).to be_nil
+ expect(subject).to be_nil
- pending_job.reload
- expect(pending_job).to be_failed
- expect(pending_job).to be_data_integrity_failure
+ pending_job.reload
+ expect(pending_job).to be_failed
+ expect(pending_job).to be_data_integrity_failure
+ end
end
- end
- context 'when build fails to be run!' do
- let!(:pending_job) do
- create(:ci_build, :pending, :queued, pipeline: pipeline)
- end
+ context 'when build fails to be run!' do
+ let!(:pending_job) do
+ create(:ci_build, :pending, :queued, pipeline: pipeline)
+ end
- before do
- expect_any_instance_of(Ci::Build).to receive(:run!)
- .and_raise(RuntimeError, 'scheduler error')
- end
+ before do
+ expect_any_instance_of(Ci::Build).to receive(:run!)
+ .and_raise(RuntimeError, 'scheduler error')
+ end
- subject { execute(project_runner, {}) }
+ subject { build_on(project_runner) }
- it 'does drop the build and logs failure' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(anything, a_hash_including(build_id: pending_job.id))
- .once
- .and_call_original
+ it 'does drop the build and logs failure' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(anything, a_hash_including(build_id: pending_job.id))
+ .once
+ .and_call_original
- expect(subject).to be_nil
+ expect(subject).to be_nil
- pending_job.reload
- expect(pending_job).to be_failed
- expect(pending_job).to be_scheduler_failure
+ pending_job.reload
+ expect(pending_job).to be_failed
+ expect(pending_job).to be_scheduler_failure
+ end
end
- end
- context 'when an exception is raised during a persistent ref creation' do
- before do
- allow_any_instance_of(Ci::PersistentRef).to receive(:exist?) { false }
- allow_any_instance_of(Ci::PersistentRef).to receive(:create_ref) { raise ArgumentError }
- end
+ context 'when an exception is raised during a persistent ref creation' do
+ before do
+ allow_any_instance_of(Ci::PersistentRef).to receive(:exist?) { false }
+ allow_any_instance_of(Ci::PersistentRef).to receive(:create_ref) { raise ArgumentError }
+ end
- subject { execute(project_runner, {}) }
+ subject { build_on(project_runner) }
- it 'picks the build' do
- expect(subject).to eq(pending_job)
+ it 'picks the build' do
+ expect(subject).to eq(pending_job)
- pending_job.reload
- expect(pending_job).to be_running
- end
- end
-
- context 'when only some builds can be matched by runner' do
- let!(:project_runner) { create(:ci_runner, :project, projects: [project], tag_list: %w[matching]) }
- let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline, tag_list: %w[matching]) }
-
- before do
- # create additional matching and non-matching jobs
- create_list(:ci_build, 2, :pending, :queued, pipeline: pipeline, tag_list: %w[matching])
- create(:ci_build, :pending, :queued, pipeline: pipeline, tag_list: %w[non-matching])
+ pending_job.reload
+ expect(pending_job).to be_running
+ end
end
- it 'observes queue size of only matching jobs' do
- # pending_job + 2 x matching ones
- expect(Gitlab::Ci::Queue::Metrics.queue_size_total).to receive(:observe)
- .with({ runner_type: project_runner.runner_type }, 3)
+ context 'when only some builds can be matched by runner' do
+ let!(:project_runner) { create(:ci_runner, :project, projects: [project], tag_list: %w[matching]) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline, tag_list: %w[matching]) }
- expect(execute(project_runner)).to eq(pending_job)
- end
+ before do
+ # create additional matching and non-matching jobs
+ create_list(:ci_build, 2, :pending, :queued, pipeline: pipeline, tag_list: %w[matching])
+ create(:ci_build, :pending, :queued, pipeline: pipeline, tag_list: %w[non-matching])
+ end
- it 'observes queue processing time by the runner type' do
- expect(Gitlab::Ci::Queue::Metrics.queue_iteration_duration_seconds)
- .to receive(:observe)
- .with({ runner_type: project_runner.runner_type }, anything)
+ it 'observes queue size of only matching jobs' do
+ # pending_job + 2 x matching ones
+ expect(Gitlab::Ci::Queue::Metrics.queue_size_total).to receive(:observe)
+ .with({ runner_type: project_runner.runner_type }, 3)
- expect(Gitlab::Ci::Queue::Metrics.queue_retrieval_duration_seconds)
- .to receive(:observe)
- .with({ runner_type: project_runner.runner_type }, anything)
+ expect(build_on(project_runner)).to eq(pending_job)
+ end
- expect(execute(project_runner)).to eq(pending_job)
- end
- end
+ it 'observes queue processing time by the runner type' do
+ expect(Gitlab::Ci::Queue::Metrics.queue_iteration_duration_seconds)
+ .to receive(:observe)
+ .with({ runner_type: project_runner.runner_type }, anything)
- context 'when ci_register_job_temporary_lock is enabled' do
- before do
- stub_feature_flags(ci_register_job_temporary_lock: true)
+ expect(Gitlab::Ci::Queue::Metrics.queue_retrieval_duration_seconds)
+ .to receive(:observe)
+ .with({ runner_type: project_runner.runner_type }, anything)
- allow(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
+ expect(build_on(project_runner)).to eq(pending_job)
+ end
end
- context 'when a build is temporarily locked' do
- let(:service) { described_class.new(project_runner) }
-
+ context 'when ci_register_job_temporary_lock is enabled' do
before do
- service.send(:acquire_temporary_lock, pending_job.id)
- end
-
- it 'skips this build and marks queue as invalid' do
- expect(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
- .with(operation: :queue_iteration)
- expect(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
- .with(operation: :build_temporary_locked)
+ stub_feature_flags(ci_register_job_temporary_lock: true)
- expect(service.execute).not_to be_valid
+ allow(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
end
- context 'when there is another build in queue' do
- let!(:next_pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
+ context 'when a build is temporarily locked' do
+ let(:service) { described_class.new(project_runner, nil) }
- it 'skips this build and picks another build' do
+ before do
+ service.send(:acquire_temporary_lock, pending_job.id)
+ end
+
+ it 'skips this build and marks queue as invalid' do
expect(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
- .with(operation: :queue_iteration).twice
+ .with(operation: :queue_iteration)
expect(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
.with(operation: :build_temporary_locked)
- result = service.execute
+ expect(service.execute).not_to be_valid
+ end
+
+ context 'when there is another build in queue' do
+ let!(:next_pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
+
+ it 'skips this build and picks another build' do
+ expect(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
+ .with(operation: :queue_iteration).twice
+ expect(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
+ .with(operation: :build_temporary_locked)
- expect(result.build).to eq(next_pending_job)
- expect(result).to be_valid
+ result = service.execute
+
+ expect(result.build).to eq(next_pending_job)
+ expect(result).to be_valid
+ end
end
end
end
end
- end
-
- context 'when using pending builds table' do
- include_examples 'handles runner assignment'
- context 'when a conflicting data is stored in denormalized table' do
- let!(:project_runner) { create(:ci_runner, :project, projects: [project], tag_list: %w[conflict]) }
- let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline, tag_list: %w[conflict]) }
+ context 'when using pending builds table' do
+ include_examples 'handles runner assignment'
- before do
- pending_job.update_column(:status, :running)
- end
+ context 'when a conflicting data is stored in denormalized table' do
+ let!(:runner) { create(:ci_runner, :project, projects: [project], tag_list: %w[conflict]) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline, tag_list: %w[conflict]) }
- it 'removes queuing entry upon build assignment attempt' do
- expect(pending_job.reload).to be_running
- expect(pending_job.queuing_entry).to be_present
+ before do
+ pending_job.update_column(:status, :running)
+ end
- result = described_class.new(project_runner).execute
+ it 'removes queuing entry upon build assignment attempt' do
+ expect(pending_job.reload).to be_running
+ expect(pending_job.queuing_entry).to be_present
- expect(result).not_to be_valid
- expect(pending_job.reload.queuing_entry).not_to be_present
+ expect(execute).not_to be_valid
+ expect(pending_job.reload.queuing_entry).not_to be_present
+ end
end
end
end
@@ -807,11 +842,11 @@ module Ci
# Stub tested metrics
allow(Gitlab::Ci::Queue::Metrics)
.to receive(:attempt_counter)
- .and_return(attempt_counter)
+ .and_return(attempt_counter)
allow(Gitlab::Ci::Queue::Metrics)
.to receive(:job_queue_duration_seconds)
- .and_return(job_queue_duration_seconds)
+ .and_return(job_queue_duration_seconds)
project.update!(shared_runners_enabled: true)
pending_job.update!(created_at: current_time - 3600, queued_at: current_time - 1800)
@@ -822,7 +857,7 @@ module Ci
allow(job_queue_duration_seconds).to receive(:observe)
expect(attempt_counter).to receive(:increment)
- execute(runner)
+ build_on(runner)
end
end
@@ -834,7 +869,7 @@ module Ci
jobs_running_for_project: expected_jobs_running_for_project_first_job,
shard: expected_shard }, 1800)
- execute(runner)
+ build_on(runner)
end
context 'when project already has running jobs' do
@@ -854,7 +889,7 @@ module Ci
jobs_running_for_project: expected_jobs_running_for_project_third_job,
shard: expected_shard }, 1800)
- execute(runner)
+ build_on(runner)
end
end
end
@@ -913,7 +948,7 @@ module Ci
allow(attempt_counter).to receive(:increment)
expect(job_queue_duration_seconds).not_to receive(:observe)
- execute(runner)
+ build_on(runner)
end
end
end
@@ -933,12 +968,12 @@ module Ci
it 'present sets runner session configuration in the build' do
runner_session_params = { session: { 'url' => 'https://example.com' } }
- expect(execute(project_runner, runner_session_params).runner_session.attributes)
+ expect(build_on(project_runner, params: runner_session_params).runner_session.attributes)
.to include(runner_session_params[:session])
end
it 'not present it does not configure the runner session' do
- expect(execute(project_runner).runner_session).to be_nil
+ expect(build_on(project_runner).runner_session).to be_nil
end
end
@@ -954,7 +989,7 @@ module Ci
it 'returns 409 conflict' do
expect(Ci::Build.pending.unstarted.count).to eq 3
- result = described_class.new(project_runner).execute
+ result = described_class.new(project_runner, nil).execute
expect(result).not_to be_valid
expect(result.build).to be_nil
@@ -962,8 +997,8 @@ module Ci
end
end
- def execute(runner, params = {})
- described_class.new(runner).execute(params).build
+ def build_on(runner, runner_machine: nil, params: {})
+ described_class.new(runner, runner_machine).execute(params).build
end
end
end
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 16e20cad662..748f062948f 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -724,6 +724,80 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
end
end
+ describe 'execute_async_index_operations' do
+ before do
+ skip_if_multiple_databases_not_setup
+ end
+
+ it 'delegates ci task to Gitlab::Database::AsyncIndexes' do
+ expect(Gitlab::Database::AsyncIndexes).to receive(:execute_pending_actions!).with(how_many: 2)
+
+ run_rake_task('gitlab:db:execute_async_index_operations:ci')
+ end
+
+ it 'delegates ci task to Gitlab::Database::AsyncIndexes with specified argument' do
+ expect(Gitlab::Database::AsyncIndexes).to receive(:execute_pending_actions!).with(how_many: 5)
+
+ run_rake_task('gitlab:db:execute_async_index_operations:ci', '[5]')
+ end
+
+ it 'delegates main task to Gitlab::Database::AsyncIndexes' do
+ expect(Gitlab::Database::AsyncIndexes).to receive(:execute_pending_actions!).with(how_many: 2)
+
+ run_rake_task('gitlab:db:execute_async_index_operations:main')
+ end
+
+ it 'delegates main task to Gitlab::Database::AsyncIndexes with specified argument' do
+ expect(Gitlab::Database::AsyncIndexes).to receive(:execute_pending_actions!).with(how_many: 7)
+
+ run_rake_task('gitlab:db:execute_async_index_operations:main', '[7]')
+ end
+
+ it 'delegates all task to every database with higher default for dev' do
+ expect(Rake::Task['gitlab:db:execute_async_index_operations:ci']).to receive(:invoke).with(1000)
+ expect(Rake::Task['gitlab:db:execute_async_index_operations:main']).to receive(:invoke).with(1000)
+
+ run_rake_task('gitlab:db:execute_async_index_operations:all')
+ end
+
+ it 'delegates all task to every database with lower default for prod' do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+
+ expect(Rake::Task['gitlab:db:execute_async_index_operations:ci']).to receive(:invoke).with(2)
+ expect(Rake::Task['gitlab:db:execute_async_index_operations:main']).to receive(:invoke).with(2)
+
+ run_rake_task('gitlab:db:execute_async_index_operations:all')
+ end
+
+ it 'delegates all task to every database with specified argument' do
+ expect(Rake::Task['gitlab:db:execute_async_index_operations:ci']).to receive(:invoke).with('50')
+ expect(Rake::Task['gitlab:db:execute_async_index_operations:main']).to receive(:invoke).with('50')
+
+ run_rake_task('gitlab:db:execute_async_index_operations:all', '[50]')
+ end
+
+ context 'when feature is not enabled' do
+ it 'is a no-op' do
+ stub_feature_flags(database_async_index_operations: false)
+
+ expect(Gitlab::Database::AsyncIndexes).not_to receive(:execute_pending_actions!)
+
+ expect { run_rake_task('gitlab:db:execute_async_index_operations:main') }.to raise_error(SystemExit)
+ end
+ end
+
+ context 'with geo configured' do
+ before do
+ skip_unless_geo_configured
+ end
+
+ it 'does not create a task for the geo database' do
+ expect { run_rake_task('gitlab:db:execute_async_index_operations:geo') }
+ .to raise_error(/Don't know how to build task 'gitlab:db:execute_async_index_operations:geo'/)
+ end
+ end
+ end
+
describe 'active' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/views/notify/user_deactivated_email.html.haml_spec.rb b/spec/views/notify/user_deactivated_email.html.haml_spec.rb
new file mode 100644
index 00000000000..25d18e37cb9
--- /dev/null
+++ b/spec/views/notify/user_deactivated_email.html.haml_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'notify/user_deactivated_email.html.haml', feature_category: :user_management do
+ let(:name) { 'John Smith' }
+
+ before do
+ assign(:name, name)
+ end
+
+ it "displays the user's name" do
+ render
+
+ expect(rendered).to have_content(/^Hello John Smith,/)
+ end
+
+ context 'when additional text setting is set' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:deactivation_email_additional_text)
+ .and_return('So long and thanks for all the fish!')
+ end
+
+ context 'when additional text feature flag is enabled' do
+ it 'displays the additional text' do
+ render
+
+ expect(rendered).to have_content(/So long and thanks for all the fish!$/)
+ end
+ end
+
+ context 'when additional text feature flag is disabled' do
+ before do
+ stub_feature_flags(deactivation_email_additional_text: false)
+ end
+
+ it 'does not display the additional text' do
+ render
+
+ expect(rendered).to have_content(/Please contact your GitLab administrator if you think this is an error\.$/)
+ end
+ end
+ end
+
+ context 'when additional text setting is not set' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:deactivation_email_additional_text).and_return('')
+ end
+
+ it 'does not display any additional text' do
+ render
+
+ expect(rendered).to have_content(/Please contact your GitLab administrator if you think this is an error\.$/)
+ end
+ end
+end
diff --git a/spec/views/notify/user_deactivated_email.text.erb_spec.rb b/spec/views/notify/user_deactivated_email.text.erb_spec.rb
new file mode 100644
index 00000000000..8cf56816b92
--- /dev/null
+++ b/spec/views/notify/user_deactivated_email.text.erb_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'notify/user_deactivated_email.text.erb', feature_category: :user_management do
+ let(:name) { 'John Smith' }
+
+ before do
+ assign(:name, name)
+ end
+
+ it_behaves_like 'renders plain text email correctly'
+
+ it "displays the user's name" do
+ render
+
+ expect(rendered).to have_content(/^Hello John Smith,/)
+ end
+
+ context 'when additional text setting is set' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:deactivation_email_additional_text)
+ .and_return('So long and thanks for all the fish!')
+ end
+
+ context 'when additional text feature flag is enabled' do
+ it 'displays the additional text' do
+ render
+
+ expect(rendered).to have_content(/So long and thanks for all the fish!$/)
+ end
+ end
+
+ context 'when additional text feature flag is disabled' do
+ before do
+ stub_feature_flags(deactivation_email_additional_text: false)
+ end
+
+ it 'does not display the additional text' do
+ render
+
+ expect(rendered).to have_content(/Please contact your GitLab administrator if you think this is an error\.$/)
+ end
+ end
+ end
+
+ context 'when additional text setting is not set' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:deactivation_email_additional_text).and_return('')
+ end
+
+ it 'does not display any additional text' do
+ render
+
+ expect(rendered).to have_content(/Please contact your GitLab administrator if you think this is an error\.$/)
+ end
+ end
+end
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index 7ad15e1923d..377333b39e8 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -55,6 +55,7 @@ RSpec.describe RunPipelineScheduleWorker, feature_category: :continuous_integrat
describe "#run_pipeline_schedule" do
let(:create_pipeline_service) { instance_double(Ci::CreatePipelineService, execute: service_response) }
let(:service_response) { instance_double(ServiceResponse, payload: pipeline, error?: false) }
+ let(:pipeline) { instance_double(Ci::Pipeline, persisted?: true) }
context 'when pipeline can be created' do
before do
@@ -64,8 +65,6 @@ RSpec.describe RunPipelineScheduleWorker, feature_category: :continuous_integrat
end
context "when pipeline is persisted" do
- let(:pipeline) { instance_double(Ci::Pipeline, persisted?: true) }
-
it "returns the service response" do
expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response)
end
@@ -76,8 +75,8 @@ RSpec.describe RunPipelineScheduleWorker, feature_category: :continuous_integrat
expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response)
end
- it "changes the next_run_at" do
- expect { worker.perform(pipeline_schedule.id, user.id) }.to change { pipeline_schedule.reload.next_run_at }.by(1.day)
+ it "does not change the next_run_at" do
+ expect { worker.perform(pipeline_schedule.id, user.id) }.not_to change { pipeline_schedule.reload.next_run_at }
end
context 'when feature flag ci_use_run_pipeline_schedule_worker is disabled' do
@@ -90,19 +89,19 @@ RSpec.describe RunPipelineScheduleWorker, feature_category: :continuous_integrat
end
end
- context 'when next_run_scheduled option is given as true' do
+ context 'when scheduling option is given as true' do
it "returns the service response" do
- expect(worker.perform(pipeline_schedule.id, user.id, 'next_run_scheduled' => true)).to eq(service_response)
+ expect(worker.perform(pipeline_schedule.id, user.id, scheduling: true)).to eq(service_response)
end
it "does not log errors" do
expect(worker).not_to receive(:log_extra_metadata_on_done)
- expect(worker.perform(pipeline_schedule.id, user.id, 'next_run_scheduled' => true)).to eq(service_response)
+ expect(worker.perform(pipeline_schedule.id, user.id, scheduling: true)).to eq(service_response)
end
- it "does not change the next_run_at" do
- expect { worker.perform(pipeline_schedule.id, user.id, 'next_run_scheduled' => true) }.not_to change { pipeline_schedule.reload.next_run_at }
+ it "changes the next_run_at" do
+ expect { worker.perform(pipeline_schedule.id, user.id, scheduling: true) }.to change { pipeline_schedule.reload.next_run_at }.by(1.day)
end
context 'when feature flag ci_use_run_pipeline_schedule_worker is disabled' do
@@ -111,7 +110,7 @@ RSpec.describe RunPipelineScheduleWorker, feature_category: :continuous_integrat
end
it 'does not change the next_run_at' do
- expect { worker.perform(pipeline_schedule.id, user.id, 'next_run_scheduled' => true) }.not_to change { pipeline_schedule.reload.next_run_at }
+ expect { worker.perform(pipeline_schedule.id, user.id, scheduling: true) }.not_to change { pipeline_schedule.reload.next_run_at }
end
end
end
@@ -143,8 +142,9 @@ RSpec.describe RunPipelineScheduleWorker, feature_category: :continuous_integrat
expect { worker.perform(pipeline_schedule.id, user.id) }.to not_change { pipeline_schedule.reload.next_run_at }
end
- it 'does not create a pipeline' do
- expect(Ci::CreatePipelineService).not_to receive(:new)
+ it 'creates a pipeline' do
+ expect(Ci::CreatePipelineService).to receive(:new).with(project, user, ref: pipeline_schedule.ref).and_return(create_pipeline_service)
+ expect(create_pipeline_service).to receive(:execute).with(:schedule, ignore_skip_ci: true, save_on_errors: false, schedule: pipeline_schedule).and_return(service_response)
worker.perform(pipeline_schedule.id, user.id)
end