From b76ae638462ab0f673e5915986070518dd3f9ad3 Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Thu, 19 Aug 2021 09:08:42 +0000 Subject: Add latest changes from gitlab-org/gitlab@14-2-stable-ee --- ...update_issuable_slas_where_issue_closed_spec.rb | 31 +++++ ...e_flags_correct_flexible_rollout_values_spec.rb | 66 ++++++++++ ...10804150320_create_base_work_item_types_spec.rb | 22 ++++ ...ans_ci_daily_pipeline_schedule_triggers_spec.rb | 137 +++++++++++++++++++++ .../add_triggers_to_integrations_type_new_spec.rb | 65 ++++++++++ ...ate_existing_dast_builds_with_variables_spec.rb | 76 ++++++++++++ .../backfill_integrations_type_new_spec.rb | 38 ++++++ .../backfill_issues_upvotes_count_spec.rb | 2 +- spec/migrations/confirm_security_bot_spec.rb | 38 ++++++ .../generate_customers_dot_jwt_signing_key_spec.rb | 42 +++++++ .../orphaned_invite_tokens_cleanup_spec.rb | 32 +++++ ..._schedule_latest_pipeline_id_population_spec.rb | 61 --------- ...ith_all_security_related_artifact_types_spec.rb | 61 +++++++++ .../reschedule_delete_orphaned_deployments_spec.rb | 73 +++++++++++ .../reset_job_token_scope_enabled_again_spec.rb | 25 ++++ ...backfill_draft_status_on_merge_requests_spec.rb | 59 --------- ...py_ci_builds_columns_to_security_scans2_spec.rb | 52 ++++++++ .../schedule_delete_orphaned_deployments_spec.rb | 48 -------- ...te_uuid_on_vulnerabilities_occurrences3_spec.rb | 127 +++++++++++++++++++ .../schedule_security_setting_creation_spec.rb | 58 +++++++++ 20 files changed, 944 insertions(+), 169 deletions(-) create mode 100644 spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb create mode 100644 spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb create mode 100644 spec/migrations/20210804150320_create_base_work_item_types_spec.rb create mode 100644 spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb create mode 100644 spec/migrations/add_triggers_to_integrations_type_new_spec.rb create mode 100644 spec/migrations/associate_existing_dast_builds_with_variables_spec.rb create mode 100644 spec/migrations/backfill_integrations_type_new_spec.rb create mode 100644 spec/migrations/confirm_security_bot_spec.rb create mode 100644 spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb create mode 100644 spec/migrations/orphaned_invite_tokens_cleanup_spec.rb delete mode 100644 spec/migrations/re_schedule_latest_pipeline_id_population_spec.rb create mode 100644 spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb create mode 100644 spec/migrations/reschedule_delete_orphaned_deployments_spec.rb create mode 100644 spec/migrations/reset_job_token_scope_enabled_again_spec.rb delete mode 100644 spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb create mode 100644 spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb delete mode 100644 spec/migrations/schedule_delete_orphaned_deployments_spec.rb create mode 100644 spec/migrations/schedule_recalculate_uuid_on_vulnerabilities_occurrences3_spec.rb create mode 100644 spec/migrations/schedule_security_setting_creation_spec.rb (limited to 'spec/migrations') diff --git a/spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb b/spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb new file mode 100644 index 00000000000..a0aae00776d --- /dev/null +++ b/spec/migrations/20210722042939_update_issuable_slas_where_issue_closed_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration!('update_issuable_slas_where_issue_closed') + +RSpec.describe UpdateIssuableSlasWhereIssueClosed, :migration do + let(:namespaces) { table(:namespaces) } + let(:projects) { table(:projects) } + let(:issues) { table(:issues) } + let(:issuable_slas) { table(:issuable_slas) } + let(:issue_params) { { title: 'title', project_id: project.id } } + let(:issue_closed_state) { 2 } + + let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') } + let!(:project) { projects.create!(namespace_id: namespace.id) } + let!(:issue_open) { issues.create!(issue_params) } + let!(:issue_closed) { issues.create!(issue_params.merge(state_id: issue_closed_state)) } + + let!(:issuable_sla_open_issue) { issuable_slas.create!(issue_id: issue_open.id, due_at: Time.now) } + let!(:issuable_sla_closed_issue) { issuable_slas.create!(issue_id: issue_closed.id, due_at: Time.now) } + + it 'sets the issuable_closed attribute to false' do + expect(issuable_sla_open_issue.issuable_closed).to eq(false) + expect(issuable_sla_closed_issue.issuable_closed).to eq(false) + + migrate! + + expect(issuable_sla_open_issue.reload.issuable_closed).to eq(false) + expect(issuable_sla_closed_issue.reload.issuable_closed).to eq(true) + end +end diff --git a/spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb b/spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb new file mode 100644 index 00000000000..130ad45ffc1 --- /dev/null +++ b/spec/migrations/20210722150102_operations_feature_flags_correct_flexible_rollout_values_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration!('operations_feature_flags_correct_flexible_rollout_values') + +RSpec.describe OperationsFeatureFlagsCorrectFlexibleRolloutValues, :migration do + let_it_be(:strategies) { table(:operations_strategies) } + + let(:namespace) { table(:namespaces).create!(name: 'feature_flag', path: 'feature_flag') } + let(:project) { table(:projects).create!(namespace_id: namespace.id) } + let(:feature_flag) { table(:operations_feature_flags).create!(project_id: project.id, active: true, name: 'foo', iid: 1) } + + describe "#up" do + described_class::STICKINESS.each do |old, new| + it "corrects parameters for flexible rollout stickiness #{old}" do + reversible_migration do |migration| + parameters = { groupId: "default", rollout: "100", stickiness: old } + strategy = create_strategy(parameters) + + migration.before -> { + expect(strategy.reload.parameters).to eq({ "groupId" => "default", "rollout" => "100", "stickiness" => old }) + } + + migration.after -> { + expect(strategy.reload.parameters).to eq({ "groupId" => "default", "rollout" => "100", "stickiness" => new }) + } + end + end + end + + it 'ignores other strategies' do + reversible_migration do |migration| + parameters = { "groupId" => "default", "rollout" => "100", "stickiness" => "USERID" } + strategy = create_strategy(parameters, name: 'default') + + migration.before -> { + expect(strategy.reload.parameters).to eq(parameters) + } + + migration.after -> { + expect(strategy.reload.parameters).to eq(parameters) + } + end + end + + it 'ignores other stickiness' do + reversible_migration do |migration| + parameters = { "groupId" => "default", "rollout" => "100", "stickiness" => "FOO" } + strategy = create_strategy(parameters) + + migration.before -> { + expect(strategy.reload.parameters).to eq(parameters) + } + + migration.after -> { + expect(strategy.reload.parameters).to eq(parameters) + } + end + end + end + + def create_strategy(params, name: 'flexibleRollout') + strategies.create!(name: name, parameters: params, feature_flag_id: feature_flag.id) + end +end diff --git a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb new file mode 100644 index 00000000000..535472f5931 --- /dev/null +++ b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration!('create_base_work_item_types') + +RSpec.describe CreateBaseWorkItemTypes, :migration do + let!(:work_item_types) { table(:work_item_types) } + + it 'creates default data' do + reversible_migration do |migration| + migration.before -> { + # Depending on whether the migration has been run before, + # the size could be 4, or 0, so we don't set any expectations + } + + migration.after -> { + expect(work_item_types.count).to eq 4 + expect(work_item_types.all.pluck(:base_type)).to match_array WorkItem::Type.base_types.values + } + end + end +end diff --git a/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb b/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb new file mode 100644 index 00000000000..819120d43ef --- /dev/null +++ b/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb @@ -0,0 +1,137 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration!('update_trial_plans_ci_daily_pipeline_schedule_triggers') + +RSpec.describe UpdateTrialPlansCiDailyPipelineScheduleTriggers, :migration do + let!(:plans) { table(:plans) } + let!(:plan_limits) { table(:plan_limits) } + let!(:premium_trial_plan) { plans.create!(name: 'premium_trial', title: 'Premium Trial') } + let!(:ultimate_trial_plan) { plans.create!(name: 'ultimate_trial', title: 'Ultimate Trial') } + + describe '#up' do + let!(:premium_trial_plan_limits) { plan_limits.create!(plan_id: premium_trial_plan.id, ci_daily_pipeline_schedule_triggers: 0) } + let!(:ultimate_trial_plan_limits) { plan_limits.create!(plan_id: ultimate_trial_plan.id, ci_daily_pipeline_schedule_triggers: 0) } + + context 'when the environment is dev or com' do + before do + allow(Gitlab).to receive(:dev_env_or_com?).and_return(true) + end + + it 'sets the trial plan limits for ci_daily_pipeline_schedule_triggers' do + disable_migrations_output { migrate! } + + expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288) + expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288) + end + + it 'does not change the plan limits if the ultimate trial plan is missing' do + ultimate_trial_plan.destroy! + + expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count } + expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0) + end + + it 'does not change the plan limits if the ultimate trial plan limits is missing' do + ultimate_trial_plan_limits.destroy! + + expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count } + expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0) + end + + it 'does not change the plan limits if the premium trial plan is missing' do + premium_trial_plan.destroy! + + expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count } + expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0) + end + + it 'does not change the plan limits if the premium trial plan limits is missing' do + premium_trial_plan_limits.destroy! + + expect { disable_migrations_output { migrate! } }.not_to change { plan_limits.count } + expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0) + end + end + + context 'when the environment is anything other than dev or com' do + before do + allow(Gitlab).to receive(:dev_env_or_com?).and_return(false) + end + + it 'does not update the plan limits' do + disable_migrations_output { migrate! } + + expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0) + expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0) + end + end + end + + describe '#down' do + let!(:premium_trial_plan_limits) { plan_limits.create!(plan_id: premium_trial_plan.id, ci_daily_pipeline_schedule_triggers: 288) } + let!(:ultimate_trial_plan_limits) { plan_limits.create!(plan_id: ultimate_trial_plan.id, ci_daily_pipeline_schedule_triggers: 288) } + + context 'when the environment is dev or com' do + before do + allow(Gitlab).to receive(:dev_env_or_com?).and_return(true) + end + + it 'sets the trial plan limits ci_daily_pipeline_schedule_triggers to zero' do + migrate_down! + + expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0) + expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(0) + end + + it 'does not change the plan limits if the ultimate trial plan is missing' do + ultimate_trial_plan.destroy! + + expect { migrate_down! }.not_to change { plan_limits.count } + expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288) + end + + it 'does not change the plan limits if the ultimate trial plan limits is missing' do + ultimate_trial_plan_limits.destroy! + + expect { migrate_down! }.not_to change { plan_limits.count } + expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288) + end + + it 'does not change the plan limits if the premium trial plan is missing' do + premium_trial_plan.destroy! + + expect { migrate_down! }.not_to change { plan_limits.count } + expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288) + end + + it 'does not change the plan limits if the premium trial plan limits is missing' do + premium_trial_plan_limits.destroy! + + expect { migrate_down! }.not_to change { plan_limits.count } + expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288) + end + end + + context 'when the environment is anything other than dev or com' do + before do + allow(Gitlab).to receive(:dev_env_or_com?).and_return(false) + end + + it 'does not change the ultimate trial plan limits' do + migrate_down! + + expect(ultimate_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288) + expect(premium_trial_plan_limits.reload.ci_daily_pipeline_schedule_triggers).to eq(288) + end + end + end + + def migrate_down! + disable_migrations_output do + migrate! + described_class.new.down + end + end +end diff --git a/spec/migrations/add_triggers_to_integrations_type_new_spec.rb b/spec/migrations/add_triggers_to_integrations_type_new_spec.rb new file mode 100644 index 00000000000..07845715a52 --- /dev/null +++ b/spec/migrations/add_triggers_to_integrations_type_new_spec.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! + +RSpec.describe AddTriggersToIntegrationsTypeNew do + let(:migration) { described_class.new } + let(:integrations) { table(:integrations) } + + describe '#up' do + before do + migrate! + end + + describe 'INSERT trigger' do + it 'sets `type_new` to the transformed `type` class name' do + Gitlab::Integrations::StiType.namespaced_integrations.each do |type| + integration = integrations.create!(type: "#{type}Service") + + expect(integration.reload).to have_attributes( + type: "#{type}Service", + type_new: "Integrations::#{type}" + ) + end + end + + it 'ignores types that are not namespaced' do + # We don't actually have any integrations without namespaces, + # but we can abuse one of the integration base classes. + integration = integrations.create!(type: 'BaseIssueTracker') + + expect(integration.reload).to have_attributes( + type: 'BaseIssueTracker', + type_new: nil + ) + end + + it 'ignores types that are unknown' do + integration = integrations.create!(type: 'FooBar') + + expect(integration.reload).to have_attributes( + type: 'FooBar', + type_new: nil + ) + end + end + end + + describe '#down' do + before do + migration.up + migration.down + end + + it 'drops the INSERT trigger' do + integration = integrations.create!(type: 'JiraService') + + expect(integration.reload).to have_attributes( + type: 'JiraService', + type_new: nil + ) + end + end +end diff --git a/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb b/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb new file mode 100644 index 00000000000..ce0ab4223e8 --- /dev/null +++ b/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb @@ -0,0 +1,76 @@ +# frozen_string_literal: true + +require 'spec_helper' +require Rails.root.join('db', 'migrate', '20210629031900_associate_existing_dast_builds_with_variables.rb') + +RSpec.describe AssociateExistingDastBuildsWithVariables do + subject(:migration) { described_class.new } + + let_it_be(:namespaces_table) { table(:namespaces) } + let_it_be(:projects_table) { table(:projects) } + let_it_be(:ci_pipelines_table) { table(:ci_pipelines) } + let_it_be(:ci_builds_table) { table(:ci_builds) } + let_it_be(:dast_sites_table) { table(:dast_sites) } + let_it_be(:dast_site_profiles_table) { table(:dast_site_profiles) } + let_it_be(:dast_scanner_profiles_table) { table(:dast_scanner_profiles) } + let_it_be(:dast_site_profiles_builds_table) { table(:dast_site_profiles_builds) } + let_it_be(:dast_profiles_table) { table(:dast_profiles) } + let_it_be(:dast_profiles_pipelines_table) { table(:dast_profiles_pipelines) } + + let!(:group) { namespaces_table.create!(type: 'Group', name: 'group', path: 'group') } + let!(:project) { projects_table.create!(name: 'project', path: 'project', namespace_id: group.id) } + + let!(:pipeline_0) { ci_pipelines_table.create!(project_id: project.id, source: 13) } + let!(:pipeline_1) { ci_pipelines_table.create!(project_id: project.id, source: 13) } + let!(:build_0) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_0.id, name: :dast, stage: :dast) } + let!(:build_1) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_0.id, name: :dast, stage: :dast) } + let!(:build_2) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_1.id, name: :dast, stage: :dast) } + let!(:build_3) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_1.id, name: :dast) } + let!(:build_4) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_1.id, stage: :dast) } + + let!(:dast_site) { dast_sites_table.create!(project_id: project.id, url: generate(:url)) } + let!(:dast_site_profile) { dast_site_profiles_table.create!(project_id: project.id, dast_site_id: dast_site.id, name: SecureRandom.hex) } + let!(:dast_scanner_profile) { dast_scanner_profiles_table.create!(project_id: project.id, name: SecureRandom.hex) } + + let!(:dast_profile) do + dast_profiles_table.create!( + project_id: project.id, + dast_site_profile_id: dast_site_profile.id, + dast_scanner_profile_id: dast_scanner_profile.id, + name: SecureRandom.hex, + description: SecureRandom.hex + ) + end + + let!(:dast_profiles_pipeline_0) { dast_profiles_pipelines_table.create!(dast_profile_id: dast_profile.id, ci_pipeline_id: pipeline_0.id) } + let!(:dast_profiles_pipeline_1) { dast_profiles_pipelines_table.create!(dast_profile_id: dast_profile.id, ci_pipeline_id: pipeline_1.id) } + + context 'when there are ci_pipelines with associated dast_profiles' do + describe 'migration up' do + it 'adds association of dast_site_profiles to ci_builds', :aggregate_failures do + expect(dast_site_profiles_builds_table.all).to be_empty + + migration.up + + expected_results = [ + [dast_site_profile.id, build_0.id], + [dast_site_profile.id, build_1.id], + [dast_site_profile.id, build_2.id] + ] + + expect(dast_site_profiles_builds_table.all.map { |assoc| [assoc.dast_site_profile_id, assoc.ci_build_id] }).to contain_exactly(*expected_results) + end + end + end + + describe 'migration down' do + it 'deletes all records in the dast_site_profiles_builds table', :aggregate_failures do + expect(dast_site_profiles_builds_table.all).to be_empty + + migration.up + migration.down + + expect(dast_site_profiles_builds_table.all).to be_empty + end + end +end diff --git a/spec/migrations/backfill_integrations_type_new_spec.rb b/spec/migrations/backfill_integrations_type_new_spec.rb new file mode 100644 index 00000000000..5b8fbf6f555 --- /dev/null +++ b/spec/migrations/backfill_integrations_type_new_spec.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe BackfillIntegrationsTypeNew do + let_it_be(:migration) { described_class::MIGRATION } + let_it_be(:integrations) { table(:integrations) } + + before do + integrations.create!(id: 1) + integrations.create!(id: 2) + integrations.create!(id: 3) + integrations.create!(id: 4) + integrations.create!(id: 5) + end + + describe '#up' do + it 'schedules background jobs for each batch of integrations' do + migrate! + + expect(migration).to have_scheduled_batched_migration( + table_name: :integrations, + column_name: :id, + interval: described_class::INTERVAL + ) + end + end + + describe '#down' do + it 'deletes all batched migration records' do + migrate! + schema_migrate_down! + + expect(migration).not_to have_scheduled_batched_migration + end + end +end diff --git a/spec/migrations/backfill_issues_upvotes_count_spec.rb b/spec/migrations/backfill_issues_upvotes_count_spec.rb index f2bea0edea0..94cfa29ae89 100644 --- a/spec/migrations/backfill_issues_upvotes_count_spec.rb +++ b/spec/migrations/backfill_issues_upvotes_count_spec.rb @@ -19,7 +19,7 @@ RSpec.describe BackfillIssuesUpvotesCount do let!(:award_emoji3) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue3.id) } let!(:award_emoji4) { award_emoji.create!( name: 'thumbsup', awardable_type: 'Issue', awardable_id: issue4.id) } - it 'correctly schedules background migrations' do + it 'correctly schedules background migrations', :aggregate_failures do stub_const("#{described_class.name}::BATCH_SIZE", 2) Sidekiq::Testing.fake! do diff --git a/spec/migrations/confirm_security_bot_spec.rb b/spec/migrations/confirm_security_bot_spec.rb new file mode 100644 index 00000000000..19ca81f92f3 --- /dev/null +++ b/spec/migrations/confirm_security_bot_spec.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ConfirmSecurityBot, :migration do + let(:users) { table(:users) } + + let(:user_type) { 8 } + + context 'when bot is not created' do + it 'skips migration' do + migrate! + + bot = users.find_by(user_type: user_type) + + expect(bot).to be_nil + end + end + + context 'when bot is confirmed' do + let(:bot) { table(:users).create!(user_type: user_type, confirmed_at: Time.current, projects_limit: 1) } + + it 'skips migration' do + expect { migrate! }.not_to change { bot.reload.confirmed_at } + end + end + + context 'when bot is not confirmed' do + let(:bot) { table(:users).create!(user_type: user_type, projects_limit: 1) } + + it 'update confirmed_at' do + freeze_time do + expect { migrate! }.to change { bot.reload.confirmed_at }.from(nil).to(Time.current) + end + end + end +end diff --git a/spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb b/spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb new file mode 100644 index 00000000000..b7a91abf5d7 --- /dev/null +++ b/spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! + +RSpec.describe GenerateCustomersDotJwtSigningKey do + let(:application_settings) do + Class.new(ActiveRecord::Base) do + self.table_name = 'application_settings' + + attr_encrypted :customers_dot_jwt_signing_key, { + mode: :per_attribute_iv, + key: Gitlab::Utils.ensure_utf8_size(Rails.application.secrets.db_key_base, bytes: 32.bytes), + algorithm: 'aes-256-gcm', + encode: true + } + end + end + + it 'generates JWT signing key' do + application_settings.create! + + reversible_migration do |migration| + migration.before -> { + settings = application_settings.first + + expect(settings.customers_dot_jwt_signing_key).to be_nil + expect(settings.encrypted_customers_dot_jwt_signing_key).to be_nil + expect(settings.encrypted_customers_dot_jwt_signing_key_iv).to be_nil + } + + migration.after -> { + settings = application_settings.first + + expect(settings.encrypted_customers_dot_jwt_signing_key).to be_present + expect(settings.encrypted_customers_dot_jwt_signing_key_iv).to be_present + expect { OpenSSL::PKey::RSA.new(settings.customers_dot_jwt_signing_key) }.not_to raise_error + } + end + end +end diff --git a/spec/migrations/orphaned_invite_tokens_cleanup_spec.rb b/spec/migrations/orphaned_invite_tokens_cleanup_spec.rb new file mode 100644 index 00000000000..889c04700c7 --- /dev/null +++ b/spec/migrations/orphaned_invite_tokens_cleanup_spec.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! 'orphaned_invite_tokens_cleanup' + +RSpec.describe OrphanedInviteTokensCleanup, :migration do + def create_member(**extra_attributes) + defaults = { + access_level: 10, + source_id: 1, + source_type: "Project", + notification_level: 0, + type: 'ProjectMember' + } + + table(:members).create!(defaults.merge(extra_attributes)) + end + + describe '#up', :aggregate_failures do + it 'removes invite tokens for accepted records with invite_accepted_at < created_at' do + record1 = create_member(invite_token: 'foo', invite_accepted_at: 1.day.ago, created_at: 1.hour.ago) + record2 = create_member(invite_token: 'foo2', invite_accepted_at: nil, created_at: 1.hour.ago) + record3 = create_member(invite_token: 'foo3', invite_accepted_at: 1.day.ago, created_at: 1.year.ago) + + migrate! + + expect(table(:members).find(record1.id).invite_token).to eq nil + expect(table(:members).find(record2.id).invite_token).to eq 'foo2' + expect(table(:members).find(record3.id).invite_token).to eq 'foo3' + end + end +end diff --git a/spec/migrations/re_schedule_latest_pipeline_id_population_spec.rb b/spec/migrations/re_schedule_latest_pipeline_id_population_spec.rb deleted file mode 100644 index 354a0896ac9..00000000000 --- a/spec/migrations/re_schedule_latest_pipeline_id_population_spec.rb +++ /dev/null @@ -1,61 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe ReScheduleLatestPipelineIdPopulation do - let(:namespaces) { table(:namespaces) } - let(:pipelines) { table(:ci_pipelines) } - let(:projects) { table(:projects) } - let(:project_settings) { table(:project_settings) } - let(:vulnerability_statistics) { table(:vulnerability_statistics) } - - let(:letter_grade_a) { 0 } - - let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') } - let(:project_1) { projects.create!(namespace_id: namespace.id, name: 'Foo 1') } - let(:project_2) { projects.create!(namespace_id: namespace.id, name: 'Foo 2') } - let(:project_3) { projects.create!(namespace_id: namespace.id, name: 'Foo 3') } - let(:project_4) { projects.create!(namespace_id: namespace.id, name: 'Foo 4') } - - before do - project_settings.create!(project_id: project_1.id, has_vulnerabilities: true) - project_settings.create!(project_id: project_2.id, has_vulnerabilities: true) - project_settings.create!(project_id: project_3.id) - project_settings.create!(project_id: project_4.id, has_vulnerabilities: true) - - pipeline = pipelines.create!(project_id: project_2.id, ref: 'master', sha: 'adf43c3a') - - vulnerability_statistics.create!(project_id: project_2.id, letter_grade: letter_grade_a, latest_pipeline_id: pipeline.id) - vulnerability_statistics.create!(project_id: project_4.id, letter_grade: letter_grade_a) - - allow(Gitlab).to receive(:ee?).and_return(is_ee?) - stub_const("#{described_class.name}::BATCH_SIZE", 1) - end - - around do |example| - freeze_time { example.run } - end - - context 'when the installation is FOSS' do - let(:is_ee?) { false } - - it 'does not schedule any background job' do - migrate! - - expect(BackgroundMigrationWorker.jobs.size).to be(0) - end - end - - context 'when the installation is EE' do - let(:is_ee?) { true } - - it 'schedules the background jobs' do - migrate! - - expect(BackgroundMigrationWorker.jobs.size).to be(2) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(described_class::DELAY_INTERVAL, project_1.id, project_1.id) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2 * described_class::DELAY_INTERVAL, project_4.id, project_4.id) - end - end -end diff --git a/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb b/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb new file mode 100644 index 00000000000..8a9b993b869 --- /dev/null +++ b/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ReScheduleLatestPipelineIdPopulationWithAllSecurityRelatedArtifactTypes do + let(:namespaces) { table(:namespaces) } + let(:pipelines) { table(:ci_pipelines) } + let(:projects) { table(:projects) } + let(:project_settings) { table(:project_settings) } + let(:vulnerability_statistics) { table(:vulnerability_statistics) } + + let(:letter_grade_a) { 0 } + + let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') } + let(:project_1) { projects.create!(namespace_id: namespace.id, name: 'Foo 1') } + let(:project_2) { projects.create!(namespace_id: namespace.id, name: 'Foo 2') } + let(:project_3) { projects.create!(namespace_id: namespace.id, name: 'Foo 3') } + let(:project_4) { projects.create!(namespace_id: namespace.id, name: 'Foo 4') } + + before do + project_settings.create!(project_id: project_1.id, has_vulnerabilities: true) + project_settings.create!(project_id: project_2.id, has_vulnerabilities: true) + project_settings.create!(project_id: project_3.id) + project_settings.create!(project_id: project_4.id, has_vulnerabilities: true) + + pipeline = pipelines.create!(project_id: project_2.id, ref: 'master', sha: 'adf43c3a') + + vulnerability_statistics.create!(project_id: project_2.id, letter_grade: letter_grade_a, latest_pipeline_id: pipeline.id) + vulnerability_statistics.create!(project_id: project_4.id, letter_grade: letter_grade_a) + + allow(Gitlab).to receive(:ee?).and_return(is_ee?) + stub_const("#{described_class.name}::BATCH_SIZE", 1) + end + + around do |example| + freeze_time { example.run } + end + + context 'when the installation is FOSS' do + let(:is_ee?) { false } + + it 'does not schedule any background job' do + migrate! + + expect(BackgroundMigrationWorker.jobs.size).to be(0) + end + end + + context 'when the installation is EE' do + let(:is_ee?) { true } + + it 'schedules the background jobs' do + migrate! + + expect(BackgroundMigrationWorker.jobs.size).to be(2) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(described_class::DELAY_INTERVAL, project_1.id, project_1.id) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2 * described_class::DELAY_INTERVAL, project_4.id, project_4.id) + end + end +end diff --git a/spec/migrations/reschedule_delete_orphaned_deployments_spec.rb b/spec/migrations/reschedule_delete_orphaned_deployments_spec.rb new file mode 100644 index 00000000000..eb91602388c --- /dev/null +++ b/spec/migrations/reschedule_delete_orphaned_deployments_spec.rb @@ -0,0 +1,73 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! + +RSpec.describe RescheduleDeleteOrphanedDeployments, :sidekiq, schema: 20210617161348 do + let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } + let!(:project) { table(:projects).create!(namespace_id: namespace.id) } + let!(:environment) { table(:environments).create!(name: 'production', slug: 'production', project_id: project.id) } + let(:background_migration_jobs) { table(:background_migration_jobs) } + + before do + create_deployment!(environment.id, project.id) + create_deployment!(environment.id, project.id) + create_deployment!(environment.id, project.id) + create_deployment!(non_existing_record_id, project.id) + create_deployment!(non_existing_record_id, project.id) + create_deployment!(non_existing_record_id, project.id) + create_deployment!(non_existing_record_id, project.id) + + stub_const("#{described_class}::BATCH_SIZE", 1) + end + + it 'steal existing background migration jobs' do + expect(Gitlab::BackgroundMigration).to receive(:steal).with('DeleteOrphanedDeployments') + + migrate! + end + + it 'cleans up background migration jobs tracking records' do + old_successful_job = background_migration_jobs.create!( + class_name: 'DeleteOrphanedDeployments', + status: Gitlab::Database::BackgroundMigrationJob.statuses[:succeeded], + arguments: [table(:deployments).minimum(:id), table(:deployments).minimum(:id)] + ) + + old_pending_job = background_migration_jobs.create!( + class_name: 'DeleteOrphanedDeployments', + status: Gitlab::Database::BackgroundMigrationJob.statuses[:pending], + arguments: [table(:deployments).maximum(:id), table(:deployments).maximum(:id)] + ) + + migrate! + + expect { old_successful_job.reload }.to raise_error(ActiveRecord::RecordNotFound) + expect { old_pending_job.reload }.to raise_error(ActiveRecord::RecordNotFound) + end + + it 'schedules DeleteOrphanedDeployments background jobs' do + Sidekiq::Testing.fake! do + freeze_time do + migrate! + + expect(BackgroundMigrationWorker.jobs.size).to eq(7) + table(:deployments).find_each do |deployment| + expect(described_class::MIGRATION).to be_scheduled_migration(deployment.id, deployment.id) + end + end + end + end + + def create_deployment!(environment_id, project_id) + table(:deployments).create!( + environment_id: environment_id, + project_id: project_id, + ref: 'master', + tag: false, + sha: 'x', + status: 1, + iid: table(:deployments).count + 1) + end +end diff --git a/spec/migrations/reset_job_token_scope_enabled_again_spec.rb b/spec/migrations/reset_job_token_scope_enabled_again_spec.rb new file mode 100644 index 00000000000..da6817f6f21 --- /dev/null +++ b/spec/migrations/reset_job_token_scope_enabled_again_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! + +RSpec.describe ResetJobTokenScopeEnabledAgain do + let(:settings) { table(:project_ci_cd_settings) } + let(:projects) { table(:projects) } + let(:namespaces) { table(:namespaces) } + let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') } + let(:project_1) { projects.create!(name: 'proj-1', path: 'gitlab-org', namespace_id: namespace.id)} + let(:project_2) { projects.create!(name: 'proj-2', path: 'gitlab-org', namespace_id: namespace.id)} + + before do + settings.create!(id: 1, project_id: project_1.id, job_token_scope_enabled: true) + settings.create!(id: 2, project_id: project_2.id, job_token_scope_enabled: false) + end + + it 'migrates job_token_scope_enabled to be always false' do + expect { migrate! } + .to change { settings.where(job_token_scope_enabled: false).count } + .from(1).to(2) + end +end diff --git a/spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb b/spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb deleted file mode 100644 index 5a1c07d810f..00000000000 --- a/spec/migrations/schedule_backfill_draft_status_on_merge_requests_spec.rb +++ /dev/null @@ -1,59 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe ScheduleBackfillDraftStatusOnMergeRequests, :sidekiq do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:merge_requests) { table(:merge_requests) } - - let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') } - let(:project) { projects.create!(namespace_id: group.id) } - - let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] } - - def create_merge_request(params) - common_params = { - target_project_id: project.id, - target_branch: 'feature1', - source_branch: 'master' - } - - merge_requests.create!(common_params.merge(params)) - end - - before do - draft_prefixes.each do |prefix| - (1..4).each do |n| - create_merge_request( - title: "#{prefix} This is a title", - draft: false, - state_id: n - ) - end - end - - stub_const("#{described_class}::BATCH_SIZE", 1) - end - - it 'schedules BackfillDraftStatusOnMergeRequests background jobs' do - Sidekiq::Testing.fake! do - draft_mrs = Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests::MergeRequest.eligible - - first_mr_id = draft_mrs.first.id - second_mr_id = draft_mrs.second.id - - freeze_time do - migrate! - - expect(BackgroundMigrationWorker.jobs.size).to eq(7) - expect(described_class::MIGRATION) - .to be_scheduled_delayed_migration(2.minutes, first_mr_id, first_mr_id) - expect(described_class::MIGRATION) - .to be_scheduled_delayed_migration(4.minutes, second_mr_id, second_mr_id) - end - end - end -end diff --git a/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb b/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb new file mode 100644 index 00000000000..012c7d065fc --- /dev/null +++ b/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ScheduleCopyCiBuildsColumnsToSecurityScans2 do + let_it_be(:namespaces) { table(:namespaces) } + let_it_be(:projects) { table(:projects) } + let_it_be(:ci_pipelines) { table(:ci_pipelines) } + let_it_be(:ci_builds) { table(:ci_builds) } + let_it_be(:security_scans) { table(:security_scans) } + let_it_be(:background_migration_jobs) { table(:background_migration_jobs) } + + let!(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') } + let!(:project) { projects.create!(namespace_id: namespace.id) } + let!(:pipeline) { ci_pipelines.create!(status: "success")} + + let!(:build1) { ci_builds.create!(commit_id: pipeline.id, type: 'Ci::Build', project_id: project.id) } + let!(:build2) { ci_builds.create!(commit_id: pipeline.id, type: 'Ci::Build', project_id: project.id) } + let!(:build3) { ci_builds.create!(commit_id: pipeline.id, type: 'Ci::Build', project_id: project.id) } + + let!(:scan1) { security_scans.create!(build_id: build1.id, scan_type: 1) } + let!(:scan2) { security_scans.create!(build_id: build2.id, scan_type: 1) } + let!(:scan3) { security_scans.create!(build_id: build3.id, scan_type: 1) } + + let!(:job_class_name) { described_class::MIGRATION } + let!(:tracked_pending_job) { background_migration_jobs.create!(class_name: job_class_name, status: 0, arguments: [1]) } + let!(:tracked_successful_job) { background_migration_jobs.create!(class_name: job_class_name, status: 1, arguments: [2]) } + let(:jobs) { Gitlab::Database::BackgroundMigrationJob.where(id: [tracked_pending_job.id, tracked_successful_job.id] ).for_migration_class(job_class_name) } + + before do + stub_const("#{described_class}::BATCH_SIZE", 2) + allow_next_instance_of(Gitlab::BackgroundMigration::CopyCiBuildsColumnsToSecurityScans) do |instance| + allow(instance).to receive(:mark_job_as_succeeded) + end + end + + around do |example| + freeze_time { Sidekiq::Testing.fake! { example.run } } + end + + it 'schedules background migrations', :aggregate_failures do + expect(jobs).not_to be_empty + + migrate! + + expect(jobs).to be_empty + expect(BackgroundMigrationWorker.jobs.size).to eq(2) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, scan1.id, scan2.id) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, scan3.id, scan3.id) + end +end diff --git a/spec/migrations/schedule_delete_orphaned_deployments_spec.rb b/spec/migrations/schedule_delete_orphaned_deployments_spec.rb deleted file mode 100644 index 618958a3d90..00000000000 --- a/spec/migrations/schedule_delete_orphaned_deployments_spec.rb +++ /dev/null @@ -1,48 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe ScheduleDeleteOrphanedDeployments, :sidekiq, schema: 20210617161348 do - let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let!(:project) { table(:projects).create!(namespace_id: namespace.id) } - let!(:environment) { table(:environments).create!(name: 'production', slug: 'production', project_id: project.id) } - let(:background_migration_jobs) { table(:background_migration_jobs) } - - before do - create_deployment!(environment.id, project.id) - create_deployment!(environment.id, project.id) - create_deployment!(environment.id, project.id) - create_deployment!(non_existing_record_id, project.id) - create_deployment!(non_existing_record_id, project.id) - create_deployment!(non_existing_record_id, project.id) - create_deployment!(non_existing_record_id, project.id) - - stub_const("#{described_class}::BATCH_SIZE", 1) - end - - it 'schedules DeleteOrphanedDeployments background jobs' do - Sidekiq::Testing.fake! do - freeze_time do - migrate! - - expect(BackgroundMigrationWorker.jobs.size).to eq(7) - table(:deployments).find_each do |deployment| - expect(described_class::MIGRATION).to be_scheduled_migration(deployment.id, deployment.id) - end - end - end - end - - def create_deployment!(environment_id, project_id) - table(:deployments).create!( - environment_id: environment_id, - project_id: project_id, - ref: 'master', - tag: false, - sha: 'x', - status: 1, - iid: table(:deployments).count + 1) - end -end diff --git a/spec/migrations/schedule_recalculate_uuid_on_vulnerabilities_occurrences3_spec.rb b/spec/migrations/schedule_recalculate_uuid_on_vulnerabilities_occurrences3_spec.rb new file mode 100644 index 00000000000..77f298b5ecb --- /dev/null +++ b/spec/migrations/schedule_recalculate_uuid_on_vulnerabilities_occurrences3_spec.rb @@ -0,0 +1,127 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ScheduleRecalculateUuidOnVulnerabilitiesOccurrences3 do + let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } + let(:users) { table(:users) } + let(:user) { create_user! } + let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) } + let(:scanners) { table(:vulnerability_scanners) } + let(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } + let(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') } + let(:vulnerabilities) { table(:vulnerabilities) } + let(:vulnerabilities_findings) { table(:vulnerability_occurrences) } + let(:vulnerability_identifiers) { table(:vulnerability_identifiers) } + let(:vulnerability_identifier) do + vulnerability_identifiers.create!( + project_id: project.id, + external_type: 'uuid-v5', + external_id: 'uuid-v5', + fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a', + name: 'Identifier for UUIDv5') + end + + let(:different_vulnerability_identifier) do + vulnerability_identifiers.create!( + project_id: project.id, + external_type: 'uuid-v4', + external_id: 'uuid-v4', + fingerprint: '772da93d34a1ba010bcb5efa9fb6f8e01bafcc89', + name: 'Identifier for UUIDv4') + end + + let(:vulnerability_for_uuidv4) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let(:vulnerability_for_uuidv5) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let!(:finding1) do + create_finding!( + vulnerability_id: vulnerability_for_uuidv4.id, + project_id: project.id, + scanner_id: different_scanner.id, + primary_identifier_id: different_vulnerability_identifier.id, + location_fingerprint: 'fa18f432f1d56675f4098d318739c3cd5b14eb3e', + uuid: 'b3cc2518-5446-4dea-871c-89d5e999c1ac' + ) + end + + let!(:finding2) do + create_finding!( + vulnerability_id: vulnerability_for_uuidv5.id, + project_id: project.id, + scanner_id: scanner.id, + primary_identifier_id: vulnerability_identifier.id, + location_fingerprint: '838574be0210968bf6b9f569df9c2576242cbf0a', + uuid: '77211ed6-7dff-5f6b-8c9a-da89ad0a9b60' + ) + end + + before do + stub_const("#{described_class}::BATCH_SIZE", 1) + end + + around do |example| + freeze_time { Sidekiq::Testing.fake! { example.run } } + end + + it 'schedules background migrations', :aggregate_failures do + migrate! + + expect(BackgroundMigrationWorker.jobs.size).to eq(2) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, finding1.id, finding1.id) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, finding2.id, finding2.id) + end + + private + + def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) + vulnerabilities.create!( + project_id: project_id, + author_id: author_id, + title: title, + severity: severity, + confidence: confidence, + report_type: report_type + ) + end + + def create_finding!( + vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, location_fingerprint:, uuid:) + vulnerabilities_findings.create!( + vulnerability_id: vulnerability_id, + project_id: project_id, + name: 'test', + severity: 7, + confidence: 7, + report_type: 0, + project_fingerprint: '123qweasdzxc', + scanner_id: scanner_id, + primary_identifier_id: primary_identifier_id, + location_fingerprint: location_fingerprint, + metadata_version: 'test', + raw_metadata: 'test', + uuid: uuid + ) + end + + def create_user!(name: "Example User", email: "user@example.com", user_type: nil) + users.create!( + name: name, + email: email, + username: name, + projects_limit: 0 + ) + end +end diff --git a/spec/migrations/schedule_security_setting_creation_spec.rb b/spec/migrations/schedule_security_setting_creation_spec.rb new file mode 100644 index 00000000000..e1b7b540d7f --- /dev/null +++ b/spec/migrations/schedule_security_setting_creation_spec.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ScheduleSecuritySettingCreation, :sidekiq do + describe '#up' do + let(:projects) { table(:projects) } + let(:namespaces) { table(:namespaces) } + + context 'for EE version' do + before do + stub_const("#{described_class.name}::BATCH_SIZE", 2) + allow(Gitlab).to receive(:ee?).and_return(true) + end + + it 'schedules background migration job' do + namespace = namespaces.create!(name: 'test', path: 'test') + projects.create!(id: 12, namespace_id: namespace.id, name: 'red', path: 'red') + projects.create!(id: 13, namespace_id: namespace.id, name: 'green', path: 'green') + projects.create!(id: 14, namespace_id: namespace.id, name: 'blue', path: 'blue') + + Sidekiq::Testing.fake! do + freeze_time do + migrate! + + expect(described_class::MIGRATION) + .to be_scheduled_delayed_migration(5.minutes, 12, 13) + + expect(described_class::MIGRATION) + .to be_scheduled_delayed_migration(10.minutes, 14, 14) + + expect(BackgroundMigrationWorker.jobs.size).to eq(2) + end + end + end + end + + context 'for FOSS version' do + before do + allow(Gitlab).to receive(:ee?).and_return(false) + end + + it 'does not schedule any jobs' do + namespace = namespaces.create!(name: 'test', path: 'test') + projects.create!(id: 12, namespace_id: namespace.id, name: 'red', path: 'red') + + Sidekiq::Testing.fake! do + freeze_time do + migrate! + + expect(BackgroundMigrationWorker.jobs.size).to eq(0) + end + end + end + end + end +end -- cgit v1.2.3