Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/migrations')
-rw-r--r--spec/migrations/20210804150320_create_base_work_item_types_spec.rb10
-rw-r--r--spec/migrations/20210818185845_backfill_projects_with_coverage_spec.rb71
-rw-r--r--spec/migrations/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session_spec.rb21
-rw-r--r--spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb54
-rw-r--r--spec/migrations/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs_spec.rb21
-rw-r--r--spec/migrations/20210906100316_drop_temporary_columns_and_triggers_for_ci_build_trace_chunks_spec.rb21
-rw-r--r--spec/migrations/active_record/schema_spec.rb2
-rw-r--r--spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb35
-rw-r--r--spec/migrations/add_triggers_to_integrations_type_new_spec.rb14
-rw-r--r--spec/migrations/backfill_cadence_id_for_boards_scoped_to_iteration_spec.rb109
-rw-r--r--spec/migrations/backfill_stage_event_hash_spec.rb103
-rw-r--r--spec/migrations/cleanup_remaining_orphan_invites_spec.rb37
-rw-r--r--spec/migrations/disable_job_token_scope_when_unused_spec.rb44
-rw-r--r--spec/migrations/remove_duplicate_dast_site_tokens_spec.rb53
-rw-r--r--spec/migrations/remove_duplicate_dast_site_tokens_with_same_token_spec.rb53
-rw-r--r--spec/migrations/replace_external_wiki_triggers_spec.rb132
-rw-r--r--spec/migrations/set_default_job_token_scope_true_spec.rb33
-rw-r--r--spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb69
-rw-r--r--spec/migrations/steal_merge_request_diff_commit_users_migration_spec.rb29
-rw-r--r--spec/migrations/update_integrations_trigger_type_new_on_insert_spec.rb102
-rw-r--r--spec/migrations/update_minimum_password_length_spec.rb2
21 files changed, 1012 insertions, 3 deletions
diff --git a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
index 535472f5931..9ba29637e00 100644
--- a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
+++ b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
@@ -6,7 +6,17 @@ require_migration!('create_base_work_item_types')
RSpec.describe CreateBaseWorkItemTypes, :migration do
let!(:work_item_types) { table(:work_item_types) }
+ after(:all) do
+ # Make sure base types are recreated after running the migration
+ # because migration specs are not run in a transaction
+ WorkItem::Type.delete_all
+ Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import
+ end
+
it 'creates default data' do
+ # Need to delete all as base types are seeded before entire test suite
+ WorkItem::Type.delete_all
+
reversible_migration do |migration|
migration.before -> {
# Depending on whether the migration has been run before,
diff --git a/spec/migrations/20210818185845_backfill_projects_with_coverage_spec.rb b/spec/migrations/20210818185845_backfill_projects_with_coverage_spec.rb
new file mode 100644
index 00000000000..d87f952b5da
--- /dev/null
+++ b/spec/migrations/20210818185845_backfill_projects_with_coverage_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('backfill_projects_with_coverage')
+
+RSpec.describe BackfillProjectsWithCoverage do
+ let(:projects) { table(:projects) }
+ let(:ci_pipelines) { table(:ci_pipelines) }
+ let(:ci_daily_build_group_report_results) { table(:ci_daily_build_group_report_results) }
+ let(:group) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let(:project_1) { projects.create!(namespace_id: group.id) }
+ let(:project_2) { projects.create!(namespace_id: group.id) }
+ let(:pipeline_1) { ci_pipelines.create!(project_id: project_1.id) }
+ let(:pipeline_2) { ci_pipelines.create!(project_id: project_2.id) }
+ let(:pipeline_3) { ci_pipelines.create!(project_id: project_2.id) }
+
+ describe '#up' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ stub_const("#{described_class}::SUB_BATCH_SIZE", 1)
+
+ ci_daily_build_group_report_results.create!(
+ id: 1,
+ project_id: project_1.id,
+ date: 3.days.ago,
+ last_pipeline_id: pipeline_1.id,
+ ref_path: 'main',
+ group_name: 'rspec',
+ data: { coverage: 95.0 },
+ default_branch: true,
+ group_id: group.id
+ )
+
+ ci_daily_build_group_report_results.create!(
+ id: 2,
+ project_id: project_2.id,
+ date: 2.days.ago,
+ last_pipeline_id: pipeline_2.id,
+ ref_path: 'main',
+ group_name: 'rspec',
+ data: { coverage: 95.0 },
+ default_branch: true,
+ group_id: group.id
+ )
+
+ ci_daily_build_group_report_results.create!(
+ id: 3,
+ project_id: project_2.id,
+ date: 1.day.ago,
+ last_pipeline_id: pipeline_3.id,
+ ref_path: 'test_branch',
+ group_name: 'rspec',
+ data: { coverage: 95.0 },
+ default_branch: false,
+ group_id: group.id
+ )
+ end
+
+ it 'schedules BackfillProjectsWithCoverage background jobs', :aggregate_failures do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, 1, 2, 1)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, 3, 3, 1)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session_spec.rb b/spec/migrations/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session_spec.rb
new file mode 100644
index 00000000000..b1751216732
--- /dev/null
+++ b/spec/migrations/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('drop_temporary_columns_and_triggers_for_ci_builds_runner_session')
+
+RSpec.describe DropTemporaryColumnsAndTriggersForCiBuildsRunnerSession, :migration do
+ let(:ci_builds_runner_session_table) { table(:ci_builds_runner_session) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(ci_builds_runner_session_table.column_names).to include('build_id_convert_to_bigint')
+ }
+
+ migration.after -> {
+ ci_builds_runner_session_table.reset_column_information
+ expect(ci_builds_runner_session_table.column_names).not_to include('build_id_convert_to_bigint')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb b/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb
new file mode 100644
index 00000000000..c23110750c3
--- /dev/null
+++ b/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('upsert_base_work_item_types')
+
+RSpec.describe UpsertBaseWorkItemTypes, :migration do
+ let!(:work_item_types) { table(:work_item_types) }
+
+ after(:all) do
+ # Make sure base types are recreated after running the migration
+ # because migration specs are not run in a transaction
+ WorkItem::Type.delete_all
+ Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import
+ end
+
+ context 'when no default types exist' do
+ it 'creates default data' do
+ # Need to delete all as base types are seeded before entire test suite
+ WorkItem::Type.delete_all
+
+ expect(work_item_types.count).to eq(0)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ # Depending on whether the migration has been run before,
+ # the size could be 4, or 0, so we don't set any expectations
+ # as we don't delete base types on migration reverse
+ }
+
+ migration.after -> {
+ expect(work_item_types.count).to eq(4)
+ expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values)
+ }
+ end
+ end
+ end
+
+ context 'when default types already exist' do
+ it 'does not create default types again' do
+ expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values)
+ }
+
+ migration.after -> {
+ expect(work_item_types.count).to eq(4)
+ expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values)
+ }
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs_spec.rb b/spec/migrations/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs_spec.rb
new file mode 100644
index 00000000000..1b35982c41d
--- /dev/null
+++ b/spec/migrations/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('drop_temporary_columns_and_triggers_for_ci_build_needs')
+
+RSpec.describe DropTemporaryColumnsAndTriggersForCiBuildNeeds do
+ let(:ci_build_needs_table) { table(:ci_build_needs) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(ci_build_needs_table.column_names).to include('build_id_convert_to_bigint')
+ }
+
+ migration.after -> {
+ ci_build_needs_table.reset_column_information
+ expect(ci_build_needs_table.column_names).not_to include('build_id_convert_to_bigint')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20210906100316_drop_temporary_columns_and_triggers_for_ci_build_trace_chunks_spec.rb b/spec/migrations/20210906100316_drop_temporary_columns_and_triggers_for_ci_build_trace_chunks_spec.rb
new file mode 100644
index 00000000000..8d46ba7eb58
--- /dev/null
+++ b/spec/migrations/20210906100316_drop_temporary_columns_and_triggers_for_ci_build_trace_chunks_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('drop_temporary_columns_and_triggers_for_ci_build_trace_chunks')
+
+RSpec.describe DropTemporaryColumnsAndTriggersForCiBuildTraceChunks do
+ let(:ci_build_trace_chunks_table) { table(:ci_build_trace_chunks) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(ci_build_trace_chunks_table.column_names).to include('build_id_convert_to_bigint')
+ }
+
+ migration.after -> {
+ ci_build_trace_chunks_table.reset_column_information
+ expect(ci_build_trace_chunks_table.column_names).not_to include('build_id_convert_to_bigint')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/active_record/schema_spec.rb b/spec/migrations/active_record/schema_spec.rb
index 4a505c51a16..042b5710dce 100644
--- a/spec/migrations/active_record/schema_spec.rb
+++ b/spec/migrations/active_record/schema_spec.rb
@@ -7,7 +7,7 @@ require 'spec_helper'
RSpec.describe ActiveRecord::Schema, schema: :latest do
let(:all_migrations) do
- migrations_directories = %w[db/migrate db/post_migrate].map { |path| Rails.root.join(path).to_s }
+ migrations_directories = Rails.application.paths["db/migrate"].paths.map(&:to_s)
migrations_paths = migrations_directories.map { |path| File.join(path, '*') }
migrations = Dir[*migrations_paths] - migrations_directories
diff --git a/spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb b/spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb
new file mode 100644
index 00000000000..057e95eb158
--- /dev/null
+++ b/spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddDefaultProjectApprovalRulesVulnAllowed do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
+ let(:project) { projects.create!(name: 'project', path: 'project', namespace_id: namespace.id) }
+ let(:approval_project_rules) { table(:approval_project_rules) }
+
+ it 'updates records when vulnerabilities_allowed is nil' do
+ records_to_migrate = 10
+
+ records_to_migrate.times do |i|
+ approval_project_rules.create!(name: "rule #{i}", project_id: project.id)
+ end
+
+ expect { migrate! }
+ .to change { approval_project_rules.where(vulnerabilities_allowed: nil).count }
+ .from(records_to_migrate)
+ .to(0)
+ end
+
+ it 'defaults vulnerabilities_allowed to 0' do
+ approval_project_rule = approval_project_rules.create!(name: "new rule", project_id: project.id)
+
+ expect(approval_project_rule.vulnerabilities_allowed).to be_nil
+
+ migrate!
+
+ expect(approval_project_rule.reload.vulnerabilities_allowed).to eq(0)
+ end
+end
diff --git a/spec/migrations/add_triggers_to_integrations_type_new_spec.rb b/spec/migrations/add_triggers_to_integrations_type_new_spec.rb
index 07845715a52..01af5884170 100644
--- a/spec/migrations/add_triggers_to_integrations_type_new_spec.rb
+++ b/spec/migrations/add_triggers_to_integrations_type_new_spec.rb
@@ -8,6 +8,18 @@ RSpec.describe AddTriggersToIntegrationsTypeNew do
let(:migration) { described_class.new }
let(:integrations) { table(:integrations) }
+ # This matches Gitlab::Integrations::StiType at the time the trigger was added
+ let(:namespaced_integrations) do
+ %w[
+ Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog
+ Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Irker Jenkins Jira Mattermost
+ MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker
+ Prometheus Pushover Redmine Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack
+
+ Github GitlabSlackApplication
+ ]
+ end
+
describe '#up' do
before do
migrate!
@@ -15,7 +27,7 @@ RSpec.describe AddTriggersToIntegrationsTypeNew do
describe 'INSERT trigger' do
it 'sets `type_new` to the transformed `type` class name' do
- Gitlab::Integrations::StiType.namespaced_integrations.each do |type|
+ namespaced_integrations.each do |type|
integration = integrations.create!(type: "#{type}Service")
expect(integration.reload).to have_attributes(
diff --git a/spec/migrations/backfill_cadence_id_for_boards_scoped_to_iteration_spec.rb b/spec/migrations/backfill_cadence_id_for_boards_scoped_to_iteration_spec.rb
new file mode 100644
index 00000000000..1a64de8d0db
--- /dev/null
+++ b/spec/migrations/backfill_cadence_id_for_boards_scoped_to_iteration_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+# require Rails.root.join('db', 'post_migrate', '20210825193652_backfill_candence_id_for_boards_scoped_to_iteration.rb')
+
+RSpec.describe BackfillCadenceIdForBoardsScopedToIteration, :migration do
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+ let(:iterations_cadences) { table(:iterations_cadences) }
+ let(:boards) { table(:boards) }
+
+ let!(:group) { namespaces.create!(name: 'group1', path: 'group1', type: 'Group') }
+ let!(:cadence) { iterations_cadences.create!(title: 'group cadence', group_id: group.id, start_date: Time.current) }
+ let!(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
+ let!(:project_board1) { boards.create!(name: 'Project Dev1', project_id: project.id) }
+ let!(:project_board2) { boards.create!(name: 'Project Dev2', project_id: project.id, iteration_id: -4) }
+ let!(:project_board3) { boards.create!(name: 'Project Dev3', project_id: project.id, iteration_id: -4) }
+ let!(:project_board4) { boards.create!(name: 'Project Dev4', project_id: project.id, iteration_id: -4) }
+
+ let!(:group_board1) { boards.create!(name: 'Group Dev1', group_id: group.id) }
+ let!(:group_board2) { boards.create!(name: 'Group Dev2', group_id: group.id, iteration_id: -4) }
+ let!(:group_board3) { boards.create!(name: 'Group Dev3', group_id: group.id, iteration_id: -4) }
+ let!(:group_board4) { boards.create!(name: 'Group Dev4', group_id: group.id, iteration_id: -4) }
+
+ describe '#up' do
+ it 'schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ described_class.new.up
+
+ migration = described_class::MIGRATION
+
+ expect(migration).to be_scheduled_delayed_migration(2.minutes, 'group', 'up', group_board2.id, group_board4.id)
+ expect(migration).to be_scheduled_delayed_migration(2.minutes, 'project', 'up', project_board2.id, project_board4.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 2
+ end
+ end
+ end
+
+ context 'in batches' do
+ before do
+ stub_const('BackfillCadenceIdForBoardsScopedToIteration::BATCH_SIZE', 2)
+ end
+
+ it 'schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ described_class.new.up
+
+ migration = described_class::MIGRATION
+
+ expect(migration).to be_scheduled_delayed_migration(2.minutes, 'group', 'up', group_board2.id, group_board3.id)
+ expect(migration).to be_scheduled_delayed_migration(4.minutes, 'group', 'up', group_board4.id, group_board4.id)
+ expect(migration).to be_scheduled_delayed_migration(2.minutes, 'project', 'up', project_board2.id, project_board3.id)
+ expect(migration).to be_scheduled_delayed_migration(4.minutes, 'project', 'up', project_board4.id, project_board4.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 4
+ end
+ end
+ end
+ end
+ end
+
+ describe '#down' do
+ let!(:project_board1) { boards.create!(name: 'Project Dev1', project_id: project.id) }
+ let!(:project_board2) { boards.create!(name: 'Project Dev2', project_id: project.id, iteration_cadence_id: cadence.id) }
+ let!(:project_board3) { boards.create!(name: 'Project Dev3', project_id: project.id, iteration_id: -4, iteration_cadence_id: cadence.id) }
+ let!(:project_board4) { boards.create!(name: 'Project Dev4', project_id: project.id, iteration_id: -4, iteration_cadence_id: cadence.id) }
+
+ let!(:group_board1) { boards.create!(name: 'Group Dev1', group_id: group.id) }
+ let!(:group_board2) { boards.create!(name: 'Group Dev2', group_id: group.id, iteration_cadence_id: cadence.id) }
+ let!(:group_board3) { boards.create!(name: 'Group Dev3', group_id: group.id, iteration_id: -4, iteration_cadence_id: cadence.id) }
+ let!(:group_board4) { boards.create!(name: 'Group Dev4', group_id: group.id, iteration_id: -4, iteration_cadence_id: cadence.id) }
+
+ it 'schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ described_class.new.down
+
+ migration = described_class::MIGRATION
+
+ expect(migration).to be_scheduled_delayed_migration(2.minutes, 'none', 'down', project_board2.id, group_board4.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 1
+ end
+ end
+ end
+
+ context 'in batches' do
+ before do
+ stub_const('BackfillCadenceIdForBoardsScopedToIteration::BATCH_SIZE', 2)
+ end
+
+ it 'schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ described_class.new.down
+
+ migration = described_class::MIGRATION
+
+ expect(migration).to be_scheduled_delayed_migration(2.minutes, 'none', 'down', project_board2.id, project_board3.id)
+ expect(migration).to be_scheduled_delayed_migration(4.minutes, 'none', 'down', project_board4.id, group_board2.id)
+ expect(migration).to be_scheduled_delayed_migration(6.minutes, 'none', 'down', group_board3.id, group_board4.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 3
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/backfill_stage_event_hash_spec.rb b/spec/migrations/backfill_stage_event_hash_spec.rb
new file mode 100644
index 00000000000..cecaddcd3d4
--- /dev/null
+++ b/spec/migrations/backfill_stage_event_hash_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe BackfillStageEventHash, schema: 20210730103808 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:labels) { table(:labels) }
+ let(:group_stages) { table(:analytics_cycle_analytics_group_stages) }
+ let(:project_stages) { table(:analytics_cycle_analytics_project_stages) }
+ let(:group_value_streams) { table(:analytics_cycle_analytics_group_value_streams) }
+ let(:project_value_streams) { table(:analytics_cycle_analytics_project_value_streams) }
+ let(:stage_event_hashes) { table(:analytics_cycle_analytics_stage_event_hashes) }
+
+ let(:issue_created) { 1 }
+ let(:issue_closed) { 3 }
+ let(:issue_label_removed) { 9 }
+ let(:unknown_stage_event) { -1 }
+
+ let(:namespace) { namespaces.create!(name: 'ns', path: 'ns', type: 'Group') }
+ let(:project) { projects.create!(name: 'project', path: 'project', namespace_id: namespace.id) }
+ let(:group_label) { labels.create!(title: 'label', type: 'GroupLabel', group_id: namespace.id) }
+ let(:group_value_stream) { group_value_streams.create!(name: 'group vs', group_id: namespace.id) }
+ let(:project_value_stream) { project_value_streams.create!(name: 'project vs', project_id: project.id) }
+
+ let(:group_stage_1) do
+ group_stages.create!(
+ name: 'stage 1',
+ group_id: namespace.id,
+ start_event_identifier: issue_created,
+ end_event_identifier: issue_closed,
+ group_value_stream_id: group_value_stream.id
+ )
+ end
+
+ let(:group_stage_2) do
+ group_stages.create!(
+ name: 'stage 2',
+ group_id: namespace.id,
+ start_event_identifier: issue_created,
+ end_event_identifier: issue_label_removed,
+ end_event_label_id: group_label.id,
+ group_value_stream_id: group_value_stream.id
+ )
+ end
+
+ let(:project_stage_1) do
+ project_stages.create!(
+ name: 'stage 1',
+ project_id: project.id,
+ start_event_identifier: issue_created,
+ end_event_identifier: issue_closed,
+ project_value_stream_id: project_value_stream.id
+ )
+ end
+
+ let(:invalid_group_stage) do
+ group_stages.create!(
+ name: 'stage 3',
+ group_id: namespace.id,
+ start_event_identifier: issue_created,
+ end_event_identifier: unknown_stage_event,
+ group_value_stream_id: group_value_stream.id
+ )
+ end
+
+ describe '#up' do
+ it 'populates stage_event_hash_id column' do
+ group_stage_1
+ group_stage_2
+ project_stage_1
+
+ migrate!
+
+ group_stage_1.reload
+ group_stage_2.reload
+ project_stage_1.reload
+
+ expect(group_stage_1.stage_event_hash_id).not_to be_nil
+ expect(group_stage_2.stage_event_hash_id).not_to be_nil
+ expect(project_stage_1.stage_event_hash_id).not_to be_nil
+
+ expect(stage_event_hashes.count).to eq(2) # group_stage_1 and project_stage_1 has the same hash
+ end
+
+ it 'runs without problem without stages' do
+ expect { migrate! }.not_to raise_error
+ end
+
+ context 'when invalid event identifier is discovered' do
+ it 'removes the stage' do
+ group_stage_1
+ invalid_group_stage
+
+ expect { migrate! }.not_to change { group_stage_1 }
+
+ expect(group_stages.find_by_id(invalid_group_stage.id)).to eq(nil)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/cleanup_remaining_orphan_invites_spec.rb b/spec/migrations/cleanup_remaining_orphan_invites_spec.rb
new file mode 100644
index 00000000000..0eb1f5a578a
--- /dev/null
+++ b/spec/migrations/cleanup_remaining_orphan_invites_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration! 'cleanup_remaining_orphan_invites'
+
+RSpec.describe CleanupRemainingOrphanInvites, :migration do
+ def create_member(**extra_attributes)
+ defaults = {
+ access_level: 10,
+ source_id: 1,
+ source_type: "Project",
+ notification_level: 0,
+ type: 'ProjectMember'
+ }
+
+ table(:members).create!(defaults.merge(extra_attributes))
+ end
+
+ def create_user(**extra_attributes)
+ defaults = { projects_limit: 0 }
+ table(:users).create!(defaults.merge(extra_attributes))
+ end
+
+ describe '#up', :aggregate_failures do
+ it 'removes invite tokens for accepted records' do
+ record1 = create_member(invite_token: 'foo', user_id: nil)
+ record2 = create_member(invite_token: 'foo2', user_id: create_user(username: 'foo', email: 'foo@example.com').id)
+ record3 = create_member(invite_token: nil, user_id: create_user(username: 'bar', email: 'bar@example.com').id)
+
+ migrate!
+
+ expect(table(:members).find(record1.id).invite_token).to eq 'foo'
+ expect(table(:members).find(record2.id).invite_token).to eq nil
+ expect(table(:members).find(record3.id).invite_token).to eq nil
+ end
+ end
+end
diff --git a/spec/migrations/disable_job_token_scope_when_unused_spec.rb b/spec/migrations/disable_job_token_scope_when_unused_spec.rb
new file mode 100644
index 00000000000..d969c98aa0f
--- /dev/null
+++ b/spec/migrations/disable_job_token_scope_when_unused_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe DisableJobTokenScopeWhenUnused do
+ let(:ci_cd_settings) { table(:project_ci_cd_settings) }
+ let(:links) { table(:ci_job_token_project_scope_links) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+
+ let(:namespace) { namespaces.create!(name: 'test', path: 'path', type: 'Group') }
+
+ let(:project_with_used_scope) { projects.create!(namespace_id: namespace.id) }
+ let!(:used_scope_settings) { ci_cd_settings.create!(project_id: project_with_used_scope.id, job_token_scope_enabled: true) }
+ let(:target_project) { projects.create!(namespace_id: namespace.id) }
+ let!(:link) { links.create!(source_project_id: project_with_used_scope.id, target_project_id: target_project.id) }
+
+ let(:project_with_unused_scope) { projects.create!(namespace_id: namespace.id) }
+ let!(:unused_scope_settings) { ci_cd_settings.create!(project_id: project_with_unused_scope.id, job_token_scope_enabled: true) }
+
+ let(:project_with_disabled_scope) { projects.create!(namespace_id: namespace.id) }
+ let!(:disabled_scope_settings) { ci_cd_settings.create!(project_id: project_with_disabled_scope.id, job_token_scope_enabled: false) }
+
+ describe '#up' do
+ it 'sets job_token_scope_enabled to false for projects not having job token scope configured' do
+ migrate!
+
+ expect(unused_scope_settings.reload.job_token_scope_enabled).to be_falsey
+ end
+
+ it 'keeps the scope enabled for projects that are using it' do
+ migrate!
+
+ expect(used_scope_settings.reload.job_token_scope_enabled).to be_truthy
+ end
+
+ it 'keeps the scope disabled for projects having it disabled' do
+ migrate!
+
+ expect(disabled_scope_settings.reload.job_token_scope_enabled).to be_falsey
+ end
+ end
+end
diff --git a/spec/migrations/remove_duplicate_dast_site_tokens_spec.rb b/spec/migrations/remove_duplicate_dast_site_tokens_spec.rb
new file mode 100644
index 00000000000..fed9941b2a4
--- /dev/null
+++ b/spec/migrations/remove_duplicate_dast_site_tokens_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe RemoveDuplicateDastSiteTokens do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:dast_site_tokens) { table(:dast_site_tokens) }
+ let!(:namespace) { namespaces.create!(id: 1, name: 'group', path: 'group') }
+ let!(:project1) { projects.create!(id: 1, namespace_id: namespace.id, path: 'project1') }
+ # create non duplicate dast site token
+ let!(:dast_site_token1) { dast_site_tokens.create!(project_id: project1.id, url: 'https://gitlab.com', token: SecureRandom.uuid) }
+
+ context 'when duplicate dast site tokens exists' do
+ # create duplicate dast site token
+ let_it_be(:duplicate_url) { 'https://about.gitlab.com' }
+
+ let!(:project2) { projects.create!(id: 2, namespace_id: namespace.id, path: 'project2') }
+ let!(:dast_site_token2) { dast_site_tokens.create!(project_id: project2.id, url: duplicate_url, token: SecureRandom.uuid) }
+ let!(:dast_site_token3) { dast_site_tokens.create!(project_id: project2.id, url: 'https://temp_url.com', token: SecureRandom.uuid) }
+ let!(:dast_site_token4) { dast_site_tokens.create!(project_id: project2.id, url: 'https://other_temp_url.com', token: SecureRandom.uuid) }
+
+ before 'update URL to bypass uniqueness validation' do
+ dast_site_tokens.where(project_id: 2).update_all(url: duplicate_url)
+ end
+
+ describe 'migration up' do
+ it 'does remove duplicated dast site tokens' do
+ expect(dast_site_tokens.count).to eq(4)
+ expect(dast_site_tokens.where(project_id: 2, url: duplicate_url).size).to eq(3)
+
+ migrate!
+
+ expect(dast_site_tokens.count).to eq(2)
+ expect(dast_site_tokens.where(project_id: 2, url: duplicate_url).size).to eq(1)
+ end
+ end
+ end
+
+ context 'when duplicate dast site tokens does not exists' do
+ before do
+ dast_site_tokens.create!(project_id: 1, url: 'https://about.gitlab.com/handbook', token: SecureRandom.uuid)
+ end
+
+ describe 'migration up' do
+ it 'does remove duplicated dast site tokens' do
+ expect { migrate! }.not_to change(dast_site_tokens, :count)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/remove_duplicate_dast_site_tokens_with_same_token_spec.rb b/spec/migrations/remove_duplicate_dast_site_tokens_with_same_token_spec.rb
new file mode 100644
index 00000000000..57d677af5cf
--- /dev/null
+++ b/spec/migrations/remove_duplicate_dast_site_tokens_with_same_token_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe RemoveDuplicateDastSiteTokensWithSameToken do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:dast_site_tokens) { table(:dast_site_tokens) }
+ let!(:namespace) { namespaces.create!(id: 1, name: 'group', path: 'group') }
+ let!(:project1) { projects.create!(id: 1, namespace_id: namespace.id, path: 'project1') }
+ # create non duplicate dast site token
+ let!(:dast_site_token1) { dast_site_tokens.create!(project_id: project1.id, url: 'https://gitlab.com', token: SecureRandom.uuid) }
+
+ context 'when duplicate dast site tokens exists' do
+ # create duplicate dast site token
+ let_it_be(:duplicate_token) { 'duplicate_token' }
+ let_it_be(:other_duplicate_token) { 'other_duplicate_token' }
+
+ let!(:project2) { projects.create!(id: 2, namespace_id: namespace.id, path: 'project2') }
+ let!(:dast_site_token2) { dast_site_tokens.create!(project_id: project2.id, url: 'https://gitlab2.com', token: duplicate_token) }
+ let!(:dast_site_token3) { dast_site_tokens.create!(project_id: project2.id, url: 'https://gitlab3.com', token: duplicate_token) }
+ let!(:dast_site_token4) { dast_site_tokens.create!(project_id: project2.id, url: 'https://gitlab4.com', token: duplicate_token) }
+
+ let!(:project3) { projects.create!(id: 3, namespace_id: namespace.id, path: 'project3') }
+ let!(:dast_site_token5) { dast_site_tokens.create!(project_id: project3.id, url: 'https://gitlab2.com', token: other_duplicate_token) }
+ let!(:dast_site_token6) { dast_site_tokens.create!(project_id: project3.id, url: 'https://gitlab3.com', token: other_duplicate_token) }
+ let!(:dast_site_token7) { dast_site_tokens.create!(project_id: project3.id, url: 'https://gitlab4.com', token: other_duplicate_token) }
+
+ describe 'migration up' do
+ it 'does remove duplicated dast site tokens with the same token' do
+ expect(dast_site_tokens.count).to eq(7)
+ expect(dast_site_tokens.where(token: duplicate_token).size).to eq(3)
+
+ migrate!
+
+ expect(dast_site_tokens.count).to eq(3)
+ expect(dast_site_tokens.where(token: duplicate_token).size).to eq(1)
+ end
+ end
+ end
+
+ context 'when duplicate dast site tokens do not exist' do
+ let!(:dast_site_token5) { dast_site_tokens.create!(project_id: 1, url: 'https://gitlab5.com', token: SecureRandom.uuid) }
+
+ describe 'migration up' do
+ it 'does not remove any dast site tokens' do
+ expect { migrate! }.not_to change(dast_site_tokens, :count)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/replace_external_wiki_triggers_spec.rb b/spec/migrations/replace_external_wiki_triggers_spec.rb
new file mode 100644
index 00000000000..392ef76c5ba
--- /dev/null
+++ b/spec/migrations/replace_external_wiki_triggers_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe ReplaceExternalWikiTriggers do
+ let(:migration) { described_class.new }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:integrations) { table(:integrations) }
+
+ before do
+ @namespace = namespaces.create!(name: 'foo', path: 'foo')
+ @project = projects.create!(namespace_id: @namespace.id)
+ end
+
+ def create_external_wiki_integration(**attrs)
+ attrs.merge!(type_info)
+
+ integrations.create!(**attrs)
+ end
+
+ def has_external_wiki
+ !!@project.reload.has_external_wiki
+ end
+
+ shared_examples 'external wiki triggers' do
+ describe 'INSERT trigger' do
+ it 'sets `has_external_wiki` to true when active external wiki integration is inserted' do
+ expect do
+ create_external_wiki_integration(active: true, project_id: @project.id)
+ end.to change { has_external_wiki }.to(true)
+ end
+
+ it 'does not set `has_external_wiki` to true when integration is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+
+ expect do
+ create_external_wiki_integration(active: true, project_id: different_project.id)
+ end.not_to change { has_external_wiki }
+ end
+
+ it 'does not set `has_external_wiki` to true when inactive external wiki integration is inserted' do
+ expect do
+ create_external_wiki_integration(active: false, project_id: @project.id)
+ end.not_to change { has_external_wiki }
+ end
+
+ it 'does not set `has_external_wiki` to true when active other service is inserted' do
+ expect do
+ integrations.create!(type_new: 'Integrations::MyService', type: 'MyService', active: true, project_id: @project.id)
+ end.not_to change { has_external_wiki }
+ end
+ end
+
+ describe 'UPDATE trigger' do
+ it 'sets `has_external_wiki` to true when `ExternalWikiService` is made active' do
+ service = create_external_wiki_integration(active: false, project_id: @project.id)
+
+ expect do
+ service.update!(active: true)
+ end.to change { has_external_wiki }.to(true)
+ end
+
+ it 'sets `has_external_wiki` to false when integration is made inactive' do
+ service = create_external_wiki_integration(active: true, project_id: @project.id)
+
+ expect do
+ service.update!(active: false)
+ end.to change { has_external_wiki }.to(false)
+ end
+
+ it 'does not change `has_external_wiki` when integration is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+ service = create_external_wiki_integration(active: false, project_id: different_project.id)
+
+ expect do
+ service.update!(active: true)
+ end.not_to change { has_external_wiki }
+ end
+ end
+
+ describe 'DELETE trigger' do
+ it 'sets `has_external_wiki` to false when integration is deleted' do
+ service = create_external_wiki_integration(active: true, project_id: @project.id)
+
+ expect do
+ service.delete
+ end.to change { has_external_wiki }.to(false)
+ end
+
+ it 'does not change `has_external_wiki` when integration is for a different project' do
+ different_project = projects.create!(namespace_id: @namespace.id)
+ service = create_external_wiki_integration(active: true, project_id: different_project.id)
+
+ expect do
+ service.delete
+ end.not_to change { has_external_wiki }
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ migrate!
+ end
+
+ context 'when integrations are created with the new STI value' do
+ let(:type_info) { { type_new: 'Integrations::ExternalWiki' } }
+
+ it_behaves_like 'external wiki triggers'
+ end
+
+ context 'when integrations are created with the old STI value' do
+ let(:type_info) { { type: 'ExternalWikiService' } }
+
+ it_behaves_like 'external wiki triggers'
+ end
+ end
+
+ describe '#down' do
+ before do
+ migration.up
+ migration.down
+ end
+
+ let(:type_info) { { type: 'ExternalWikiService' } }
+
+ it_behaves_like 'external wiki triggers'
+ end
+end
diff --git a/spec/migrations/set_default_job_token_scope_true_spec.rb b/spec/migrations/set_default_job_token_scope_true_spec.rb
new file mode 100644
index 00000000000..e7c77357318
--- /dev/null
+++ b/spec/migrations/set_default_job_token_scope_true_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SetDefaultJobTokenScopeTrue, schema: 20210819153805 do
+ let(:ci_cd_settings) { table(:project_ci_cd_settings) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+
+ let(:namespace) { namespaces.create!(name: 'test', path: 'path', type: 'Group') }
+ let(:project) { projects.create!(namespace_id: namespace.id) }
+
+ describe '#up' do
+ it 'sets the job_token_scope_enabled default to true' do
+ described_class.new.up
+
+ settings = ci_cd_settings.create!(project_id: project.id)
+
+ expect(settings.job_token_scope_enabled).to be_truthy
+ end
+ end
+
+ describe '#down' do
+ it 'sets the job_token_scope_enabled default to false' do
+ described_class.new.down
+
+ settings = ci_cd_settings.create!(project_id: project.id)
+
+ expect(settings.job_token_scope_enabled).to be_falsey
+ end
+ end
+end
diff --git a/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb b/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb
new file mode 100644
index 00000000000..1fd19ee42b4
--- /dev/null
+++ b/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration! 'slice_merge_request_diff_commit_migrations'
+
+RSpec.describe SliceMergeRequestDiffCommitMigrations, :migration do
+ let(:migration) { described_class.new }
+
+ describe '#up' do
+ context 'when there are no jobs to process' do
+ it 'does nothing' do
+ expect(migration).not_to receive(:migrate_in)
+ expect(Gitlab::Database::BackgroundMigrationJob).not_to receive(:create!)
+
+ migration.up
+ end
+ end
+
+ context 'when there are pending jobs' do
+ let!(:job1) do
+ Gitlab::Database::BackgroundMigrationJob.create!(
+ class_name: described_class::MIGRATION_CLASS,
+ arguments: [1, 10_001]
+ )
+ end
+
+ let!(:job2) do
+ Gitlab::Database::BackgroundMigrationJob.create!(
+ class_name: described_class::MIGRATION_CLASS,
+ arguments: [10_001, 20_001]
+ )
+ end
+
+ it 'marks the old jobs as finished' do
+ migration.up
+
+ job1.reload
+ job2.reload
+
+ expect(job1).to be_succeeded
+ expect(job2).to be_succeeded
+ end
+
+ it 'the jobs are slices into smaller ranges' do
+ migration.up
+
+ new_jobs = Gitlab::Database::BackgroundMigrationJob
+ .for_migration_class(described_class::MIGRATION_CLASS)
+ .pending
+ .to_a
+
+ expect(new_jobs.map(&:arguments)).to eq([
+ [1, 5_001],
+ [5_001, 10_001],
+ [10_001, 15_001],
+ [15_001, 20_001]
+ ])
+ end
+
+ it 'schedules a background migration for the first job' do
+ expect(migration)
+ .to receive(:migrate_in)
+ .with(1.hour, described_class::STEAL_MIGRATION_CLASS, [1, 5_001])
+
+ migration.up
+ end
+ end
+ end
+end
diff --git a/spec/migrations/steal_merge_request_diff_commit_users_migration_spec.rb b/spec/migrations/steal_merge_request_diff_commit_users_migration_spec.rb
new file mode 100644
index 00000000000..3ad0b5a93c2
--- /dev/null
+++ b/spec/migrations/steal_merge_request_diff_commit_users_migration_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration! 'steal_merge_request_diff_commit_users_migration'
+
+RSpec.describe StealMergeRequestDiffCommitUsersMigration, :migration do
+ let(:migration) { described_class.new }
+
+ describe '#up' do
+ it 'schedules a job if there are pending jobs' do
+ Gitlab::Database::BackgroundMigrationJob.create!(
+ class_name: 'MigrateMergeRequestDiffCommitUsers',
+ arguments: [10, 20]
+ )
+
+ expect(migration)
+ .to receive(:migrate_in)
+ .with(1.hour, 'StealMigrateMergeRequestDiffCommitUsers', [10, 20])
+
+ migration.up
+ end
+
+ it 'does not schedule any jobs when all jobs have been completed' do
+ expect(migration).not_to receive(:migrate_in)
+
+ migration.up
+ end
+ end
+end
diff --git a/spec/migrations/update_integrations_trigger_type_new_on_insert_spec.rb b/spec/migrations/update_integrations_trigger_type_new_on_insert_spec.rb
new file mode 100644
index 00000000000..41cf35b40f4
--- /dev/null
+++ b/spec/migrations/update_integrations_trigger_type_new_on_insert_spec.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe UpdateIntegrationsTriggerTypeNewOnInsert do
+ let(:migration) { described_class.new }
+ let(:integrations) { table(:integrations) }
+
+ shared_examples 'transforms known types' do
+ # This matches Gitlab::Integrations::StiType at the time the original trigger
+ # was added in db/migrate/20210721135638_add_triggers_to_integrations_type_new.rb
+ let(:namespaced_integrations) do
+ %w[
+ Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog
+ Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Irker Jenkins Jira Mattermost
+ MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker
+ Prometheus Pushover Redmine Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack
+
+ Github GitlabSlackApplication
+ ]
+ end
+
+ it 'sets `type_new` to the transformed `type` class name' do
+ namespaced_integrations.each do |type|
+ integration = integrations.create!(type: "#{type}Service")
+
+ expect(integration.reload).to have_attributes(
+ type: "#{type}Service",
+ type_new: "Integrations::#{type}"
+ )
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ migrate!
+ end
+
+ describe 'INSERT trigger with dynamic mapping' do
+ it_behaves_like 'transforms known types'
+
+ it 'transforms unknown types if it ends in "Service"' do
+ integration = integrations.create!(type: 'AcmeService')
+
+ expect(integration.reload).to have_attributes(
+ type: 'AcmeService',
+ type_new: 'Integrations::Acme'
+ )
+ end
+
+ it 'ignores "Service" occurring elsewhere in the type' do
+ integration = integrations.create!(type: 'ServiceAcmeService')
+
+ expect(integration.reload).to have_attributes(
+ type: 'ServiceAcmeService',
+ type_new: 'Integrations::ServiceAcme'
+ )
+ end
+
+ it 'copies unknown types if it does not end with "Service"' do
+ integration = integrations.create!(type: 'Integrations::Acme')
+
+ expect(integration.reload).to have_attributes(
+ type: 'Integrations::Acme',
+ type_new: 'Integrations::Acme'
+ )
+ end
+ end
+ end
+
+ describe '#down' do
+ before do
+ migration.up
+ migration.down
+ end
+
+ describe 'INSERT trigger with static mapping' do
+ it_behaves_like 'transforms known types'
+
+ it 'ignores types that are already namespaced' do
+ integration = integrations.create!(type: 'Integrations::Asana')
+
+ expect(integration.reload).to have_attributes(
+ type: 'Integrations::Asana',
+ type_new: nil
+ )
+ end
+
+ it 'ignores types that are unknown' do
+ integration = integrations.create!(type: 'FooBar')
+
+ expect(integration.reload).to have_attributes(
+ type: 'FooBar',
+ type_new: nil
+ )
+ end
+ end
+ end
+end
diff --git a/spec/migrations/update_minimum_password_length_spec.rb b/spec/migrations/update_minimum_password_length_spec.rb
index 02254ba1343..e40d090fd77 100644
--- a/spec/migrations/update_minimum_password_length_spec.rb
+++ b/spec/migrations/update_minimum_password_length_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe UpdateMinimumPasswordLength do
before do
stub_const('ApplicationSetting::DEFAULT_MINIMUM_PASSWORD_LENGTH', 10)
- allow(Devise.password_length).to receive(:min).and_return(12)
+ allow(Devise).to receive(:password_length).and_return(12..20)
end
it 'correctly migrates minimum_password_length' do