Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-12-20 16:37:47 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-12-20 16:37:47 +0300
commitaee0a117a889461ce8ced6fcf73207fe017f1d99 (patch)
tree891d9ef189227a8445d83f35c1b0fc99573f4380 /spec/migrations
parent8d46af3258650d305f53b819eabf7ab18d22f59e (diff)
Add latest changes from gitlab-org/gitlab@14-6-stable-eev14.6.0-rc42
Diffstat (limited to 'spec/migrations')
-rw-r--r--spec/migrations/20210713042000_fix_ci_sources_pipelines_index_names_spec.rb67
-rw-r--r--spec/migrations/20210804150320_create_base_work_item_types_spec.rb22
-rw-r--r--spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb31
-rw-r--r--spec/migrations/20211101222614_consume_remaining_user_namespace_jobs_spec.rb21
-rw-r--r--spec/migrations/20211110143306_add_not_null_constraint_to_security_findings_uuid_spec.rb23
-rw-r--r--spec/migrations/20211110151350_schedule_drop_invalid_security_findings_spec.rb71
-rw-r--r--spec/migrations/20211116091751_change_namespace_type_default_to_user_spec.rb5
-rw-r--r--spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb191
-rw-r--r--spec/migrations/20211117084814_migrate_remaining_u2f_registrations_spec.rb43
-rw-r--r--spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb56
-rw-r--r--spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb54
-rw-r--r--spec/migrations/20211130165043_backfill_sequence_column_for_sprints_table_spec.rb42
-rw-r--r--spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb18
-rw-r--r--spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb88
-rw-r--r--spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb44
15 files changed, 762 insertions, 14 deletions
diff --git a/spec/migrations/20210713042000_fix_ci_sources_pipelines_index_names_spec.rb b/spec/migrations/20210713042000_fix_ci_sources_pipelines_index_names_spec.rb
new file mode 100644
index 00000000000..adec1e05533
--- /dev/null
+++ b/spec/migrations/20210713042000_fix_ci_sources_pipelines_index_names_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe FixCiSourcesPipelinesIndexNames, :migration do
+ def validate_foreign_keys_and_index!
+ aggregate_failures do
+ expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_be5624bf37')).to be_truthy
+ expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_pipelines, column: :pipeline_id, name: 'fk_e1bad85861')).to be_truthy
+ expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_pipelines, column: :source_pipeline_id, name: 'fk_d4e29af7d7')).to be_truthy
+ expect(subject.foreign_key_exists?(:ci_sources_pipelines, :projects, column: :source_project_id, name: 'fk_acd9737679')).to be_truthy
+ expect(subject.foreign_key_exists?(:ci_sources_pipelines, :projects, name: 'fk_1e53c97c0a')).to be_truthy
+ expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_builds, column: :source_job_id_convert_to_bigint, name: 'fk_be5624bf37_tmp')).to be_falsey
+
+ expect(subject.index_exists_by_name?(:ci_sources_pipelines, described_class::NEW_INDEX_NAME)).to be_truthy
+ expect(subject.index_exists_by_name?(:ci_sources_pipelines, described_class::OLD_INDEX_NAME)).to be_falsey
+ end
+ end
+
+ it 'existing foreign keys and indexes are untouched' do
+ validate_foreign_keys_and_index!
+
+ migrate!
+
+ validate_foreign_keys_and_index!
+ end
+
+ context 'with a legacy (pre-GitLab 10.0) foreign key' do
+ let(:old_foreign_keys) { described_class::OLD_TO_NEW_FOREIGN_KEY_DEFS.keys }
+ let(:new_foreign_keys) { described_class::OLD_TO_NEW_FOREIGN_KEY_DEFS.values.map { |entry| entry[:name] } }
+
+ before do
+ new_foreign_keys.each { |name| subject.remove_foreign_key_if_exists(:ci_sources_pipelines, name: name) }
+
+ # GitLab 9.5.4: https://gitlab.com/gitlab-org/gitlab/-/blob/v9.5.4-ee/db/schema.rb#L2026-2030
+ subject.add_foreign_key(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_3f0c88d7dc', on_delete: :cascade)
+ subject.add_foreign_key(:ci_sources_pipelines, :ci_pipelines, column: :pipeline_id, name: "fk_b8c0fac459", on_delete: :cascade)
+ subject.add_foreign_key(:ci_sources_pipelines, :ci_pipelines, column: :source_pipeline_id, name: "fk_3a3e3cb83a", on_delete: :cascade)
+ subject.add_foreign_key(:ci_sources_pipelines, :projects, column: :source_project_id, name: "fk_8868d0f3e4", on_delete: :cascade)
+ subject.add_foreign_key(:ci_sources_pipelines, :projects, name: "fk_83b4346e48", on_delete: :cascade)
+
+ # https://gitlab.com/gitlab-org/gitlab/-/blob/v9.5.4-ee/db/schema.rb#L443
+ subject.add_index "ci_sources_pipelines", ["source_job_id"], name: described_class::OLD_INDEX_NAME, using: :btree
+ end
+
+ context 'when new index already exists' do
+ it 'corrects foreign key constraints and drops old index' do
+ expect { migrate! }.to change { subject.foreign_key_exists?(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_3f0c88d7dc') }.from(true).to(false)
+
+ validate_foreign_keys_and_index!
+ end
+ end
+
+ context 'when new index does not exist' do
+ before do
+ subject.remove_index("ci_sources_pipelines", name: described_class::NEW_INDEX_NAME)
+ end
+
+ it 'drops the old index' do
+ expect { migrate! }.to change { subject.index_exists_by_name?(:ci_sources_pipelines, described_class::OLD_INDEX_NAME) }.from(true).to(false)
+
+ validate_foreign_keys_and_index!
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
index 34ea7f53f51..6df8e1b2ebf 100644
--- a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
+++ b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
@@ -4,18 +4,28 @@ require 'spec_helper'
require_migration!
RSpec.describe CreateBaseWorkItemTypes, :migration do
- let!(:work_item_types) { table(:work_item_types) }
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let_it_be(:work_item_types) { table(:work_item_types) }
+
+ let(:base_types) do
+ {
+ issue: 0,
+ incident: 1,
+ test_case: 2,
+ requirement: 3
+ }
+ end
after(:all) do
# Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction
- WorkItem::Type.delete_all
- Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import
+ reset_work_item_types
end
it 'creates default data' do
# Need to delete all as base types are seeded before entire test suite
- WorkItem::Type.delete_all
+ work_item_types.delete_all
reversible_migration do |migration|
migration.before -> {
@@ -24,8 +34,8 @@ RSpec.describe CreateBaseWorkItemTypes, :migration do
}
migration.after -> {
- expect(work_item_types.count).to eq 4
- expect(work_item_types.all.pluck(:base_type)).to match_array WorkItem::Type.base_types.values
+ expect(work_item_types.count).to eq(4)
+ expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values)
}
end
end
diff --git a/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb b/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb
index 3c8c55ccb80..1957a973ee1 100644
--- a/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb
+++ b/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb
@@ -4,19 +4,29 @@ require 'spec_helper'
require_migration!
RSpec.describe UpsertBaseWorkItemTypes, :migration do
- let!(:work_item_types) { table(:work_item_types) }
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let_it_be(:work_item_types) { table(:work_item_types) }
+
+ let(:base_types) do
+ {
+ issue: 0,
+ incident: 1,
+ test_case: 2,
+ requirement: 3
+ }
+ end
after(:all) do
# Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction
- WorkItem::Type.delete_all
- Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import
+ reset_work_item_types
end
context 'when no default types exist' do
it 'creates default data' do
# Need to delete all as base types are seeded before entire test suite
- WorkItem::Type.delete_all
+ work_item_types.delete_all
expect(work_item_types.count).to eq(0)
@@ -29,7 +39,7 @@ RSpec.describe UpsertBaseWorkItemTypes, :migration do
migration.after -> {
expect(work_item_types.count).to eq(4)
- expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values)
+ expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values)
}
end
end
@@ -37,16 +47,21 @@ RSpec.describe UpsertBaseWorkItemTypes, :migration do
context 'when default types already exist' do
it 'does not create default types again' do
- expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values)
+ # Database needs to be in a similar state as when this migration was created
+ work_item_types.delete_all
+ work_item_types.find_or_create_by!(name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue')
+ work_item_types.find_or_create_by!(name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident')
+ work_item_types.find_or_create_by!(name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case')
+ work_item_types.find_or_create_by!(name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements')
reversible_migration do |migration|
migration.before -> {
- expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values)
+ expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values)
}
migration.after -> {
expect(work_item_types.count).to eq(4)
- expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values)
+ expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values)
}
end
end
diff --git a/spec/migrations/20211101222614_consume_remaining_user_namespace_jobs_spec.rb b/spec/migrations/20211101222614_consume_remaining_user_namespace_jobs_spec.rb
new file mode 100644
index 00000000000..d78ecc26ebf
--- /dev/null
+++ b/spec/migrations/20211101222614_consume_remaining_user_namespace_jobs_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ConsumeRemainingUserNamespaceJobs do
+ let(:namespaces) { table(:namespaces) }
+ let!(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org', type: nil) }
+
+ context 'when Namespaces with nil `type` still exist' do
+ it 'steals sidekiq jobs from BackfillUserNamespace background migration' do
+ expect(Gitlab::BackgroundMigration).to receive(:steal).with('BackfillUserNamespace')
+
+ migrate!
+ end
+
+ it 'migrates namespaces without type' do
+ expect { migrate! }.to change { namespaces.where(type: 'User').count }.from(0).to(1)
+ end
+ end
+end
diff --git a/spec/migrations/20211110143306_add_not_null_constraint_to_security_findings_uuid_spec.rb b/spec/migrations/20211110143306_add_not_null_constraint_to_security_findings_uuid_spec.rb
new file mode 100644
index 00000000000..946fbf7f568
--- /dev/null
+++ b/spec/migrations/20211110143306_add_not_null_constraint_to_security_findings_uuid_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddNotNullConstraintToSecurityFindingsUuid do
+ let_it_be(:security_findings) { table(:security_findings) }
+ let_it_be(:migration) { described_class.new }
+
+ before do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ allow(migration).to receive(:with_lock_retries).and_yield
+ end
+
+ it 'adds a check constraint' do
+ constraint = security_findings.connection.check_constraints(:security_findings).find { |constraint| constraint.expression == "uuid IS NOT NULL" }
+ expect(constraint).to be_nil
+
+ migration.up
+
+ constraint = security_findings.connection.check_constraints(:security_findings).find { |constraint| constraint.expression == "uuid IS NOT NULL" }
+ expect(constraint).to be_a(ActiveRecord::ConnectionAdapters::CheckConstraintDefinition)
+ end
+end
diff --git a/spec/migrations/20211110151350_schedule_drop_invalid_security_findings_spec.rb b/spec/migrations/20211110151350_schedule_drop_invalid_security_findings_spec.rb
new file mode 100644
index 00000000000..b35cf5cbf4c
--- /dev/null
+++ b/spec/migrations/20211110151350_schedule_drop_invalid_security_findings_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleDropInvalidSecurityFindings, :migration, schema: 20211108211434 do
+ let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
+
+ let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user', type: Namespaces::UserNamespace.sti_name) }
+ let_it_be(:project) { table(:projects).create!(namespace_id: namespace.id) }
+
+ let_it_be(:pipelines) { table(:ci_pipelines) }
+ let_it_be(:pipeline) { pipelines.create!(project_id: project.id) }
+
+ let_it_be(:ci_builds) { table(:ci_builds) }
+ let_it_be(:ci_build) { ci_builds.create! }
+
+ let_it_be(:security_scans) { table(:security_scans) }
+ let_it_be(:security_scan) do
+ security_scans.create!(
+ scan_type: 1,
+ status: 1,
+ build_id: ci_build.id,
+ project_id: project.id,
+ pipeline_id: pipeline.id
+ )
+ end
+
+ let_it_be(:vulnerability_scanners) { table(:vulnerability_scanners) }
+ let_it_be(:vulnerability_scanner) { vulnerability_scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+
+ let_it_be(:security_findings) { table(:security_findings) }
+ let_it_be(:security_finding_without_uuid) do
+ security_findings.create!(
+ severity: 1,
+ confidence: 1,
+ scan_id: security_scan.id,
+ scanner_id: vulnerability_scanner.id,
+ uuid: nil
+ )
+ end
+
+ let_it_be(:security_finding_with_uuid) do
+ security_findings.create!(
+ severity: 1,
+ confidence: 1,
+ scan_id: security_scan.id,
+ scanner_id: vulnerability_scanner.id,
+ uuid: 'bd95c085-71aa-51d7-9bb6-08ae669c262e'
+ )
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ stub_const("#{described_class}::SUB_BATCH_SIZE", 1)
+ end
+
+ around do |example|
+ freeze_time { Sidekiq::Testing.fake! { example.run } }
+ end
+
+ it 'schedules background migrations' do
+ migrate!
+
+ expect(background_migration_jobs.count).to eq(1)
+ expect(background_migration_jobs.first.arguments).to match_array([security_finding_without_uuid.id, security_finding_without_uuid.id, described_class::SUB_BATCH_SIZE])
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, security_finding_without_uuid.id, security_finding_without_uuid.id, described_class::SUB_BATCH_SIZE)
+ end
+end
diff --git a/spec/migrations/20211116091751_change_namespace_type_default_to_user_spec.rb b/spec/migrations/20211116091751_change_namespace_type_default_to_user_spec.rb
new file mode 100644
index 00000000000..deba6f9b87c
--- /dev/null
+++ b/spec/migrations/20211116091751_change_namespace_type_default_to_user_spec.rb
@@ -0,0 +1,5 @@
+# frozen_string_literal: true
+
+# With https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73495, we no longer allow
+# a Namespace type to be nil. There is nothing left to test for this migration,
+# but we'll keep this file here as a tombstone.
diff --git a/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb b/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
new file mode 100644
index 00000000000..c1d96f50dc8
--- /dev/null
+++ b/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
@@ -0,0 +1,191 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe ScheduleRemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings, :migration do
+ let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
+ let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let_it_be(:users) { table(:users) }
+ let_it_be(:user) { create_user! }
+ let_it_be(:project) { table(:projects).create!(id: 14219619, namespace_id: namespace.id) }
+ let_it_be(:pipelines) { table(:ci_pipelines) }
+ let_it_be(:scanners) { table(:vulnerability_scanners) }
+ let_it_be(:scanner1) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+ let_it_be(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
+ let_it_be(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') }
+ let_it_be(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') }
+ let_it_be(:vulnerabilities) { table(:vulnerabilities) }
+ let_it_be(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let_it_be(:vulnerability_finding_pipelines) { table(:vulnerability_occurrence_pipelines) }
+ let_it_be(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let_it_be(:vulnerability_identifier) do
+ vulnerability_identifiers.create!(
+ id: 1244459,
+ project_id: project.id,
+ external_type: 'vulnerability-identifier',
+ external_id: 'vulnerability-identifier',
+ fingerprint: '0a203e8cd5260a1948edbedc76c7cb91ad6a2e45',
+ name: 'vulnerability identifier')
+ end
+
+ let_it_be(:vulnerability_for_first_duplicate) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:first_finding_duplicate) do
+ create_finding!(
+ id: 5606961,
+ uuid: "bd95c085-71aa-51d7-9bb6-08ae669c262e",
+ vulnerability_id: vulnerability_for_first_duplicate.id,
+ report_type: 0,
+ location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner1.id,
+ project_id: project.id
+ )
+ end
+
+ let_it_be(:vulnerability_for_second_duplicate) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:second_finding_duplicate) do
+ create_finding!(
+ id: 8765432,
+ uuid: "5b714f58-1176-5b26-8fd5-e11dfcb031b5",
+ vulnerability_id: vulnerability_for_second_duplicate.id,
+ report_type: 0,
+ location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner2.id,
+ project_id: project.id
+ )
+ end
+
+ let_it_be(:vulnerability_for_third_duplicate) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:third_finding_duplicate) do
+ create_finding!(
+ id: 8832995,
+ uuid: "cfe435fa-b25b-5199-a56d-7b007cc9e2d4",
+ vulnerability_id: vulnerability_for_third_duplicate.id,
+ report_type: 0,
+ location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner3.id,
+ project_id: project.id
+ )
+ end
+
+ let_it_be(:unrelated_finding) do
+ create_finding!(
+ id: 9999999,
+ uuid: "unreleated_finding",
+ vulnerability_id: nil,
+ report_type: 1,
+ location_fingerprint: 'random_location_fingerprint',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: unrelated_scanner.id,
+ project_id: project.id
+ )
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+
+ 4.times do
+ create_finding_pipeline!(project_id: project.id, finding_id: first_finding_duplicate.id)
+ create_finding_pipeline!(project_id: project.id, finding_id: second_finding_duplicate.id)
+ create_finding_pipeline!(project_id: project.id, finding_id: third_finding_duplicate.id)
+ create_finding_pipeline!(project_id: project.id, finding_id: unrelated_finding.id)
+ end
+ end
+
+ around do |example|
+ freeze_time { Sidekiq::Testing.fake! { example.run } }
+ end
+
+ it 'schedules background migrations' do
+ migrate!
+
+ expect(background_migration_jobs.count).to eq(4)
+ expect(background_migration_jobs.first.arguments).to match_array([first_finding_duplicate.id, first_finding_duplicate.id])
+ expect(background_migration_jobs.second.arguments).to match_array([second_finding_duplicate.id, second_finding_duplicate.id])
+ expect(background_migration_jobs.third.arguments).to match_array([third_finding_duplicate.id, third_finding_duplicate.id])
+ expect(background_migration_jobs.fourth.arguments).to match_array([unrelated_finding.id, unrelated_finding.id])
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(4)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, first_finding_duplicate.id, first_finding_duplicate.id)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, second_finding_duplicate.id, second_finding_duplicate.id)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(6.minutes, third_finding_duplicate.id, third_finding_duplicate.id)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(8.minutes, unrelated_finding.id, unrelated_finding.id)
+ end
+
+ private
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+
+ # rubocop:disable Metrics/ParameterLists
+ def create_finding!(
+ id: nil,
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
+ name: "test", severity: 7, confidence: 7, report_type: 0,
+ project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
+ metadata_version: 'test', raw_metadata: 'test', uuid: 'test')
+ params = {
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner_id,
+ primary_identifier_id: vulnerability_identifier.id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ }
+ params[:id] = id unless id.nil?
+ vulnerability_findings.create!(params)
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ user_type: user_type,
+ confirmed_at: confirmed_at
+ )
+ end
+
+ def create_finding_pipeline!(project_id:, finding_id:)
+ pipeline = pipelines.create!(project_id: project_id)
+ vulnerability_finding_pipelines.create!(pipeline_id: pipeline.id, occurrence_id: finding_id)
+ end
+end
diff --git a/spec/migrations/20211117084814_migrate_remaining_u2f_registrations_spec.rb b/spec/migrations/20211117084814_migrate_remaining_u2f_registrations_spec.rb
new file mode 100644
index 00000000000..6a82ed016af
--- /dev/null
+++ b/spec/migrations/20211117084814_migrate_remaining_u2f_registrations_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe MigrateRemainingU2fRegistrations, :migration do
+ let(:u2f_registrations) { table(:u2f_registrations) }
+ let(:webauthn_registrations) { table(:webauthn_registrations) }
+ let(:users) { table(:users) }
+
+ let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) }
+
+ before do
+ create_u2f_registration(1, 'reg1')
+ create_u2f_registration(2, 'reg2')
+ create_u2f_registration(3, '')
+ create_u2f_registration(4, nil)
+ webauthn_registrations.create!({ name: 'reg1', u2f_registration_id: 1, credential_xid: '', public_key: '', user_id: user.id })
+ end
+
+ it 'correctly migrates u2f registrations previously not migrated' do
+ expect { migrate! }.to change { webauthn_registrations.count }.from(1).to(4)
+ end
+
+ it 'migrates all valid u2f registrations depite errors' do
+ create_u2f_registration(5, 'reg3', 'invalid!')
+ create_u2f_registration(6, 'reg4')
+
+ expect { migrate! }.to change { webauthn_registrations.count }.from(1).to(5)
+ end
+
+ def create_u2f_registration(id, name, public_key = nil)
+ device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5), { key_handle: SecureRandom.random_bytes(255) })
+ public_key ||= Base64.strict_encode64(device.origin_public_key_raw)
+ u2f_registrations.create!({ id: id,
+ certificate: Base64.strict_encode64(device.cert_raw),
+ key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
+ public_key: public_key,
+ counter: 5,
+ name: name,
+ user_id: user.id })
+ end
+end
diff --git a/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb b/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb
new file mode 100644
index 00000000000..bc8b7c56676
--- /dev/null
+++ b/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe EncryptStaticObjectsExternalStorageAuthToken, :migration do
+ let(:application_settings) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'application_settings'
+ end
+ end
+
+ context 'when static_objects_external_storage_auth_token is not set' do
+ it 'does nothing' do
+ application_settings.create!
+
+ reversible_migration do |migration|
+ migration.before -> {
+ settings = application_settings.first
+
+ expect(settings.static_objects_external_storage_auth_token).to be_nil
+ expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil
+ }
+
+ migration.after -> {
+ settings = application_settings.first
+
+ expect(settings.static_objects_external_storage_auth_token).to be_nil
+ expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil
+ }
+ end
+ end
+ end
+
+ context 'when static_objects_external_storage_auth_token is set' do
+ it 'encrypts static_objects_external_storage_auth_token' do
+ settings = application_settings.create!
+ settings.update_column(:static_objects_external_storage_auth_token, 'Test')
+
+ reversible_migration do |migration|
+ migration.before -> {
+ settings = application_settings.first
+
+ expect(settings.static_objects_external_storage_auth_token).to eq('Test')
+ expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil
+ }
+ migration.after -> {
+ settings = application_settings.first
+
+ expect(settings.static_objects_external_storage_auth_token).to eq('Test')
+ expect(settings.static_objects_external_storage_auth_token_encrypted).to be_present
+ }
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb b/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb
new file mode 100644
index 00000000000..b80e4703f07
--- /dev/null
+++ b/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddTaskToWorkItemTypes, :migration do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let_it_be(:work_item_types) { table(:work_item_types) }
+
+ let(:base_types) do
+ {
+ issue: 0,
+ incident: 1,
+ test_case: 2,
+ requirement: 3,
+ task: 4
+ }
+ end
+
+ after(:all) do
+ # Make sure base types are recreated after running the migration
+ # because migration specs are not run in a transaction
+ reset_work_item_types
+ end
+
+ it 'skips creating the record if it already exists' do
+ reset_db_state_prior_to_migration
+ work_item_types.find_or_create_by!(name: 'Task', namespace_id: nil, base_type: base_types[:task], icon_name: 'issue-type-task')
+
+ expect do
+ migrate!
+ end.to not_change(work_item_types, :count)
+ end
+
+ it 'adds task to base work item types' do
+ reset_db_state_prior_to_migration
+
+ expect do
+ migrate!
+ end.to change(work_item_types, :count).from(4).to(5)
+
+ expect(work_item_types.all.pluck(:base_type)).to include(base_types[:task])
+ end
+
+ def reset_db_state_prior_to_migration
+ # Database needs to be in a similar state as when this migration was created
+ work_item_types.delete_all
+ work_item_types.find_or_create_by!(name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue')
+ work_item_types.find_or_create_by!(name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident')
+ work_item_types.find_or_create_by!(name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case')
+ work_item_types.find_or_create_by!(name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements')
+ end
+end
diff --git a/spec/migrations/20211130165043_backfill_sequence_column_for_sprints_table_spec.rb b/spec/migrations/20211130165043_backfill_sequence_column_for_sprints_table_spec.rb
new file mode 100644
index 00000000000..809ee53462f
--- /dev/null
+++ b/spec/migrations/20211130165043_backfill_sequence_column_for_sprints_table_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe BackfillSequenceColumnForSprintsTable, :migration, schema: 20211126042235 do
+ let(:migration) { described_class.new }
+ let(:namespaces) { table(:namespaces) }
+ let(:sprints) { table(:sprints) }
+ let(:iterations_cadences) { table(:iterations_cadences) }
+
+ let!(:group) { namespaces.create!(name: 'foo', path: 'foo') }
+ let!(:cadence_1) { iterations_cadences.create!(group_id: group.id, title: "cadence 1") }
+ let!(:cadence_2) { iterations_cadences.create!(group_id: group.id, title: "cadence 2") }
+ let!(:iteration_1) { sprints.create!(id: 1, group_id: group.id, iterations_cadence_id: cadence_1.id, start_date: Date.new(2021, 11, 1), due_date: Date.new(2021, 11, 5), iid: 1, title: 'a' ) }
+ let!(:iteration_2) { sprints.create!(id: 2, group_id: group.id, iterations_cadence_id: cadence_1.id, start_date: Date.new(2021, 12, 1), due_date: Date.new(2021, 12, 5), iid: 2, title: 'b') }
+ let!(:iteration_3) { sprints.create!(id: 3, group_id: group.id, iterations_cadence_id: cadence_2.id, start_date: Date.new(2021, 12, 1), due_date: Date.new(2021, 12, 5), iid: 4, title: 'd') }
+ let!(:iteration_4) { sprints.create!(id: 4, group_id: group.id, iterations_cadence_id: nil, start_date: Date.new(2021, 11, 15), due_date: Date.new(2021, 11, 20), iid: 3, title: 'c') }
+
+ describe '#up' do
+ it "correctly sets the sequence attribute with idempotency" do
+ migration.up
+
+ expect(iteration_1.reload.sequence).to be 1
+ expect(iteration_2.reload.sequence).to be 2
+ expect(iteration_3.reload.sequence).to be 1
+ expect(iteration_4.reload.sequence).to be nil
+
+ iteration_5 = sprints.create!(id: 5, group_id: group.id, iterations_cadence_id: cadence_1.id, start_date: Date.new(2022, 1, 1), due_date: Date.new(2022, 1, 5), iid: 1, title: 'e' )
+
+ migration.down
+ migration.up
+
+ expect(iteration_1.reload.sequence).to be 1
+ expect(iteration_2.reload.sequence).to be 2
+ expect(iteration_5.reload.sequence).to be 3
+ expect(iteration_3.reload.sequence).to be 1
+ expect(iteration_4.reload.sequence).to be nil
+ end
+ end
+end
diff --git a/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb b/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb
new file mode 100644
index 00000000000..2e1289c58f7
--- /dev/null
+++ b/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddIndexToProjectsOnMarkedForDeletionAt do
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(ActiveRecord::Base.connection.indexes('projects').map(&:name)).not_to include('index_projects_not_aimed_for_deletion')
+ }
+
+ migration.after -> {
+ expect(ActiveRecord::Base.connection.indexes('projects').map(&:name)).to include('index_projects_not_aimed_for_deletion')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb b/spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb
new file mode 100644
index 00000000000..2545bb4a66c
--- /dev/null
+++ b/spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleRecalculateVulnerabilityFindingSignaturesForFindings, :migration do
+ before do
+ allow(Gitlab).to receive(:ee?).and_return(ee?)
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ end
+
+ context 'when the Gitlab instance is FOSS' do
+ let(:ee?) { false }
+
+ it 'does not run the migration' do
+ expect { migrate! }.not_to change { BackgroundMigrationWorker.jobs.size }
+ end
+ end
+
+ context 'when the Gitlab instance is EE' do
+ let(:ee?) { true }
+
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+ let_it_be(:findings) { table(:vulnerability_occurrences) }
+ let_it_be(:scanners) { table(:vulnerability_scanners) }
+ let_it_be(:identifiers) { table(:vulnerability_identifiers) }
+ let_it_be(:vulnerability_finding_signatures) { table(:vulnerability_finding_signatures) }
+
+ let_it_be(:namespace) { namespaces.create!(name: 'test', path: 'test') }
+ let_it_be(:project) { projects.create!(namespace_id: namespace.id, name: 'gitlab', path: 'gitlab') }
+
+ let_it_be(:scanner) do
+ scanners.create!(project_id: project.id, external_id: 'trivy', name: 'Security Scanner')
+ end
+
+ let_it_be(:identifier) do
+ identifiers.create!(project_id: project.id,
+ fingerprint: 'd432c2ad2953e8bd587a3a43b3ce309b5b0154c123',
+ external_type: 'SECURITY_ID',
+ external_id: 'SECURITY_0',
+ name: 'SECURITY_IDENTIFIER 0')
+ end
+
+ let_it_be(:finding1) { findings.create!(finding_params) }
+ let_it_be(:signature1) { vulnerability_finding_signatures.create!(finding_id: finding1.id, algorithm_type: 0, signature_sha: ::Digest::SHA1.digest(SecureRandom.hex(50))) }
+
+ let_it_be(:finding2) { findings.create!(finding_params) }
+ let_it_be(:signature2) { vulnerability_finding_signatures.create!(finding_id: finding2.id, algorithm_type: 0, signature_sha: ::Digest::SHA1.digest(SecureRandom.hex(50))) }
+
+ let_it_be(:finding3) { findings.create!(finding_params) }
+ let_it_be(:signature3) { vulnerability_finding_signatures.create!(finding_id: finding3.id, algorithm_type: 0, signature_sha: ::Digest::SHA1.digest(SecureRandom.hex(50))) }
+
+ it 'schedules the background jobs', :aggregate_failure do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_migration_with_multiple_args(signature1.id, signature2.id)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_migration_with_multiple_args(signature3.id, signature3.id)
+ end
+ end
+ end
+
+ def finding_params
+ uuid = SecureRandom.uuid
+
+ {
+ severity: 0,
+ confidence: 5,
+ report_type: 2,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: identifier.id,
+ location: nil,
+ project_fingerprint: SecureRandom.hex(20),
+ location_fingerprint: Digest::SHA1.hexdigest(SecureRandom.hex(10)),
+ uuid: uuid,
+ name: "Vulnerability Finding #{uuid}",
+ metadata_version: '1.3',
+ raw_metadata: '{}'
+ }
+ end
+ end
+end
diff --git a/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb b/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb
new file mode 100644
index 00000000000..a81059518e6
--- /dev/null
+++ b/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleUpdateTimelogsNullSpentAt do
+ let_it_be(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
+ let_it_be(:project) { table(:projects).create!(namespace_id: namespace.id) }
+ let_it_be(:issue) { table(:issues).create!(project_id: project.id) }
+ let_it_be(:merge_request) { table(:merge_requests).create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature') }
+ let_it_be(:timelog1) { create_timelog!(merge_request_id: merge_request.id) }
+ let_it_be(:timelog2) { create_timelog!(merge_request_id: merge_request.id) }
+ let_it_be(:timelog3) { create_timelog!(merge_request_id: merge_request.id) }
+ let_it_be(:timelog4) { create_timelog!(issue_id: issue.id) }
+ let_it_be(:timelog5) { create_timelog!(issue_id: issue.id) }
+
+ before_all do
+ table(:timelogs).where.not(id: timelog3.id).update_all(spent_at: nil)
+ end
+
+ it 'correctly schedules background migrations' do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(2.minutes, timelog1.id, timelog2.id)
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(4.minutes, timelog4.id, timelog5.id)
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+
+ private
+
+ def create_timelog!(**args)
+ table(:timelogs).create!(**args, time_spent: 1)
+ end
+end