Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib/gitlab/background_migration')
-rw-r--r--spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb78
-rw-r--r--spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb225
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb94
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb67
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb73
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb73
-rw-r--r--spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb84
-rw-r--r--spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb124
8 files changed, 818 insertions, 0 deletions
diff --git a/spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb
new file mode 100644
index 00000000000..ea5c7086ac2
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillIssueSearchDataNamespaceId,
+ schema: 20240105144908, feature_category: :team_planning do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+ let(:issue_search_data) { table(:issue_search_data) }
+ let(:issue_type) { table(:work_item_types).find_by!(namespace_id: nil, base_type: 0) }
+
+ let(:namespace_1) { namespaces.create!(name: 'namespace1', type: 'Group', path: 'namespace1') }
+ let(:namespace_2) { namespaces.create!(name: 'namespace2', type: 'Group', path: 'namespace2') }
+
+ let(:proj_ns_1) { namespaces.create!(name: 'pn1', path: 'pn1', type: 'Project', parent_id: namespace_1.id) }
+ let(:proj_ns_2) { namespaces.create!(name: 'pn2', path: 'pn2', type: 'Project', parent_id: namespace_2.id) }
+
+ let(:proj_1) do
+ projects.create!(name: 'proj1', path: 'proj1', namespace_id: namespace_1.id, project_namespace_id: proj_ns_1.id)
+ end
+
+ let(:proj_2) do
+ projects.create!(name: 'proj2', path: 'proj2', namespace_id: namespace_2.id, project_namespace_id: proj_ns_2.id)
+ end
+
+ let(:proj_1_issue_1) do
+ issues.create!(title: 'issue1', project_id: proj_1.id, namespace_id: proj_ns_1.id, work_item_type_id: issue_type.id)
+ end
+
+ let(:proj_1_issue_2) do
+ issues.create!(title: 'issue2', project_id: proj_1.id, namespace_id: proj_ns_1.id, work_item_type_id: issue_type.id)
+ end
+
+ let(:proj_2_issue_1) do
+ issues.create!(title: 'issue1', project_id: proj_2.id, namespace_id: proj_ns_2.id, work_item_type_id: issue_type.id)
+ end
+
+ let(:proj_2_issue_2) do
+ issues.create!(title: 'issue2', project_id: proj_2.id, namespace_id: proj_ns_2.id, work_item_type_id: issue_type.id)
+ end
+
+ let!(:proj_1_issue_1_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_1.id, issue_id: proj_1_issue_1.id)
+ end
+
+ let!(:proj_1_issue_2_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_1.id, issue_id: proj_1_issue_2.id)
+ end
+
+ let!(:proj_2_issue_1_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_2.id, issue_id: proj_2_issue_1.id)
+ end
+
+ let!(:proj_2_issue_2_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_2.id, issue_id: proj_2_issue_2.id)
+ end
+
+ let(:migration) do
+ described_class.new(
+ start_id: proj_1_issue_1.id,
+ end_id: proj_2_issue_2.id,
+ batch_table: :issues,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 2,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ it 'backfills namespace_id for the specified records' do
+ migration.perform
+
+ [proj_1_issue_1, proj_1_issue_2, proj_2_issue_1, proj_2_issue_2].each do |issue|
+ expect(issue_search_data.find_by_issue_id(issue.id).namespace_id).to eq(issue.namespace_id)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..1462848845e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb
@@ -0,0 +1,225 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillOwaspTopTenOfVulnerabilityReads,
+ feature_category: :vulnerability_management do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerability_reads) { table(:vulnerability_reads) }
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_occurrence_identifiers) { table(:vulnerability_occurrence_identifiers) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+
+ let(:namespace) { namespaces.create!(name: 'user', path: 'user') }
+ let(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let(:user) { users.create!(username: 'john_doe', email: 'johndoe@gitlab.com', projects_limit: 10) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'external_id', name: 'Test Scanner') }
+
+ shared_context 'with vulnerability data' do
+ let(:external_id) { '' }
+ let(:external_type) { '' }
+ let(:identifier_name) { '' }
+
+ let(:vulnerability_1) { create_vulnerability(title: 'vulnerability 1') }
+ let(:vulnerability_2) { create_vulnerability(title: 'vulnerability 2') }
+ let(:vulnerability_3) { create_vulnerability(title: 'vulnerability 3') }
+
+ let(:vuln_identifier) do
+ create_identifier(external_id: external_id, external_type: external_type, name: identifier_name)
+ end
+
+ let(:vuln_finding) do
+ create_finding(vulnerability_id: vulnerability_1.id, primary_identifier_id: vuln_identifier.id)
+ end
+
+ let!(:vulnerability_read_1) { create_vulnerability_read(vulnerability_id: vulnerability_1.id) }
+ let!(:vulnerability_read_2) { create_vulnerability_read(vulnerability_id: vulnerability_2.id) }
+ let!(:vulnerability_read_3) { create_vulnerability_read(vulnerability_id: vulnerability_3.id) }
+
+ before do
+ create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding.id, identifier_id: vuln_identifier.id)
+ end
+ end
+
+ describe '#perform' do
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: vulnerability_reads.first.vulnerability_id,
+ end_id: vulnerability_reads.last.vulnerability_id,
+ batch_table: :vulnerability_reads,
+ batch_column: :vulnerability_id,
+ sub_batch_size: vulnerability_reads.count,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ context 'with owasp top 10 data' do
+ include_context 'with vulnerability data' do
+ let(:external_id) { 'A1:2017-Injection' }
+ let(:external_type) { 'owasp' }
+ let(:identifier_name) { 'Injection' }
+ end
+
+ it 'updates vulnerability_reads' do
+ expect { perform_migration }.to change { vulnerability_read_1.reload.owasp_top_10 }
+ .from(nil).to(1)
+ .and not_change { vulnerability_read_2.reload.owasp_top_10 }.from(nil)
+ end
+
+ it 'updates vulnerability_reads with correct mapping' do
+ vuln_identifier_2 = create_identifier(external_id: 'A1:2021', external_type: 'owasp', name: 'A1 2021')
+ vuln_identifier_3 = create_identifier
+ vuln_finding_2 = create_finding(vulnerability_id: vulnerability_2.id,
+ primary_identifier_id: vuln_identifier_2.id)
+ vuln_finding_3 = create_finding(vulnerability_id: vulnerability_3.id,
+ primary_identifier_id: vuln_identifier_3.id)
+
+ create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding_2.id,
+ identifier_id: vuln_identifier_2.id)
+ create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding_3.id,
+ identifier_id: vuln_identifier_3.id)
+
+ perform_migration
+
+ expect(vulnerability_read_1.reload.owasp_top_10).to eq(1)
+ expect(vulnerability_read_2.reload.owasp_top_10).to eq(11)
+ expect(vulnerability_read_3.reload.owasp_top_10).to be_nil
+ end
+ end
+
+ context 'with incorrect owasp top 10 data' do
+ include_context 'with vulnerability data'
+
+ shared_examples 'does not update vulnerability_reads' do
+ it do
+ perform_migration
+
+ expect(vulnerability_read_1.reload.owasp_top_10).to be_nil
+ expect(vulnerability_read_2.reload.owasp_top_10).to be_nil
+ expect(vulnerability_read_3.reload.owasp_top_10).to be_nil
+ end
+ end
+
+ context 'with incorrect long format external_id' do
+ let(:external_id) { 'A1:2015-Injection' }
+ let(:external_type) { 'owasp' }
+ let(:identifier_name) { 'Injection' }
+
+ it_behaves_like 'does not update vulnerability_reads'
+ end
+
+ context 'with incorrect short format external_id' do
+ let(:external_id) { 'A1' }
+ let(:external_type) { 'owasp' }
+ let(:identifier_name) { 'Injection' }
+
+ it_behaves_like 'does not update vulnerability_reads'
+ end
+
+ context 'with incorrect external_type' do
+ let(:external_id) { 'A1:2017' }
+ let(:external_type) { 'owasp2017' }
+ let(:identifier_name) { 'Injection' }
+
+ it_behaves_like 'does not update vulnerability_reads'
+ end
+ end
+
+ context 'with no vulnerability identifiers match' do
+ include_context 'with vulnerability data' do
+ let(:external_id) { 'CVE-2018-1234' }
+ let(:external_type) { 'CVE' }
+ let(:identifier_name) { 'CVE-2018-1234' }
+ end
+
+ it 'does not update vulnerability_reads' do
+ perform_migration
+
+ expect(vulnerability_reads.where.not(owasp_top_10: nil).count).to eq(0)
+ end
+ end
+ end
+
+ private
+
+ def create_vulnerability(overrides = {})
+ attrs = {
+ project_id: project.id,
+ author_id: user.id,
+ title: 'test',
+ severity: 1,
+ confidence: 1,
+ report_type: 1
+ }.merge(overrides)
+
+ vulnerabilities.create!(attrs)
+ end
+
+ def create_vulnerability_read(overrides = {})
+ attrs = {
+ project_id: project.id,
+ vulnerability_id: 1,
+ scanner_id: scanner.id,
+ severity: 1,
+ report_type: 1,
+ state: 1,
+ uuid: SecureRandom.uuid
+ }.merge(overrides)
+
+ vulnerability_reads.create!(attrs)
+ end
+
+ def create_finding(overrides = {})
+ attrs = {
+ project_id: project.id,
+ scanner_id: scanner.id,
+ severity: 5, # medium
+ confidence: 2, # unknown,
+ report_type: 99, # generic
+ primary_identifier_id: create_identifier.id,
+ project_fingerprint: SecureRandom.hex(20),
+ location_fingerprint: SecureRandom.hex(20),
+ uuid: SecureRandom.uuid,
+ name: "CVE-2018-1234",
+ raw_metadata: "{}",
+ metadata_version: "test:1.0"
+ }.merge(overrides)
+
+ vulnerability_findings.create!(attrs)
+ end
+
+ def create_identifier(overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "CVE-2018-1234",
+ external_type: "CVE",
+ name: "CVE-2018-1234",
+ fingerprint: SecureRandom.hex(20)
+ }.merge(overrides)
+
+ vulnerability_identifiers.create!(attrs)
+ end
+
+ def create_vulnerability_occurrence_identifier(overrides = {})
+ time = Time.now.utc
+
+ attrs = {
+ created_at: time,
+ updated_at: time,
+ occurrence_id: nil,
+ identifier_id: nil
+ }.merge(overrides)
+
+ vulnerability_occurrence_identifiers.create!(attrs)
+ end
+
+ def checksum(value)
+ sha = Digest::SHA256.hexdigest(value)
+ Gitlab::Database::ShaAttribute.new.serialize(sha)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb
new file mode 100644
index 00000000000..c466fdaa36a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineArtifact,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_artifacts_table) { table(:ci_pipeline_artifacts, database: :ci) }
+ let!(:pipeline_100) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline_101) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:pipeline_102) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
+ let!(:ci_pipeline_artifact_100) do
+ ci_pipeline_artifacts_table.create!(
+ id: 1,
+ pipeline_id: pipeline_100.id,
+ project_id: 1,
+ size: 1.megabyte,
+ file_type: 1,
+ file_format: 1,
+ file: fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
+ ),
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let!(:ci_pipeline_artifact_101) do
+ ci_pipeline_artifacts_table.create!(
+ id: 2,
+ pipeline_id: pipeline_101.id,
+ project_id: 1,
+ size: 1.megabyte,
+ file_type: 1,
+ file_format: 1,
+ file: fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
+ ),
+ partition_id: pipeline_101.partition_id
+ )
+ end
+
+ let!(:invalid_ci_pipeline_artifact) do
+ ci_pipeline_artifacts_table.create!(
+ id: 3,
+ pipeline_id: pipeline_102.id,
+ project_id: 1,
+ size: 1.megabyte,
+ file_type: 1,
+ file_format: 1,
+ file: fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
+ ),
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_artifacts_table.minimum(:pipeline_id),
+ end_id: ci_pipeline_artifacts_table.maximum(:pipeline_id),
+ batch_table: :ci_pipeline_artifacts,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_artifact.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ before do
+ allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
+ end
+
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to not_change { ci_pipeline_artifact_100.reload.partition_id }
+ .and not_change { ci_pipeline_artifact_101.reload.partition_id }
+ .and change { invalid_ci_pipeline_artifact.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb
new file mode 100644
index 00000000000..ad1900ab6a6
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineChatData,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_chat_data_table) { table(:ci_pipeline_chat_data, database: :ci) }
+ let!(:pipeline1) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline2) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:invalid_ci_pipeline_chat_data) do
+ ci_pipeline_chat_data_table.create!(
+ id: 1,
+ pipeline_id: pipeline1.id,
+ chat_name_id: 1,
+ response_url: '',
+ partition_id: pipeline1.partition_id
+ )
+ end
+
+ let!(:valid_ci_pipeline_chat_data) do
+ ci_pipeline_chat_data_table.create!(
+ id: 2,
+ pipeline_id: pipeline2.id,
+ chat_name_id: 2,
+ response_url: '',
+ partition_id: pipeline2.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_chat_data_table.minimum(:id),
+ end_id: ci_pipeline_chat_data_table.maximum(:id),
+ batch_table: :ci_pipeline_chat_data,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_chat_data.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ before do
+ allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
+ pipeline1.update!(partition_id: 101)
+ end
+
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to change { invalid_ci_pipeline_chat_data.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb
new file mode 100644
index 00000000000..fad3e277888
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineConfig,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_config_table) { table(:ci_pipelines_config, database: :ci) }
+ let!(:pipeline_1) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline_2) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:pipeline_3) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
+ let!(:ci_pipeline_config_100) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_1.id,
+ content: "content",
+ partition_id: pipeline_1.partition_id
+ )
+ end
+
+ let!(:ci_pipeline_config_101) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_2.id,
+ content: "content",
+ partition_id: pipeline_2.partition_id
+ )
+ end
+
+ let!(:invalid_ci_pipeline_config) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_3.id,
+ content: "content",
+ partition_id: pipeline_1.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_config_table.minimum(:pipeline_id),
+ end_id: ci_pipeline_config_table.maximum(:pipeline_id),
+ batch_table: :ci_pipelines_config,
+ batch_column: :pipeline_id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ before do
+ pipeline_3.update!(partition_id: 100)
+ end
+
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_config.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to not_change { ci_pipeline_config_100.reload.partition_id }
+ .and not_change { ci_pipeline_config_101.reload.partition_id }
+ .and change { invalid_ci_pipeline_config.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb
new file mode 100644
index 00000000000..d09d5016dcc
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineMetadata,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_metadata_table) { table(:ci_pipeline_metadata, database: :ci) }
+ let!(:pipeline_100) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline_101) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:pipeline_102) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
+ let!(:ci_pipeline_metadata_100) do
+ ci_pipeline_metadata_table.create!(
+ pipeline_id: pipeline_100.id,
+ project_id: 1,
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let!(:ci_pipeline_metadata_101) do
+ ci_pipeline_metadata_table.create!(
+ pipeline_id: pipeline_101.id,
+ project_id: 1,
+ partition_id: pipeline_101.partition_id
+ )
+ end
+
+ let!(:invalid_ci_pipeline_metadata) do
+ ci_pipeline_metadata_table.create!(
+ pipeline_id: pipeline_102.id,
+ project_id: 1,
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_metadata_table.minimum(:pipeline_id),
+ end_id: ci_pipeline_metadata_table.maximum(:pipeline_id),
+ batch_table: :ci_pipeline_metadata,
+ batch_column: :pipeline_id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_metadata.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ before do
+ allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
+ end
+
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to not_change { ci_pipeline_metadata_100.reload.partition_id }
+ .and not_change { ci_pipeline_metadata_101.reload.partition_id }
+ .and change { invalid_ci_pipeline_metadata.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb
new file mode 100644
index 00000000000..725cd7f4bca
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillVsCodeSettingsVersion, schema: 20231212135235, feature_category: :web_ide do
+ let(:vs_code_settings) { table(:vs_code_settings) }
+
+ let(:users) { table(:users) }
+
+ let(:user) do
+ users.create!(
+ email: "test1@example.com",
+ username: "test1",
+ notification_email: "test@example.com",
+ name: "test",
+ state: "active",
+ projects_limit: 10)
+ end
+
+ let(:persistent_settings) { VsCode::Settings::SETTINGS_TYPES.filter { |type| type != 'machines' } }
+
+ subject(:migration) do
+ described_class.new(
+ start_id: vs_code_settings.first.id,
+ end_id: vs_code_settings.last.id,
+ batch_table: :vs_code_settings,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ )
+ end
+
+ describe "#perform" do
+ context 'when it finds vs_code_setting rows with version that is nil or zero' do
+ let(:settings) do
+ persistent_settings.each_with_index.map do |type, index|
+ vs_code_settings.create!(user_id: user.id,
+ setting_type: type,
+ content: '{}',
+ uuid: SecureRandom.uuid,
+ version: index.odd? ? nil : 0)
+ end
+ end
+
+ it 'sets version field with default value for setting type' do
+ settings.each do |setting|
+ expect(setting.version).to eq(nil).or eq(0)
+ end
+
+ migration.perform
+
+ settings.each do |setting|
+ expect(setting.reload.version)
+ .to eq(described_class::VsCodeSetting::DEFAULT_SETTING_VERSIONS[setting.setting_type])
+ end
+ end
+ end
+
+ context 'when it finds vs_code_setting rows with version that is not nil or zero' do
+ let(:settings) do
+ persistent_settings.map do |type|
+ vs_code_settings.create!(user_id: user.id,
+ setting_type: type,
+ content: '{}',
+ uuid: SecureRandom.uuid,
+ version: 1)
+ end
+ end
+
+ it 'does not set version field' do
+ settings.each do |setting|
+ expect(setting.version).to eq(1)
+ end
+
+ migration.perform
+
+ settings.each do |setting|
+ expect(setting.reload.version).to eq(1)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb b/spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb
new file mode 100644
index 00000000000..05817001395
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DropVulnerabilitiesWithoutFindingId, feature_category: :vulnerability_management do # rubocop:disable Layout/LineLength -- autogenerated
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:members) { table(:members) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let(:vulnerability_scanners) { table(:vulnerability_scanners) }
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let!(:user) { create_user(email: "test1@example.com", username: "test1") }
+ let!(:namespace) { namespaces.create!(name: "test-1", path: "test-1", owner_id: user.id) }
+ let!(:project) do
+ projects.create!(
+ id: 9999, namespace_id: namespace.id,
+ project_namespace_id: namespace.id,
+ creator_id: user.id
+ )
+ end
+
+ let!(:membership) do
+ members.create!(access_level: 50, source_id: project.id, source_type: "Project", user_id: user.id, state: 0,
+ notification_level: 3, type: "ProjectMember", member_namespace_id: namespace.id)
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: vulnerabilities.first.id,
+ end_id: vulnerabilities.last.id,
+ batch_table: :vulnerabilities,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ }
+ end
+
+ describe "#perform" do
+ subject(:background_migration) { described_class.new(**migration_attrs).perform }
+
+ let!(:vulnerability_without_finding_id) { create_vulnerability }
+
+ let!(:vulnerabilities_finding) { create_finding(project) }
+ let!(:vulnerability_with_finding_id) { create_vulnerability(finding_id: vulnerabilities_finding.id) }
+
+ it 'removes all Vulnerabilities without a finding_id' do
+ expect { background_migration }.to change { vulnerabilities.count }.from(2).to(1)
+ end
+ end
+
+ private
+
+ def create_scanner(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "test_vulnerability_scanner",
+ name: "Test Vulnerabilities::Scanner"
+ }.merge(overrides)
+
+ vulnerability_scanners.create!(attrs)
+ end
+
+ def create_identifier(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "CVE-2018-1234",
+ external_type: "CVE",
+ name: "CVE-2018-1234",
+ fingerprint: SecureRandom.hex(20)
+ }.merge(overrides)
+
+ vulnerability_identifiers.create!(attrs)
+ end
+
+ def create_finding(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ scanner_id: create_scanner(project).id,
+ severity: 5, # medium
+ confidence: 2, # unknown,
+ report_type: 99, # generic
+ primary_identifier_id: create_identifier(project).id,
+ project_fingerprint: SecureRandom.hex(20),
+ location_fingerprint: SecureRandom.hex(20),
+ uuid: SecureRandom.uuid,
+ name: "CVE-2018-1234",
+ raw_metadata: "{}",
+ metadata_version: "test:1.0"
+ }.merge(overrides)
+
+ vulnerability_findings.create!(attrs)
+ end
+
+ def create_vulnerability(overrides = {})
+ attrs = {
+ project_id: project.id,
+ author_id: user.id,
+ title: 'test',
+ severity: 1,
+ confidence: 1,
+ report_type: 1,
+ state: 1,
+ detected_at: Time.zone.now
+ }.merge(overrides)
+
+ vulnerabilities.create!(attrs)
+ end
+
+ def create_user(overrides = {})
+ attrs = {
+ email: "test@example.com",
+ notification_email: "test@example.com",
+ name: "test",
+ username: "test",
+ state: "active",
+ projects_limit: 10
+ }.merge(overrides)
+
+ users.create!(attrs)
+ end
+end