Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRobert Speicher <rspeicher@gmail.com>2021-01-20 22:34:23 +0300
committerRobert Speicher <rspeicher@gmail.com>2021-01-20 22:34:23 +0300
commit6438df3a1e0fb944485cebf07976160184697d72 (patch)
tree00b09bfd170e77ae9391b1a2f5a93ef6839f2597 /spec/lib/gitlab/background_migration
parent42bcd54d971da7ef2854b896a7b34f4ef8601067 (diff)
Add latest changes from gitlab-org/gitlab@13-8-stable-eev13.8.0-rc42
Diffstat (limited to 'spec/lib/gitlab/background_migration')
-rw-r--r--spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb82
-rw-r--r--spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb91
-rw-r--r--spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb113
-rw-r--r--spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb121
4 files changed, 407 insertions, 0 deletions
diff --git a/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb b/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb
new file mode 100644
index 00000000000..49fa7b41916
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillArtifactExpiryDate, :migration, schema: 20201111152859 do
+ subject(:perform) { migration.perform(1, 99) }
+
+ let(:migration) { described_class.new }
+ let(:artifact_outside_id_range) { create_artifact!(id: 100, created_at: 1.year.ago, expire_at: nil) }
+ let(:artifact_outside_date_range) { create_artifact!(id: 40, created_at: Time.current, expire_at: nil) }
+ let(:old_artifact) { create_artifact!(id: 10, created_at: 16.months.ago, expire_at: nil) }
+ let(:recent_artifact) { create_artifact!(id: 20, created_at: 1.year.ago, expire_at: nil) }
+ let(:artifact_with_expiry) { create_artifact!(id: 30, created_at: 1.year.ago, expire_at: Time.current + 1.day) }
+
+ before do
+ table(:namespaces).create!(id: 1, name: 'the-namespace', path: 'the-path')
+ table(:projects).create!(id: 1, name: 'the-project', namespace_id: 1)
+ table(:ci_builds).create!(id: 1, allow_failure: false)
+ end
+
+ context 'when current date is before the 22nd' do
+ before do
+ travel_to(Time.zone.local(2020, 1, 1, 0, 0, 0))
+ end
+
+ it 'backfills the expiry date for old artifacts' do
+ expect(old_artifact.reload.expire_at).to eq(nil)
+
+ perform
+
+ expect(old_artifact.reload.expire_at).to be_within(1.minute).of(Time.zone.local(2020, 4, 22, 0, 0, 0))
+ end
+
+ it 'backfills the expiry date for recent artifacts' do
+ expect(recent_artifact.reload.expire_at).to eq(nil)
+
+ perform
+
+ expect(recent_artifact.reload.expire_at).to be_within(1.minute).of(Time.zone.local(2021, 1, 22, 0, 0, 0))
+ end
+ end
+
+ context 'when current date is after the 22nd' do
+ before do
+ travel_to(Time.zone.local(2020, 1, 23, 0, 0, 0))
+ end
+
+ it 'backfills the expiry date for old artifacts' do
+ expect(old_artifact.reload.expire_at).to eq(nil)
+
+ perform
+
+ expect(old_artifact.reload.expire_at).to be_within(1.minute).of(Time.zone.local(2020, 5, 22, 0, 0, 0))
+ end
+
+ it 'backfills the expiry date for recent artifacts' do
+ expect(recent_artifact.reload.expire_at).to eq(nil)
+
+ perform
+
+ expect(recent_artifact.reload.expire_at).to be_within(1.minute).of(Time.zone.local(2021, 2, 22, 0, 0, 0))
+ end
+ end
+
+ it 'does not touch artifacts with expiry date' do
+ expect { perform }.not_to change { artifact_with_expiry.reload.expire_at }
+ end
+
+ it 'does not touch artifacts outside id range' do
+ expect { perform }.not_to change { artifact_outside_id_range.reload.expire_at }
+ end
+
+ it 'does not touch artifacts outside date range' do
+ expect { perform }.not_to change { artifact_outside_date_range.reload.expire_at }
+ end
+
+ private
+
+ def create_artifact!(**args)
+ table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 1)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
new file mode 100644
index 00000000000..110a1ff8a08
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob do
+ let(:table_name) { :copy_primary_key_test }
+ let(:test_table) { table(table_name) }
+ let(:sub_batch_size) { 1000 }
+
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE #{table_name}
+ (
+ id integer NOT NULL,
+ name character varying,
+ fk integer NOT NULL,
+ id_convert_to_bigint bigint DEFAULT 0 NOT NULL,
+ fk_convert_to_bigint bigint DEFAULT 0 NOT NULL,
+ name_convert_to_text text DEFAULT 'no name'
+ );
+ SQL
+
+ # Insert some data, it doesn't make a difference
+ test_table.create!(id: 11, name: 'test1', fk: 1)
+ test_table.create!(id: 12, name: 'test2', fk: 2)
+ test_table.create!(id: 15, name: nil, fk: 3)
+ test_table.create!(id: 19, name: 'test4', fk: 4)
+ end
+
+ after do
+ # Make sure that the temp table we created is dropped (it is not removed by the database_cleaner)
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ DROP TABLE IF EXISTS #{table_name};
+ SQL
+ end
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ let(:migration_class) { described_class.name }
+ let!(:job1) do
+ table(:background_migration_jobs).create!(
+ class_name: migration_class,
+ arguments: [1, 10, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size]
+ )
+ end
+
+ let!(:job2) do
+ table(:background_migration_jobs).create!(
+ class_name: migration_class,
+ arguments: [11, 20, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size]
+ )
+ end
+
+ it 'copies all primary keys in range' do
+ subject.perform(12, 15, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size)
+
+ expect(test_table.where('id = id_convert_to_bigint').pluck(:id)).to contain_exactly(12, 15)
+ expect(test_table.where(id_convert_to_bigint: 0).pluck(:id)).to contain_exactly(11, 19)
+ expect(test_table.all.count).to eq(4)
+ end
+
+ it 'copies all foreign keys in range' do
+ subject.perform(10, 14, table_name, 'id', 'fk', 'fk_convert_to_bigint', sub_batch_size)
+
+ expect(test_table.where('fk = fk_convert_to_bigint').pluck(:id)).to contain_exactly(11, 12)
+ expect(test_table.where(fk_convert_to_bigint: 0).pluck(:id)).to contain_exactly(15, 19)
+ expect(test_table.all.count).to eq(4)
+ end
+
+ it 'copies columns with NULLs' do
+ expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(4)
+
+ subject.perform(10, 20, table_name, 'id', 'name', 'name_convert_to_text', sub_batch_size)
+
+ expect(test_table.where('name = name_convert_to_text').pluck(:id)).to contain_exactly(11, 12, 19)
+ expect(test_table.where('name is NULL and name_convert_to_text is NULL').pluck(:id)).to contain_exactly(15)
+ expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(0)
+ end
+
+ it 'tracks completion with BackgroundMigrationJob' do
+ expect do
+ subject.perform(11, 20, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size)
+ end.to change { Gitlab::Database::BackgroundMigrationJob.succeeded.count }.from(0).to(1)
+
+ expect(job1.reload.status).to eq(0)
+ expect(job2.reload.status).to eq(1)
+ expect(test_table.where('id = id_convert_to_bigint').count).to eq(4)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb b/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb
new file mode 100644
index 00000000000..8e74935e127
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityFeedback, schema: 20201211090634 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:identifiers) { table(:vulnerability_identifiers) }
+ let(:findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_feedback) { table(:vulnerability_feedback) }
+
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
+ let(:user) { users.create!(username: 'john.doe', projects_limit: 5) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'foo', name: 'bar') }
+ let(:identifier) { identifiers.create!(project_id: project.id, fingerprint: 'foo', external_type: 'bar', external_id: 'zoo', name: 'baz') }
+ let(:sast_report) { 0 }
+ let(:dependency_scanning_report) { 1 }
+ let(:dast_report) { 3 }
+ let(:secret_detection_report) { 4 }
+ let(:project_fingerprint) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
+ let(:location_fingerprint_1) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
+ let(:location_fingerprint_2) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
+ let(:location_fingerprint_3) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
+ let(:finding_1) { finding_creator.call(sast_report, location_fingerprint_1) }
+ let(:finding_2) { finding_creator.call(dast_report, location_fingerprint_2) }
+ let(:finding_3) { finding_creator.call(secret_detection_report, location_fingerprint_3) }
+ let(:uuid_1_components) { ['sast', identifier.fingerprint, location_fingerprint_1, project.id].join('-') }
+ let(:uuid_2_components) { ['dast', identifier.fingerprint, location_fingerprint_2, project.id].join('-') }
+ let(:uuid_3_components) { ['secret_detection', identifier.fingerprint, location_fingerprint_3, project.id].join('-') }
+ let(:expected_uuid_1) { Gitlab::UUID.v5(uuid_1_components) }
+ let(:expected_uuid_2) { Gitlab::UUID.v5(uuid_2_components) }
+ let(:expected_uuid_3) { Gitlab::UUID.v5(uuid_3_components) }
+ let(:finding_creator) do
+ -> (report_type, location_fingerprint) do
+ findings.create!(
+ project_id: project.id,
+ primary_identifier_id: identifier.id,
+ scanner_id: scanner.id,
+ report_type: report_type,
+ uuid: SecureRandom.uuid,
+ name: 'Foo',
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize(location_fingerprint),
+ project_fingerprint: Gitlab::Database::ShaAttribute.serialize(project_fingerprint),
+ metadata_version: '1',
+ severity: 0,
+ confidence: 5,
+ raw_metadata: '{}'
+ )
+ end
+ end
+
+ let(:feedback_creator) do
+ -> (category, project_fingerprint) do
+ vulnerability_feedback.create!(
+ project_id: project.id,
+ author_id: user.id,
+ feedback_type: 0,
+ category: category,
+ project_fingerprint: project_fingerprint
+ )
+ end
+ end
+
+ let!(:feedback_1) { feedback_creator.call(finding_1.report_type, project_fingerprint) }
+ let!(:feedback_2) { feedback_creator.call(finding_2.report_type, project_fingerprint) }
+ let!(:feedback_3) { feedback_creator.call(finding_3.report_type, project_fingerprint) }
+ let!(:feedback_4) { feedback_creator.call(finding_1.report_type, 'foo') }
+ let!(:feedback_5) { feedback_creator.call(dependency_scanning_report, project_fingerprint) }
+
+ subject(:populate_finding_uuids) { described_class.new.perform(feedback_1.id, feedback_5.id) }
+
+ before do
+ allow(Gitlab::BackgroundMigration::Logger).to receive(:info)
+ end
+
+ describe '#perform' do
+ it 'updates the `finding_uuid` attributes of the feedback records' do
+ expect { populate_finding_uuids }.to change { feedback_1.reload.finding_uuid }.from(nil).to(expected_uuid_1)
+ .and change { feedback_2.reload.finding_uuid }.from(nil).to(expected_uuid_2)
+ .and change { feedback_3.reload.finding_uuid }.from(nil).to(expected_uuid_3)
+ .and not_change { feedback_4.reload.finding_uuid }
+ .and not_change { feedback_5.reload.finding_uuid }
+
+ expect(Gitlab::BackgroundMigration::Logger).to have_received(:info).once
+ end
+
+ it 'preloads the finding and identifier records to prevent N+1 queries' do
+ # Load feedback records(1), load findings(2), load identifiers(3) and finally update feedback records one by one(6)
+ expect { populate_finding_uuids }.not_to exceed_query_limit(6)
+ end
+
+ context 'when setting the `finding_uuid` attribute of a feedback record fails' do
+ let(:expected_error) { RuntimeError.new }
+
+ before do
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+
+ allow_next_found_instance_of(described_class::VulnerabilityFeedback) do |feedback|
+ allow(feedback).to receive(:update_column).and_raise(expected_error)
+ end
+ end
+
+ it 'captures the errors and does not crash entirely' do
+ expect { populate_finding_uuids }.not_to raise_error
+
+ expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception).with(expected_error).exactly(3).times
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb b/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb
new file mode 100644
index 00000000000..391b27b28e6
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateServices, :migration, schema: 20201207165956 do
+ let_it_be(:users) { table(:users) }
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+ let_it_be(:services) { table(:services) }
+
+ let_it_be(:alerts_service_data) { table(:alerts_service_data) }
+ let_it_be(:chat_names) { table(:chat_names) }
+ let_it_be(:issue_tracker_data) { table(:issue_tracker_data) }
+ let_it_be(:jira_tracker_data) { table(:jira_tracker_data) }
+ let_it_be(:open_project_tracker_data) { table(:open_project_tracker_data) }
+ let_it_be(:slack_integrations) { table(:slack_integrations) }
+ let_it_be(:web_hooks) { table(:web_hooks) }
+
+ let_it_be(:data_tables) do
+ [alerts_service_data, chat_names, issue_tracker_data, jira_tracker_data, open_project_tracker_data, slack_integrations, web_hooks]
+ end
+
+ let!(:user) { users.create!(id: 1, projects_limit: 100) }
+ let!(:namespace) { namespaces.create!(id: 1, name: 'group', path: 'group') }
+
+ # project without duplicate services
+ let!(:project1) { projects.create!(id: 1, namespace_id: namespace.id) }
+ let!(:service1) { services.create!(id: 1, project_id: project1.id, type: 'AsanaService') }
+ let!(:service2) { services.create!(id: 2, project_id: project1.id, type: 'JiraService') }
+ let!(:service3) { services.create!(id: 3, project_id: project1.id, type: 'SlackService') }
+
+ # project with duplicate services
+ let!(:project2) { projects.create!(id: 2, namespace_id: namespace.id) }
+ let!(:service4) { services.create!(id: 4, project_id: project2.id, type: 'AsanaService') }
+ let!(:service5) { services.create!(id: 5, project_id: project2.id, type: 'JiraService') }
+ let!(:service6) { services.create!(id: 6, project_id: project2.id, type: 'JiraService') }
+ let!(:service7) { services.create!(id: 7, project_id: project2.id, type: 'SlackService') }
+ let!(:service8) { services.create!(id: 8, project_id: project2.id, type: 'SlackService') }
+ let!(:service9) { services.create!(id: 9, project_id: project2.id, type: 'SlackService') }
+
+ # project with duplicate services and dependant records
+ let!(:project3) { projects.create!(id: 3, namespace_id: namespace.id) }
+ let!(:service10) { services.create!(id: 10, project_id: project3.id, type: 'AlertsService') }
+ let!(:service11) { services.create!(id: 11, project_id: project3.id, type: 'AlertsService') }
+ let!(:service12) { services.create!(id: 12, project_id: project3.id, type: 'SlashCommandsService') }
+ let!(:service13) { services.create!(id: 13, project_id: project3.id, type: 'SlashCommandsService') }
+ let!(:service14) { services.create!(id: 14, project_id: project3.id, type: 'IssueTrackerService') }
+ let!(:service15) { services.create!(id: 15, project_id: project3.id, type: 'IssueTrackerService') }
+ let!(:service16) { services.create!(id: 16, project_id: project3.id, type: 'JiraService') }
+ let!(:service17) { services.create!(id: 17, project_id: project3.id, type: 'JiraService') }
+ let!(:service18) { services.create!(id: 18, project_id: project3.id, type: 'OpenProjectService') }
+ let!(:service19) { services.create!(id: 19, project_id: project3.id, type: 'OpenProjectService') }
+ let!(:service20) { services.create!(id: 20, project_id: project3.id, type: 'SlackService') }
+ let!(:service21) { services.create!(id: 21, project_id: project3.id, type: 'SlackService') }
+ let!(:dependant_records) do
+ alerts_service_data.create!(id: 1, service_id: service10.id)
+ alerts_service_data.create!(id: 2, service_id: service11.id)
+ chat_names.create!(id: 1, service_id: service12.id, user_id: user.id, team_id: 'team1', chat_id: 'chat1')
+ chat_names.create!(id: 2, service_id: service13.id, user_id: user.id, team_id: 'team2', chat_id: 'chat2')
+ issue_tracker_data.create!(id: 1, service_id: service14.id)
+ issue_tracker_data.create!(id: 2, service_id: service15.id)
+ jira_tracker_data.create!(id: 1, service_id: service16.id)
+ jira_tracker_data.create!(id: 2, service_id: service17.id)
+ open_project_tracker_data.create!(id: 1, service_id: service18.id)
+ open_project_tracker_data.create!(id: 2, service_id: service19.id)
+ slack_integrations.create!(id: 1, service_id: service20.id, user_id: user.id, team_id: 'team1', team_name: 'team1', alias: 'alias1')
+ slack_integrations.create!(id: 2, service_id: service21.id, user_id: user.id, team_id: 'team2', team_name: 'team2', alias: 'alias2')
+ web_hooks.create!(id: 1, service_id: service20.id)
+ web_hooks.create!(id: 2, service_id: service21.id)
+ end
+
+ # project without services
+ let!(:project4) { projects.create!(id: 4, namespace_id: namespace.id) }
+
+ it 'removes duplicate services and dependant records' do
+ # Determine which services we expect to keep
+ expected_services = projects.pluck(:id).each_with_object({}) do |project_id, map|
+ project_services = services.where(project_id: project_id)
+ types = project_services.distinct.pluck(:type)
+
+ map[project_id] = types.map { |type| project_services.where(type: type).take!.id }
+ end
+
+ expect do
+ subject.perform(project2.id, project3.id)
+ end.to change { services.count }.from(21).to(12)
+
+ services1 = services.where(project_id: project1.id)
+ expect(services1.count).to be(3)
+ expect(services1.pluck(:type)).to contain_exactly('AsanaService', 'JiraService', 'SlackService')
+ expect(services1.pluck(:id)).to contain_exactly(*expected_services[project1.id])
+
+ services2 = services.where(project_id: project2.id)
+ expect(services2.count).to be(3)
+ expect(services2.pluck(:type)).to contain_exactly('AsanaService', 'JiraService', 'SlackService')
+ expect(services2.pluck(:id)).to contain_exactly(*expected_services[project2.id])
+
+ services3 = services.where(project_id: project3.id)
+ expect(services3.count).to be(6)
+ expect(services3.pluck(:type)).to contain_exactly('AlertsService', 'SlashCommandsService', 'IssueTrackerService', 'JiraService', 'OpenProjectService', 'SlackService')
+ expect(services3.pluck(:id)).to contain_exactly(*expected_services[project3.id])
+
+ kept_services = expected_services.values.flatten
+ data_tables.each do |table|
+ expect(table.count).to be(1)
+ expect(kept_services).to include(table.pluck(:service_id).first)
+ end
+ end
+
+ it 'does not delete services without duplicates' do
+ expect do
+ subject.perform(project1.id, project4.id)
+ end.not_to change { services.count }
+ end
+
+ it 'only deletes duplicate services for the current batch' do
+ expect do
+ subject.perform(project2.id)
+ end.to change { services.count }.by(-3)
+ end
+end