Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-11-19 11:27:35 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-11-19 11:27:35 +0300
commit7e9c479f7de77702622631cff2628a9c8dcbc627 (patch)
treec8f718a08e110ad7e1894510980d2155a6549197 /spec/lib/gitlab/background_migration
parente852b0ae16db4052c1c567d9efa4facc81146e88 (diff)
Add latest changes from gitlab-org/gitlab@13-6-stable-eev13.6.0-rc42
Diffstat (limited to 'spec/lib/gitlab/background_migration')
-rw-r--r--spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb69
-rw-r--r--spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb65
-rw-r--r--spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb53
-rw-r--r--spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb98
-rw-r--r--spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb63
-rw-r--r--spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb65
-rw-r--r--spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb26
7 files changed, 359 insertions, 80 deletions
diff --git a/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb b/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb
new file mode 100644
index 00000000000..4bf59a02a31
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillDesignInternalIds, :migration, schema: 20201030203854 do
+ subject { described_class.new(designs) }
+
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+ let_it_be(:designs) { table(:design_management_designs) }
+
+ let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:project_2) { projects.create!(namespace_id: namespace.id) }
+
+ def create_design!(proj = project)
+ designs.create!(project_id: proj.id, filename: generate(:filename))
+ end
+
+ def migrate!
+ relation = designs.where(project_id: [project.id, project_2.id]).select(:project_id).distinct
+
+ subject.perform(relation)
+ end
+
+ it 'backfills the iid for designs' do
+ 3.times { create_design! }
+
+ expect do
+ migrate!
+ end.to change { designs.pluck(:iid) }.from(contain_exactly(nil, nil, nil)).to(contain_exactly(1, 2, 3))
+ end
+
+ it 'scopes IIDs and handles range and starting-point correctly' do
+ create_design!.update!(iid: 10)
+ create_design!.update!(iid: 12)
+ create_design!(project_2).update!(iid: 7)
+ project_3 = projects.create!(namespace_id: namespace.id)
+
+ 2.times { create_design! }
+ 2.times { create_design!(project_2) }
+ 2.times { create_design!(project_3) }
+
+ migrate!
+
+ expect(designs.where(project_id: project.id).pluck(:iid)).to contain_exactly(10, 12, 13, 14)
+ expect(designs.where(project_id: project_2.id).pluck(:iid)).to contain_exactly(7, 8, 9)
+ expect(designs.where(project_id: project_3.id).pluck(:iid)).to contain_exactly(nil, nil)
+ end
+
+ it 'updates the internal ID records' do
+ design = create_design!
+ 2.times { create_design! }
+ design.update!(iid: 10)
+ scope = { project_id: project.id }
+ usage = :design_management_designs
+ init = ->(_d, _s) { 0 }
+
+ ::InternalId.track_greatest(design, scope, usage, 10, init)
+
+ migrate!
+
+ next_iid = ::InternalId.generate_next(design, scope, usage, init)
+
+ expect(designs.pluck(:iid)).to contain_exactly(10, 11, 12)
+ expect(design.reload.iid).to eq(10)
+ expect(next_iid).to eq(13)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
new file mode 100644
index 00000000000..7fe82420364
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20201028182809 do
+ let_it_be(:jira_service_temp) { described_class::JiraServiceTemp }
+ let_it_be(:jira_tracker_data_temp) { described_class::JiraTrackerDataTemp }
+ let_it_be(:atlassian_host) { 'https://api.atlassian.net' }
+ let_it_be(:mixedcase_host) { 'https://api.AtlassiaN.nEt' }
+ let_it_be(:server_host) { 'https://my.server.net' }
+
+ let(:jira_service) { jira_service_temp.create!(type: 'JiraService', active: true, category: 'issue_tracker') }
+
+ subject { described_class.new }
+
+ def create_tracker_data(options = {})
+ jira_tracker_data_temp.create!({ service_id: jira_service.id }.merge(options))
+ end
+
+ describe '#perform' do
+ context do
+ it 'ignores if deployment already set' do
+ tracker_data = create_tracker_data(url: atlassian_host, deployment_type: 'server')
+
+ expect(subject).not_to receive(:collect_deployment_type)
+
+ subject.perform(tracker_data.id, tracker_data.id)
+
+ expect(tracker_data.reload.deployment_type).to eq 'server'
+ end
+
+ it 'ignores if no url is set' do
+ tracker_data = create_tracker_data(deployment_type: 'unknown')
+
+ expect(subject).to receive(:collect_deployment_type)
+
+ subject.perform(tracker_data.id, tracker_data.id)
+
+ expect(tracker_data.reload.deployment_type).to eq 'unknown'
+ end
+ end
+
+ context 'when tracker is valid' do
+ let!(:tracker_1) { create_tracker_data(url: atlassian_host, deployment_type: 0) }
+ let!(:tracker_2) { create_tracker_data(url: mixedcase_host, deployment_type: 0) }
+ let!(:tracker_3) { create_tracker_data(url: server_host, deployment_type: 0) }
+ let!(:tracker_4) { create_tracker_data(api_url: server_host, deployment_type: 0) }
+ let!(:tracker_nextbatch) { create_tracker_data(api_url: atlassian_host, deployment_type: 0) }
+
+ it 'sets the proper deployment_type', :aggregate_failures do
+ subject.perform(tracker_1.id, tracker_4.id)
+
+ expect(tracker_1.reload.deployment_cloud?).to be_truthy
+ expect(tracker_2.reload.deployment_cloud?).to be_truthy
+ expect(tracker_3.reload.deployment_server?).to be_truthy
+ expect(tracker_4.reload.deployment_server?).to be_truthy
+ expect(tracker_nextbatch.reload.deployment_unknown?).to be_truthy
+ end
+ end
+
+ it_behaves_like 'marks background migration job records' do
+ let(:arguments) { [1, 4] }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb b/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb
new file mode 100644
index 00000000000..c2daa35703d
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillMergeRequestCleanupSchedules, schema: 20201103110018 do
+ let(:merge_requests) { table(:merge_requests) }
+ let(:cleanup_schedules) { table(:merge_request_cleanup_schedules) }
+ let(:metrics) { table(:merge_request_metrics) }
+
+ let(:namespace) { table(:namespaces).create!(name: 'name', path: 'path') }
+ let(:project) { table(:projects).create!(namespace_id: namespace.id) }
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ let!(:open_mr) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master') }
+
+ let!(:closed_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
+ let!(:closed_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
+ let!(:closed_mr_1_metrics) { metrics.create!(merge_request_id: closed_mr_1.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
+ let!(:closed_mr_2_metrics) { metrics.create!(merge_request_id: closed_mr_2.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
+ let!(:closed_mr_2_cleanup_schedule) { cleanup_schedules.create!(merge_request_id: closed_mr_2.id, scheduled_at: Time.current) }
+
+ let!(:merged_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3) }
+ let!(:merged_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3, updated_at: Time.current) }
+ let!(:merged_mr_1_metrics) { metrics.create!(merge_request_id: merged_mr_1.id, target_project_id: project.id, merged_at: Time.current, created_at: Time.current, updated_at: Time.current) }
+
+ let!(:closed_mr_3) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
+ let!(:closed_mr_3_metrics) { metrics.create!(merge_request_id: closed_mr_3.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
+
+ it 'creates records for all closed and merged merge requests in range' do
+ expect(Gitlab::BackgroundMigration::Logger).to receive(:info).with(
+ message: 'Backfilled merge_request_cleanup_schedules records',
+ count: 3
+ )
+
+ subject.perform(open_mr.id, merged_mr_2.id)
+
+ aggregate_failures do
+ expect(cleanup_schedules.all.pluck(:merge_request_id))
+ .to contain_exactly(closed_mr_1.id, closed_mr_2.id, merged_mr_1.id, merged_mr_2.id)
+ expect(cleanup_schedules.find_by(merge_request_id: closed_mr_1.id).scheduled_at.to_s)
+ .to eq((closed_mr_1_metrics.latest_closed_at + 14.days).to_s)
+ expect(cleanup_schedules.find_by(merge_request_id: closed_mr_2.id).scheduled_at.to_s)
+ .to eq(closed_mr_2_cleanup_schedule.scheduled_at.to_s)
+ expect(cleanup_schedules.find_by(merge_request_id: merged_mr_1.id).scheduled_at.to_s)
+ .to eq((merged_mr_1_metrics.merged_at + 14.days).to_s)
+ expect(cleanup_schedules.find_by(merge_request_id: merged_mr_2.id).scheduled_at.to_s)
+ .to eq((merged_mr_2.updated_at + 14.days).to_s)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
index 1637589d272..934ab7e37f8 100644
--- a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
+++ b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
# rubocop: disable RSpec/FactoriesInMigrationSpecs
-RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
+RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover, :aggregate_failures do
let(:test_dir) { FileUploader.options['storage_path'] }
let(:filename) { 'image.png' }
@@ -67,27 +67,35 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
end
end
- shared_examples 'migrates the file correctly' do
- before do
+ shared_examples 'migrates the file correctly' do |remote|
+ it 'creates a new upload record correctly, updates the legacy upload note so that it references the file in the markdown, removes the attachment from the note model, removes the file, moves legacy uploads to the correct location, removes the upload record' do
+ expect(File.exist?(legacy_upload.absolute_path)).to be_truthy unless remote
+
described_class.new(legacy_upload).execute
- end
- it 'creates a new uplaod record correctly' do
expect(new_upload.secret).not_to be_nil
- expect(new_upload.path).to end_with("#{new_upload.secret}/image.png")
+ expect(new_upload.path).to end_with("#{new_upload.secret}/#{filename}")
expect(new_upload.model_id).to eq(project.id)
expect(new_upload.model_type).to eq('Project')
expect(new_upload.uploader).to eq('FileUploader')
- end
- it 'updates the legacy upload note so that it references the file in the markdown' do
- expected_path = File.join('/uploads', new_upload.secret, 'image.png')
+ expected_path = File.join('/uploads', new_upload.secret, filename)
expected_markdown = "some note \n ![image](#{expected_path})"
+
expect(note.reload.note).to eq(expected_markdown)
- end
+ expect(note.attachment.file).to be_nil
+
+ if remote
+ expect(bucket.files.get(remote_file[:key])).to be_nil
+ connection = ::Fog::Storage.new(FileUploader.object_store_credentials)
+ expect(connection.get_object('uploads', new_upload.path)[:status]).to eq(200)
+ else
+ expect(File.exist?(legacy_upload.absolute_path)).to be_falsey
+ expected_path = File.join(test_dir, 'uploads', project.disk_path, new_upload.secret, filename)
+ expect(File.exist?(expected_path)).to be_truthy
+ end
- it 'removes the attachment from the note model' do
- expect(note.reload.attachment.file).to be_nil
+ expect { legacy_upload.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
@@ -120,23 +128,6 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
end
context 'when the upload is in local storage' do
- shared_examples 'legacy local file' do
- it 'removes the file correctly' do
- expect(File.exist?(legacy_upload.absolute_path)).to be_truthy
-
- described_class.new(legacy_upload).execute
-
- expect(File.exist?(legacy_upload.absolute_path)).to be_falsey
- end
-
- it 'moves legacy uploads to the correct location' do
- described_class.new(legacy_upload).execute
-
- expected_path = File.join(test_dir, 'uploads', project.disk_path, new_upload.secret, filename)
- expect(File.exist?(expected_path)).to be_truthy
- end
- end
-
context 'when the upload file does not exist on the filesystem' do
let(:legacy_upload) { create_upload(note, filename, false) }
@@ -201,15 +192,11 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
path: "uploads/-/system/note/attachment/#{note.id}/#{filename}", model: note, mount_point: nil)
end
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
context 'when the file can be handled correctly' do
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
end
@@ -217,17 +204,13 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
context 'when the file belongs to a legacy project' do
let(:project) { legacy_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
context 'when the file belongs to a hashed project' do
let(:project) { hashed_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
end
@@ -244,17 +227,13 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
context 'when the file belongs to a legacy project' do
let(:project) { legacy_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
context 'when the file belongs to a hashed project' do
let(:project) { hashed_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
end
end
@@ -272,23 +251,6 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
stub_uploads_object_storage(FileUploader)
end
- shared_examples 'legacy remote file' do
- it 'removes the file correctly' do
- # expect(bucket.files.get(remote_file[:key])).to be_nil
-
- described_class.new(legacy_upload).execute
-
- expect(bucket.files.get(remote_file[:key])).to be_nil
- end
-
- it 'moves legacy uploads to the correct remote location' do
- described_class.new(legacy_upload).execute
-
- connection = ::Fog::Storage.new(FileUploader.object_store_credentials)
- expect(connection.get_object('uploads', new_upload.path)[:status]).to eq(200)
- end
- end
-
context 'when the upload file does not exist on the filesystem' do
it_behaves_like 'legacy upload deletion'
end
@@ -300,9 +262,7 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
let(:project) { legacy_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy remote file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', true
end
context 'when the file belongs to a hashed project' do
@@ -312,9 +272,7 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
let(:project) { hashed_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy remote file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', true
end
end
end
diff --git a/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb
new file mode 100644
index 00000000000..c6385340ca3
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::PopulateHasVulnerabilities, schema: 20201103192526 do
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:project_settings) { table(:project_settings) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+
+ let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:vulnerability_base_params) { { title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, author_id: user.id } }
+
+ let!(:project_1) { projects.create!(namespace_id: namespace.id, name: 'foo_1') }
+ let!(:project_2) { projects.create!(namespace_id: namespace.id, name: 'foo_2') }
+ let!(:project_3) { projects.create!(namespace_id: namespace.id, name: 'foo_3') }
+
+ before do
+ project_settings.create!(project_id: project_1.id)
+ vulnerabilities.create!(vulnerability_base_params.merge(project_id: project_1.id))
+ vulnerabilities.create!(vulnerability_base_params.merge(project_id: project_3.id))
+
+ allow(::Gitlab::BackgroundMigration::Logger).to receive_messages(info: true, error: true)
+ end
+
+ describe '#perform' do
+ it 'sets `has_vulnerabilities` attribute of project_settings' do
+ expect { subject.perform(project_1.id, project_3.id) }.to change { project_settings.count }.from(1).to(2)
+ .and change { project_settings.where(has_vulnerabilities: true).count }.from(0).to(2)
+ end
+
+ it 'writes info log message' do
+ subject.perform(project_1.id, project_3.id)
+
+ expect(::Gitlab::BackgroundMigration::Logger).to have_received(:info).with(migrator: described_class.name,
+ message: 'Projects has been processed to populate `has_vulnerabilities` information',
+ count: 2)
+ end
+
+ context 'when non-existing project_id is given' do
+ it 'populates only for the existing projects' do
+ expect { subject.perform(project_1.id, 0, project_3.id) }.to change { project_settings.count }.from(1).to(2)
+ .and change { project_settings.where(has_vulnerabilities: true).count }.from(0).to(2)
+ end
+ end
+
+ context 'when an error happens' do
+ before do
+ allow(described_class::ProjectSetting).to receive(:upsert_for).and_raise('foo')
+ end
+
+ it 'writes error log message' do
+ subject.perform(project_1.id, project_3.id)
+
+ expect(::Gitlab::BackgroundMigration::Logger).to have_received(:error).with(migrator: described_class.name,
+ message: 'foo',
+ project_ids: [project_1.id, project_3.id])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb b/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb
new file mode 100644
index 00000000000..44c5f3d1381
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation, schema: 20201028160832 do
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:findings) { table(:vulnerability_occurrences) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:identifiers) { table(:vulnerability_identifiers) }
+ let(:feedback) { table(:vulnerability_feedback) }
+
+ let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
+ let(:vulnerability_1) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
+ let(:vulnerability_2) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'foo', name: 'bar') }
+ let(:identifier) { identifiers.create!(project_id: project.id, fingerprint: 'foo', external_type: 'bar', external_id: 'zoo', name: 'identifier') }
+
+ before do
+ feedback.create!(feedback_type: 0,
+ category: 'sast',
+ project_fingerprint: '418291a26024a1445b23fe64de9380cdcdfd1fa8',
+ project_id: project.id,
+ author_id: user.id,
+ created_at: Time.current)
+
+ findings.create!(name: 'Finding',
+ report_type: 'sast',
+ project_fingerprint: Gitlab::Database::ShaAttribute.new.serialize('418291a26024a1445b23fe64de9380cdcdfd1fa8'),
+ location_fingerprint: 'bar',
+ severity: 1,
+ confidence: 1,
+ metadata_version: 1,
+ raw_metadata: '',
+ uuid: SecureRandom.uuid,
+ project_id: project.id,
+ vulnerability_id: vulnerability_1.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: identifier.id)
+
+ allow(::Gitlab::BackgroundMigration::Logger).to receive_messages(info: true, warn: true, error: true)
+ end
+
+ describe '#perform' do
+ it 'updates the missing dismissal information of the vulnerability' do
+ expect { subject.perform(vulnerability_1.id, vulnerability_2.id) }.to change { vulnerability_1.reload.dismissed_at }.from(nil)
+ .and change { vulnerability_1.reload.dismissed_by_id }.from(nil).to(user.id)
+ end
+
+ it 'writes log messages' do
+ subject.perform(vulnerability_1.id, vulnerability_2.id)
+
+ expect(::Gitlab::BackgroundMigration::Logger).to have_received(:info).with(migrator: described_class.name,
+ message: 'Dismissal information has been copied',
+ count: 2)
+ expect(::Gitlab::BackgroundMigration::Logger).to have_received(:warn).with(migrator: described_class.name,
+ message: 'Could not update vulnerability!',
+ vulnerability_id: vulnerability_2.id)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb b/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb
index fa4f2d1fd88..561a602fab9 100644
--- a/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb
+++ b/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb
@@ -9,28 +9,34 @@ RSpec.describe Gitlab::BackgroundMigration::ReplaceBlockedByLinks, schema: 20201
let(:issue2) { table(:issues).create!(project_id: project.id, title: 'b') }
let(:issue3) { table(:issues).create!(project_id: project.id, title: 'c') }
let(:issue_links) { table(:issue_links) }
- let!(:blocks_link) { issue_links.create!(source_id: issue1.id, target_id: issue2.id, link_type: 1) }
- let!(:bidirectional_link) { issue_links.create!(source_id: issue2.id, target_id: issue1.id, link_type: 2) }
- let!(:blocked_link) { issue_links.create!(source_id: issue1.id, target_id: issue3.id, link_type: 2) }
+ let!(:blocked_link1) { issue_links.create!(source_id: issue2.id, target_id: issue1.id, link_type: 2) }
+ let!(:opposite_link1) { issue_links.create!(source_id: issue1.id, target_id: issue2.id, link_type: 1) }
+ let!(:blocked_link2) { issue_links.create!(source_id: issue1.id, target_id: issue3.id, link_type: 2) }
+ let!(:opposite_link2) { issue_links.create!(source_id: issue3.id, target_id: issue1.id, link_type: 0) }
+ let!(:nochange_link) { issue_links.create!(source_id: issue2.id, target_id: issue3.id, link_type: 1) }
subject { described_class.new.perform(issue_links.minimum(:id), issue_links.maximum(:id)) }
- it 'deletes issue links where opposite relation already exists' do
- expect { subject }.to change { issue_links.count }.by(-1)
+ it 'deletes any opposite relations' do
+ subject
+
+ expect(issue_links.ids).to match_array([nochange_link.id, blocked_link1.id, blocked_link2.id])
end
it 'ignores issue links other than blocked_by' do
subject
- expect(blocks_link.reload.link_type).to eq(1)
+ expect(nochange_link.reload.link_type).to eq(1)
end
it 'updates blocked_by issue links' do
subject
- link = blocked_link.reload
- expect(link.link_type).to eq(1)
- expect(link.source_id).to eq(issue3.id)
- expect(link.target_id).to eq(issue1.id)
+ expect(blocked_link1.reload.link_type).to eq(1)
+ expect(blocked_link1.source_id).to eq(issue1.id)
+ expect(blocked_link1.target_id).to eq(issue2.id)
+ expect(blocked_link2.reload.link_type).to eq(1)
+ expect(blocked_link2.source_id).to eq(issue3.id)
+ expect(blocked_link2.target_id).to eq(issue1.id)
end
end