Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-04-21 02:50:22 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-04-21 02:50:22 +0300
commit9dc93a4519d9d5d7be48ff274127136236a3adb3 (patch)
tree70467ae3692a0e35e5ea56bcb803eb512a10bedb /spec/lib/gitlab/database
parent4b0f34b6d759d6299322b3a54453e930c6121ff0 (diff)
Add latest changes from gitlab-org/gitlab@13-11-stable-eev13.11.0-rc43
Diffstat (limited to 'spec/lib/gitlab/database')
-rw-r--r--spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb31
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb198
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb51
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb94
-rw-r--r--spec/lib/gitlab/database/background_migration/scheduler_spec.rb182
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb76
-rw-r--r--spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb71
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb241
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb28
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb66
-rw-r--r--spec/lib/gitlab/database/pg_class_spec.rb37
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb3
-rw-r--r--spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb191
13 files changed, 999 insertions, 270 deletions
diff --git a/spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb b/spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb
new file mode 100644
index 00000000000..e96862fbc2d
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::BatchMetrics do
+ let(:batch_metrics) { described_class.new }
+
+ describe '#time_operation' do
+ it 'tracks the duration of the operation using monotonic time' do
+ expect(batch_metrics.timings).to be_empty
+
+ expect(Gitlab::Metrics::System).to receive(:monotonic_time)
+ .exactly(6).times
+ .and_return(0.0, 111.0, 200.0, 290.0, 300.0, 410.0)
+
+ batch_metrics.time_operation(:my_label) do
+ # some operation
+ end
+
+ batch_metrics.time_operation(:my_other_label) do
+ # some operation
+ end
+
+ batch_metrics.time_operation(:my_label) do
+ # some operation
+ end
+
+ expect(batch_metrics.timings).to eq(my_label: [111.0, 110.0], my_other_label: [90.0])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
new file mode 100644
index 00000000000..7d0e10b62c6
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -0,0 +1,198 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
+ let(:migration_wrapper) { double('test wrapper') }
+ let(:runner) { described_class.new(migration_wrapper) }
+
+ describe '#run_migration_job' do
+ shared_examples_for 'it has completed the migration' do
+ it 'does not create and run a migration job' do
+ expect(migration_wrapper).not_to receive(:perform)
+
+ expect do
+ runner.run_migration_job(migration)
+ end.not_to change { Gitlab::Database::BackgroundMigration::BatchedJob.count }
+ end
+
+ it 'marks the migration as finished' do
+ relation = Gitlab::Database::BackgroundMigration::BatchedMigration.finished.where(id: migration.id)
+
+ expect { runner.run_migration_job(migration) }.to change { relation.count }.by(1)
+ end
+ end
+
+ context 'when the migration has no previous jobs' do
+ let(:migration) { create(:batched_background_migration, :active, batch_size: 2) }
+
+ let(:job_relation) do
+ Gitlab::Database::BackgroundMigration::BatchedJob.where(batched_background_migration_id: migration.id)
+ end
+
+ context 'when the migration has batches to process' do
+ let!(:event1) { create(:event) }
+ let!(:event2) { create(:event) }
+ let!(:event3) { create(:event) }
+
+ it 'runs the job for the first batch' do
+ migration.update!(min_value: event1.id, max_value: event2.id)
+
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(job_relation.first)
+ end
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(1)
+
+ expect(job_relation.first).to have_attributes(
+ min_value: event1.id,
+ max_value: event2.id,
+ batch_size: migration.batch_size,
+ sub_batch_size: migration.sub_batch_size)
+ end
+ end
+
+ context 'when the batch maximum exceeds the migration maximum' do
+ let!(:events) { create_list(:event, 3) }
+ let(:event1) { events[0] }
+ let(:event2) { events[1] }
+
+ it 'clamps the batch maximum to the migration maximum' do
+ migration.update!(min_value: event1.id, max_value: event2.id, batch_size: 5)
+
+ expect(migration_wrapper).to receive(:perform)
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(1)
+
+ expect(job_relation.first).to have_attributes(
+ min_value: event1.id,
+ max_value: event2.id,
+ batch_size: migration.batch_size,
+ sub_batch_size: migration.sub_batch_size)
+ end
+ end
+
+ context 'when the migration has no batches to process' do
+ it_behaves_like 'it has completed the migration'
+ end
+ end
+
+ context 'when the migration has previous jobs' do
+ let!(:event1) { create(:event) }
+ let!(:event2) { create(:event) }
+ let!(:event3) { create(:event) }
+
+ let!(:migration) do
+ create(:batched_background_migration, :active, batch_size: 2, min_value: event1.id, max_value: event3.id)
+ end
+
+ let!(:previous_job) do
+ create(:batched_background_migration_job,
+ batched_migration: migration,
+ min_value: event1.id,
+ max_value: event2.id,
+ batch_size: 2,
+ sub_batch_size: 1)
+ end
+
+ let(:job_relation) do
+ Gitlab::Database::BackgroundMigration::BatchedJob.where(batched_background_migration_id: migration.id)
+ end
+
+ context 'when the migration has batches to process' do
+ it 'runs the migration job for the next batch' do
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(job_relation.last)
+ end
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(1)
+
+ expect(job_relation.last).to have_attributes(
+ min_value: event3.id,
+ max_value: event3.id,
+ batch_size: migration.batch_size,
+ sub_batch_size: migration.sub_batch_size)
+ end
+
+ context 'when the batch minimum exceeds the migration maximum' do
+ before do
+ migration.update!(batch_size: 5, max_value: event2.id)
+ end
+
+ it_behaves_like 'it has completed the migration'
+ end
+ end
+
+ context 'when the migration has no batches remaining' do
+ before do
+ create(:batched_background_migration_job,
+ batched_migration: migration,
+ min_value: event3.id,
+ max_value: event3.id,
+ batch_size: 2,
+ sub_batch_size: 1)
+ end
+
+ it_behaves_like 'it has completed the migration'
+ end
+ end
+ end
+
+ describe '#run_entire_migration' do
+ context 'when not in a development or test environment' do
+ it 'raises an error' do
+ environment = double('environment', development?: false, test?: false)
+ migration = build(:batched_background_migration, :finished)
+
+ allow(Rails).to receive(:env).and_return(environment)
+
+ expect do
+ runner.run_entire_migration(migration)
+ end.to raise_error('this method is not intended for use in real environments')
+ end
+ end
+
+ context 'when the given migration is not active' do
+ it 'does not create and run migration jobs' do
+ migration = build(:batched_background_migration, :finished)
+
+ expect(migration_wrapper).not_to receive(:perform)
+
+ expect do
+ runner.run_entire_migration(migration)
+ end.not_to change { Gitlab::Database::BackgroundMigration::BatchedJob.count }
+ end
+ end
+
+ context 'when the given migration is active' do
+ let!(:event1) { create(:event) }
+ let!(:event2) { create(:event) }
+ let!(:event3) { create(:event) }
+
+ let!(:migration) do
+ create(:batched_background_migration, :active, batch_size: 2, min_value: event1.id, max_value: event3.id)
+ end
+
+ let(:job_relation) do
+ Gitlab::Database::BackgroundMigration::BatchedJob.where(batched_background_migration_id: migration.id)
+ end
+
+ it 'runs all jobs inline until finishing the migration' do
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(job_relation.first)
+ end
+
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(job_relation.last)
+ end
+
+ expect { runner.run_entire_migration(migration) }.to change { job_relation.count }.by(2)
+
+ expect(job_relation.first).to have_attributes(min_value: event1.id, max_value: event2.id)
+ expect(job_relation.last).to have_attributes(min_value: event3.id, max_value: event3.id)
+
+ expect(migration.reload).to be_finished
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index f4a939e7c1f..261e23d0745 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -29,6 +29,16 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
+ describe '.active_migration' do
+ let!(:migration1) { create(:batched_background_migration, :finished) }
+ let!(:migration2) { create(:batched_background_migration, :active) }
+ let!(:migration3) { create(:batched_background_migration, :active) }
+
+ it 'returns the first active migration according to queue order' do
+ expect(described_class.active_migration).to eq(migration2)
+ end
+ end
+
describe '#interval_elapsed?' do
context 'when the migration has no last_job' do
let(:batched_migration) { build(:batched_background_migration) }
@@ -77,6 +87,34 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
end
+
+ context 'when an interval variance is given' do
+ let(:variance) { 2.seconds }
+
+ context 'when the last job is less than an interval with variance old' do
+ it 'returns false' do
+ freeze_time do
+ create(:batched_background_migration_job,
+ batched_migration: batched_migration,
+ created_at: Time.current - 1.minute - 57.seconds)
+
+ expect(batched_migration.interval_elapsed?(variance: variance)).to eq(false)
+ end
+ end
+ end
+
+ context 'when the last job is more than an interval with variance old' do
+ it 'returns true' do
+ freeze_time do
+ create(:batched_background_migration_job,
+ batched_migration: batched_migration,
+ created_at: Time.current - 1.minute - 58.seconds)
+
+ expect(batched_migration.interval_elapsed?(variance: variance)).to eq(true)
+ end
+ end
+ end
+ end
end
end
@@ -157,4 +195,17 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
describe '#batch_class_name=' do
it_behaves_like 'an attr_writer that demodulizes assigned class names', :batch_class_name
end
+
+ describe '#prometheus_labels' do
+ let(:batched_migration) { create(:batched_background_migration, job_class_name: 'TestMigration', table_name: 'foo', column_name: 'bar') }
+
+ it 'returns a hash with labels for the migration' do
+ labels = {
+ migration_id: batched_migration.id,
+ migration_identifier: 'TestMigration/foo.bar'
+ }
+
+ expect(batched_migration.prometheus_labels).to eq(labels)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
index 17cceb35ff7..00d13f23d36 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
@@ -3,43 +3,105 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '#perform' do
- let(:migration_wrapper) { described_class.new }
+ subject { described_class.new.perform(job_record) }
+
let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
let_it_be(:active_migration) { create(:batched_background_migration, :active, job_arguments: [:id, :other_id]) }
let!(:job_record) { create(:batched_background_migration_job, batched_migration: active_migration) }
+ let(:job_instance) { double('job instance', batch_metrics: {}) }
+
+ before do
+ allow(job_class).to receive(:new).and_return(job_instance)
+ end
it 'runs the migration job' do
- expect_next_instance_of(job_class) do |job_instance|
- expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
- end
+ expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
- migration_wrapper.perform(job_record)
+ subject
end
- it 'updates the the tracking record in the database' do
+ it 'updates the tracking record in the database' do
+ test_metrics = { 'my_metris' => 'some value' }
+
+ expect(job_instance).to receive(:perform)
+ expect(job_instance).to receive(:batch_metrics).and_return(test_metrics)
+
expect(job_record).to receive(:update!).with(hash_including(attempts: 1, status: :running)).and_call_original
freeze_time do
- migration_wrapper.perform(job_record)
+ subject
reloaded_job_record = job_record.reload
expect(reloaded_job_record).not_to be_pending
expect(reloaded_job_record.attempts).to eq(1)
expect(reloaded_job_record.started_at).to eq(Time.current)
+ expect(reloaded_job_record.metrics).to eq(test_metrics)
+ end
+ end
+
+ context 'reporting prometheus metrics' do
+ let(:labels) { job_record.batched_migration.prometheus_labels }
+
+ before do
+ allow(job_instance).to receive(:perform)
+ end
+
+ it 'reports batch_size' do
+ expect(described_class.metrics[:gauge_batch_size]).to receive(:set).with(labels, job_record.batch_size)
+
+ subject
+ end
+
+ it 'reports sub_batch_size' do
+ expect(described_class.metrics[:gauge_sub_batch_size]).to receive(:set).with(labels, job_record.sub_batch_size)
+
+ subject
+ end
+
+ it 'reports updated tuples (currently based on batch_size)' do
+ expect(described_class.metrics[:counter_updated_tuples]).to receive(:increment).with(labels, job_record.batch_size)
+
+ subject
+ end
+
+ it 'reports summary of query timings' do
+ metrics = { 'timings' => { 'update_all' => [1, 2, 3, 4, 5] } }
+
+ expect(job_instance).to receive(:batch_metrics).and_return(metrics)
+
+ metrics['timings'].each do |key, timings|
+ summary_labels = labels.merge(operation: key)
+ timings.each do |timing|
+ expect(described_class.metrics[:histogram_timings]).to receive(:observe).with(summary_labels, timing)
+ end
+ end
+
+ subject
+ end
+
+ it 'reports time efficiency' do
+ freeze_time do
+ expect(Time).to receive(:current).and_return(Time.zone.now - 5.seconds).ordered
+ expect(Time).to receive(:current).and_return(Time.zone.now).ordered
+
+ ratio = 5 / job_record.batched_migration.interval.to_f
+
+ expect(described_class.metrics[:histogram_time_efficiency]).to receive(:observe).with(labels, ratio)
+
+ subject
+ end
end
end
context 'when the migration job does not raise an error' do
it 'marks the tracking record as succeeded' do
- expect_next_instance_of(job_class) do |job_instance|
- expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
- end
+ expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
freeze_time do
- migration_wrapper.perform(job_record)
+ subject
reloaded_job_record = job_record.reload
@@ -51,14 +113,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
context 'when the migration job raises an error' do
it 'marks the tracking record as failed before raising the error' do
- expect_next_instance_of(job_class) do |job_instance|
- expect(job_instance).to receive(:perform)
- .with(1, 10, 'events', 'id', 1, 'id', 'other_id')
- .and_raise(RuntimeError, 'Something broke!')
- end
+ expect(job_instance).to receive(:perform)
+ .with(1, 10, 'events', 'id', 1, 'id', 'other_id')
+ .and_raise(RuntimeError, 'Something broke!')
freeze_time do
- expect { migration_wrapper.perform(job_record) }.to raise_error(RuntimeError, 'Something broke!')
+ expect { subject }.to raise_error(RuntimeError, 'Something broke!')
reloaded_job_record = job_record.reload
diff --git a/spec/lib/gitlab/database/background_migration/scheduler_spec.rb b/spec/lib/gitlab/database/background_migration/scheduler_spec.rb
deleted file mode 100644
index ba745acdf8a..00000000000
--- a/spec/lib/gitlab/database/background_migration/scheduler_spec.rb
+++ /dev/null
@@ -1,182 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::BackgroundMigration::Scheduler, '#perform' do
- let(:scheduler) { described_class.new }
-
- shared_examples_for 'it has no jobs to run' do
- it 'does not create and run a migration job' do
- test_wrapper = double('test wrapper')
-
- expect(test_wrapper).not_to receive(:perform)
-
- expect do
- scheduler.perform(migration_wrapper: test_wrapper)
- end.not_to change { Gitlab::Database::BackgroundMigration::BatchedJob.count }
- end
- end
-
- context 'when there are no active migrations' do
- let!(:migration) { create(:batched_background_migration, :finished) }
-
- it_behaves_like 'it has no jobs to run'
- end
-
- shared_examples_for 'it has completed the migration' do
- it 'marks the migration as finished' do
- relation = Gitlab::Database::BackgroundMigration::BatchedMigration.finished.where(id: first_migration.id)
-
- expect { scheduler.perform }.to change { relation.count }.by(1)
- end
- end
-
- context 'when there are active migrations' do
- let!(:first_migration) { create(:batched_background_migration, :active, batch_size: 2) }
- let!(:last_migration) { create(:batched_background_migration, :active) }
-
- let(:job_relation) do
- Gitlab::Database::BackgroundMigration::BatchedJob.where(batched_background_migration_id: first_migration.id)
- end
-
- context 'when the migration interval has not elapsed' do
- before do
- expect_next_found_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigration) do |migration|
- expect(migration).to receive(:interval_elapsed?).and_return(false)
- end
- end
-
- it_behaves_like 'it has no jobs to run'
- end
-
- context 'when the interval has elapsed' do
- before do
- expect_next_found_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigration) do |migration|
- expect(migration).to receive(:interval_elapsed?).and_return(true)
- end
- end
-
- context 'when the first migration has no previous jobs' do
- context 'when the migration has batches to process' do
- let!(:event1) { create(:event) }
- let!(:event2) { create(:event) }
- let!(:event3) { create(:event) }
-
- it 'runs the job for the first batch' do
- first_migration.update!(min_value: event1.id, max_value: event3.id)
-
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper) do |wrapper|
- expect(wrapper).to receive(:perform).and_wrap_original do |_, job_record|
- expect(job_record).to eq(job_relation.first)
- end
- end
-
- expect { scheduler.perform }.to change { job_relation.count }.by(1)
-
- expect(job_relation.first).to have_attributes(
- min_value: event1.id,
- max_value: event2.id,
- batch_size: first_migration.batch_size,
- sub_batch_size: first_migration.sub_batch_size)
- end
- end
-
- context 'when the migration has no batches to process' do
- it_behaves_like 'it has no jobs to run'
- it_behaves_like 'it has completed the migration'
- end
- end
-
- context 'when the first migration has previous jobs' do
- let!(:event1) { create(:event) }
- let!(:event2) { create(:event) }
- let!(:event3) { create(:event) }
-
- let!(:previous_job) do
- create(:batched_background_migration_job,
- batched_migration: first_migration,
- min_value: event1.id,
- max_value: event2.id,
- batch_size: 2,
- sub_batch_size: 1)
- end
-
- context 'when the migration is ready to process another job' do
- it 'runs the migration job for the next batch' do
- first_migration.update!(min_value: event1.id, max_value: event3.id)
-
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper) do |wrapper|
- expect(wrapper).to receive(:perform).and_wrap_original do |_, job_record|
- expect(job_record).to eq(job_relation.last)
- end
- end
-
- expect { scheduler.perform }.to change { job_relation.count }.by(1)
-
- expect(job_relation.last).to have_attributes(
- min_value: event3.id,
- max_value: event3.id,
- batch_size: first_migration.batch_size,
- sub_batch_size: first_migration.sub_batch_size)
- end
- end
-
- context 'when the migration has no batches remaining' do
- let!(:final_job) do
- create(:batched_background_migration_job,
- batched_migration: first_migration,
- min_value: event3.id,
- max_value: event3.id,
- batch_size: 2,
- sub_batch_size: 1)
- end
-
- it_behaves_like 'it has no jobs to run'
- it_behaves_like 'it has completed the migration'
- end
- end
-
- context 'when the bounds of the next batch exceed the migration maximum value' do
- let!(:events) { create_list(:event, 3) }
- let(:event1) { events[0] }
- let(:event2) { events[1] }
-
- context 'when the batch maximum exceeds the migration maximum' do
- it 'clamps the batch maximum to the migration maximum' do
- first_migration.update!(batch_size: 5, min_value: event1.id, max_value: event2.id)
-
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper) do |wrapper|
- expect(wrapper).to receive(:perform)
- end
-
- expect { scheduler.perform }.to change { job_relation.count }.by(1)
-
- expect(job_relation.first).to have_attributes(
- min_value: event1.id,
- max_value: event2.id,
- batch_size: first_migration.batch_size,
- sub_batch_size: first_migration.sub_batch_size)
- end
- end
-
- context 'when the batch minimum exceeds the migration maximum' do
- let!(:previous_job) do
- create(:batched_background_migration_job,
- batched_migration: first_migration,
- min_value: event1.id,
- max_value: event2.id,
- batch_size: 5,
- sub_batch_size: 1)
- end
-
- before do
- first_migration.update!(batch_size: 5, min_value: 1, max_value: event2.id)
- end
-
- it_behaves_like 'it has no jobs to run'
- it_behaves_like 'it has completed the migration'
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 29688b18e94..da13bc425d1 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -270,6 +270,8 @@ RSpec.describe Gitlab::Database::BatchCount do
end
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE}" do
+ stub_feature_flags(loose_index_scan_for_distinct_values: false)
+
min_id = model.minimum(:id)
relation = instance_double(ActiveRecord::Relation)
allow(model).to receive_message_chain(:select, public_send: relation)
@@ -315,13 +317,85 @@ RSpec.describe Gitlab::Database::BatchCount do
end
end
- it_behaves_like 'when batch fetch query is canceled' do
+ context 'when the loose_index_scan_for_distinct_values feature flag is off' do
+ it_behaves_like 'when batch fetch query is canceled' do
+ let(:mode) { :distinct }
+ let(:operation) { :count }
+ let(:operation_args) { nil }
+ let(:column) { nil }
+
+ subject { described_class.method(:batch_distinct_count) }
+
+ before do
+ stub_feature_flags(loose_index_scan_for_distinct_values: false)
+ end
+ end
+ end
+
+ context 'when the loose_index_scan_for_distinct_values feature flag is on' do
let(:mode) { :distinct }
let(:operation) { :count }
let(:operation_args) { nil }
let(:column) { nil }
+ let(:batch_size) { 10_000 }
+
subject { described_class.method(:batch_distinct_count) }
+
+ before do
+ stub_feature_flags(loose_index_scan_for_distinct_values: true)
+ end
+
+ it 'reduces batch size by half and retry fetch' do
+ too_big_batch_relation_mock = instance_double(ActiveRecord::Relation)
+
+ count_method = double(send: 1)
+
+ allow(too_big_batch_relation_mock).to receive(:send).and_raise(ActiveRecord::QueryCanceled)
+ allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).with(from: 0, to: batch_size).and_return(too_big_batch_relation_mock)
+ allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).with(from: 0, to: batch_size / 2).and_return(count_method)
+ allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).with(from: batch_size / 2, to: batch_size).and_return(count_method)
+
+ subject.call(model, column, batch_size: batch_size, start: 0, finish: batch_size - 1)
+ end
+
+ context 'when all retries fail' do
+ let(:batch_count_query) { 'SELECT COUNT(id) FROM relation WHERE id BETWEEN 0 and 1' }
+
+ before do
+ relation = instance_double(ActiveRecord::Relation)
+ allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).and_return(relation)
+ allow(relation).to receive(:send).and_raise(ActiveRecord::QueryCanceled.new('query timed out'))
+ allow(relation).to receive(:to_sql).and_return(batch_count_query)
+ end
+
+ it 'logs failing query' do
+ expect(Gitlab::AppJsonLogger).to receive(:error).with(
+ event: 'batch_count',
+ relation: model.table_name,
+ operation: operation,
+ operation_args: operation_args,
+ start: 0,
+ mode: mode,
+ query: batch_count_query,
+ message: 'Query has been canceled with message: query timed out'
+ )
+ expect(subject.call(model, column, batch_size: batch_size, start: 0)).to eq(-1)
+ end
+ end
+
+ context 'when LooseIndexScanDistinctCount raises error' do
+ let(:column) { :creator_id }
+ let(:error_class) { Gitlab::Database::LooseIndexScanDistinctCount::ColumnConfigurationError }
+
+ it 'rescues ColumnConfigurationError' do
+ allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive(:new).and_raise(error_class.new('error message'))
+
+ expect(Gitlab::AppJsonLogger).to receive(:error).with(a_hash_including(message: 'LooseIndexScanDistinctCount column error: error message'))
+
+ expect(subject.call(Project, column, batch_size: 10_000, start: 0)).to eq(-1)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb b/spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb
new file mode 100644
index 00000000000..e0eac26e4d9
--- /dev/null
+++ b/spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::LooseIndexScanDistinctCount do
+ context 'counting distinct users' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+
+ let(:column) { :creator_id }
+
+ before_all do
+ create_list(:project, 3, creator: user)
+ create_list(:project, 1, creator: other_user)
+ end
+
+ subject(:count) { described_class.new(Project, :creator_id).count(from: Project.minimum(:creator_id), to: Project.maximum(:creator_id) + 1) }
+
+ it { is_expected.to eq(2) }
+
+ context 'when STI model is queried' do
+ it 'does not raise error' do
+ expect { described_class.new(Group, :owner_id).count(from: 0, to: 1) }.not_to raise_error
+ end
+ end
+
+ context 'when model with default_scope is queried' do
+ it 'does not raise error' do
+ expect { described_class.new(GroupMember, :id).count(from: 0, to: 1) }.not_to raise_error
+ end
+ end
+
+ context 'when the fully qualified column is given' do
+ let(:column) { 'projects.creator_id' }
+
+ it { is_expected.to eq(2) }
+ end
+
+ context 'when AR attribute is given' do
+ let(:column) { Project.arel_table[:creator_id] }
+
+ it { is_expected.to eq(2) }
+ end
+
+ context 'when invalid value is given for the column' do
+ let(:column) { Class.new }
+
+ it { expect { described_class.new(Group, column) }.to raise_error(Gitlab::Database::LooseIndexScanDistinctCount::ColumnConfigurationError) }
+ end
+
+ context 'when null values are present' do
+ before do
+ create_list(:project, 2).each { |p| p.update_column(:creator_id, nil) }
+ end
+
+ it { is_expected.to eq(2) }
+ end
+ end
+
+ context 'counting STI models' do
+ let!(:groups) { create_list(:group, 3) }
+ let!(:namespaces) { create_list(:namespace, 2) }
+
+ let(:max_id) { Namespace.maximum(:id) + 1 }
+
+ it 'counts groups' do
+ count = described_class.new(Group, :id).count(from: 0, to: max_id)
+ expect(count).to eq(3)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 9178707a3d0..44293086e79 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -835,7 +835,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:check_trigger_permissions!).with(:users)
expect(model).to receive(:install_rename_triggers_for_postgresql)
- .with(trigger_name, '"users"', '"old"', '"new"')
+ .with(:users, :old, :new)
expect(model).to receive(:add_column)
.with(:users, :new, :integer,
@@ -860,14 +860,18 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'with existing records and type casting' do
let(:trigger_name) { model.rename_trigger_name(:users, :id, :new) }
let(:user) { create(:user) }
+ let(:copy_trigger) { double('copy trigger') }
+
+ before do
+ expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
+ .with(:users).and_return(copy_trigger)
+ end
it 'copies the value to the new column using the type_cast_function', :aggregate_failures do
expect(model).to receive(:copy_indexes).with(:users, :id, :new)
expect(model).to receive(:add_not_null_constraint).with(:users, :new)
expect(model).to receive(:execute).with("UPDATE \"users\" SET \"new\" = cast_to_jsonb_with_default(\"users\".\"id\") WHERE \"users\".\"id\" >= #{user.id}")
- expect(model).to receive(:execute).with("DROP TRIGGER IF EXISTS #{trigger_name}\nON \"users\"\n")
- expect(model).to receive(:execute).with("CREATE TRIGGER #{trigger_name}\nBEFORE INSERT OR UPDATE\nON \"users\"\nFOR EACH ROW\nEXECUTE FUNCTION #{trigger_name}()\n")
- expect(model).to receive(:execute).with("CREATE OR REPLACE FUNCTION #{trigger_name}()\nRETURNS trigger AS\n$BODY$\nBEGIN\n NEW.\"new\" := NEW.\"id\";\n RETURN NEW;\nEND;\n$BODY$\nLANGUAGE 'plpgsql'\nVOLATILE\n")
+ expect(copy_trigger).to receive(:create).with(:id, :new, trigger_name: nil)
model.rename_column_concurrently(:users, :id, :new, type_cast_function: 'cast_to_jsonb_with_default')
end
@@ -996,7 +1000,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:check_trigger_permissions!).with(:users)
expect(model).to receive(:install_rename_triggers_for_postgresql)
- .with(trigger_name, '"users"', '"old"', '"new"')
+ .with(:users, :old, :new)
expect(model).to receive(:add_column)
.with(:users, :old, :integer,
@@ -1156,7 +1160,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
.with(:users, temp_undo_cleanup_column, :old)
expect(model).to receive(:install_rename_triggers_for_postgresql)
- .with(trigger_name, '"users"', '"old"', '"old_for_type_change"')
+ .with(:users, :old, 'old_for_type_change')
model.undo_cleanup_concurrent_column_type_change(:users, :old, :string)
end
@@ -1182,7 +1186,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
.with(:users, temp_undo_cleanup_column, :old)
expect(model).to receive(:install_rename_triggers_for_postgresql)
- .with(trigger_name, '"users"', '"old"', '"old_for_type_change"')
+ .with(:users, :old, 'old_for_type_change')
model.undo_cleanup_concurrent_column_type_change(
:users,
@@ -1204,28 +1208,25 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#install_rename_triggers_for_postgresql' do
it 'installs the triggers for PostgreSQL' do
- expect(model).to receive(:execute)
- .with(/CREATE OR REPLACE FUNCTION foo()/m)
+ copy_trigger = double('copy trigger')
- expect(model).to receive(:execute)
- .with(/DROP TRIGGER IF EXISTS foo/m)
+ expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
+ .with(:users).and_return(copy_trigger)
- expect(model).to receive(:execute)
- .with(/CREATE TRIGGER foo/m)
+ expect(copy_trigger).to receive(:create).with(:old, :new, trigger_name: 'foo')
- model.install_rename_triggers_for_postgresql('foo', :users, :old, :new)
- end
-
- it 'does not fail if trigger already exists' do
- model.install_rename_triggers_for_postgresql('foo', :users, :old, :new)
- model.install_rename_triggers_for_postgresql('foo', :users, :old, :new)
+ model.install_rename_triggers_for_postgresql(:users, :old, :new, trigger_name: 'foo')
end
end
describe '#remove_rename_triggers_for_postgresql' do
it 'removes the function and trigger' do
- expect(model).to receive(:execute).with('DROP TRIGGER IF EXISTS foo ON bar')
- expect(model).to receive(:execute).with('DROP FUNCTION IF EXISTS foo()')
+ copy_trigger = double('copy trigger')
+
+ expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
+ .with('bar').and_return(copy_trigger)
+
+ expect(copy_trigger).to receive(:drop).with('foo')
model.remove_rename_triggers_for_postgresql('bar', 'foo')
end
@@ -1702,65 +1703,171 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
describe '#initialize_conversion_of_integer_to_bigint' do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
- let(:issue) { create(:issue, project: project) }
- let!(:event) do
- create(:event, :created, project: project, target: issue, author: user)
+ let(:table) { :test_table }
+ let(:column) { :id }
+ let(:tmp_column) { "#{column}_convert_to_bigint" }
+
+ before do
+ model.create_table table, id: false do |t|
+ t.integer :id, primary_key: true
+ t.integer :non_nullable_column, null: false
+ t.integer :nullable_column
+ t.timestamps
+ end
end
- context 'in a transaction' do
- it 'raises RuntimeError' do
- allow(model).to receive(:transaction_open?).and_return(true)
+ context 'when the target table does not exist' do
+ it 'raises an error' do
+ expect { model.initialize_conversion_of_integer_to_bigint(:this_table_is_not_real, column) }
+ .to raise_error('Table this_table_is_not_real does not exist')
+ end
+ end
- expect { model.initialize_conversion_of_integer_to_bigint(:events, :id) }
- .to raise_error(RuntimeError)
+ context 'when the primary key does not exist' do
+ it 'raises an error' do
+ expect { model.initialize_conversion_of_integer_to_bigint(table, column, primary_key: :foobar) }
+ .to raise_error("Column foobar does not exist on #{table}")
end
end
- context 'outside a transaction' do
- before do
- allow(model).to receive(:transaction_open?).and_return(false)
+ context 'when the column to convert does not exist' do
+ let(:column) { :foobar }
+
+ it 'raises an error' do
+ expect { model.initialize_conversion_of_integer_to_bigint(table, column) }
+ .to raise_error("Column #{column} does not exist on #{table}")
end
+ end
- it 'creates a bigint column and starts backfilling it' do
- expect(model)
- .to receive(:add_column)
- .with(
- :events,
- 'id_convert_to_bigint',
- :bigint,
- default: 0,
- null: false
- )
+ context 'when the column to convert is the primary key' do
+ it 'creates a not-null bigint column and installs triggers' do
+ expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: 0, null: false)
- expect(model)
- .to receive(:install_rename_triggers)
- .with(:events, :id, 'id_convert_to_bigint')
+ expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
- expect(model).to receive(:queue_background_migration_jobs_by_range_at_intervals).and_call_original
+ model.initialize_conversion_of_integer_to_bigint(table, column)
+ end
+ end
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in)
- .ordered
- .with(
- 2.minutes,
- 'CopyColumnUsingBackgroundMigrationJob',
- [event.id, event.id, :events, :id, 100, :id, 'id_convert_to_bigint']
- )
+ context 'when the column to convert is not the primary key, but non-nullable' do
+ let(:column) { :non_nullable_column }
+
+ it 'creates a not-null bigint column and installs triggers' do
+ expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: 0, null: false)
+
+ expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
+
+ model.initialize_conversion_of_integer_to_bigint(table, column)
+ end
+ end
+
+ context 'when the column to convert is not the primary key, but nullable' do
+ let(:column) { :nullable_column }
+
+ it 'creates a nullable bigint column and installs triggers' do
+ expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: nil)
+
+ expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
+
+ model.initialize_conversion_of_integer_to_bigint(table, column)
+ end
+ end
+ end
+
+ describe '#backfill_conversion_of_integer_to_bigint' do
+ let(:table) { :_test_backfill_table }
+ let(:column) { :id }
+ let(:tmp_column) { "#{column}_convert_to_bigint" }
+
+ before do
+ model.create_table table, id: false do |t|
+ t.integer :id, primary_key: true
+ t.text :message, null: false
+ t.timestamps
+ end
- expect(Gitlab::BackgroundMigration)
- .to receive(:steal)
- .ordered
- .with('CopyColumnUsingBackgroundMigrationJob')
+ allow(model).to receive(:perform_background_migration_inline?).and_return(false)
+ end
- model.initialize_conversion_of_integer_to_bigint(
- :events,
- :id,
- batch_size: 300,
- sub_batch_size: 100
+ context 'when the target table does not exist' do
+ it 'raises an error' do
+ expect { model.backfill_conversion_of_integer_to_bigint(:this_table_is_not_real, column) }
+ .to raise_error('Table this_table_is_not_real does not exist')
+ end
+ end
+
+ context 'when the primary key does not exist' do
+ it 'raises an error' do
+ expect { model.backfill_conversion_of_integer_to_bigint(table, column, primary_key: :foobar) }
+ .to raise_error("Column foobar does not exist on #{table}")
+ end
+ end
+
+ context 'when the column to convert does not exist' do
+ let(:column) { :foobar }
+
+ it 'raises an error' do
+ expect { model.backfill_conversion_of_integer_to_bigint(table, column) }
+ .to raise_error("Column #{column} does not exist on #{table}")
+ end
+ end
+
+ context 'when the temporary column does not exist' do
+ it 'raises an error' do
+ expect { model.backfill_conversion_of_integer_to_bigint(table, column) }
+ .to raise_error('The temporary column does not exist, initialize it with `initialize_conversion_of_integer_to_bigint`')
+ end
+ end
+
+ context 'when the conversion is properly initialized' do
+ let(:model_class) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = :_test_backfill_table
+ end
+ end
+
+ let(:migration_relation) { Gitlab::Database::BackgroundMigration::BatchedMigration.active }
+
+ before do
+ model.initialize_conversion_of_integer_to_bigint(table, column)
+
+ model_class.create!(message: 'hello')
+ model_class.create!(message: 'so long')
+ end
+
+ it 'creates the batched migration tracking record' do
+ last_record = model_class.create!(message: 'goodbye')
+
+ expect do
+ model.backfill_conversion_of_integer_to_bigint(table, column, batch_size: 2, sub_batch_size: 1)
+ end.to change { migration_relation.count }.by(1)
+
+ expect(migration_relation.last).to have_attributes(
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: table.to_s,
+ column_name: column.to_s,
+ min_value: 1,
+ max_value: last_record.id,
+ interval: 120,
+ batch_size: 2,
+ sub_batch_size: 1,
+ job_arguments: [column.to_s, "#{column}_convert_to_bigint"]
)
end
+
+ context 'when the migration should be performed inline' do
+ it 'calls the runner to run the entire migration' do
+ expect(model).to receive(:perform_background_migration_inline?).and_return(true)
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |scheduler|
+ expect(scheduler).to receive(:run_entire_migration) do |batched_migration|
+ expect(batched_migration).to eq(migration_relation.last)
+ end
+ end
+
+ model.backfill_conversion_of_integer_to_bigint(table, column, batch_size: 2, sub_batch_size: 1)
+ end
+ end
end
end
@@ -1910,9 +2017,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
def setup
namespace = namespaces.create!(name: 'foo', path: 'foo')
- project = projects.create!(namespace_id: namespace.id)
-
- project
+ projects.create!(namespace_id: namespace.id)
end
it 'generates iids properly for models created after the migration' do
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index e25e4af2e86..c6d456964cf 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -263,7 +263,15 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
end
describe '#queue_batched_background_migration' do
+ let(:pgclass_info) { instance_double('Gitlab::Database::PgClass', cardinality_estimate: 42) }
+
+ before do
+ allow(Gitlab::Database::PgClass).to receive(:for_table).and_call_original
+ end
+
it 'creates the database record for the migration' do
+ expect(Gitlab::Database::PgClass).to receive(:for_table).with(:projects).and_return(pgclass_info)
+
expect do
model.queue_batched_background_migration(
'MyJobClass',
@@ -288,7 +296,8 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
batch_size: 100,
sub_batch_size: 10,
job_arguments: %w[],
- status: 'active')
+ status: 'active',
+ total_tuple_count: pgclass_info.cardinality_estimate)
end
context 'when the job interval is lower than the minimum' do
@@ -431,4 +440,21 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
model.bulk_migrate_in(10.minutes, [%w(Class hello world)])
end
end
+
+ describe '#delete_queued_jobs' do
+ let(:job1) { double }
+ let(:job2) { double }
+
+ it 'deletes all queued jobs for the given background migration' do
+ expect(Gitlab::BackgroundMigration).to receive(:steal).with('BackgroundMigrationClassName') do |&block|
+ expect(block.call(job1)).to be(false)
+ expect(block.call(job2)).to be(false)
+ end
+
+ expect(job1).to receive(:delete)
+ expect(job2).to receive(:delete)
+
+ model.delete_queued_jobs('BackgroundMigrationClassName')
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index b5d741fc5e9..5b2a29d1d2d 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -704,6 +704,72 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
end
+ describe '#drop_nonpartitioned_archive_table' do
+ subject { migration.drop_nonpartitioned_archive_table source_table }
+
+ let(:archived_table) { "#{source_table}_archived" }
+
+ before do
+ migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
+ migration.replace_with_partitioned_table source_table
+ end
+
+ it 'drops the archive table' do
+ expect(table_type(archived_table)).to eq('normal')
+
+ subject
+
+ expect(table_type(archived_table)).to eq(nil)
+ end
+
+ it 'drops the trigger on the source table' do
+ expect_valid_function_trigger(source_table, trigger_name, function_name, after: %w[delete insert update])
+
+ subject
+
+ expect_trigger_not_to_exist(source_table, trigger_name)
+ end
+
+ it 'drops the sync function' do
+ expect_function_to_exist(function_name)
+
+ subject
+
+ expect_function_not_to_exist(function_name)
+ end
+ end
+
+ describe '#create_trigger_to_sync_tables' do
+ subject { migration.create_trigger_to_sync_tables(source_table, target_table, :id) }
+
+ let(:target_table) { "#{source_table}_copy" }
+
+ before do
+ migration.create_table target_table do |t|
+ t.string :name, null: false
+ t.integer :age, null: false
+ t.datetime partition_column
+ t.datetime :updated_at
+ end
+ end
+
+ it 'creates the sync function' do
+ expect_function_not_to_exist(function_name)
+
+ subject
+
+ expect_function_to_exist(function_name)
+ end
+
+ it 'installs the trigger' do
+ expect_trigger_not_to_exist(source_table, trigger_name)
+
+ subject
+
+ expect_valid_function_trigger(source_table, trigger_name, function_name, after: %w[delete insert update])
+ end
+ end
+
def filter_columns_by_name(columns, names)
columns.reject { |c| names.include?(c.name) }
end
diff --git a/spec/lib/gitlab/database/pg_class_spec.rb b/spec/lib/gitlab/database/pg_class_spec.rb
new file mode 100644
index 00000000000..83b50415a6c
--- /dev/null
+++ b/spec/lib/gitlab/database/pg_class_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PgClass, type: :model do
+ describe '#cardinality_estimate' do
+ context 'when no information is available' do
+ subject { described_class.new(reltuples: 0.0).cardinality_estimate }
+
+ it 'returns nil for the estimate' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'with reltuples available' do
+ subject { described_class.new(reltuples: 42.0).cardinality_estimate }
+
+ it 'returns the reltuples for the estimate' do
+ expect(subject).to eq(42)
+ end
+ end
+ end
+
+ describe '.for_table' do
+ let(:relname) { :projects }
+
+ subject { described_class.for_table(relname) }
+
+ it 'returns PgClass for this table' do
+ expect(subject).to be_a(described_class)
+ end
+
+ it 'matches the relname' do
+ expect(subject.relname).to eq(relname.to_s)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
index 757da2d9092..1edcd890370 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
@@ -246,7 +246,8 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :
subject.track_rename('namespace', 'path/to/namespace', 'path/to/renamed')
- old_path, new_path = [nil, nil]
+ old_path = nil
+ new_path = nil
Gitlab::Redis::SharedState.with do |redis|
rename_info = redis.lpop(key)
old_path, new_path = Gitlab::Json.parse(rename_info)
diff --git a/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb b/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb
new file mode 100644
index 00000000000..2955c208f16
--- /dev/null
+++ b/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb
@@ -0,0 +1,191 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::UnidirectionalCopyTrigger do
+ include Database::TriggerHelpers
+
+ let(:table_name) { '_test_table' }
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:copy_trigger) { described_class.on_table(table_name) }
+
+ describe '#name' do
+ context 'when a single column name is given' do
+ subject(:trigger_name) { copy_trigger.name('id', 'other_id') }
+
+ it 'returns the trigger name' do
+ expect(trigger_name).to eq('trigger_cfce7a56a9d6')
+ end
+ end
+
+ context 'when multiple column names are given' do
+ subject(:trigger_name) { copy_trigger.name(%w[id fk_id], %w[other_id other_fk_id]) }
+
+ it 'returns the trigger name' do
+ expect(trigger_name).to eq('trigger_166626e51481')
+ end
+ end
+
+ context 'when a different number of new and old column names are given' do
+ it 'raises an error' do
+ expect do
+ copy_trigger.name(%w[id fk_id], %w[other_id])
+ end.to raise_error(ArgumentError, 'number of source and destination columns must match')
+ end
+ end
+ end
+
+ describe '#create' do
+ let(:model) { Class.new(ActiveRecord::Base) }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ other_id integer,
+ fk_id bigint,
+ other_fk_id bigint);
+ SQL
+
+ model.table_name = table_name
+ end
+
+ context 'when a single column name is given' do
+ let(:trigger_name) { 'trigger_cfce7a56a9d6' }
+
+ it 'creates the trigger and function' do
+ expect_function_not_to_exist(trigger_name)
+ expect_trigger_not_to_exist(table_name, trigger_name)
+
+ copy_trigger.create('id', 'other_id')
+
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+ end
+
+ it 'properly copies the column data using the trigger function' do
+ copy_trigger.create('id', 'other_id')
+
+ record = model.create!(id: 10)
+ expect(record.reload).to have_attributes(other_id: 10)
+
+ record.update!({ id: 20 })
+ expect(record.reload).to have_attributes(other_id: 20)
+ end
+ end
+
+ context 'when multiple column names are given' do
+ let(:trigger_name) { 'trigger_166626e51481' }
+
+ it 'creates the trigger and function to set all the columns' do
+ expect_function_not_to_exist(trigger_name)
+ expect_trigger_not_to_exist(table_name, trigger_name)
+
+ copy_trigger.create(%w[id fk_id], %w[other_id other_fk_id])
+
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+ end
+
+ it 'properly copies the columns using the trigger function' do
+ copy_trigger.create(%w[id fk_id], %w[other_id other_fk_id])
+
+ record = model.create!(id: 10, fk_id: 20)
+ expect(record.reload).to have_attributes(other_id: 10, other_fk_id: 20)
+
+ record.update!(id: 30, fk_id: 50)
+ expect(record.reload).to have_attributes(other_id: 30, other_fk_id: 50)
+ end
+ end
+
+ context 'when a custom trigger name is given' do
+ let(:trigger_name) { '_test_trigger' }
+
+ it 'creates the trigger and function with the custom name' do
+ expect_function_not_to_exist(trigger_name)
+ expect_trigger_not_to_exist(table_name, trigger_name)
+
+ copy_trigger.create('id', 'other_id', trigger_name: trigger_name)
+
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+ end
+ end
+
+ context 'when the trigger function already exists' do
+ let(:trigger_name) { 'trigger_cfce7a56a9d6' }
+
+ it 'does not raise an error' do
+ expect_function_not_to_exist(trigger_name)
+ expect_trigger_not_to_exist(table_name, trigger_name)
+
+ copy_trigger.create('id', 'other_id')
+
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+
+ copy_trigger.create('id', 'other_id')
+
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+ end
+ end
+
+ context 'when a different number of new and old column names are given' do
+ it 'raises an error' do
+ expect do
+ copy_trigger.create(%w[id fk_id], %w[other_id])
+ end.to raise_error(ArgumentError, 'number of source and destination columns must match')
+ end
+ end
+ end
+
+ describe '#drop' do
+ let(:trigger_name) { '_test_trigger' }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ other_id integer NOT NULL);
+
+ CREATE FUNCTION #{trigger_name}()
+ RETURNS trigger
+ LANGUAGE plpgsql AS
+ $$
+ BEGIN
+ RAISE NOTICE 'hello';
+ RETURN NEW;
+ END
+ $$;
+
+ CREATE TRIGGER #{trigger_name}
+ BEFORE INSERT OR UPDATE
+ ON #{table_name}
+ FOR EACH ROW
+ EXECUTE FUNCTION #{trigger_name}();
+ SQL
+ end
+
+ it 'drops the trigger and function for the given arguments' do
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+
+ copy_trigger.drop(trigger_name)
+
+ expect_trigger_not_to_exist(table_name, trigger_name)
+ expect_function_not_to_exist(trigger_name)
+ end
+
+ context 'when the trigger does not exist' do
+ it 'does not raise an error' do
+ copy_trigger.drop(trigger_name)
+
+ expect_trigger_not_to_exist(table_name, trigger_name)
+ expect_function_not_to_exist(trigger_name)
+
+ copy_trigger.drop(trigger_name)
+ end
+ end
+ end
+end