Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-10-21 10:08:36 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-10-21 10:08:36 +0300
commit48aff82709769b098321c738f3444b9bdaa694c6 (patch)
treee00c7c43e2d9b603a5a6af576b1685e400410dee /spec/lib/gitlab/database
parent879f5329ee916a948223f8f43d77fba4da6cd028 (diff)
Add latest changes from gitlab-org/gitlab@13-5-stable-eev13.5.0-rc42
Diffstat (limited to 'spec/lib/gitlab/database')
-rw-r--r--spec/lib/gitlab/database/background_migration_job_spec.rb2
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb131
-rw-r--r--spec/lib/gitlab/database/bulk_update_spec.rb139
-rw-r--r--spec/lib/gitlab/database/concurrent_reindex_spec.rb207
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb343
-rw-r--r--spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb17
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb6
-rw-r--r--spec/lib/gitlab/database/postgres_index_spec.rb116
-rw-r--r--spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb255
-rw-r--r--spec/lib/gitlab/database/reindexing/coordinator_spec.rb68
-rw-r--r--spec/lib/gitlab/database/reindexing/reindex_action_spec.rb86
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb32
-rw-r--r--spec/lib/gitlab/database/similarity_score_spec.rb11
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb64
16 files changed, 1232 insertions, 249 deletions
diff --git a/spec/lib/gitlab/database/background_migration_job_spec.rb b/spec/lib/gitlab/database/background_migration_job_spec.rb
index dd5bf8b512f..42695925a1c 100644
--- a/spec/lib/gitlab/database/background_migration_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration_job_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Gitlab::Database::BackgroundMigrationJob do
let!(:job1) { create(:background_migration_job, :succeeded, created_at: initial_time, updated_at: initial_time) }
it 'does not update non-pending jobs' do
- Timecop.freeze(initial_time + 1.day) do
+ travel_to(initial_time + 1.day) do
expect { described_class.mark_all_as_succeeded('TestJob', [1, 100]) }
.to change { described_class.succeeded.count }.from(1).to(2)
end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 71d3666602f..31a8b4afa03 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::BatchCount do
let_it_be(:fallback) { ::Gitlab::Database::BatchCounter::FALLBACK }
- let_it_be(:small_batch_size) { ::Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE - 1 }
+ let_it_be(:small_batch_size) { calculate_batch_size(::Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE) }
let(:model) { Issue }
let(:column) { :author_id }
@@ -22,6 +22,12 @@ RSpec.describe Gitlab::Database::BatchCount do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(in_transaction)
end
+ def calculate_batch_size(batch_size)
+ zero_offset_modifier = -1
+
+ batch_size + zero_offset_modifier
+ end
+
shared_examples 'disallowed configurations' do |method|
it 'returns fallback if start is bigger than finish' do
expect(described_class.public_send(method, *args, start: 1, finish: 0)).to eq(fallback)
@@ -45,6 +51,46 @@ RSpec.describe Gitlab::Database::BatchCount do
end
end
+ shared_examples 'when batch fetch query is canceled' do
+ let(:batch_size) { 22_000 }
+ let(:relation) { instance_double(ActiveRecord::Relation) }
+
+ it 'reduces batch size by half and retry fetch' do
+ too_big_batch_relation_mock = instance_double(ActiveRecord::Relation)
+ allow(model).to receive_message_chain(:select, public_send: relation)
+ allow(relation).to receive(:where).with("id" => 0..calculate_batch_size(batch_size)).and_return(too_big_batch_relation_mock)
+ allow(too_big_batch_relation_mock).to receive(:send).and_raise(ActiveRecord::QueryCanceled)
+
+ expect(relation).to receive(:where).with("id" => 0..calculate_batch_size(batch_size / 2)).and_return(double(send: 1))
+
+ subject.call(model, column, batch_size: batch_size, start: 0)
+ end
+
+ context 'when all retries fail' do
+ let(:batch_count_query) { 'SELECT COUNT(id) FROM relation WHERE id BETWEEN 0 and 1' }
+
+ before do
+ allow(model).to receive_message_chain(:select, :public_send, where: relation)
+ allow(relation).to receive(:send).and_raise(ActiveRecord::QueryCanceled.new('query timed out'))
+ allow(relation).to receive(:to_sql).and_return(batch_count_query)
+ end
+
+ it 'logs failing query' do
+ expect(Gitlab::AppJsonLogger).to receive(:error).with(
+ event: 'batch_count',
+ relation: model.table_name,
+ operation: operation,
+ operation_args: operation_args,
+ start: 0,
+ mode: mode,
+ query: batch_count_query,
+ message: 'Query has been canceled with message: query timed out'
+ )
+ expect(subject.call(model, column, batch_size: batch_size, start: 0)).to eq(-1)
+ end
+ end
+ end
+
describe '#batch_count' do
it 'counts table' do
expect(described_class.batch_count(model)).to eq(5)
@@ -86,10 +132,11 @@ RSpec.describe Gitlab::Database::BatchCount do
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE}" do
min_id = model.minimum(:id)
+ relation = instance_double(ActiveRecord::Relation)
+ allow(model).to receive_message_chain(:select, public_send: relation)
+ batch_end_id = min_id + calculate_batch_size(Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE)
- expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
- expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE + min_id, :itself).once.and_call_original
- end
+ expect(relation).to receive(:where).with("id" => min_id..batch_end_id).and_return(double(send: 1))
described_class.batch_count(model)
end
@@ -98,6 +145,15 @@ RSpec.describe Gitlab::Database::BatchCount do
subject { described_class.batch_count(model) }
end
+ it_behaves_like 'when batch fetch query is canceled' do
+ let(:mode) { :itself }
+ let(:operation) { :count }
+ let(:operation_args) { nil }
+ let(:column) { nil }
+
+ subject { described_class.method(:batch_count) }
+ end
+
context 'disallowed_configurations' do
include_examples 'disallowed configurations', :batch_count do
let(:args) { [Issue] }
@@ -108,6 +164,24 @@ RSpec.describe Gitlab::Database::BatchCount do
expect { described_class.batch_count(model.distinct(column)) }.to raise_error 'Use distinct count for optimized distinct counting'
end
end
+
+ context 'when a relation is grouped' do
+ let!(:one_more_issue) { create(:issue, author: user, project: model.first.project) }
+
+ before do
+ stub_const('Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE', 1)
+ end
+
+ context 'count by default column' do
+ let(:count) do
+ described_class.batch_count(model.group(column), batch_size: 2)
+ end
+
+ it 'counts grouped records' do
+ expect(count).to eq({ user.id => 4, another_user.id => 2 })
+ end
+ end
+ end
end
describe '#batch_distinct_count' do
@@ -151,10 +225,11 @@ RSpec.describe Gitlab::Database::BatchCount do
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE}" do
min_id = model.minimum(:id)
+ relation = instance_double(ActiveRecord::Relation)
+ allow(model).to receive_message_chain(:select, public_send: relation)
+ batch_end_id = min_id + calculate_batch_size(Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE)
- expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
- expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE + min_id, :distinct).once.and_call_original
- end
+ expect(relation).to receive(:where).with("id" => min_id..batch_end_id).and_return(double(send: 1))
described_class.batch_distinct_count(model)
end
@@ -175,6 +250,33 @@ RSpec.describe Gitlab::Database::BatchCount do
end.to raise_error 'Use distinct count only with non id fields'
end
end
+
+ context 'when a relation is grouped' do
+ let!(:one_more_issue) { create(:issue, author: user, project: model.first.project) }
+
+ before do
+ stub_const('Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE', 1)
+ end
+
+ context 'distinct count by non-unique column' do
+ let(:count) do
+ described_class.batch_distinct_count(model.group(column), :project_id, batch_size: 2)
+ end
+
+ it 'counts grouped records' do
+ expect(count).to eq({ user.id => 3, another_user.id => 2 })
+ end
+ end
+ end
+
+ it_behaves_like 'when batch fetch query is canceled' do
+ let(:mode) { :distinct }
+ let(:operation) { :count }
+ let(:operation_args) { nil }
+ let(:column) { nil }
+
+ subject { described_class.method(:batch_distinct_count) }
+ end
end
describe '#batch_sum' do
@@ -209,10 +311,11 @@ RSpec.describe Gitlab::Database::BatchCount do
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE}" do
min_id = model.minimum(:id)
+ relation = instance_double(ActiveRecord::Relation)
+ allow(model).to receive_message_chain(:select, public_send: relation)
+ batch_end_id = min_id + calculate_batch_size(Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE)
- expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
- expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE + min_id, :itself).once.and_call_original
- end
+ expect(relation).to receive(:where).with("id" => min_id..batch_end_id).and_return(double(send: 1))
described_class.batch_sum(model, column)
end
@@ -226,5 +329,13 @@ RSpec.describe Gitlab::Database::BatchCount do
let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE }
let(:small_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE - 1 }
end
+
+ it_behaves_like 'when batch fetch query is canceled' do
+ let(:mode) { :itself }
+ let(:operation) { :sum }
+ let(:operation_args) { [column] }
+
+ subject { described_class.method(:batch_sum) }
+ end
end
end
diff --git a/spec/lib/gitlab/database/bulk_update_spec.rb b/spec/lib/gitlab/database/bulk_update_spec.rb
new file mode 100644
index 00000000000..f2a7d6e69d8
--- /dev/null
+++ b/spec/lib/gitlab/database/bulk_update_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BulkUpdate do
+ describe 'error states' do
+ let(:columns) { %i[title] }
+
+ let_it_be(:mapping) do
+ create_default(:user)
+ create_default(:project)
+
+ i_a, i_b = create_list(:issue, 2)
+
+ {
+ i_a => { title: 'Issue a' },
+ i_b => { title: 'Issue b' }
+ }
+ end
+
+ it 'does not raise errors on valid inputs' do
+ expect { described_class.execute(columns, mapping) }.not_to raise_error
+ end
+
+ it 'expects a non-empty list of column names' do
+ expect { described_class.execute([], mapping) }.to raise_error(ArgumentError)
+ end
+
+ it 'expects all columns to be symbols' do
+ expect { described_class.execute([1], mapping) }.to raise_error(ArgumentError)
+ end
+
+ it 'expects all columns to be valid columns on the tables' do
+ expect { described_class.execute([:foo], mapping) }.to raise_error(ArgumentError)
+ end
+
+ it 'refuses to set ID' do
+ expect { described_class.execute([:id], mapping) }.to raise_error(ArgumentError)
+ end
+
+ it 'expects a non-empty mapping' do
+ expect { described_class.execute(columns, []) }.to raise_error(ArgumentError)
+ end
+
+ it 'expects all map values to be Hash instances' do
+ bad_map = mapping.merge(build(:issue) => 2)
+
+ expect { described_class.execute(columns, bad_map) }.to raise_error(ArgumentError)
+ end
+ end
+
+ it 'is possible to update all objects in a single query' do
+ users = create_list(:user, 3)
+ mapping = users.zip(%w(foo bar baz)).to_h do |u, name|
+ [u, { username: name, admin: true }]
+ end
+
+ expect do
+ described_class.execute(%i[username admin], mapping)
+ end.not_to exceed_query_limit(1)
+
+ # We have optimistically updated the values
+ expect(users).to all(be_admin)
+ expect(users.map(&:username)).to eq(%w(foo bar baz))
+
+ users.each(&:reset)
+
+ # The values are correct on reset
+ expect(users).to all(be_admin)
+ expect(users.map(&:username)).to eq(%w(foo bar baz))
+ end
+
+ it 'is possible to update heterogeneous sets' do
+ create_default(:user)
+ create_default(:project)
+
+ mr_a = create(:merge_request)
+ i_a, i_b = create_list(:issue, 2)
+
+ mapping = {
+ mr_a => { title: 'MR a' },
+ i_a => { title: 'Issue a' },
+ i_b => { title: 'Issue b' }
+ }
+
+ expect do
+ described_class.execute(%i[title], mapping)
+ end.not_to exceed_query_limit(2)
+
+ expect([mr_a, i_a, i_b].map { |x| x.reset.title })
+ .to eq(['MR a', 'Issue a', 'Issue b'])
+ end
+
+ shared_examples 'basic functionality' do
+ it 'sets multiple values' do
+ create_default(:user)
+ create_default(:project)
+
+ i_a, i_b = create_list(:issue, 2)
+
+ mapping = {
+ i_a => { title: 'Issue a' },
+ i_b => { title: 'Issue b' }
+ }
+
+ described_class.execute(%i[title], mapping)
+
+ expect([i_a, i_b].map { |x| x.reset.title })
+ .to eq(['Issue a', 'Issue b'])
+ end
+ end
+
+ include_examples 'basic functionality'
+
+ context 'when prepared statements are configured differently to the normal test environment' do
+ # rubocop: disable RSpec/LeakyConstantDeclaration
+ # This cop is disabled because you cannot call establish_connection on
+ # an anonymous class.
+ class ActiveRecordBasePreparedStatementsInverted < ActiveRecord::Base
+ def self.abstract_class?
+ true # So it gets its own connection
+ end
+ end
+ # rubocop: enable RSpec/LeakyConstantDeclaration
+
+ before_all do
+ c = ActiveRecord::Base.connection.instance_variable_get(:@config)
+ inverted = c.merge(prepared_statements: !ActiveRecord::Base.connection.prepared_statements)
+ ActiveRecordBasePreparedStatementsInverted.establish_connection(inverted)
+ end
+
+ before do
+ allow(ActiveRecord::Base).to receive(:connection_specification_name)
+ .and_return(ActiveRecordBasePreparedStatementsInverted.connection_specification_name)
+ end
+
+ include_examples 'basic functionality'
+ end
+end
diff --git a/spec/lib/gitlab/database/concurrent_reindex_spec.rb b/spec/lib/gitlab/database/concurrent_reindex_spec.rb
deleted file mode 100644
index 4e2c3f547d4..00000000000
--- a/spec/lib/gitlab/database/concurrent_reindex_spec.rb
+++ /dev/null
@@ -1,207 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::ConcurrentReindex, '#execute' do
- subject { described_class.new(index_name, logger: logger) }
-
- let(:table_name) { '_test_reindex_table' }
- let(:column_name) { '_test_column' }
- let(:index_name) { '_test_reindex_index' }
- let(:logger) { double('logger', debug: nil, info: nil, error: nil ) }
- let(:connection) { ActiveRecord::Base.connection }
-
- before do
- connection.execute(<<~SQL)
- CREATE TABLE #{table_name} (
- id serial NOT NULL PRIMARY KEY,
- #{column_name} integer NOT NULL);
-
- CREATE INDEX #{index_name} ON #{table_name} (#{column_name});
- SQL
- end
-
- context 'when the index does not exist' do
- before do
- connection.execute(<<~SQL)
- DROP INDEX #{index_name}
- SQL
- end
-
- it 'raises an error' do
- expect { subject.execute }.to raise_error(described_class::ReindexError, /does not exist/)
- end
- end
-
- context 'when the index is unique' do
- before do
- connection.execute(<<~SQL)
- DROP INDEX #{index_name};
- CREATE UNIQUE INDEX #{index_name} ON #{table_name} (#{column_name})
- SQL
- end
-
- it 'raises an error' do
- expect do
- subject.execute
- end.to raise_error(described_class::ReindexError, /UNIQUE indexes are currently not supported/)
- end
- end
-
- context 'replacing the original index with a rebuilt copy' do
- let(:replacement_name) { 'tmp_reindex__test_reindex_index' }
- let(:replaced_name) { 'old_reindex__test_reindex_index' }
-
- let(:create_index) { "CREATE INDEX CONCURRENTLY #{replacement_name} ON public.#{table_name} USING btree (#{column_name})" }
- let(:drop_index) { "DROP INDEX CONCURRENTLY IF EXISTS #{replacement_name}" }
-
- let!(:original_index) { find_index_create_statement }
-
- before do
- allow(subject).to receive(:connection).and_return(connection)
- allow(subject).to receive(:disable_statement_timeout).and_yield
- end
-
- it 'replaces the existing index with an identical index' do
- expect(subject).to receive(:disable_statement_timeout).exactly(3).times.and_yield
-
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_to_execute_in_order("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
- expect_to_execute_in_order("ALTER INDEX #{replacement_name} RENAME TO #{index_name}")
- expect_to_execute_in_order("ALTER INDEX #{replaced_name} RENAME TO #{replacement_name}")
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- subject.execute
-
- check_index_exists
- end
-
- context 'when a dangling index is left from a previous run' do
- before do
- connection.execute("CREATE INDEX #{replacement_name} ON #{table_name} (#{column_name})")
- end
-
- it 'replaces the existing index with an identical index' do
- expect(subject).to receive(:disable_statement_timeout).exactly(3).times.and_yield
-
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_to_execute_in_order("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
- expect_to_execute_in_order("ALTER INDEX #{replacement_name} RENAME TO #{index_name}")
- expect_to_execute_in_order("ALTER INDEX #{replaced_name} RENAME TO #{replacement_name}")
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- subject.execute
-
- check_index_exists
- end
- end
-
- context 'when it fails to create the replacement index' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect(connection).to receive(:execute).with(create_index).ordered
- .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /connect timeout/)
-
- check_index_exists
- end
- end
-
- context 'when the replacement index is not valid' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect(subject).to receive(:replacement_index_valid?).and_return(false)
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /replacement index was created as INVALID/)
-
- check_index_exists
- end
- end
-
- context 'when a database error occurs while swapping the indexes' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect(connection).to receive(:execute).ordered
- .with("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
- .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /connect timeout/)
-
- check_index_exists
- end
- end
-
- context 'when with_lock_retries fails to acquire the lock' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true)
- .and_raise(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, 'exhausted')
- end
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /exhausted/)
-
- check_index_exists
- end
- end
- end
-
- def expect_to_execute_concurrently_in_order(sql)
- # Indexes cannot be created CONCURRENTLY in a transaction. Since the tests are wrapped in transactions,
- # verify the original call but pass through the non-concurrent form.
- expect(connection).to receive(:execute).with(sql).ordered.and_wrap_original do |method, sql|
- method.call(sql.sub(/CONCURRENTLY/, ''))
- end
- end
-
- def expect_to_execute_in_order(sql)
- expect(connection).to receive(:execute).with(sql).ordered.and_call_original
- end
-
- def find_index_create_statement
- ActiveRecord::Base.connection.select_value(<<~SQL)
- SELECT indexdef
- FROM pg_indexes
- WHERE schemaname = 'public'
- AND indexname = #{ActiveRecord::Base.connection.quote(index_name)}
- SQL
- end
-
- def check_index_exists
- expect(find_index_create_statement).to eq(original_index)
- end
-end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 0bdcca630aa..a8edcc5f7e5 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -699,6 +699,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:copy_indexes).with(:users, :old, :new)
expect(model).to receive(:copy_foreign_keys).with(:users, :old, :new)
+ expect(model).to receive(:copy_check_constraints).with(:users, :old, :new)
model.rename_column_concurrently(:users, :old, :new)
end
@@ -761,6 +762,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:change_column_default)
.with(:users, :new, old_column.default)
+ expect(model).to receive(:copy_check_constraints)
+ .with(:users, :old, :new)
+
model.rename_column_concurrently(:users, :old, :new)
end
end
@@ -856,6 +860,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:copy_indexes).with(:users, :new, :old)
expect(model).to receive(:copy_foreign_keys).with(:users, :new, :old)
+ expect(model).to receive(:copy_check_constraints).with(:users, :new, :old)
model.undo_cleanup_concurrent_column_rename(:users, :old, :new)
end
@@ -894,6 +899,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:change_column_default)
.with(:users, :old, new_column.default)
+ expect(model).to receive(:copy_check_constraints)
+ .with(:users, :new, :old)
+
model.undo_cleanup_concurrent_column_rename(:users, :old, :new)
end
end
@@ -925,6 +933,19 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#undo_change_column_type_concurrently' do
+ it 'reverses the operations of change_column_type_concurrently' do
+ expect(model).to receive(:check_trigger_permissions!).with(:users)
+
+ expect(model).to receive(:remove_rename_triggers_for_postgresql)
+ .with(:users, /trigger_.{12}/)
+
+ expect(model).to receive(:remove_column).with(:users, "old_for_type_change")
+
+ model.undo_change_column_type_concurrently(:users, :old)
+ end
+ end
+
describe '#cleanup_concurrent_column_type_change' do
it 'cleans up the type changing procedure' do
expect(model).to receive(:cleanup_concurrent_column_rename)
@@ -937,6 +958,94 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#undo_cleanup_concurrent_column_type_change' do
+ context 'in a transaction' do
+ it 'raises RuntimeError' do
+ allow(model).to receive(:transaction_open?).and_return(true)
+
+ expect { model.undo_cleanup_concurrent_column_type_change(:users, :old, :new) }
+ .to raise_error(RuntimeError)
+ end
+ end
+
+ context 'outside a transaction' do
+ let(:temp_column) { "old_for_type_change" }
+
+ let(:temp_undo_cleanup_column) do
+ identifier = "users_old_for_type_change"
+ hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
+ "tmp_undo_cleanup_column_#{hashed_identifier}"
+ end
+
+ let(:trigger_name) { model.rename_trigger_name(:users, :old, :old_for_type_change) }
+
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'reverses the operations of cleanup_concurrent_column_type_change' do
+ expect(model).to receive(:check_trigger_permissions!).with(:users)
+
+ expect(model).to receive(:create_column_from).with(
+ :users,
+ :old,
+ temp_undo_cleanup_column,
+ type: :string,
+ batch_column_name: :id,
+ type_cast_function: nil
+ ).and_return(true)
+
+ expect(model).to receive(:rename_column)
+ .with(:users, :old, temp_column)
+
+ expect(model).to receive(:rename_column)
+ .with(:users, temp_undo_cleanup_column, :old)
+
+ expect(model).to receive(:install_rename_triggers_for_postgresql)
+ .with(trigger_name, '"users"', '"old"', '"old_for_type_change"')
+
+ model.undo_cleanup_concurrent_column_type_change(:users, :old, :string)
+ end
+
+ it 'passes the type_cast_function and batch_column_name' do
+ expect(model).to receive(:column_exists?).with(:users, :other_batch_column).and_return(true)
+ expect(model).to receive(:check_trigger_permissions!).with(:users)
+
+ expect(model).to receive(:create_column_from).with(
+ :users,
+ :old,
+ temp_undo_cleanup_column,
+ type: :string,
+ batch_column_name: :other_batch_column,
+ type_cast_function: :custom_type_cast_function
+ ).and_return(true)
+
+ expect(model).to receive(:rename_column)
+ .with(:users, :old, temp_column)
+
+ expect(model).to receive(:rename_column)
+ .with(:users, temp_undo_cleanup_column, :old)
+
+ expect(model).to receive(:install_rename_triggers_for_postgresql)
+ .with(trigger_name, '"users"', '"old"', '"old_for_type_change"')
+
+ model.undo_cleanup_concurrent_column_type_change(
+ :users,
+ :old,
+ :string,
+ type_cast_function: :custom_type_cast_function,
+ batch_column_name: :other_batch_column
+ )
+ end
+
+ it 'raises an error with invalid batch_column_name' do
+ expect do
+ model.undo_cleanup_concurrent_column_type_change(:users, :old, :new, batch_column_name: :invalid)
+ end.to raise_error(RuntimeError, /Column invalid does not exist on users/)
+ end
+ end
+ end
+
describe '#install_rename_triggers_for_postgresql' do
it 'installs the triggers for PostgreSQL' do
expect(model).to receive(:execute)
@@ -1128,7 +1237,65 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
name: 'index_on_issues_gl_project_id',
length: [],
order: [],
- opclasses: { 'gl_project_id' => 'bar' })
+ opclass: { 'gl_project_id' => 'bar' })
+
+ model.copy_indexes(:issues, :project_id, :gl_project_id)
+ end
+ end
+
+ context 'using an index with multiple columns and custom operator classes' do
+ it 'copies the index' do
+ index = double(:index,
+ columns: %w(project_id foobar),
+ name: 'index_on_issues_project_id_foobar',
+ using: :gin,
+ where: nil,
+ opclasses: { 'project_id' => 'bar', 'foobar' => :gin_trgm_ops },
+ unique: false,
+ lengths: [],
+ orders: [])
+
+ allow(model).to receive(:indexes_for).with(:issues, 'project_id')
+ .and_return([index])
+
+ expect(model).to receive(:add_concurrent_index)
+ .with(:issues,
+ %w(gl_project_id foobar),
+ unique: false,
+ name: 'index_on_issues_gl_project_id_foobar',
+ length: [],
+ order: [],
+ opclass: { 'gl_project_id' => 'bar', 'foobar' => :gin_trgm_ops },
+ using: :gin)
+
+ model.copy_indexes(:issues, :project_id, :gl_project_id)
+ end
+ end
+
+ context 'using an index with multiple columns and a custom operator class on the non affected column' do
+ it 'copies the index' do
+ index = double(:index,
+ columns: %w(project_id foobar),
+ name: 'index_on_issues_project_id_foobar',
+ using: :gin,
+ where: nil,
+ opclasses: { 'foobar' => :gin_trgm_ops },
+ unique: false,
+ lengths: [],
+ orders: [])
+
+ allow(model).to receive(:indexes_for).with(:issues, 'project_id')
+ .and_return([index])
+
+ expect(model).to receive(:add_concurrent_index)
+ .with(:issues,
+ %w(gl_project_id foobar),
+ unique: false,
+ name: 'index_on_issues_gl_project_id_foobar',
+ length: [],
+ order: [],
+ opclass: { 'foobar' => :gin_trgm_ops },
+ using: :gin)
model.copy_indexes(:issues, :project_id, :gl_project_id)
end
@@ -1400,15 +1567,32 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
)
end
- after do
- 'DROP INDEX IF EXISTS test_index;'
- end
-
it 'returns true if an index exists' do
expect(model.index_exists_by_name?(:projects, 'test_index'))
.to be_truthy
end
end
+
+ context 'when an index exists for a table with the same name in another schema' do
+ before do
+ ActiveRecord::Base.connection.execute(
+ 'CREATE SCHEMA new_test_schema'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'CREATE INDEX test_index_on_name ON new_test_schema.projects (LOWER(name));'
+ )
+ end
+
+ it 'returns false if the index does not exist in the current schema' do
+ expect(model.index_exists_by_name?(:projects, 'test_index_on_name'))
+ .to be_falsy
+ end
+ end
end
describe '#create_or_update_plan_limit' do
@@ -1863,11 +2047,17 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
ActiveRecord::Base.connection.execute(
'ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID'
)
- end
- after do
ActiveRecord::Base.connection.execute(
- 'ALTER TABLE projects DROP CONSTRAINT IF EXISTS check_1'
+ 'CREATE SCHEMA new_test_schema'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'ALTER TABLE new_test_schema.projects ADD CONSTRAINT check_2 CHECK (char_length(name) <= 5)'
)
end
@@ -1885,6 +2075,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model.check_constraint_exists?(:users, 'check_1'))
.to be_falsy
end
+
+ it 'returns false if a constraint with the same name exists for the same table in another schema' do
+ expect(model.check_constraint_exists?(:projects, 'check_2'))
+ .to be_falsy
+ end
end
describe '#add_check_constraint' do
@@ -2086,6 +2281,138 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#copy_check_constraints' do
+ context 'inside a transaction' do
+ it 'raises an error' do
+ expect(model).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'outside a transaction' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ allow(model).to receive(:column_exists?).and_return(true)
+ end
+
+ let(:old_column_constraints) do
+ [
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'check_d7d49d475d',
+ 'constraint_def' => 'CHECK ((old_column IS NOT NULL))'
+ },
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'check_48560e521e',
+ 'constraint_def' => 'CHECK ((char_length(old_column) <= 255))'
+ },
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'custom_check_constraint',
+ 'constraint_def' => 'CHECK (((old_column IS NOT NULL) AND (another_column IS NULL)))'
+ },
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'not_valid_check_constraint',
+ 'constraint_def' => 'CHECK ((old_column IS NOT NULL)) NOT VALID'
+ }
+ ]
+ end
+
+ it 'copies check constraints from one column to another' do
+ allow(model).to receive(:check_constraints_for)
+ .with(:test_table, :old_column, schema: nil)
+ .and_return(old_column_constraints)
+
+ allow(model).to receive(:not_null_constraint_name).with(:test_table, :new_column)
+ .and_return('check_1')
+
+ allow(model).to receive(:text_limit_name).with(:test_table, :new_column)
+ .and_return('check_2')
+
+ allow(model).to receive(:check_constraint_name)
+ .with(:test_table, :new_column, 'copy_check_constraint')
+ .and_return('check_3')
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '(new_column IS NOT NULL)',
+ 'check_1',
+ validate: true
+ ).once
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '(char_length(new_column) <= 255)',
+ 'check_2',
+ validate: true
+ ).once
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '((new_column IS NOT NULL) AND (another_column IS NULL))',
+ 'check_3',
+ validate: true
+ ).once
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '(new_column IS NOT NULL)',
+ 'check_1',
+ validate: false
+ ).once
+
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end
+
+ it 'does nothing if there are no constraints defined for the old column' do
+ allow(model).to receive(:check_constraints_for)
+ .with(:test_table, :old_column, schema: nil)
+ .and_return([])
+
+ expect(model).not_to receive(:add_check_constraint)
+
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end
+
+ it 'raises an error when the orginating column does not exist' do
+ allow(model).to receive(:column_exists?).with(:test_table, :old_column).and_return(false)
+
+ error_message = /Column old_column does not exist on test_table/
+
+ expect do
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end.to raise_error(RuntimeError, error_message)
+ end
+
+ it 'raises an error when the target column does not exist' do
+ allow(model).to receive(:column_exists?).with(:test_table, :new_column).and_return(false)
+
+ error_message = /Column new_column does not exist on test_table/
+
+ expect do
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end.to raise_error(RuntimeError, error_message)
+ end
+ end
+ end
+
describe '#add_text_limit' do
context 'when it is called with the default options' do
it 'calls add_check_constraint with an infered constraint name and validate: true' do
diff --git a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
index 034bf966db7..8a35d8149ad 100644
--- a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
+++ b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::Database::ObsoleteIgnoredColumns do
describe '#execute' do
it 'returns a list of class names and columns pairs' do
- Timecop.freeze(REMOVE_DATE) do
+ travel_to(REMOVE_DATE) do
expect(subject.execute).to eq([
['Testing::A', {
'unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0'),
diff --git a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
index 334cac653cf..885eef5723e 100644
--- a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
let(:partitioning_key) { :created_at }
around do |example|
- Timecop.freeze(Date.parse('2020-08-22')) { example.run }
+ travel_to(Date.parse('2020-08-22')) { example.run }
end
context 'with existing partitions' do
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
index ec3d0a6dbcb..c43b51e10a0 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
@@ -116,23 +116,6 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition
expect(jobs_updated).to eq(1)
end
- context 'when the feature flag is disabled' do
- let(:mock_connection) { double('connection') }
-
- before do
- allow(subject).to receive(:connection).and_return(mock_connection)
- stub_feature_flags(backfill_partitioned_audit_events: false)
- end
-
- it 'exits without attempting to copy data' do
- expect(mock_connection).not_to receive(:execute)
-
- subject.perform(1, 100, source_table, destination_table, unique_key)
-
- expect(destination_model.count).to eq(0)
- end
- end
-
context 'when the job is run within an explicit transaction block' do
let(:mock_connection) { double('connection') }
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 44ef0b307fe..147637cf471 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -213,7 +213,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
it 'creates partitions including the next month from today' do
today = Date.new(2020, 5, 8)
- Timecop.freeze(today) do
+ travel_to(today) do
migration.partition_table_by_date source_table, partition_column, min_date: min_date
expect_range_partitions_for(partitioned_table, {
@@ -233,7 +233,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
context 'without min_date, max_date' do
it 'creates partitions for the current and next month' do
current_date = Date.new(2020, 05, 22)
- Timecop.freeze(current_date.to_time) do
+ travel_to(current_date.to_time) do
migration.partition_table_by_date source_table, partition_column
expect_range_partitions_for(partitioned_table, {
@@ -514,6 +514,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
allow(migration).to receive(:table_exists?).with(partitioned_table).and_return(true)
allow(migration).to receive(:copy_missed_records)
allow(migration).to receive(:execute).with(/VACUUM/)
+ allow(migration).to receive(:execute).with(/^(RE)?SET/)
end
it 'finishes remaining jobs for the correct table' do
@@ -567,6 +568,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
allow(Gitlab::BackgroundMigration).to receive(:steal)
allow(migration).to receive(:execute).with(/VACUUM/)
+ allow(migration).to receive(:execute).with(/^(RE)?SET/)
end
it 'idempotently cleans up after failed background migrations' do
diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb
new file mode 100644
index 00000000000..1da67a5a6c0
--- /dev/null
+++ b/spec/lib/gitlab/database/postgres_index_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresIndex do
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE INDEX foo_idx ON public.users (name);
+ CREATE UNIQUE INDEX bar_key ON public.users (id);
+
+ CREATE TABLE example_table (id serial primary key);
+ SQL
+ end
+
+ def find(name)
+ described_class.by_identifier(name)
+ end
+
+ describe '.by_identifier' do
+ it 'finds the index' do
+ expect(find('public.foo_idx')).to be_a(Gitlab::Database::PostgresIndex)
+ end
+
+ it 'raises an error if not found' do
+ expect { find('public.idontexist') }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'raises ArgumentError if given a non-fully qualified index name' do
+ expect { find('foo') }.to raise_error(ArgumentError, /not fully qualified/)
+ end
+ end
+
+ describe '.regular' do
+ it 'only non-unique indexes' do
+ expect(described_class.regular).to all(have_attributes(unique: false))
+ end
+
+ it 'only non partitioned indexes ' do
+ expect(described_class.regular).to all(have_attributes(partitioned: false))
+ end
+
+ it 'only indexes that dont serve an exclusion constraint' do
+ expect(described_class.regular).to all(have_attributes(exclusion: false))
+ end
+ end
+
+ describe '.not_match' do
+ it 'excludes indexes matching the given regex' do
+ expect(described_class.not_match('^bar_k').map(&:name)).to all(match(/^(?!bar_k).*/))
+ end
+
+ it 'matches indexes without this prefix regex' do
+ expect(described_class.not_match('^bar_k')).not_to be_empty
+ end
+ end
+
+ describe '.random_few' do
+ it 'limits to two records by default' do
+ expect(described_class.random_few(2).size).to eq(2)
+ end
+ end
+
+ describe '#unique?' do
+ it 'returns true for a unique index' do
+ expect(find('public.bar_key')).to be_unique
+ end
+
+ it 'returns false for a regular, non-unique index' do
+ expect(find('public.foo_idx')).not_to be_unique
+ end
+
+ it 'returns true for a primary key index' do
+ expect(find('public.example_table_pkey')).to be_unique
+ end
+ end
+
+ describe '#valid_index?' do
+ it 'returns true if the index is invalid' do
+ expect(find('public.foo_idx')).to be_valid_index
+ end
+
+ it 'returns false if the index is marked as invalid' do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ UPDATE pg_index SET indisvalid=false
+ FROM pg_class
+ WHERE pg_class.relname = 'foo_idx' AND pg_index.indexrelid = pg_class.oid
+ SQL
+
+ expect(find('public.foo_idx')).not_to be_valid_index
+ end
+ end
+
+ describe '#to_s' do
+ it 'returns the index name' do
+ expect(find('public.foo_idx').to_s).to eq('foo_idx')
+ end
+ end
+
+ describe '#name' do
+ it 'returns the name' do
+ expect(find('public.foo_idx').name).to eq('foo_idx')
+ end
+ end
+
+ describe '#schema' do
+ it 'returns the index schema' do
+ expect(find('public.foo_idx').schema).to eq('public')
+ end
+ end
+
+ describe '#definition' do
+ it 'returns the index definition' do
+ expect(find('public.foo_idx').definition).to eq('CREATE INDEX foo_idx ON public.users USING btree (name)')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
new file mode 100644
index 00000000000..2d6765aac2e
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
@@ -0,0 +1,255 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
+ subject { described_class.new(index, logger: logger) }
+
+ let(:table_name) { '_test_reindex_table' }
+ let(:column_name) { '_test_column' }
+ let(:index_name) { '_test_reindex_index' }
+ let(:index) { instance_double(Gitlab::Database::PostgresIndex, indexrelid: 42, name: index_name, schema: 'public', partitioned?: false, unique?: false, exclusion?: false, definition: 'CREATE INDEX _test_reindex_index ON public._test_reindex_table USING btree (_test_column)') }
+ let(:logger) { double('logger', debug: nil, info: nil, error: nil ) }
+ let(:connection) { ActiveRecord::Base.connection }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ #{column_name} integer NOT NULL);
+
+ CREATE INDEX #{index.name} ON #{table_name} (#{column_name});
+ SQL
+ end
+
+ context 'when the index is unique' do
+ before do
+ allow(index).to receive(:unique?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /UNIQUE indexes are currently not supported/)
+ end
+ end
+
+ context 'when the index is partitioned' do
+ before do
+ allow(index).to receive(:partitioned?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /partitioned indexes are currently not supported/)
+ end
+ end
+
+ context 'when the index serves an exclusion constraint' do
+ before do
+ allow(index).to receive(:exclusion?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /indexes serving an exclusion constraint are currently not supported/)
+ end
+ end
+
+ context 'when the index is a lingering temporary index from a previous reindexing run' do
+ context 'with the temporary index prefix' do
+ let(:index_name) { 'tmp_reindex_something' }
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /left-over temporary index/)
+ end
+ end
+
+ context 'with the replaced index prefix' do
+ let(:index_name) { 'old_reindex_something' }
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /left-over temporary index/)
+ end
+ end
+ end
+
+ context 'replacing the original index with a rebuilt copy' do
+ let(:replacement_name) { 'tmp_reindex_42' }
+ let(:replaced_name) { 'old_reindex_42' }
+
+ let(:create_index) { "CREATE INDEX CONCURRENTLY #{replacement_name} ON public.#{table_name} USING btree (#{column_name})" }
+ let(:drop_index) do
+ <<~SQL
+ DROP INDEX CONCURRENTLY
+ IF EXISTS "public"."#{replacement_name}"
+ SQL
+ end
+
+ let!(:original_index) { find_index_create_statement }
+
+ it 'integration test: executing full index replacement without mocks' do
+ allow(connection).to receive(:execute).and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
+
+ subject.perform
+
+ check_index_exists
+ end
+
+ context 'mocked specs' do
+ before do
+ allow(subject).to receive(:connection).and_return(connection)
+ allow(connection).to receive(:execute).and_call_original
+ end
+
+ it 'replaces the existing index with an identical index' do
+ expect(connection).to receive(:execute).with('SET statement_timeout TO \'21600s\'').twice
+
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect_index_rename(index.name, replaced_name)
+ expect_index_rename(replacement_name, index.name)
+ expect_index_rename(replaced_name, replacement_name)
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ subject.perform
+
+ check_index_exists
+ end
+
+ context 'when a dangling index is left from a previous run' do
+ before do
+ connection.execute("CREATE INDEX #{replacement_name} ON #{table_name} (#{column_name})")
+ end
+
+ it 'replaces the existing index with an identical index' do
+ expect(connection).to receive(:execute).with('SET statement_timeout TO \'21600s\'').exactly(3).times
+
+ expect_to_execute_concurrently_in_order(drop_index)
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect_index_rename(index.name, replaced_name)
+ expect_index_rename(replacement_name, index.name)
+ expect_index_rename(replaced_name, replacement_name)
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ subject.perform
+
+ check_index_exists
+ end
+ end
+
+ context 'when it fails to create the replacement index' do
+ it 'safely cleans up and signals the error' do
+ expect(connection).to receive(:execute).with(create_index).ordered
+ .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when the replacement index is not valid' do
+ it 'safely cleans up and signals the error' do
+ replacement_index = double('replacement index', valid_index?: false)
+ allow(Gitlab::Database::PostgresIndex).to receive(:find_by).with(schema: 'public', name: replacement_name).and_return(nil, replacement_index)
+
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(described_class::ReindexError, /replacement index was created as INVALID/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when a database error occurs while swapping the indexes' do
+ it 'safely cleans up and signals the error' do
+ replacement_index = double('replacement index', valid_index?: true)
+ allow(Gitlab::Database::PostgresIndex).to receive(:find_by).with(schema: 'public', name: replacement_name).and_return(nil, replacement_index)
+
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect_index_rename(index.name, replaced_name).and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when with_lock_retries fails to acquire the lock' do
+ it 'safely cleans up and signals the error' do
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true)
+ .and_raise(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, 'exhausted')
+ end
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, /exhausted/)
+
+ check_index_exists
+ end
+ end
+ end
+ end
+
+ def expect_to_execute_concurrently_in_order(sql)
+ # Indexes cannot be created CONCURRENTLY in a transaction. Since the tests are wrapped in transactions,
+ # verify the original call but pass through the non-concurrent form.
+ expect(connection).to receive(:execute).with(sql).ordered.and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
+ end
+
+ def expect_index_rename(from, to)
+ expect(connection).to receive(:execute).with(<<~SQL).ordered
+ ALTER INDEX "public"."#{from}"
+ RENAME TO "#{to}"
+ SQL
+ end
+
+ def find_index_create_statement
+ ActiveRecord::Base.connection.select_value(<<~SQL)
+ SELECT indexdef
+ FROM pg_indexes
+ WHERE schemaname = 'public'
+ AND indexname = #{ActiveRecord::Base.connection.quote(index.name)}
+ SQL
+ end
+
+ def check_index_exists
+ expect(find_index_create_statement).to eq(original_index)
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
new file mode 100644
index 00000000000..f45d959c0de
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing::Coordinator do
+ include ExclusiveLeaseHelpers
+
+ describe '.perform' do
+ subject { described_class.new(indexes).perform }
+
+ let(:indexes) { [instance_double(Gitlab::Database::PostgresIndex), instance_double(Gitlab::Database::PostgresIndex)] }
+ let(:reindexers) { [instance_double(Gitlab::Database::Reindexing::ConcurrentReindex), instance_double(Gitlab::Database::Reindexing::ConcurrentReindex)] }
+
+ let!(:lease) { stub_exclusive_lease(lease_key, uuid, timeout: lease_timeout) }
+ let(:lease_key) { 'gitlab/database/reindexing/coordinator' }
+ let(:lease_timeout) { 1.day }
+ let(:uuid) { 'uuid' }
+
+ before do
+ allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:keep_track_of).and_yield
+
+ indexes.zip(reindexers).each do |index, reindexer|
+ allow(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).with(index).and_return(reindexer)
+ allow(reindexer).to receive(:perform)
+ end
+ end
+
+ it 'performs concurrent reindexing for each index' do
+ indexes.zip(reindexers).each do |index, reindexer|
+ expect(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).with(index).ordered.and_return(reindexer)
+ expect(reindexer).to receive(:perform)
+ end
+
+ subject
+ end
+
+ it 'keeps track of actions and creates ReindexAction records' do
+ indexes.each do |index|
+ expect(Gitlab::Database::Reindexing::ReindexAction).to receive(:keep_track_of).with(index).and_yield
+ end
+
+ subject
+ end
+
+ context 'locking' do
+ it 'acquires a lock while reindexing' do
+ indexes.each do |index|
+ expect(lease).to receive(:try_obtain).ordered.and_return(uuid)
+ action = instance_double(Gitlab::Database::Reindexing::ConcurrentReindex)
+ expect(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).ordered.with(index).and_return(action)
+ expect(action).to receive(:perform).ordered
+ expect(Gitlab::ExclusiveLease).to receive(:cancel).ordered.with(lease_key, uuid)
+ end
+
+ subject
+ end
+
+ it 'does does not perform reindexing actions if lease is not granted' do
+ indexes.each do |index|
+ expect(lease).to receive(:try_obtain).ordered.and_return(false)
+ expect(Gitlab::Database::Reindexing::ConcurrentReindex).not_to receive(:new)
+ end
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
new file mode 100644
index 00000000000..efb5b8463a1
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing::ReindexAction, '.keep_track_of' do
+ let(:index) { double('index', identifier: 'public.something', ondisk_size_bytes: 10240, reload: nil) }
+ let(:size_after) { 512 }
+
+ it 'yields to the caller' do
+ expect { |b| described_class.keep_track_of(index, &b) }.to yield_control
+ end
+
+ def find_record
+ described_class.find_by(index_identifier: index.identifier)
+ end
+
+ it 'creates the record with a start time and updates its end time' do
+ freeze_time do
+ described_class.keep_track_of(index) do
+ expect(find_record.action_start).to be_within(1.second).of(Time.zone.now)
+
+ travel(10.seconds)
+ end
+
+ duration = find_record.action_end - find_record.action_start
+
+ expect(duration).to be_within(1.second).of(10.seconds)
+ end
+ end
+
+ it 'creates the record with its status set to :started and updates its state to :finished' do
+ described_class.keep_track_of(index) do
+ expect(find_record).to be_started
+ end
+
+ expect(find_record).to be_finished
+ end
+
+ it 'creates the record with the indexes start size and updates its end size' do
+ described_class.keep_track_of(index) do
+ expect(find_record.ondisk_size_bytes_start).to eq(index.ondisk_size_bytes)
+
+ expect(index).to receive(:reload).once
+ allow(index).to receive(:ondisk_size_bytes).and_return(size_after)
+ end
+
+ expect(find_record.ondisk_size_bytes_end).to eq(size_after)
+ end
+
+ context 'in case of errors' do
+ it 'sets the state to failed' do
+ expect do
+ described_class.keep_track_of(index) do
+ raise 'something went wrong'
+ end
+ end.to raise_error(/something went wrong/)
+
+ expect(find_record).to be_failed
+ end
+
+ it 'records the end time' do
+ freeze_time do
+ expect do
+ described_class.keep_track_of(index) do
+ raise 'something went wrong'
+ end
+ end.to raise_error(/something went wrong/)
+
+ expect(find_record.action_end).to be_within(1.second).of(Time.zone.now)
+ end
+ end
+
+ it 'records the resulting index size' do
+ expect(index).to receive(:reload).once
+ allow(index).to receive(:ondisk_size_bytes).and_return(size_after)
+
+ expect do
+ described_class.keep_track_of(index) do
+ raise 'something went wrong'
+ end
+ end.to raise_error(/something went wrong/)
+
+ expect(find_record.ondisk_size_bytes_end).to eq(size_after)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
new file mode 100644
index 00000000000..86b3c029944
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing do
+ include ExclusiveLeaseHelpers
+
+ describe '.perform' do
+ subject { described_class.perform(indexes) }
+
+ let(:coordinator) { instance_double(Gitlab::Database::Reindexing::Coordinator) }
+ let(:indexes) { double }
+
+ it 'delegates to Coordinator' do
+ expect(Gitlab::Database::Reindexing::Coordinator).to receive(:new).with(indexes).and_return(coordinator)
+ expect(coordinator).to receive(:perform)
+
+ subject
+ end
+ end
+
+ describe '.candidate_indexes' do
+ subject { described_class.candidate_indexes }
+
+ it 'retrieves regular indexes that are no left-overs from previous runs' do
+ result = double
+ expect(Gitlab::Database::PostgresIndex).to receive_message_chain('regular.not_match.not_match').with(no_args).with('^tmp_reindex_').with('^old_reindex_').and_return(result)
+
+ expect(subject).to eq(result)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/similarity_score_spec.rb b/spec/lib/gitlab/database/similarity_score_spec.rb
index e36a4f610e1..cf75e5a72d9 100644
--- a/spec/lib/gitlab/database/similarity_score_spec.rb
+++ b/spec/lib/gitlab/database/similarity_score_spec.rb
@@ -90,4 +90,15 @@ RSpec.describe Gitlab::Database::SimilarityScore do
expect(subject).to eq(%w[different same gitlab-danger])
end
end
+
+ describe 'annotation' do
+ it 'annotates the generated SQL expression' do
+ expression = Gitlab::Database::SimilarityScore.build_expression(search: 'test', rules: [
+ { column: Arel.sql('path'), multiplier: 1 },
+ { column: Arel.sql('name'), multiplier: 0.8 }
+ ])
+
+ expect(Gitlab::Database::SimilarityScore).to be_order_by_similarity(expression)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 2cc6e175500..220ae705e71 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -104,9 +104,69 @@ RSpec.describe Gitlab::Database::WithLockRetries do
end
context 'after 3 iterations' do
- let(:retry_count) { 4 }
+ it_behaves_like 'retriable exclusive lock on `projects`' do
+ let(:retry_count) { 4 }
+ end
+
+ context 'setting the idle transaction timeout' do
+ context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
+ it 'does not disable the idle transaction timeout' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+ allow(subject).to receive(:run_block_with_transaction).once
+
+ expect(subject).not_to receive(:disable_idle_in_transaction_timeout)
+
+ subject.run {}
+ end
+ end
- it_behaves_like 'retriable exclusive lock on `projects`'
+ context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
+ it 'disables the idle transaction timeout so the code can sleep and retry' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+
+ n = 0
+ allow(subject).to receive(:run_block_with_transaction).twice do
+ n += 1
+ raise(ActiveRecord::LockWaitTimeout) if n == 1
+ end
+
+ expect(subject).to receive(:disable_idle_in_transaction_timeout).once
+
+ subject.run {}
+ end
+ end
+ end
+ end
+
+ context 'after the retries are exhausted' do
+ let(:timing_configuration) do
+ [
+ [1.second, 1.second]
+ ]
+ end
+
+ context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
+ it 'does not disable the lock_timeout' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+
+ expect(subject).not_to receive(:disable_lock_timeout)
+
+ subject.run {}
+ end
+ end
+
+ context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
+ it 'disables the lock_timeout' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+ allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+
+ expect(subject).to receive(:disable_lock_timeout)
+
+ subject.run {}
+ end
+ end
end
context 'after the retries, without setting lock_timeout' do