Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-08-20 21:42:06 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-08-20 21:42:06 +0300
commit6e4e1050d9dba2b7b2523fdd1768823ab85feef4 (patch)
tree78be5963ec075d80116a932011d695dd33910b4e /spec/lib/gitlab/database
parent1ce776de4ae122aba3f349c02c17cebeaa8ecf07 (diff)
Add latest changes from gitlab-org/gitlab@13-3-stable-ee
Diffstat (limited to 'spec/lib/gitlab/database')
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb140
-rw-r--r--spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb1
-rw-r--r--spec/lib/gitlab/database/custom_structure_spec.rb1
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb1
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb110
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb35
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb1
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/schema_versions_copy_mixin_spec.rb42
-rw-r--r--spec/lib/gitlab/database/schema_version_files_spec.rb95
-rw-r--r--spec/lib/gitlab/database/similarity_score_spec.rb93
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb38
12 files changed, 445 insertions, 114 deletions
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 656501dbf56..1f84a915cdc 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -13,11 +13,34 @@ RSpec.describe Gitlab::Database::BatchCount do
let(:another_user) { create(:user) }
before do
- create_list(:issue, 3, author: user )
- create_list(:issue, 2, author: another_user )
+ create_list(:issue, 3, author: user)
+ create_list(:issue, 2, author: another_user)
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(in_transaction)
end
+ shared_examples 'disallowed configurations' do |method|
+ it 'returns fallback if start is bigger than finish' do
+ expect(described_class.public_send(method, *args, start: 1, finish: 0)).to eq(fallback)
+ end
+
+ it 'returns fallback if loops more than allowed' do
+ large_finish = Gitlab::Database::BatchCounter::MAX_ALLOWED_LOOPS * default_batch_size + 1
+ expect(described_class.public_send(method, *args, start: 1, finish: large_finish)).to eq(fallback)
+ end
+
+ it 'returns fallback if batch size is less than min required' do
+ expect(described_class.public_send(method, *args, batch_size: small_batch_size)).to eq(fallback)
+ end
+ end
+
+ shared_examples 'when a transaction is open' do
+ let(:in_transaction) { true }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error('BatchCount can not be run inside a transaction')
+ end
+ end
+
describe '#batch_count' do
it 'counts table' do
expect(described_class.batch_count(model)).to eq(5)
@@ -53,38 +76,32 @@ RSpec.describe Gitlab::Database::BatchCount do
[1, 2, 4, 5, 6].each { |i| expect(described_class.batch_count(model, batch_size: i)).to eq(5) }
end
- it 'will raise an error if distinct count is requested' do
- expect do
- described_class.batch_count(model.distinct(column))
- end.to raise_error 'Use distinct count for optimized distinct counting'
+ it 'counts with a start and finish' do
+ expect(described_class.batch_count(model, start: model.minimum(:id), finish: model.maximum(:id))).to eq(5)
end
- context 'in a transaction' do
- let(:in_transaction) { true }
+ it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE}" do
+ min_id = model.minimum(:id)
- it 'cannot count' do
- expect do
- described_class.batch_count(model)
- end.to raise_error 'BatchCount can not be run inside a transaction'
+ expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
+ expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE + min_id, :itself).once.and_call_original
end
- end
- it 'counts with a start and finish' do
- expect(described_class.batch_count(model, start: model.minimum(:id), finish: model.maximum(:id))).to eq(5)
+ described_class.batch_count(model)
end
- context 'disallowed configurations' do
- it 'returns fallback if start is bigger than finish' do
- expect(described_class.batch_count(model, start: 1, finish: 0)).to eq(fallback)
- end
+ it_behaves_like 'when a transaction is open' do
+ subject { described_class.batch_count(model) }
+ end
- it 'returns fallback if loops more than allowed' do
- large_finish = Gitlab::Database::BatchCounter::MAX_ALLOWED_LOOPS * Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE + 1
- expect(described_class.batch_count(model, start: 1, finish: large_finish)).to eq(fallback)
+ context 'disallowed_configurations' do
+ include_examples 'disallowed configurations', :batch_count do
+ let(:args) { [Issue] }
+ let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE }
end
- it 'returns fallback if batch size is less than min required' do
- expect(described_class.batch_count(model, batch_size: small_batch_size)).to eq(fallback)
+ it 'raises an error if distinct count is requested' do
+ expect { described_class.batch_count(model.distinct(column)) }.to raise_error 'Use distinct count for optimized distinct counting'
end
end
end
@@ -128,18 +145,24 @@ RSpec.describe Gitlab::Database::BatchCount do
expect(described_class.batch_distinct_count(model, column, start: User.minimum(:id), finish: User.maximum(:id))).to eq(2)
end
- context 'disallowed configurations' do
- it 'returns fallback if start is bigger than finish' do
- expect(described_class.batch_distinct_count(model, column, start: 1, finish: 0)).to eq(fallback)
- end
+ it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE}" do
+ min_id = model.minimum(:id)
- it 'returns fallback if loops more than allowed' do
- large_finish = Gitlab::Database::BatchCounter::MAX_ALLOWED_LOOPS * Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE + 1
- expect(described_class.batch_distinct_count(model, column, start: 1, finish: large_finish)).to eq(fallback)
+ expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
+ expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE + min_id, :distinct).once.and_call_original
end
- it 'returns fallback if batch size is less than min required' do
- expect(described_class.batch_distinct_count(model, column, batch_size: small_batch_size)).to eq(fallback)
+ described_class.batch_distinct_count(model)
+ end
+
+ it_behaves_like 'when a transaction is open' do
+ subject { described_class.batch_distinct_count(model, column) }
+ end
+
+ context 'disallowed configurations' do
+ include_examples 'disallowed configurations', :batch_distinct_count do
+ let(:args) { [model, column] }
+ let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE }
end
it 'will raise an error if distinct count with the :id column is requested' do
@@ -149,4 +172,55 @@ RSpec.describe Gitlab::Database::BatchCount do
end
end
end
+
+ describe '#batch_sum' do
+ let(:column) { :weight }
+
+ before do
+ Issue.first.update_attribute(column, 3)
+ Issue.last.update_attribute(column, 4)
+ end
+
+ it 'returns the sum of values in the given column' do
+ expect(described_class.batch_sum(model, column)).to eq(7)
+ end
+
+ it 'works when given an Arel column' do
+ expect(described_class.batch_sum(model, model.arel_table[column])).to eq(7)
+ end
+
+ it 'works with a batch size of 50K' do
+ expect(described_class.batch_sum(model, column, batch_size: 50_000)).to eq(7)
+ end
+
+ it 'works with start and finish provided' do
+ expect(described_class.batch_sum(model, column, start: model.minimum(:id), finish: model.maximum(:id))).to eq(7)
+ end
+
+ it 'returns the same result regardless of batch size' do
+ stub_const('Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE', 0)
+
+ (1..(model.count + 1)).each { |i| expect(described_class.batch_sum(model, column, batch_size: i)).to eq(7) }
+ end
+
+ it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE}" do
+ min_id = model.minimum(:id)
+
+ expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
+ expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE + min_id, :itself).once.and_call_original
+ end
+
+ described_class.batch_sum(model, column)
+ end
+
+ it_behaves_like 'when a transaction is open' do
+ subject { described_class.batch_sum(model, column) }
+ end
+
+ it_behaves_like 'disallowed configurations', :batch_sum do
+ let(:args) { [model, column] }
+ let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE }
+ let(:small_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE - 1 }
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
index e488bf5ee4c..c2028f8c238 100644
--- a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
+++ b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe Gitlab::Database::Count::TablesampleCountStrategy do
Namespace => threshold + 1
}
end
+
let(:threshold) { Gitlab::Database::Count::TablesampleCountStrategy::EXACT_COUNT_THRESHOLD }
before do
diff --git a/spec/lib/gitlab/database/custom_structure_spec.rb b/spec/lib/gitlab/database/custom_structure_spec.rb
index beda9df3684..b3bdca0acdd 100644
--- a/spec/lib/gitlab/database/custom_structure_spec.rb
+++ b/spec/lib/gitlab/database/custom_structure_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe Gitlab::Database::CustomStructure do
Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey.create(
cascade_delete: true, from_table: 'issues', from_column: 'project_id', to_table: 'projects', to_column: 'id')
end
+
let!(:second_fk) do
Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey.create(
cascade_delete: false, from_table: 'issues', from_column: 'moved_to_id', to_table: 'issues', to_column: 'id')
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 48e1c97e97f..4b7f371b25a 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -712,7 +712,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:add_not_null_constraint).with(:users, :new)
expect(model).to receive(:execute).with("UPDATE \"users\" SET \"new\" = cast_to_jsonb_with_default(\"users\".\"id\") WHERE \"users\".\"id\" >= #{user.id}")
expect(model).to receive(:execute).with("DROP TRIGGER IF EXISTS #{trigger_name}\nON \"users\"\n")
- expect(model).to receive(:execute).with("CREATE TRIGGER #{trigger_name}\nBEFORE INSERT OR UPDATE\nON \"users\"\nFOR EACH ROW\nEXECUTE PROCEDURE #{trigger_name}()\n")
+ expect(model).to receive(:execute).with("CREATE TRIGGER #{trigger_name}\nBEFORE INSERT OR UPDATE\nON \"users\"\nFOR EACH ROW\nEXECUTE FUNCTION #{trigger_name}()\n")
expect(model).to receive(:execute).with("CREATE OR REPLACE FUNCTION #{trigger_name}()\nRETURNS trigger AS\n$BODY$\nBEGIN\n NEW.\"new\" := NEW.\"id\";\n RETURN NEW;\nEND;\n$BODY$\nLANGUAGE 'plpgsql'\nVOLATILE\n")
model.rename_column_concurrently(:users, :id, :new, type_cast_function: 'cast_to_jsonb_with_default')
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
index efa9c83b2d2..7d88c17c9b3 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
let(:model) do
ActiveRecord::Migration.new.extend(described_class)
end
+
let_it_be(:connection) { ActiveRecord::Base.connection }
let(:referenced_table) { :issues }
let(:function_name) { '_test_partitioned_foreign_keys_function' }
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 9b24ab7cad4..86f79b213ae 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -315,42 +315,13 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
expect(model.find(second_todo.id).attributes).to eq(second_todo.attributes)
end
end
-
- describe 'copying historic data to the partitioned table' do
- let(:source_table) { 'todos' }
- let(:migration_class) { '::Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' }
- let(:sub_batch_size) { described_class::SUB_BATCH_SIZE }
- let(:pause_seconds) { described_class::PAUSE_SECONDS }
- let!(:first_id) { create(:todo).id }
- let!(:second_id) { create(:todo).id }
- let!(:third_id) { create(:todo).id }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- expect(migration).to receive(:queue_background_migration_jobs_by_range_at_intervals).and_call_original
- end
-
- it 'enqueues jobs to copy each batch of data' do
- Sidekiq::Testing.fake! do
- migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
-
- first_job_arguments = [first_id, second_id, source_table, partitioned_table, 'id']
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([migration_class, first_job_arguments])
-
- second_job_arguments = [third_id, third_id, source_table, partitioned_table, 'id']
- expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([migration_class, second_job_arguments])
- end
- end
- end
end
describe '#drop_partitioned_table_for' do
let(:expected_tables) do
%w[000000 201912 202001 202002].map { |suffix| "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{partitioned_table}_#{suffix}" }.unshift(partitioned_table)
end
+
let(:migration_class) { 'Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' }
context 'when the table is not allowed' do
@@ -390,16 +361,85 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
expect(connection.table_exists?(table)).to be(false)
end
end
+ end
+
+ describe '#enqueue_partitioning_data_migration' do
+ context 'when the table is not allowed' do
+ let(:source_table) { :this_table_is_not_allowed }
+
+ it 'raises an error' do
+ expect(migration).to receive(:assert_table_is_allowed).with(source_table).and_call_original
+
+ expect do
+ migration.enqueue_partitioning_data_migration source_table
+ end.to raise_error(/#{source_table} is not allowed for use/)
+ end
+ end
- context 'cleaning up background migration tracking records' do
+ context 'when run inside a transaction block' do
+ it 'raises an error' do
+ expect(migration).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ migration.enqueue_partitioning_data_migration source_table
+ end.to raise_error(/can not be run inside a transaction/)
+ end
+ end
+
+ context 'when records exist in the source table' do
+ let(:source_table) { 'todos' }
+ let(:migration_class) { '::Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' }
+ let(:sub_batch_size) { described_class::SUB_BATCH_SIZE }
+ let(:pause_seconds) { described_class::PAUSE_SECONDS }
+ let!(:first_id) { create(:todo).id }
+ let!(:second_id) { create(:todo).id }
+ let!(:third_id) { create(:todo).id }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+
+ expect(migration).to receive(:queue_background_migration_jobs_by_range_at_intervals).and_call_original
+ end
+
+ it 'enqueues jobs to copy each batch of data' do
+ migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
+
+ Sidekiq::Testing.fake! do
+ migration.enqueue_partitioning_data_migration source_table
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+
+ first_job_arguments = [first_id, second_id, source_table, partitioned_table, 'id']
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([migration_class, first_job_arguments])
+
+ second_job_arguments = [third_id, third_id, source_table, partitioned_table, 'id']
+ expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([migration_class, second_job_arguments])
+ end
+ end
+ end
+ end
+
+ describe '#cleanup_partitioning_data_migration' do
+ context 'when the table is not allowed' do
+ let(:source_table) { :this_table_is_not_allowed }
+
+ it 'raises an error' do
+ expect(migration).to receive(:assert_table_is_allowed).with(source_table).and_call_original
+
+ expect do
+ migration.cleanup_partitioning_data_migration source_table
+ end.to raise_error(/#{source_table} is not allowed for use/)
+ end
+ end
+
+ context 'when tracking records exist in the background_migration_jobs table' do
+ let(:migration_class) { 'Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' }
let!(:job1) { create(:background_migration_job, class_name: migration_class, arguments: [1, 10, source_table]) }
let!(:job2) { create(:background_migration_job, class_name: migration_class, arguments: [11, 20, source_table]) }
let!(:job3) { create(:background_migration_job, class_name: migration_class, arguments: [1, 10, 'other_table']) }
- it 'deletes any tracking records from the background_migration_jobs table' do
- migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
-
- expect { migration.drop_partitioned_table_for(source_table) }
+ it 'deletes those pertaining to the given table' do
+ expect { migration.cleanup_partitioning_data_migration(source_table) }
.to change { ::Gitlab::Database::BackgroundMigrationJob.count }.from(3).to(1)
remaining_record = ::Gitlab::Database::BackgroundMigrationJob.first
diff --git a/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
new file mode 100644
index 00000000000..ca9f4af9187
--- /dev/null
+++ b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresqlAdapter::DumpSchemaVersionsMixin do
+ let(:schema_migration) { double('schema_migration', all_versions: versions) }
+
+ let(:instance) do
+ Object.new.extend(described_class)
+ end
+
+ before do
+ allow(instance).to receive(:schema_migration).and_return(schema_migration)
+ end
+
+ context 'when version files exist' do
+ let(:versions) { %w(5 2 1000 200 4 93 2) }
+
+ it 'touches version files' do
+ expect(Gitlab::Database::SchemaVersionFiles).to receive(:touch_all).with(versions)
+
+ instance.dump_schema_information
+ end
+ end
+
+ context 'when version files do not exist' do
+ let(:versions) { [] }
+
+ it 'does not touch version files' do
+ expect(Gitlab::Database::SchemaVersionFiles).not_to receive(:touch_all)
+
+ instance.dump_schema_information
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
index 8b3a0ceb804..ea8c9e2cfd7 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin do
end
end
end
+
let(:config) { Rails.application.config_for(:database).merge(pool: 1) }
let(:pool) { model.establish_connection(config) }
diff --git a/spec/lib/gitlab/database/postgresql_adapter/schema_versions_copy_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/schema_versions_copy_mixin_spec.rb
deleted file mode 100644
index c6333e4a4dc..00000000000
--- a/spec/lib/gitlab/database/postgresql_adapter/schema_versions_copy_mixin_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::PostgresqlAdapter::SchemaVersionsCopyMixin do
- let(:schema_migration) { double('schem_migration', table_name: table_name, all_versions: versions) }
- let(:versions) { %w(5 2 1000 200 4 93 2) }
- let(:table_name) { "schema_migrations" }
-
- let(:instance) do
- Object.new.extend(described_class)
- end
-
- before do
- allow(instance).to receive(:schema_migration).and_return(schema_migration)
- allow(instance).to receive(:quote_table_name).with(table_name).and_return("\"#{table_name}\"")
- end
-
- subject { instance.dump_schema_information }
-
- it 'uses COPY FROM STDIN' do
- expect(subject.split("\n").first).to match(/COPY "schema_migrations" \(version\) FROM STDIN;/)
- end
-
- it 'contains a sorted list of versions by their numeric value' do
- version_lines = subject.split("\n")[1..-2].map(&:to_i)
-
- expect(version_lines).to eq(versions.map(&:to_i).sort)
- end
-
- it 'contains a end-of-data marker' do
- expect(subject).to end_with("\\.\n")
- end
-
- context 'with non-Integer versions' do
- let(:versions) { %w(5 2 4 abc) }
-
- it 'raises an error' do
- expect { subject }.to raise_error(/invalid value for Integer/)
- end
- end
-end
diff --git a/spec/lib/gitlab/database/schema_version_files_spec.rb b/spec/lib/gitlab/database/schema_version_files_spec.rb
new file mode 100644
index 00000000000..c3b3ae0a07f
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_version_files_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaVersionFiles do
+ describe '.touch_all' do
+ let(:version1) { '20200123' }
+ let(:version2) { '20200410' }
+ let(:version3) { '20200602' }
+ let(:version4) { '20200809' }
+ let(:relative_schema_directory) { 'db/schema_migrations' }
+ let(:relative_migrate_directory) { 'db/migrate' }
+ let(:relative_post_migrate_directory) { 'db/post_migrate' }
+
+ it 'creates a file containing a checksum for each version with a matching migration' do
+ Dir.mktmpdir do |tmpdir|
+ schema_directory = Pathname.new(tmpdir).join(relative_schema_directory)
+ migrate_directory = Pathname.new(tmpdir).join(relative_migrate_directory)
+ post_migrate_directory = Pathname.new(tmpdir).join(relative_post_migrate_directory)
+
+ FileUtils.mkdir_p(migrate_directory)
+ FileUtils.mkdir_p(post_migrate_directory)
+ FileUtils.mkdir_p(schema_directory)
+
+ migration1_filepath = migrate_directory.join("#{version1}_migration.rb")
+ FileUtils.touch(migration1_filepath)
+
+ migration2_filepath = post_migrate_directory.join("#{version2}_post_migration.rb")
+ FileUtils.touch(migration2_filepath)
+
+ old_version_filepath = schema_directory.join('20200101')
+ FileUtils.touch(old_version_filepath)
+
+ expect(File.exist?(old_version_filepath)).to be(true)
+
+ allow(described_class).to receive(:schema_directory).and_return(schema_directory)
+ allow(described_class).to receive(:migration_directories).and_return([migrate_directory, post_migrate_directory])
+
+ described_class.touch_all([version1, version2, version3, version4])
+
+ expect(File.exist?(old_version_filepath)).to be(false)
+ [version1, version2].each do |version|
+ version_filepath = schema_directory.join(version)
+ expect(File.exist?(version_filepath)).to be(true)
+
+ hashed_value = Digest::SHA256.hexdigest(version)
+ expect(File.read(version_filepath)).to eq(hashed_value)
+ end
+
+ [version3, version4].each do |version|
+ version_filepath = schema_directory.join(version)
+ expect(File.exist?(version_filepath)).to be(false)
+ end
+ end
+ end
+ end
+
+ describe '.load_all' do
+ let(:connection) { double('connection') }
+
+ before do
+ allow(described_class).to receive(:connection).and_return(connection)
+ allow(described_class).to receive(:find_version_filenames).and_return(filenames)
+ end
+
+ context 'when there are no version files' do
+ let(:filenames) { [] }
+
+ it 'does nothing' do
+ expect(connection).not_to receive(:quote_string)
+ expect(connection).not_to receive(:execute)
+
+ described_class.load_all
+ end
+ end
+
+ context 'when there are version files' do
+ let(:filenames) { %w[123 456 789] }
+
+ it 'inserts the missing versions into schema_migrations' do
+ filenames.each do |filename|
+ expect(connection).to receive(:quote_string).with(filename).and_return(filename)
+ end
+
+ expect(connection).to receive(:execute).with(<<~SQL)
+ INSERT INTO schema_migrations (version)
+ VALUES ('123'),('456'),('789')
+ ON CONFLICT DO NOTHING
+ SQL
+
+ described_class.load_all
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/similarity_score_spec.rb b/spec/lib/gitlab/database/similarity_score_spec.rb
new file mode 100644
index 00000000000..e36a4f610e1
--- /dev/null
+++ b/spec/lib/gitlab/database/similarity_score_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SimilarityScore do
+ let(:search) { '' }
+ let(:query_result) { ActiveRecord::Base.connection.execute(query).to_a }
+
+ let(:query) do
+ # In memory query, with the id as the tie breaker.
+ <<-SQL
+ SELECT *, #{order_expression} AS similarity
+ FROM (
+ VALUES (1, 'Git', 'git', 'git source code mirror. this is a publish-only repository.'),
+ (2, 'GitLab Runner', 'gitlab-runner', 'official helm chart for the gitlab runner'),
+ (3, 'gitaly', 'gitaly', 'gitaly is a git rpc service for handling all the git calls made by gitlab'),
+ (4, 'GitLab', 'gitlab', 'gitlab is an open source end-to-end software development platform with built-in version control'),
+ (5, 'Gitlab Danger', 'gitlab-danger', 'this gem provides common dangerfile and plugins for gitlab projects'),
+ (6, 'different', 'same', 'same'),
+ (7, 'same', 'different', 'same'),
+ (8, 'gitlab-styles', 'gitlab-styles', 'gitlab style guides and shared style configs.'),
+ (9, '🔒 gitaly', 'gitaly-sec', 'security mirror for gitaly')
+ ) tbl (id, name, path, descrption) ORDER BY #{order_expression} DESC, id DESC;
+ SQL
+ end
+
+ let(:order_expression) do
+ Gitlab::Database::SimilarityScore.build_expression(search: search, rules: [{ column: Arel.sql('path') }]).to_sql
+ end
+
+ subject { query_result.take(3).map { |row| row['path'] } }
+
+ context 'when passing empty values' do
+ context 'when search is nil' do
+ let(:search) { nil }
+
+ it 'orders by a constant 0 value' do
+ expect(query).to include('ORDER BY CAST(0 AS integer) DESC')
+ end
+ end
+
+ context 'when rules are empty' do
+ let(:search) { 'text' }
+
+ let(:order_expression) do
+ Gitlab::Database::SimilarityScore.build_expression(search: search, rules: []).to_sql
+ end
+
+ it 'orders by a constant 0 value' do
+ expect(query).to include('ORDER BY CAST(0 AS integer) DESC')
+ end
+ end
+ end
+
+ context 'when similarity scoring based on the path' do
+ let(:search) { 'git' }
+
+ context 'when searching for `git`' do
+ let(:search) { 'git' }
+
+ it { expect(subject).to eq(%w[git gitlab gitaly]) }
+ end
+
+ context 'when searching for `gitlab`' do
+ let(:search) { 'gitlab' }
+
+ it { expect(subject).to eq(%w[gitlab gitlab-styles gitlab-danger]) }
+ end
+
+ context 'when searching for something unrelated' do
+ let(:search) { 'xyz' }
+
+ it 'results have 0 similarity score' do
+ expect(query_result.map { |row| row['similarity'] }).to all(eq(0))
+ end
+ end
+ end
+
+ describe 'score multiplier' do
+ let(:order_expression) do
+ Gitlab::Database::SimilarityScore.build_expression(search: search, rules: [
+ { column: Arel.sql('path'), multiplier: 1 },
+ { column: Arel.sql('name'), multiplier: 0.8 }
+ ]).to_sql
+ end
+
+ let(:search) { 'different' }
+
+ it 'ranks `path` matches higher' do
+ expect(subject).to eq(%w[different same gitlab-danger])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 70cbddbb7b7..2cc6e175500 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -72,9 +72,14 @@ RSpec.describe Gitlab::Database::WithLockRetries do
lock_attempts = 0
lock_acquired = false
- expect_any_instance_of(Gitlab::Database::WithLockRetries).to receive(:sleep).exactly(retry_count - 1).times # we don't sleep in the last iteration
-
- allow_any_instance_of(Gitlab::Database::WithLockRetries).to receive(:run_block_with_transaction).and_wrap_original do |method|
+ # the actual number of attempts to run_block_with_transaction can never exceed the number of
+ # timings_configurations, so here we limit the retry_count if it exceeds that value
+ #
+ # also, there is no call to sleep after the final attempt, which is why it will always be one less
+ expected_runs_with_timeout = [retry_count, timing_configuration.size].min
+ expect(subject).to receive(:sleep).exactly(expected_runs_with_timeout - 1).times
+
+ expect(subject).to receive(:run_block_with_transaction).exactly(expected_runs_with_timeout).times.and_wrap_original do |method|
lock_fiber.resume if lock_attempts == retry_count
method.call
@@ -114,6 +119,33 @@ RSpec.describe Gitlab::Database::WithLockRetries do
end
end
+ context 'after the retries, when requested to raise an error' do
+ let(:expected_attempts_with_timeout) { timing_configuration.size }
+ let(:retry_count) { timing_configuration.size + 1 }
+
+ it 'raises an error instead of waiting indefinitely for the lock' do
+ lock_attempts = 0
+ lock_acquired = false
+
+ expect(subject).to receive(:sleep).exactly(expected_attempts_with_timeout - 1).times
+ expect(subject).to receive(:run_block_with_transaction).exactly(expected_attempts_with_timeout).times.and_call_original
+
+ expect do
+ subject.run(raise_on_exhaustion: true) do
+ lock_attempts += 1
+
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+ lock_acquired = true
+ end
+ end
+ end.to raise_error(described_class::AttemptsExhaustedError)
+
+ expect(lock_attempts).to eq(retry_count - 1)
+ expect(lock_acquired).to eq(false)
+ end
+ end
+
context 'when statement timeout is reached' do
it 'raises QueryCanceled error' do
lock_acquired = false