Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-12-20 17:22:11 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-12-20 17:22:11 +0300
commit0c872e02b2c822e3397515ec324051ff540f0cd5 (patch)
treece2fb6ce7030e4dad0f4118d21ab6453e5938cdd /spec/lib/gitlab/database
parentf7e05a6853b12f02911494c4b3fe53d9540d74fc (diff)
Add latest changes from gitlab-org/gitlab@15-7-stable-eev15.7.0-rc42
Diffstat (limited to 'spec/lib/gitlab/database')
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb70
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb45
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb25
-rw-r--r--spec/lib/gitlab/database/lock_writes_manager_spec.rb8
-rw-r--r--spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb334
-rw-r--r--spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb19
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb206
-rw-r--r--spec/lib/gitlab/database/migrations/batched_migration_last_id_spec.rb72
-rw-r--r--spec/lib/gitlab/database/migrations/runner_spec.rb8
-rw-r--r--spec/lib/gitlab/database/migrations/sidekiq_helpers_spec.rb276
-rw-r--r--spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb24
-rw-r--r--spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb6
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb3
-rw-r--r--spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb3
-rw-r--r--spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb77
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb2
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb58
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb22
-rw-r--r--spec/lib/gitlab/database/schema_cleaner_spec.rb9
-rw-r--r--spec/lib/gitlab/database/tables_sorted_by_foreign_keys_spec.rb34
-rw-r--r--spec/lib/gitlab/database/tables_truncate_spec.rb94
-rw-r--r--spec/lib/gitlab/database/transaction/context_spec.rb2
-rw-r--r--spec/lib/gitlab/database/type/indifferent_jsonb_spec.rb66
24 files changed, 1126 insertions, 339 deletions
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index 72950895022..4b37cbda047 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -1,42 +1,86 @@
# frozen_string_literal: true
require 'spec_helper'
+RSpec.shared_examples 'validate path globs' do |path_globs|
+ it 'returns an array of path globs' do
+ expect(path_globs).to be_an(Array)
+ expect(path_globs).to all(be_an(Pathname))
+ end
+end
+
RSpec.describe Gitlab::Database::GitlabSchema do
- describe '.tables_to_schema' do
- it 'all tables have assigned a known gitlab_schema' do
- expect(described_class.tables_to_schema).to all(
+ describe '.views_and_tables_to_schema' do
+ it 'all tables and views have assigned a known gitlab_schema' do
+ expect(described_class.views_and_tables_to_schema).to all(
match([be_a(String), be_in(Gitlab::Database.schemas_to_base_models.keys.map(&:to_sym))])
)
end
# This being run across different databases indirectly also tests
# a general consistency of structure across databases
- Gitlab::Database.database_base_models.select { |k, _| k != 'geo' }.each do |db_config_name, db_class|
+ Gitlab::Database.database_base_models.except(:geo).each do |db_config_name, db_class|
context "for #{db_config_name} using #{db_class}" do
let(:db_data_sources) { db_class.connection.data_sources }
# The Geo database does not share the same structure as all decomposed databases
- subject { described_class.tables_to_schema.select { |_, v| v != :gitlab_geo } }
+ subject { described_class.views_and_tables_to_schema.select { |_, v| v != :gitlab_geo } }
it 'new data sources are added' do
- missing_tables = db_data_sources.to_set - subject.keys
+ missing_data_sources = db_data_sources.to_set - subject.keys
- expect(missing_tables).to be_empty, \
- "Missing table(s) #{missing_tables.to_a} not found in #{described_class}.tables_to_schema. " \
- "Any new tables must be added to #{described_class::GITLAB_SCHEMAS_FILE}."
+ expect(missing_data_sources).to be_empty, \
+ "Missing table/view(s) #{missing_data_sources.to_a} not found in " \
+ "#{described_class}.views_and_tables_to_schema. " \
+ "Any new tables or views must be added to the database dictionary. " \
+ "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
end
it 'non-existing data sources are removed' do
- extra_tables = subject.keys.to_set - db_data_sources
+ extra_data_sources = subject.keys.to_set - db_data_sources
- expect(extra_tables).to be_empty, \
- "Extra table(s) #{extra_tables.to_a} found in #{described_class}.tables_to_schema. " \
- "Any removed or renamed tables must be removed from #{described_class::GITLAB_SCHEMAS_FILE}."
+ expect(extra_data_sources).to be_empty, \
+ "Extra table/view(s) #{extra_data_sources.to_a} found in #{described_class}.views_and_tables_to_schema. " \
+ "Any removed or renamed tables or views must be removed from the database dictionary. " \
+ "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
end
end
end
end
+ describe '.dictionary_path_globs' do
+ include_examples 'validate path globs', described_class.dictionary_path_globs
+ end
+
+ describe '.view_path_globs' do
+ include_examples 'validate path globs', described_class.view_path_globs
+ end
+
+ describe '.tables_to_schema' do
+ let(:database_models) { Gitlab::Database.database_base_models.except(:geo) }
+ let(:views) { database_models.flat_map { |_, m| m.connection.views }.sort.uniq }
+
+ subject { described_class.tables_to_schema }
+
+ it 'returns only tables' do
+ tables = subject.keys
+
+ expect(tables).not_to include(views.to_set)
+ end
+ end
+
+ describe '.views_to_schema' do
+ let(:database_models) { Gitlab::Database.database_base_models.except(:geo) }
+ let(:tables) { database_models.flat_map { |_, m| m.connection.tables }.sort.uniq }
+
+ subject { described_class.views_to_schema }
+
+ it 'returns only views' do
+ views = subject.keys
+
+ expect(views).not_to include(tables.to_set)
+ end
+ end
+
describe '.table_schema' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
index b7915e6cf69..7eb20f77417 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
+RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware, feature_category: :database do
let(:middleware) { described_class.new }
let(:worker_class) { 'TestDataConsistencyWorker' }
@@ -34,8 +34,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
data_consistency data_consistency, feature_flag: feature_flag
- def perform(*args)
- end
+ def perform(*args); end
end
end
@@ -83,21 +82,41 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary?).and_return(false)
end
- it 'passes database_replica_location' do
- expected_location = {}
+ context 'when replica hosts are available' do
+ it 'passes database_replica_location' do
+ expected_location = {}
- Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
- expect(lb.host)
- .to receive(:database_replica_location)
- .and_return(location)
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ expect(lb.host)
+ .to receive(:database_replica_location)
+ .and_return(location)
- expected_location[lb.name] = location
+ expected_location[lb.name] = location
+ end
+
+ run_middleware
+
+ expect(job['wal_locations']).to eq(expected_location)
+ expect(job['wal_location_source']).to eq(:replica)
end
+ end
- run_middleware
+ context 'when no replica hosts are available' do
+ it 'passes primary_write_location' do
+ expected_location = {}
- expect(job['wal_locations']).to eq(expected_location)
- expect(job['wal_location_source']).to eq(:replica)
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ expect(lb).to receive(:host).and_return(nil)
+ expect(lb).to receive(:primary_write_location).and_return(location)
+
+ expected_location[lb.name] = location
+ end
+
+ run_middleware
+
+ expect(job['wal_locations']).to eq(expected_location)
+ expect(job['wal_location_source']).to eq(:replica)
+ end
end
include_examples 'job data consistency'
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
index 61b63016f1a..abf10456d0a 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
@@ -33,8 +33,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
data_consistency data_consistency, feature_flag: feature_flag
- def perform(*args)
- end
+ def perform(*args); end
end
end
@@ -332,28 +331,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
expect(middleware.send(:databases_in_sync?, locations))
.to eq(false)
end
-
- context 'when "indifferent_wal_location_keys" FF is off' do
- before do
- stub_feature_flags(indifferent_wal_location_keys: false)
- end
-
- it 'returns true when the load balancers are not in sync' do
- locations = {}
-
- Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
- locations[lb.name.to_s] = 'foo'
-
- allow(lb)
- .to receive(:select_up_to_date_host)
- .with('foo')
- .and_return(false)
- end
-
- expect(middleware.send(:databases_in_sync?, locations))
- .to eq(true)
- end
- end
end
end
diff --git a/spec/lib/gitlab/database/lock_writes_manager_spec.rb b/spec/lib/gitlab/database/lock_writes_manager_spec.rb
index b1cc8add55a..242b2040eaa 100644
--- a/spec/lib/gitlab/database/lock_writes_manager_spec.rb
+++ b/spec/lib/gitlab/database/lock_writes_manager_spec.rb
@@ -37,6 +37,14 @@ RSpec.describe Gitlab::Database::LockWritesManager do
it 'returns true for a table that is locked for writes' do
expect { subject.lock_writes }.to change { subject.table_locked_for_writes?(test_table) }.from(false).to(true)
end
+
+ context 'for detached partition tables in another schema' do
+ let(:test_table) { 'gitlab_partitions_dynamic._test_table_20220101' }
+
+ it 'returns true for a table that is locked for writes' do
+ expect { subject.lock_writes }.to change { subject.table_locked_for_writes?(test_table) }.from(false).to(true)
+ end
+ end
end
describe '#lock_writes' do
diff --git a/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb b/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb
new file mode 100644
index 00000000000..9fd49b312eb
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb
@@ -0,0 +1,334 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
+ :reestablished_active_record_base, query_analyzers: false do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:schema_class) { Class.new(Gitlab::Database::Migration[2.1]) }
+ let(:gitlab_main_table_name) { :_test_gitlab_main_table }
+ let(:gitlab_ci_table_name) { :_test_gitlab_ci_table }
+ let(:gitlab_geo_table_name) { :_test_gitlab_geo_table }
+ let(:gitlab_shared_table_name) { :_test_table }
+
+ before do
+ stub_feature_flags(automatic_lock_writes_on_table: true)
+ reconfigure_db_connection(model: ActiveRecord::Base, config_model: config_model)
+ end
+
+ shared_examples 'does not lock writes on table' do |config_model|
+ let(:config_model) { config_model }
+
+ it 'allows deleting records from the table' do
+ allow_next_instance_of(Gitlab::Database::LockWritesManager) do |instance|
+ expect(instance).not_to receive(:lock_writes)
+ end
+
+ run_migration
+
+ expect do
+ migration_class.connection.execute("DELETE FROM #{table_name}")
+ end.not_to raise_error
+ end
+ end
+
+ shared_examples 'locks writes on table' do |config_model|
+ let(:config_model) { config_model }
+
+ it 'errors on deleting' do
+ allow_next_instance_of(Gitlab::Database::LockWritesManager) do |instance|
+ expect(instance).to receive(:lock_writes).and_call_original
+ end
+
+ run_migration
+
+ expect do
+ migration_class.connection.execute("DELETE FROM #{table_name}")
+ end.to raise_error(ActiveRecord::StatementInvalid, /is write protected/)
+ end
+ end
+
+ context 'when executing create_table migrations' do
+ let(:create_gitlab_main_table_migration_class) { create_table_migration(gitlab_main_table_name) }
+ let(:create_gitlab_ci_table_migration_class) { create_table_migration(gitlab_ci_table_name) }
+ let(:create_gitlab_shared_table_migration_class) { create_table_migration(gitlab_shared_table_name) }
+
+ context 'when single database' do
+ let(:config_model) { Gitlab::Database.database_base_models[:main] }
+
+ before do
+ skip_if_multiple_databases_are_setup
+ end
+
+ it 'does not lock any newly created tables' do
+ allow_next_instance_of(Gitlab::Database::LockWritesManager) do |instance|
+ expect(instance).not_to receive(:lock_writes)
+ end
+
+ create_gitlab_main_table_migration_class.migrate(:up)
+ create_gitlab_ci_table_migration_class.migrate(:up)
+ create_gitlab_shared_table_migration_class.migrate(:up)
+
+ expect do
+ create_gitlab_main_table_migration_class.connection.execute("DELETE FROM #{gitlab_main_table_name}")
+ create_gitlab_ci_table_migration_class.connection.execute("DELETE FROM #{gitlab_ci_table_name}")
+ create_gitlab_shared_table_migration_class.connection.execute("DELETE FROM #{gitlab_shared_table_name}")
+ end.not_to raise_error
+ end
+ end
+
+ context 'when multiple databases' do
+ before do
+ skip_if_multiple_databases_not_setup
+ end
+
+ let(:skip_automatic_lock_on_writes) { false }
+ let(:migration_class) { create_table_migration(table_name, skip_automatic_lock_on_writes) }
+ let(:run_migration) { migration_class.migrate(:up) }
+
+ context 'for creating a gitlab_main table' do
+ let(:table_name) { gitlab_main_table_name }
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:main]
+ it_behaves_like 'locks writes on table', Gitlab::Database.database_base_models[:ci]
+
+ context 'when table listed as a deleted table' do
+ before do
+ stub_const("Gitlab::Database::GitlabSchema::DELETED_TABLES", { table_name.to_s => :gitlab_main })
+ end
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
+ end
+
+ context 'when the migration skips automatic locking of tables' do
+ let(:skip_automatic_lock_on_writes) { true }
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
+ end
+
+ context 'when the SKIP_AUTOMATIC_LOCK_ON_WRITES feature flag is set' do
+ before do
+ stub_env('SKIP_AUTOMATIC_LOCK_ON_WRITES' => 'true')
+ end
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
+ end
+
+ context 'when the automatic_lock_writes_on_table feature flag is disabled' do
+ before do
+ stub_feature_flags(automatic_lock_writes_on_table: false)
+ end
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
+ end
+ end
+
+ context 'for creating a gitlab_ci table' do
+ let(:table_name) { gitlab_ci_table_name }
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
+ it_behaves_like 'locks writes on table', Gitlab::Database.database_base_models[:main]
+
+ context 'when table listed as a deleted table' do
+ before do
+ stub_const("Gitlab::Database::GitlabSchema::DELETED_TABLES", { table_name.to_s => :gitlab_ci })
+ end
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:main]
+ end
+
+ context 'when the migration skips automatic locking of tables' do
+ let(:skip_automatic_lock_on_writes) { true }
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:main]
+ end
+
+ context 'when the SKIP_AUTOMATIC_LOCK_ON_WRITES feature flag is set' do
+ before do
+ stub_env('SKIP_AUTOMATIC_LOCK_ON_WRITES' => 'true')
+ end
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:main]
+ end
+
+ context 'when the automatic_lock_writes_on_table feature flag is disabled' do
+ before do
+ stub_feature_flags(automatic_lock_writes_on_table: false)
+ end
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:main]
+ end
+ end
+
+ context 'for creating gitlab_shared table' do
+ let(:table_name) { gitlab_shared_table_name }
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:main]
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
+ end
+
+ context 'for creating a gitlab_geo table' do
+ before do
+ skip unless geo_configured?
+ end
+
+ let(:table_name) { gitlab_geo_table_name }
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:geo]
+ end
+
+ context 'for creating an unknown gitlab_schema table' do
+ let(:table_name) { :foobar } # no gitlab_schema defined
+ let(:config_model) { Gitlab::Database.database_base_models[:main] }
+
+ it "raises an error about undefined gitlab_schema" do
+ expected_error_message = <<~ERROR
+ No gitlab_schema is defined for the table #{table_name}. Please consider
+ adding it to the database dictionary.
+ More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html
+ ERROR
+
+ expect { run_migration }.to raise_error(expected_error_message)
+ end
+ end
+ end
+ end
+
+ context 'when renaming a table' do
+ before do
+ skip_if_multiple_databases_not_setup
+ create_table_migration(old_table_name).migrate(:up) # create the table first before renaming it
+ end
+
+ let(:migration_class) { rename_table_migration(old_table_name, table_name) }
+ let(:run_migration) { migration_class.migrate(:up) }
+
+ context 'when a gitlab_main table' do
+ let(:old_table_name) { gitlab_main_table_name }
+ let(:table_name) { :_test_gitlab_main_new_table }
+ let(:database_base_model) { Gitlab::Database.database_base_models[:main] }
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:main]
+ it_behaves_like 'locks writes on table', Gitlab::Database.database_base_models[:ci]
+ end
+
+ context 'when a gitlab_ci table' do
+ let(:old_table_name) { gitlab_ci_table_name }
+ let(:table_name) { :_test_gitlab_ci_new_table }
+ let(:database_base_model) { Gitlab::Database.database_base_models[:ci] }
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
+ it_behaves_like 'locks writes on table', Gitlab::Database.database_base_models[:main]
+ end
+ end
+
+ context 'when reversing drop_table migrations' do
+ let(:drop_gitlab_main_table_migration_class) { drop_table_migration(gitlab_main_table_name) }
+ let(:drop_gitlab_ci_table_migration_class) { drop_table_migration(gitlab_ci_table_name) }
+ let(:drop_gitlab_shared_table_migration_class) { drop_table_migration(gitlab_shared_table_name) }
+
+ context 'when single database' do
+ let(:config_model) { Gitlab::Database.database_base_models[:main] }
+
+ before do
+ skip_if_multiple_databases_are_setup
+ end
+
+ it 'does not lock any newly created tables' do
+ allow_next_instance_of(Gitlab::Database::LockWritesManager) do |instance|
+ expect(instance).not_to receive(:lock_writes)
+ end
+
+ drop_gitlab_main_table_migration_class.connection.execute("CREATE TABLE #{gitlab_main_table_name}()")
+ drop_gitlab_ci_table_migration_class.connection.execute("CREATE TABLE #{gitlab_ci_table_name}()")
+ drop_gitlab_shared_table_migration_class.connection.execute("CREATE TABLE #{gitlab_shared_table_name}()")
+
+ drop_gitlab_main_table_migration_class.migrate(:up)
+ drop_gitlab_ci_table_migration_class.migrate(:up)
+ drop_gitlab_shared_table_migration_class.migrate(:up)
+
+ drop_gitlab_main_table_migration_class.migrate(:down)
+ drop_gitlab_ci_table_migration_class.migrate(:down)
+ drop_gitlab_shared_table_migration_class.migrate(:down)
+
+ expect do
+ drop_gitlab_main_table_migration_class.connection.execute("DELETE FROM #{gitlab_main_table_name}")
+ drop_gitlab_ci_table_migration_class.connection.execute("DELETE FROM #{gitlab_ci_table_name}")
+ drop_gitlab_shared_table_migration_class.connection.execute("DELETE FROM #{gitlab_shared_table_name}")
+ end.not_to raise_error
+ end
+ end
+
+ context 'when multiple databases' do
+ before do
+ skip_if_multiple_databases_not_setup
+ migration_class.connection.execute("CREATE TABLE #{table_name}()")
+ migration_class.migrate(:up)
+ end
+
+ let(:migration_class) { drop_table_migration(table_name) }
+ let(:run_migration) { migration_class.migrate(:down) }
+
+ context 'for re-creating a gitlab_main table' do
+ let(:table_name) { gitlab_main_table_name }
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:main]
+ it_behaves_like 'locks writes on table', Gitlab::Database.database_base_models[:ci]
+ end
+
+ context 'for re-creating a gitlab_ci table' do
+ let(:table_name) { gitlab_ci_table_name }
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
+ it_behaves_like 'locks writes on table', Gitlab::Database.database_base_models[:main]
+ end
+
+ context 'for re-creating a gitlab_shared table' do
+ let(:table_name) { gitlab_shared_table_name }
+
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:main]
+ it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
+ end
+ end
+ end
+
+ def create_table_migration(table_name, skip_lock_on_writes = false)
+ migration_class = Class.new(schema_class) do
+ class << self; attr_accessor :table_name; end
+ def change
+ create_table self.class.table_name
+ end
+ end
+ migration_class.skip_automatic_lock_on_writes = skip_lock_on_writes
+ migration_class.tap { |klass| klass.table_name = table_name }
+ end
+
+ def rename_table_migration(old_table_name, new_table_name)
+ migration_class = Class.new(schema_class) do
+ class << self; attr_accessor :old_table_name, :new_table_name; end
+ def change
+ rename_table self.class.old_table_name, self.class.new_table_name
+ end
+ end
+
+ migration_class.tap do |klass|
+ klass.old_table_name = old_table_name
+ klass.new_table_name = new_table_name
+ end
+ end
+
+ def drop_table_migration(table_name)
+ migration_class = Class.new(schema_class) do
+ class << self; attr_accessor :table_name; end
+ def change
+ drop_table(self.class.table_name) {}
+ end
+ end
+ migration_class.tap { |klass| klass.table_name = table_name }
+ end
+
+ def geo_configured?
+ !!ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'geo')
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
index e43cfe0814e..e8045f5afec 100644
--- a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a
describe '#restrict_gitlab_migration' do
it 'invalid schema raises exception' do
- expect { schema_class.restrict_gitlab_migration gitlab_schema: :gitlab_non_exisiting }
+ expect { schema_class.restrict_gitlab_migration gitlab_schema: :gitlab_non_existing }
.to raise_error /Unknown 'gitlab_schema:/
end
@@ -102,7 +102,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a
"does add index to projects in gitlab_main and gitlab_ci" => {
migration: ->(klass) do
def change
- # Due to running in transactin we cannot use `add_concurrent_index`
+ # Due to running in transaction we cannot use `add_concurrent_index`
add_index :projects, :hidden
end
end,
@@ -185,8 +185,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a
execute("create schema __test_schema")
end
- def down
- end
+ def down; end
end,
query_matcher: /create schema __test_schema/,
expected: {
@@ -306,8 +305,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a
detached_partitions_class.create!(drop_after: Time.current, table_name: '_test_table')
end
- def down
- end
+ def down; end
def detached_partitions_class
Class.new(Gitlab::Database::Migration[2.0]::MigrationRecord) do
@@ -450,8 +448,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a
ApplicationSetting.last
end
- def down
- end
+ def down; end
end,
query_matcher: /FROM "application_settings"/,
expected: {
@@ -475,8 +472,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a
Feature.enabled?(:redis_hll_tracking, type: :ops)
end
- def down
- end
+ def down; end
end,
query_matcher: /FROM "features"/,
expected: {
@@ -505,8 +501,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a
end
end
- def down
- end
+ def down; end
end,
query_matcher: /FROM ci_builds/,
setup: -> (_) { skip_if_multiple_databases_not_setup },
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 65fbc8d9935..30eeff31326 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1199,18 +1199,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- describe '#add_column_with_default' do
- let(:column) { Project.columns.find { |c| c.name == "id" } }
-
- it 'delegates to #add_column' do
- expect(model).to receive(:add_column).with(:projects, :foo, :integer, default: 10, limit: nil, null: true)
-
- model.add_column_with_default(:projects, :foo, :integer,
- default: 10,
- allow_null: true)
- end
- end
-
describe '#rename_column_concurrently' do
context 'in a transaction' do
it 'raises RuntimeError' do
@@ -2006,170 +1994,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- describe 'sidekiq migration helpers', :redis do
- let(:worker) do
- Class.new do
- include Sidekiq::Worker
-
- sidekiq_options queue: 'test'
-
- def self.name
- 'WorkerClass'
- end
- end
- end
-
- let(:same_queue_different_worker) do
- Class.new do
- include Sidekiq::Worker
-
- sidekiq_options queue: 'test'
-
- def self.name
- 'SameQueueDifferentWorkerClass'
- end
- end
- end
-
- let(:unrelated_worker) do
- Class.new do
- include Sidekiq::Worker
-
- sidekiq_options queue: 'unrelated'
-
- def self.name
- 'UnrelatedWorkerClass'
- end
- end
- end
-
- before do
- stub_const(worker.name, worker)
- stub_const(unrelated_worker.name, unrelated_worker)
- stub_const(same_queue_different_worker.name, same_queue_different_worker)
- end
-
- describe '#sidekiq_remove_jobs', :clean_gitlab_redis_queues do
- def clear_queues
- Sidekiq::Queue.new('test').clear
- Sidekiq::Queue.new('unrelated').clear
- Sidekiq::RetrySet.new.clear
- Sidekiq::ScheduledSet.new.clear
- end
-
- around do |example|
- clear_queues
- Sidekiq::Testing.disable!(&example)
- clear_queues
- end
-
- it "removes all related job instances from the job class's queue" do
- worker.perform_async
- same_queue_different_worker.perform_async
- unrelated_worker.perform_async
-
- queue_we_care_about = Sidekiq::Queue.new(worker.queue)
- unrelated_queue = Sidekiq::Queue.new(unrelated_worker.queue)
-
- expect(queue_we_care_about.size).to eq(2)
- expect(unrelated_queue.size).to eq(1)
-
- model.sidekiq_remove_jobs(job_klass: worker)
-
- expect(queue_we_care_about.size).to eq(1)
- expect(queue_we_care_about.map(&:klass)).not_to include(worker.name)
- expect(queue_we_care_about.map(&:klass)).to include(
- same_queue_different_worker.name
- )
- expect(unrelated_queue.size).to eq(1)
- end
-
- context 'when job instances are in the scheduled set' do
- it 'removes all related job instances from the scheduled set' do
- worker.perform_in(1.hour)
- unrelated_worker.perform_in(1.hour)
-
- scheduled = Sidekiq::ScheduledSet.new
-
- expect(scheduled.size).to eq(2)
- expect(scheduled.map(&:klass)).to include(
- worker.name,
- unrelated_worker.name
- )
-
- model.sidekiq_remove_jobs(job_klass: worker)
-
- expect(scheduled.size).to eq(1)
- expect(scheduled.map(&:klass)).not_to include(worker.name)
- expect(scheduled.map(&:klass)).to include(unrelated_worker.name)
- end
- end
-
- context 'when job instances are in the retry set' do
- include_context 'when handling retried jobs'
-
- it 'removes all related job instances from the retry set' do
- retry_in(worker, 1.hour)
- retry_in(worker, 2.hours)
- retry_in(worker, 3.hours)
- retry_in(unrelated_worker, 4.hours)
-
- retries = Sidekiq::RetrySet.new
-
- expect(retries.size).to eq(4)
- expect(retries.map(&:klass)).to include(
- worker.name,
- unrelated_worker.name
- )
-
- model.sidekiq_remove_jobs(job_klass: worker)
-
- expect(retries.size).to eq(1)
- expect(retries.map(&:klass)).not_to include(worker.name)
- expect(retries.map(&:klass)).to include(unrelated_worker.name)
- end
- end
- end
-
- describe '#sidekiq_queue_length' do
- context 'when queue is empty' do
- it 'returns zero' do
- Sidekiq::Testing.disable! do
- expect(model.sidekiq_queue_length('test')).to eq 0
- end
- end
- end
-
- context 'when queue contains jobs' do
- it 'returns correct size of the queue' do
- Sidekiq::Testing.disable! do
- worker.perform_async('Something', [1])
- worker.perform_async('Something', [2])
-
- expect(model.sidekiq_queue_length('test')).to eq 2
- end
- end
- end
- end
-
- describe '#sidekiq_queue_migrate' do
- it 'migrates jobs from one sidekiq queue to another' do
- Sidekiq::Testing.disable! do
- worker.perform_async('Something', [1])
- worker.perform_async('Something', [2])
-
- expect(model.sidekiq_queue_length('test')).to eq 2
- expect(model.sidekiq_queue_length('new_test')).to eq 0
-
- model.sidekiq_queue_migrate('test', to: 'new_test')
-
- expect(model.sidekiq_queue_length('test')).to eq 0
- expect(model.sidekiq_queue_length('new_test')).to eq 2
- end
- end
- end
- end
-
describe '#check_trigger_permissions!' do
it 'does nothing when the user has the correct permissions' do
expect { model.check_trigger_permissions!('users') }
@@ -2790,18 +2614,18 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue = issue_class.create!(project_id: project.id)
+ issue = issue_class.create!(project_id: project.id, namespace_id: project.project_namespace_id)
expect(issue.iid).to eq(1)
end
it 'generates iids properly for models created after the migration when iids are backfilled' do
project = setup
- issue_a = issues.create!(project_id: project.id, work_item_type_id: issue_type.id)
+ issue_a = issues.create!(project_id: project.id, namespace_id: project.project_namespace_id, work_item_type_id: issue_type.id)
model.backfill_iids('issues')
- issue_b = issue_class.create!(project_id: project.id)
+ issue_b = issue_class.create!(project_id: project.id, namespace_id: project.project_namespace_id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.iid).to eq(2)
@@ -2810,14 +2634,14 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'generates iids properly for models created after the migration across multiple projects' do
project_a = setup
project_b = setup
- issues.create!(project_id: project_a.id, work_item_type_id: issue_type.id)
- issues.create!(project_id: project_b.id, work_item_type_id: issue_type.id)
- issues.create!(project_id: project_b.id, work_item_type_id: issue_type.id)
+ issues.create!(project_id: project_a.id, namespace_id: project_a.project_namespace_id, work_item_type_id: issue_type.id)
+ issues.create!(project_id: project_b.id, namespace_id: project_b.project_namespace_id, work_item_type_id: issue_type.id)
+ issues.create!(project_id: project_b.id, namespace_id: project_b.project_namespace_id, work_item_type_id: issue_type.id)
model.backfill_iids('issues')
- issue_a = issue_class.create!(project_id: project_a.id, work_item_type_id: issue_type.id)
- issue_b = issue_class.create!(project_id: project_b.id, work_item_type_id: issue_type.id)
+ issue_a = issue_class.create!(project_id: project_a.id, namespace_id: project_a.project_namespace_id, work_item_type_id: issue_type.id)
+ issue_b = issue_class.create!(project_id: project_b.id, namespace_id: project_b.project_namespace_id, work_item_type_id: issue_type.id)
expect(issue_a.iid).to eq(2)
expect(issue_b.iid).to eq(3)
@@ -2827,11 +2651,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'generates an iid' do
project_a = setup
project_b = setup
- issue_a = issues.create!(project_id: project_a.id, work_item_type_id: issue_type.id)
+ issue_a = issues.create!(project_id: project_a.id, namespace_id: project_a.project_namespace_id, work_item_type_id: issue_type.id)
model.backfill_iids('issues')
- issue_b = issue_class.create!(project_id: project_b.id)
+ issue_b = issue_class.create!(project_id: project_b.id, namespace_id: project_b.project_namespace_id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(1)
@@ -2841,8 +2665,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'when a row already has an iid set in the database' do
it 'backfills iids' do
project = setup
- issue_a = issues.create!(project_id: project.id, work_item_type_id: issue_type.id, iid: 1)
- issue_b = issues.create!(project_id: project.id, work_item_type_id: issue_type.id, iid: 2)
+ issue_a = issues.create!(project_id: project.id, namespace_id: project.project_namespace_id, work_item_type_id: issue_type.id, iid: 1)
+ issue_b = issues.create!(project_id: project.id, namespace_id: project.project_namespace_id, work_item_type_id: issue_type.id, iid: 2)
model.backfill_iids('issues')
@@ -2853,9 +2677,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'backfills for multiple projects' do
project_a = setup
project_b = setup
- issue_a = issues.create!(project_id: project_a.id, work_item_type_id: issue_type.id, iid: 1)
- issue_b = issues.create!(project_id: project_b.id, work_item_type_id: issue_type.id, iid: 1)
- issue_c = issues.create!(project_id: project_a.id, work_item_type_id: issue_type.id, iid: 2)
+ issue_a = issues.create!(project_id: project_a.id, namespace_id: project_a.project_namespace_id, work_item_type_id: issue_type.id, iid: 1)
+ issue_b = issues.create!(project_id: project_b.id, namespace_id: project_b.project_namespace_id, work_item_type_id: issue_type.id, iid: 1)
+ issue_c = issues.create!(project_id: project_a.id, namespace_id: project_a.project_namespace_id, work_item_type_id: issue_type.id, iid: 2)
model.backfill_iids('issues')
diff --git a/spec/lib/gitlab/database/migrations/batched_migration_last_id_spec.rb b/spec/lib/gitlab/database/migrations/batched_migration_last_id_spec.rb
new file mode 100644
index 00000000000..97b432406eb
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/batched_migration_last_id_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::BatchedMigrationLastId, feature_category: :pipeline_insights do
+ subject(:test_sampling) { described_class.new(connection, base_dir) }
+
+ let(:base_dir) { Pathname.new(Dir.mktmpdir) }
+ let(:file_name) { 'last-batched-background-migration-id.txt' }
+ let(:file_path) { base_dir.join(file_name) }
+ let(:file_contents) { nil }
+
+ where(:base_model) do
+ [
+ [ApplicationRecord], [Ci::ApplicationRecord]
+ ]
+ end
+
+ with_them do
+ let(:connection) { base_model.connection }
+
+ after do
+ FileUtils.rm_rf(file_path)
+ end
+
+ describe '#read' do
+ before do
+ File.write(file_path, file_contents)
+ end
+
+ context 'when the file exists and have content' do
+ let(:file_contents) { 99 }
+
+ it { expect(test_sampling.read).to eq(file_contents) }
+ end
+
+ context 'when the file exists and is blank' do
+ it { expect(test_sampling.read).to be_nil }
+ end
+
+ context "when the file doesn't exists" do
+ before do
+ FileUtils.rm_rf(file_path)
+ end
+
+ it { expect(test_sampling.read).to be_nil }
+ end
+ end
+
+ describe '#store' do
+ let(:file_contents) { File.read(file_path) }
+ let(:migration) do
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ create(:batched_background_migration)
+ end
+ end
+
+ it 'creates the file properly' do
+ test_sampling.store
+
+ expect(File).to exist(file_path)
+ end
+
+ it 'stores the proper id in the file' do
+ migration
+ test_sampling.store
+
+ expect(file_contents).to eq(migration.id.to_s)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb
index bd382547689..66eb5a5de51 100644
--- a/spec/lib/gitlab/database/migrations/runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/runner_spec.rb
@@ -230,5 +230,13 @@ RSpec.describe Gitlab::Database::Migrations::Runner, :reestablished_active_recor
end
end
end
+
+ describe '.batched_migrations_last_id' do
+ let(:runner_class) { Gitlab::Database::Migrations::BatchedMigrationLastId }
+
+ it 'matches the expected runner class' do
+ expect(described_class.batched_migrations_last_id(database)).to be_a(runner_class)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/database/migrations/sidekiq_helpers_spec.rb b/spec/lib/gitlab/database/migrations/sidekiq_helpers_spec.rb
new file mode 100644
index 00000000000..fb1cb46171f
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/sidekiq_helpers_spec.rb
@@ -0,0 +1,276 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
+ let(:model) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ describe "sidekiq migration helpers", :redis do
+ let(:worker) do
+ Class.new do
+ include Sidekiq::Worker
+
+ sidekiq_options queue: "test"
+
+ def self.name
+ "WorkerClass"
+ end
+ end
+ end
+
+ let(:worker_two) do
+ Class.new do
+ include Sidekiq::Worker
+
+ sidekiq_options queue: "test_two"
+
+ def self.name
+ "WorkerTwoClass"
+ end
+ end
+ end
+
+ let(:same_queue_different_worker) do
+ Class.new do
+ include Sidekiq::Worker
+
+ sidekiq_options queue: "test"
+
+ def self.name
+ "SameQueueDifferentWorkerClass"
+ end
+ end
+ end
+
+ let(:unrelated_worker) do
+ Class.new do
+ include Sidekiq::Worker
+
+ sidekiq_options queue: "unrelated"
+
+ def self.name
+ "UnrelatedWorkerClass"
+ end
+ end
+ end
+
+ before do
+ stub_const(worker.name, worker)
+ stub_const(worker_two.name, worker_two)
+ stub_const(unrelated_worker.name, unrelated_worker)
+ stub_const(same_queue_different_worker.name, same_queue_different_worker)
+ end
+
+ describe "#sidekiq_remove_jobs", :clean_gitlab_redis_queues do
+ def clear_queues
+ Sidekiq::Queue.new("test").clear
+ Sidekiq::Queue.new("test_two").clear
+ Sidekiq::Queue.new("unrelated").clear
+ Sidekiq::RetrySet.new.clear
+ Sidekiq::ScheduledSet.new.clear
+ end
+
+ around do |example|
+ clear_queues
+ Sidekiq::Testing.disable!(&example)
+ clear_queues
+ end
+
+ context "when the constant is not defined" do
+ it "doesn't try to delete it" do
+ my_non_constant = +"SomeThingThatIsNotAConstant"
+
+ expect(Sidekiq::Queue).not_to receive(:new).with(any_args)
+ model.sidekiq_remove_jobs(job_klasses: [my_non_constant])
+ end
+ end
+
+ context "when the constant is defined" do
+ it "will use it find job instances to delete" do
+ my_constant = worker.name
+ expect(Sidekiq::Queue)
+ .to receive(:new)
+ .with(worker.queue)
+ .and_call_original
+ model.sidekiq_remove_jobs(job_klasses: [my_constant])
+ end
+ end
+
+ it "removes all related job instances from the job classes' queues" do
+ worker.perform_async
+ worker_two.perform_async
+ same_queue_different_worker.perform_async
+ unrelated_worker.perform_async
+
+ worker_queue = Sidekiq::Queue.new(worker.queue)
+ worker_two_queue = Sidekiq::Queue.new(worker_two.queue)
+ unrelated_queue = Sidekiq::Queue.new(unrelated_worker.queue)
+
+ expect(worker_queue.size).to eq(2)
+ expect(worker_two_queue.size).to eq(1)
+ expect(unrelated_queue.size).to eq(1)
+
+ model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name])
+
+ expect(worker_queue.size).to eq(1)
+ expect(worker_two_queue.size).to eq(0)
+ expect(worker_queue.map(&:klass)).not_to include(worker.name)
+ expect(worker_queue.map(&:klass)).to include(
+ same_queue_different_worker.name
+ )
+ expect(worker_two_queue.map(&:klass)).not_to include(worker_two.name)
+ expect(unrelated_queue.size).to eq(1)
+ end
+
+ context "when job instances are in the scheduled set" do
+ it "removes all related job instances from the scheduled set" do
+ worker.perform_in(1.hour)
+ worker_two.perform_in(1.hour)
+ unrelated_worker.perform_in(1.hour)
+
+ scheduled = Sidekiq::ScheduledSet.new
+
+ expect(scheduled.size).to eq(3)
+ expect(scheduled.map(&:klass)).to include(
+ worker.name,
+ worker_two.name,
+ unrelated_worker.name
+ )
+
+ model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name])
+
+ expect(scheduled.size).to eq(1)
+ expect(scheduled.map(&:klass)).not_to include(worker.name)
+ expect(scheduled.map(&:klass)).not_to include(worker_two.name)
+ expect(scheduled.map(&:klass)).to include(unrelated_worker.name)
+ end
+ end
+
+ context "when job instances are in the retry set" do
+ include_context "when handling retried jobs"
+
+ it "removes all related job instances from the retry set" do
+ retry_in(worker, 1.hour)
+ retry_in(worker, 2.hours)
+ retry_in(worker, 3.hours)
+ retry_in(worker_two, 4.hours)
+ retry_in(unrelated_worker, 5.hours)
+
+ retries = Sidekiq::RetrySet.new
+
+ expect(retries.size).to eq(5)
+ expect(retries.map(&:klass)).to include(
+ worker.name,
+ worker_two.name,
+ unrelated_worker.name
+ )
+
+ model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name])
+
+ expect(retries.size).to eq(1)
+ expect(retries.map(&:klass)).not_to include(worker.name)
+ expect(retries.map(&:klass)).not_to include(worker_two.name)
+ expect(retries.map(&:klass)).to include(unrelated_worker.name)
+ end
+ end
+
+ # Imitate job deletion returning zero and then non zero.
+ context "when job fails to be deleted" do
+ let(:job_double) do
+ instance_double(
+ "Sidekiq::JobRecord",
+ klass: worker.name
+ )
+ end
+
+ context "and does not work enough times in a row before max attempts" do
+ it "tries the max attempts without succeeding" do
+ worker.perform_async
+
+ allow(job_double).to receive(:delete).and_return(true)
+
+ # Scheduled set runs last so only need to stub out its values.
+ allow(Sidekiq::ScheduledSet)
+ .to receive(:new)
+ .and_return([job_double])
+
+ expect(model.sidekiq_remove_jobs(job_klasses: [worker.name]))
+ .to eq(
+ {
+ attempts: 5,
+ success: false
+ }
+ )
+ end
+ end
+
+ context "and then it works enough times in a row before max attempts" do
+ it "succeeds" do
+ worker.perform_async
+
+ # attempt 1: false will increment the streak once to 1
+ # attempt 2: true resets it back to 0
+ # attempt 3: false will increment the streak once to 1
+ # attempt 4: false will increment the streak once to 2, loop breaks
+ allow(job_double).to receive(:delete).and_return(false, true, false)
+
+ worker.perform_async
+
+ # Scheduled set runs last so only need to stub out its values.
+ allow(Sidekiq::ScheduledSet)
+ .to receive(:new)
+ .and_return([job_double])
+
+ expect(model.sidekiq_remove_jobs(job_klasses: [worker.name]))
+ .to eq(
+ {
+ attempts: 4,
+ success: true
+ }
+ )
+ end
+ end
+ end
+ end
+
+ describe "#sidekiq_queue_length" do
+ context "when queue is empty" do
+ it "returns zero" do
+ Sidekiq::Testing.disable! do
+ expect(model.sidekiq_queue_length("test")).to eq 0
+ end
+ end
+ end
+
+ context "when queue contains jobs" do
+ it "returns correct size of the queue" do
+ Sidekiq::Testing.disable! do
+ worker.perform_async("Something", [1])
+ worker.perform_async("Something", [2])
+
+ expect(model.sidekiq_queue_length("test")).to eq 2
+ end
+ end
+ end
+ end
+
+ describe "#sidekiq_queue_migrate" do
+ it "migrates jobs from one sidekiq queue to another" do
+ Sidekiq::Testing.disable! do
+ worker.perform_async("Something", [1])
+ worker.perform_async("Something", [2])
+
+ expect(model.sidekiq_queue_length("test")).to eq 2
+ expect(model.sidekiq_queue_length("new_test")).to eq 0
+
+ model.sidekiq_queue_migrate("test", to: "new_test")
+
+ expect(model.sidekiq_queue_length("test")).to eq 0
+ expect(model.sidekiq_queue_length("new_test")).to eq 2
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
index 07226f3d025..73d69d55e5a 100644
--- a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
@@ -55,6 +55,8 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
let(:table_name) { "_test_column_copying" }
+ let(:from_id) { 0 }
+
before do
connection.execute(<<~SQL)
CREATE TABLE #{table_name} (
@@ -76,7 +78,8 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
end
subject(:sample_migration) do
- described_class.new(result_dir: result_dir, connection: connection).run_jobs(for_duration: 1.minute)
+ described_class.new(result_dir: result_dir, connection: connection,
+ from_id: from_id).run_jobs(for_duration: 1.minute)
end
it 'runs sampled jobs from the batched background migration' do
@@ -111,20 +114,26 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
job_interval: 5.minutes,
batch_size: 100)
- described_class.new(result_dir: result_dir, connection: connection).run_jobs(for_duration: 3.minutes)
+ described_class.new(result_dir: result_dir, connection: connection,
+ from_id: from_id).run_jobs(for_duration: 3.minutes)
expect(calls).not_to be_empty
end
context 'with multiple jobs to run' do
- it 'runs all jobs created within the last 3 hours' do
+ let(:last_id) do
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ Gitlab::Database::BackgroundMigration::BatchedMigration.maximum(:id)
+ end
+ end
+
+ it 'runs all pending jobs based on the last migration id' do
old_migration = define_background_migration(migration_name)
queue_migration(migration_name, table_name, :id,
job_interval: 5.minutes,
batch_size: 100)
- travel 4.hours
-
+ last_id
new_migration = define_background_migration('NewMigration') { travel 1.second }
queue_migration('NewMigration', table_name, :id,
job_interval: 5.minutes,
@@ -138,14 +147,15 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
sub_batch_size: 5)
expect_migration_runs(new_migration => 3, other_new_migration => 2, old_migration => 0) do
- described_class.new(result_dir: result_dir, connection: connection).run_jobs(for_duration: 5.seconds)
+ described_class.new(result_dir: result_dir, connection: connection,
+ from_id: last_id).run_jobs(for_duration: 5.seconds)
end
end
end
end
context 'choosing uniform batches to run' do
- subject { described_class.new(result_dir: result_dir, connection: connection) }
+ subject { described_class.new(result_dir: result_dir, connection: connection, from_id: from_id) }
describe '#uniform_fractions' do
it 'generates evenly distributed sequences of fractions' do
diff --git a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
index 336dec3a8a0..646ae50fb44 100644
--- a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do
SQL
end
- def create_partition(name:, table: 'parent_table', from:, to:, attached:, drop_after:)
+ def create_partition(name:, from:, to:, attached:, drop_after:, table: 'parent_table')
from = from.beginning_of_month
to = to.beginning_of_month
full_name = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{name}"
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
index 550f254c4da..e6014f81b74 100644
--- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -229,11 +229,9 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
next_partition_if: method(:next_partition_if_wrapper),
detach_partition_if: method(:detach_partition_if_wrapper)
- def self.next_partition?(current_partition)
- end
+ def self.next_partition?(current_partition); end
- def self.detach_partition?(partition)
- end
+ def self.detach_partition?(partition); end
end
end
diff --git a/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
index 8b06f068503..884c4f625dd 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
@@ -9,8 +9,7 @@ RSpec.describe Gitlab::Database::PostgresqlAdapter::DumpSchemaVersionsMixin do
original_dump_schema_information
end
- def original_dump_schema_information
- end
+ def original_dump_schema_information; end
end
klass.prepend(described_class)
diff --git a/spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb
index 3e675a85999..3bb206c6627 100644
--- a/spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb
@@ -9,8 +9,7 @@ RSpec.describe Gitlab::Database::PostgresqlDatabaseTasks::LoadSchemaVersionsMixi
original_structure_load
end
- def original_structure_load
- end
+ def original_structure_load; end
end
klass.prepend(described_class)
diff --git a/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb b/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb
index ec01ae623ae..bcc39c0c3db 100644
--- a/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb
@@ -10,29 +10,76 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::QueryRecorder, query_analyzers:
end
end
- context 'when analyzer is enabled for tests' do
+ context 'with query analyzer' do
let(:query) { 'SELECT 1 FROM projects' }
- let(:log_path) { Rails.root.join(described_class::LOG_FILE) }
+ let(:log_path) { Rails.root.join(described_class::LOG_PATH) }
+ let(:log_file) { described_class.log_file }
- before do
- stub_env('CI', 'true')
+ after do
+ ::Gitlab::Database::QueryAnalyzer.instance.end!([described_class])
+ end
- # This is needed to be able to stub_env the CI variable
- ::Gitlab::Database::QueryAnalyzer.instance.begin!([described_class])
+ shared_examples_for 'an enabled query recorder' do
+ it 'logs queries to a file' do
+ allow(FileUtils).to receive(:mkdir_p)
+ .with(log_path)
+ expect(File).to receive(:write)
+ .with(log_file, /^{"sql":"#{query}/, mode: 'a')
+ expect(described_class).to receive(:analyze).with(/^#{query}/).and_call_original
+
+ expect { ApplicationRecord.connection.execute(query) }.not_to raise_error
+ end
end
- after do
- ::Gitlab::Database::QueryAnalyzer.instance.end!([described_class])
+ context 'on default branch' do
+ before do
+ stub_env('CI_MERGE_REQUEST_LABELS', nil)
+ stub_env('CI_DEFAULT_BRANCH', 'default_branch_name')
+ stub_env('CI_COMMIT_REF_NAME', 'default_branch_name')
+
+ # This is needed to be able to stub_env the CI variable
+ ::Gitlab::Database::QueryAnalyzer.instance.begin!([described_class])
+ end
+
+ it_behaves_like 'an enabled query recorder'
+ end
+
+ context 'on database merge requests' do
+ before do
+ stub_env('CI_MERGE_REQUEST_LABELS', 'database')
+
+ # This is needed to be able to stub_env the CI variable
+ ::Gitlab::Database::QueryAnalyzer.instance.begin!([described_class])
+ end
+
+ it_behaves_like 'an enabled query recorder'
+ end
+ end
+
+ describe '.log_file' do
+ let(:folder) { 'query_recorder' }
+ let(:extension) { 'ndjson' }
+ let(:default_name) { 'rspec' }
+ let(:job_name) { 'test-job-1' }
+
+ subject { described_class.log_file.to_s }
+
+ context 'when in CI' do
+ before do
+ stub_env('CI_JOB_NAME_SLUG', job_name)
+ end
+
+ it { is_expected.to include("#{folder}/#{job_name}.#{extension}") }
+ it { is_expected.not_to include("#{folder}/#{default_name}.#{extension}") }
end
- it 'logs queries to a file' do
- allow(FileUtils).to receive(:mkdir_p)
- .with(File.dirname(log_path))
- expect(File).to receive(:write)
- .with(log_path, /^{"sql":"#{query}/, mode: 'a')
- expect(described_class).to receive(:analyze).with(/^#{query}/).and_call_original
+ context 'when not in CI' do
+ before do
+ stub_env('CI_JOB_NAME_SLUG', nil)
+ end
- expect { ApplicationRecord.connection.execute(query) }.not_to raise_error
+ it { is_expected.to include("#{folder}/#{default_name}.#{extension}") }
+ it { is_expected.not_to include("#{folder}/#{job_name}.#{extension}") }
end
end
end
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index 4c98185e780..fa26aa59329 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Reindexing do
+RSpec.describe Gitlab::Database::Reindexing, feature_category: :database do
include ExclusiveLeaseHelpers
include Database::DatabaseHelpers
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
index ac2de43b7c6..c507bce634e 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
@@ -97,39 +97,48 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespa
let(:namespace) { create(:group, name: 'hello-group') }
it 'moves a project for a namespace' do
- create(:project, :repository, :legacy_storage, namespace: namespace, path: 'hello-project')
- expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(TestEnv.repos_path, 'bye-group', 'hello-project.git')
- end
+ project = create(:project, :repository, :legacy_storage, namespace: namespace, path: 'hello-project')
+ expected_repository = Gitlab::Git::Repository.new(
+ project.repository_storage,
+ 'bye-group/hello-project.git',
+ nil,
+ nil
+ )
subject.move_repositories(namespace, 'hello-group', 'bye-group')
- expect(File.directory?(expected_path)).to be(true)
+ expect(expected_repository).to exist
end
it 'moves a namespace in a subdirectory correctly' do
child_namespace = create(:group, name: 'sub-group', parent: namespace)
- create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project')
+ project = create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project')
- expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(TestEnv.repos_path, 'hello-group', 'renamed-sub-group', 'hello-project.git')
- end
+ expected_repository = Gitlab::Git::Repository.new(
+ project.repository_storage,
+ 'hello-group/renamed-sub-group/hello-project.git',
+ nil,
+ nil
+ )
subject.move_repositories(child_namespace, 'hello-group/sub-group', 'hello-group/renamed-sub-group')
- expect(File.directory?(expected_path)).to be(true)
+ expect(expected_repository).to exist
end
it 'moves a parent namespace with subdirectories' do
child_namespace = create(:group, name: 'sub-group', parent: namespace)
- create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project')
- expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(TestEnv.repos_path, 'renamed-group', 'sub-group', 'hello-project.git')
- end
+ project = create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project')
+ expected_repository = Gitlab::Git::Repository.new(
+ project.repository_storage,
+ 'renamed-group/sub-group/hello-project.git',
+ nil,
+ nil
+ )
subject.move_repositories(child_namespace, 'hello-group', 'renamed-group')
- expect(File.directory?(expected_path)).to be(true)
+ expect(expected_repository).to exist
end
end
@@ -175,14 +184,17 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespa
describe '#rename_namespace_dependencies' do
it "moves the repository for a project in the namespace" do
- create(:project, :repository, :legacy_storage, namespace: namespace, path: "the-path-project")
- expected_repo = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(TestEnv.repos_path, "the-path0", "the-path-project.git")
- end
+ project = create(:project, :repository, :legacy_storage, namespace: namespace, path: "the-path-project")
+ expected_repository = Gitlab::Git::Repository.new(
+ project.repository_storage,
+ "the-path0/the-path-project.git",
+ nil,
+ nil
+ )
subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
- expect(File.directory?(expected_repo)).to be(true)
+ expect(expected_repository).to exist
end
it "moves the uploads for the namespace" do
@@ -276,9 +288,7 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespa
project.create_repository
subject.rename_namespace(namespace)
- expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(TestEnv.repos_path, 'the-path', 'a-project.git')
- end
+ expected_repository = Gitlab::Git::Repository.new(project.repository_storage, 'the-path/a-project.git', nil, nil)
expect(subject).to receive(:rename_namespace_dependencies)
.with(
@@ -289,7 +299,7 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespa
subject.revert_renames
- expect(File.directory?(expected_path)).to be_truthy
+ expect(expected_repository).to exist
end
it "doesn't break when the namespace was renamed" do
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
index 6292f0246f7..aa2a3329477 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
@@ -126,13 +126,16 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProject
let(:project) { create(:project, :repository, :legacy_storage, path: 'the-path', namespace: known_parent) }
it 'moves the repository for a project' do
- expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(TestEnv.repos_path, 'known-parent', 'new-repo.git')
- end
+ expected_repository = Gitlab::Git::Repository.new(
+ project.repository_storage,
+ 'known-parent/new-repo.git',
+ nil,
+ nil
+ )
subject.move_repository(project, 'known-parent/the-path', 'known-parent/new-repo')
- expect(File.directory?(expected_path)).to be(true)
+ expect(expected_repository).to exist
end
end
@@ -157,9 +160,12 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProject
project.create_repository
subject.rename_project(project)
- expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(TestEnv.repos_path, 'known-parent', 'the-path.git')
- end
+ expected_repository = Gitlab::Git::Repository.new(
+ project.repository_storage,
+ 'known-parent/the-path.git',
+ nil,
+ nil
+ )
expect(subject).to receive(:move_project_folders)
.with(
@@ -170,7 +176,7 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProject
subject.revert_renames
- expect(File.directory?(expected_path)).to be_truthy
+ expect(expected_repository).to exist
end
it "doesn't break when the project was renamed" do
diff --git a/spec/lib/gitlab/database/schema_cleaner_spec.rb b/spec/lib/gitlab/database/schema_cleaner_spec.rb
index 950759c7f96..5283b34ca86 100644
--- a/spec/lib/gitlab/database/schema_cleaner_spec.rb
+++ b/spec/lib/gitlab/database/schema_cleaner_spec.rb
@@ -19,6 +19,15 @@ RSpec.describe Gitlab::Database::SchemaCleaner do
expect(subject).not_to match(/public\.\w+/)
end
+ it 'cleans up all the gitlab_schema_prevent_write table triggers' do
+ expect(subject).not_to match(/CREATE TRIGGER gitlab_schema_write_trigger_for_\w+/)
+ expect(subject).not_to match(/FOR EACH STATEMENT EXECUTE FUNCTION gitlab_schema_prevent_write/)
+ end
+
+ it 'keeps the lock_writes trigger functions' do
+ expect(subject).to match(/CREATE FUNCTION gitlab_schema_prevent_write/)
+ end
+
it 'cleans up the full schema as expected (blackbox test with example)' do
expected_schema = fixture_file(File.join('gitlab', 'database', 'structure_example_cleaned.sql'))
diff --git a/spec/lib/gitlab/database/tables_sorted_by_foreign_keys_spec.rb b/spec/lib/gitlab/database/tables_sorted_by_foreign_keys_spec.rb
index 97abd6d23bd..aa25590ed58 100644
--- a/spec/lib/gitlab/database/tables_sorted_by_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/tables_sorted_by_foreign_keys_spec.rb
@@ -4,7 +4,10 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::TablesSortedByForeignKeys do
let(:connection) { ApplicationRecord.connection }
- let(:tables) { %w[_test_gitlab_main_items _test_gitlab_main_references] }
+ let(:tables) do
+ %w[_test_gitlab_main_items _test_gitlab_main_references _test_gitlab_partition_parent
+ gitlab_partitions_dynamic._test_gitlab_partition_20220101]
+ end
subject do
described_class.new(connection, tables).execute
@@ -19,13 +22,33 @@ RSpec.describe Gitlab::Database::TablesSortedByForeignKeys do
item_id BIGINT NOT NULL,
CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
);
+
+ CREATE TABLE _test_gitlab_partition_parent (
+ id bigserial not null,
+ created_at timestamptz not null,
+ item_id BIGINT NOT NULL,
+ primary key (id, created_at),
+ CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
+ ) PARTITION BY RANGE(created_at);
+
+ CREATE TABLE gitlab_partitions_dynamic._test_gitlab_partition_20220101
+ PARTITION OF _test_gitlab_partition_parent
+ FOR VALUES FROM ('20220101') TO ('20220131');
+
+ ALTER TABLE _test_gitlab_partition_parent DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_partition_20220101;
SQL
connection.execute(statement)
end
describe '#execute' do
it 'returns the tables sorted by the foreign keys dependency' do
- expect(subject).to eq([['_test_gitlab_main_references'], ['_test_gitlab_main_items']])
+ expect(subject).to eq(
+ [
+ ['_test_gitlab_main_references'],
+ ['_test_gitlab_partition_parent'],
+ ['gitlab_partitions_dynamic._test_gitlab_partition_20220101'],
+ ['_test_gitlab_main_items']
+ ])
end
it 'returns both tables together if they are strongly connected' do
@@ -35,7 +58,12 @@ RSpec.describe Gitlab::Database::TablesSortedByForeignKeys do
SQL
connection.execute(statement)
- expect(subject).to eq([tables])
+ expect(subject).to eq(
+ [
+ ['_test_gitlab_partition_parent'],
+ ['gitlab_partitions_dynamic._test_gitlab_partition_20220101'],
+ %w[_test_gitlab_main_items _test_gitlab_main_references]
+ ])
end
end
end
diff --git a/spec/lib/gitlab/database/tables_truncate_spec.rb b/spec/lib/gitlab/database/tables_truncate_spec.rb
index 4f68cd93a8e..4d04bd67a1e 100644
--- a/spec/lib/gitlab/database/tables_truncate_spec.rb
+++ b/spec/lib/gitlab/database/tables_truncate_spec.rb
@@ -6,14 +6,9 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
:suppress_gitlab_schemas_validate_connection do
include MigrationsHelpers
- let(:logger) { instance_double(Logger) }
- let(:dry_run) { false }
- let(:until_table) { nil }
let(:min_batch_size) { 1 }
let(:main_connection) { ApplicationRecord.connection }
let(:ci_connection) { Ci::ApplicationRecord.connection }
- let(:test_gitlab_main_table) { '_test_gitlab_main_table' }
- let(:test_gitlab_ci_table) { '_test_gitlab_ci_table' }
# Main Database
let(:main_db_main_item_model) { table("_test_gitlab_main_items", database: "main") }
@@ -21,24 +16,37 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
let(:main_db_ci_item_model) { table("_test_gitlab_ci_items", database: "main") }
let(:main_db_ci_reference_model) { table("_test_gitlab_ci_references", database: "main") }
let(:main_db_shared_item_model) { table("_test_gitlab_shared_items", database: "main") }
+ let(:main_db_partitioned_item) { table("_test_gitlab_hook_logs", database: "main") }
+ let(:main_db_partitioned_item_detached) do
+ table("gitlab_partitions_dynamic._test_gitlab_hook_logs_20220101", database: "main")
+ end
+
# CI Database
let(:ci_db_main_item_model) { table("_test_gitlab_main_items", database: "ci") }
let(:ci_db_main_reference_model) { table("_test_gitlab_main_references", database: "ci") }
let(:ci_db_ci_item_model) { table("_test_gitlab_ci_items", database: "ci") }
let(:ci_db_ci_reference_model) { table("_test_gitlab_ci_references", database: "ci") }
let(:ci_db_shared_item_model) { table("_test_gitlab_shared_items", database: "ci") }
-
- subject(:truncate_legacy_tables) do
- described_class.new(
- database_name: database_name,
- min_batch_size: min_batch_size,
- logger: logger,
- dry_run: dry_run,
- until_table: until_table
- ).execute
+ let(:ci_db_partitioned_item) { table("_test_gitlab_hook_logs", database: "ci") }
+ let(:ci_db_partitioned_item_detached) do
+ table("gitlab_partitions_dynamic._test_gitlab_hook_logs_20220101", database: "ci")
end
shared_examples 'truncating legacy tables on a database' do
+ let(:logger) { instance_double(Logger) }
+ let(:dry_run) { false }
+ let(:until_table) { nil }
+
+ subject(:truncate_legacy_tables) do
+ described_class.new(
+ database_name: connection.pool.db_config.name,
+ min_batch_size: min_batch_size,
+ logger: logger,
+ dry_run: dry_run,
+ until_table: until_table
+ ).execute
+ end
+
before do
skip_if_multiple_databases_not_setup
@@ -51,6 +59,24 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
item_id BIGINT NOT NULL,
CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
);
+
+ CREATE TABLE _test_gitlab_hook_logs (
+ id bigserial not null,
+ created_at timestamptz not null,
+ item_id BIGINT NOT NULL,
+ primary key (id, created_at),
+ CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
+ ) PARTITION BY RANGE(created_at);
+
+ CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_20220101
+ PARTITION OF _test_gitlab_hook_logs
+ FOR VALUES FROM ('20220101') TO ('20220131');
+
+ CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_20220201
+ PARTITION OF _test_gitlab_hook_logs
+ FOR VALUES FROM ('20220201') TO ('20220228');
+
+ ALTER TABLE _test_gitlab_hook_logs DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_hook_logs_20220101;
SQL
main_connection.execute(main_tables_sql)
@@ -84,18 +110,49 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
main_db_ci_item_model.create!(id: i)
main_db_ci_reference_model.create!(item_id: i)
main_db_shared_item_model.create!(id: i)
+ main_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00')
+ main_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00')
# CI Database
ci_db_main_item_model.create!(id: i)
ci_db_main_reference_model.create!(item_id: i)
ci_db_ci_item_model.create!(id: i)
ci_db_ci_reference_model.create!(item_id: i)
ci_db_shared_item_model.create!(id: i)
+ ci_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00')
+ ci_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00')
+ end
+
+ Gitlab::Database::SharedModel.using_connection(main_connection) do
+ Postgresql::DetachedPartition.create!(
+ table_name: '_test_gitlab_hook_logs_20220101',
+ drop_after: Time.current
+ )
+ end
+
+ Gitlab::Database::SharedModel.using_connection(ci_connection) do
+ Postgresql::DetachedPartition.create!(
+ table_name: '_test_gitlab_hook_logs_20220101',
+ drop_after: Time.current
+ )
end
allow(Gitlab::Database::GitlabSchema).to receive(:tables_to_schema).and_return(
{
"_test_gitlab_main_items" => :gitlab_main,
"_test_gitlab_main_references" => :gitlab_main,
+ "_test_gitlab_hook_logs" => :gitlab_main,
+ "_test_gitlab_ci_items" => :gitlab_ci,
+ "_test_gitlab_ci_references" => :gitlab_ci,
+ "_test_gitlab_shared_items" => :gitlab_shared,
+ "_test_gitlab_geo_items" => :gitlab_geo
+ }
+ )
+
+ allow(Gitlab::Database::GitlabSchema).to receive(:views_and_tables_to_schema).and_return(
+ {
+ "_test_gitlab_main_items" => :gitlab_main,
+ "_test_gitlab_main_references" => :gitlab_main,
+ "_test_gitlab_hook_logs" => :gitlab_main,
"_test_gitlab_ci_items" => :gitlab_ci,
"_test_gitlab_ci_references" => :gitlab_ci,
"_test_gitlab_shared_items" => :gitlab_shared,
@@ -119,7 +176,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
Gitlab::Database::LockWritesManager.new(
table_name: table,
connection: connection,
- database_name: database_name
+ database_name: connection.pool.db_config.name
).lock_writes
end
end
@@ -199,7 +256,6 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
context 'when truncating gitlab_ci tables on the main database' do
let(:connection) { ApplicationRecord.connection }
- let(:database_name) { "main" }
let(:legacy_tables_models) { [main_db_ci_item_model, main_db_ci_reference_model] }
let(:referencing_table_model) { main_db_ci_reference_model }
let(:referenced_table_model) { main_db_ci_item_model }
@@ -217,8 +273,10 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
context 'when truncating gitlab_main tables on the ci database' do
let(:connection) { Ci::ApplicationRecord.connection }
- let(:database_name) { "ci" }
- let(:legacy_tables_models) { [ci_db_main_item_model, ci_db_main_reference_model] }
+ let(:legacy_tables_models) do
+ [ci_db_main_item_model, ci_db_main_reference_model, ci_db_partitioned_item, ci_db_partitioned_item_detached]
+ end
+
let(:referencing_table_model) { ci_db_main_reference_model }
let(:referenced_table_model) { ci_db_main_item_model }
let(:other_tables_models) do
diff --git a/spec/lib/gitlab/database/transaction/context_spec.rb b/spec/lib/gitlab/database/transaction/context_spec.rb
index 33a47150060..1681098e20c 100644
--- a/spec/lib/gitlab/database/transaction/context_spec.rb
+++ b/spec/lib/gitlab/database/transaction/context_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Transaction::Context do
+RSpec.describe Gitlab::Database::Transaction::Context, feature_category: :database do
subject { described_class.new }
let(:data) { subject.context }
diff --git a/spec/lib/gitlab/database/type/indifferent_jsonb_spec.rb b/spec/lib/gitlab/database/type/indifferent_jsonb_spec.rb
new file mode 100644
index 00000000000..6d27cbe180d
--- /dev/null
+++ b/spec/lib/gitlab/database/type/indifferent_jsonb_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Type::IndifferentJsonb do
+ let(:type) { described_class.new }
+
+ describe '#deserialize' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { type.deserialize(json) }
+
+ where(:json, :value) do
+ nil | nil
+ '{"key":"value"}' | { key: 'value' }
+ '{"key":[1,2,3]}' | { key: [1, 2, 3] }
+ '{"key":{"subkey":"value"}}' | { key: { subkey: 'value' } }
+ '{"key":{"a":[{"b":"c"},{"d":"e"}]}}' | { key: { a: [{ b: 'c' }, { d: 'e' }] } }
+ end
+
+ with_them do
+ it { is_expected.to match(value) }
+ it { is_expected.to match(value&.deep_stringify_keys) }
+ end
+ end
+
+ context 'when used by a model' do
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = :_test_indifferent_jsonb
+
+ attribute :options, :ind_jsonb
+ end
+ end
+
+ let(:record) do
+ model.create!(name: 'test', options: { key: 'value' })
+ end
+
+ before do
+ model.connection.execute(<<~SQL)
+ CREATE TABLE _test_indifferent_jsonb(
+ id serial NOT NULL PRIMARY KEY,
+ name text,
+ options jsonb);
+ SQL
+
+ model.reset_column_information
+ end
+
+ it { expect(record.options).to match({ key: 'value' }) }
+ it { expect(record.options).to match({ 'key' => 'value' }) }
+
+ it 'ignores changes to other attributes' do
+ record.name = 'other test'
+
+ expect(record.changes).to match('name' => ['test', 'other test'])
+ end
+
+ it 'tracks changes to options' do
+ record.options = { key: 'other value' }
+
+ expect(record.changes).to match('options' => [{ 'key' => 'value' }, { 'key' => 'other value' }])
+ end
+ end
+end