From 43a25d93ebdabea52f99b05e15b06250cd8f07d7 Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Wed, 17 May 2023 16:05:49 +0000 Subject: Add latest changes from gitlab-org/gitlab@16-0-stable-ee --- .../async_constraints/migration_helpers_spec.rb | 288 ++++++++++++++++ .../postgres_async_constraint_validation_spec.rb | 109 ++++++ .../validators/check_constraint_spec.rb | 20 ++ .../validators/foreign_key_spec.rb | 35 ++ .../database/async_constraints/validators_spec.rb | 21 ++ spec/lib/gitlab/database/async_constraints_spec.rb | 29 ++ .../foreign_key_validator_spec.rb | 152 --------- .../async_foreign_keys/migration_helpers_spec.rb | 167 ---------- .../postgres_async_foreign_key_validation_spec.rb | 52 --- .../lib/gitlab/database/async_foreign_keys_spec.rb | 23 -- .../async_indexes/migration_helpers_spec.rb | 86 +++++ .../background_migration/batch_optimizer_spec.rb | 9 + .../background_migration/batched_job_spec.rb | 151 +++++++++ .../background_migration/batched_migration_spec.rb | 13 +- .../batched_migration_wrapper_spec.rb | 15 +- .../health_status/indicators/patroni_apdex_spec.rb | 148 +++++++++ .../background_migration/health_status_spec.rb | 26 +- .../database/background_migration_job_spec.rb | 20 -- .../gitlab/database/consistency_checker_spec.rb | 2 +- .../gitlab/database/dynamic_model_helpers_spec.rb | 44 +++ spec/lib/gitlab/database/gitlab_schema_spec.rb | 80 ++--- .../load_balancing/action_cable_callbacks_spec.rb | 11 +- .../gitlab/database/load_balancing/logger_spec.rb | 13 + .../load_balancing/service_discovery_spec.rb | 17 +- .../sidekiq_client_middleware_spec.rb | 78 ++--- .../sidekiq_server_middleware_spec.rb | 65 ++-- spec/lib/gitlab/database/load_balancing_spec.rb | 7 +- .../gitlab/database/lock_writes_manager_spec.rb | 24 +- .../lib/gitlab/database/loose_foreign_keys_spec.rb | 49 +-- .../automatic_lock_writes_on_tables_spec.rb | 25 +- .../migration_helpers/convert_to_bigint_spec.rb | 35 ++ .../loose_foreign_key_helpers_spec.rb | 16 +- .../restrict_gitlab_schema_spec.rb | 4 +- .../gitlab/database/migration_helpers/v2_spec.rb | 79 +++++ .../wraparound_vacuum_helpers_spec.rb | 97 ++++++ spec/lib/gitlab/database/migration_helpers_spec.rb | 365 ++++++++++----------- .../batched_background_migration_helpers_spec.rb | 44 ++- .../migrations/constraints_helpers_spec.rb | 35 +- .../database/migrations/instrumentation_spec.rb | 19 +- .../database/migrations/pg_backend_pid_spec.rb | 52 +++ .../runner_backoff/active_record_mixin_spec.rb | 106 ++++++ .../migrations/runner_backoff/communicator_spec.rb | 84 +++++ .../runner_backoff/migration_helpers_spec.rb | 41 +++ spec/lib/gitlab/database/migrations/runner_spec.rb | 2 +- .../database/migrations/sidekiq_helpers_spec.rb | 264 ++++++++------- .../test_batched_background_runner_spec.rb | 21 +- .../database/no_cross_db_foreign_keys_spec.rb | 2 +- .../database/obsolete_ignored_columns_spec.rb | 7 +- .../partitioning/ci_sliding_list_strategy_spec.rb | 178 ++++++++++ .../convert_table_to_first_list_partition_spec.rb | 273 --------------- .../detached_partition_dropper_spec.rb | 48 +-- .../partitioning/list/convert_table_spec.rb | 365 +++++++++++++++++++++ .../list/locking_configuration_spec.rb | 46 +++ .../partitioning/partition_manager_spec.rb | 35 +- .../backfill_partitioned_table_spec.rb | 7 +- .../foreign_key_helpers_spec.rb | 122 ++++++- .../table_management_helpers_spec.rb | 225 ++++--------- spec/lib/gitlab/database/partitioning_spec.rb | 80 ++--- spec/lib/gitlab/database/pg_depend_spec.rb | 21 ++ .../gitlab/database/postgres_foreign_key_spec.rb | 58 +++- .../lib/gitlab/database/postgres_partition_spec.rb | 14 +- .../query_analyzers/gitlab_schemas_metrics_spec.rb | 16 +- .../gitlab_schemas_validate_connection_spec.rb | 17 +- .../prevent_cross_database_modification_spec.rb | 29 +- spec/lib/gitlab/database/reflection_spec.rb | 8 +- spec/lib/gitlab/database/reindexing_spec.rb | 4 +- .../adapters/column_database_adapter_spec.rb | 72 ++++ .../adapters/column_structure_sql_adapter_spec.rb | 78 +++++ .../database/schema_validation/database_spec.rb | 96 ++++-- .../schema_validation/inconsistency_spec.rb | 96 ++++++ .../database/schema_validation/index_spec.rb | 22 -- .../database/schema_validation/indexes_spec.rb | 56 ---- .../database/schema_validation/runner_spec.rb | 50 +++ .../schema_validation/schema_inconsistency_spec.rb | 17 + .../schema_objects/column_spec.rb | 25 ++ .../schema_validation/schema_objects/index_spec.rb | 11 + .../schema_validation/schema_objects/table_spec.rb | 45 +++ .../schema_objects/trigger_spec.rb | 11 + .../schema_validation/structure_sql_spec.rb | 66 ++++ .../schema_validation/track_inconsistency_spec.rb | 82 +++++ .../validators/base_validator_spec.rb | 36 ++ .../different_definition_indexes_spec.rb | 8 + .../validators/different_definition_tables_spec.rb | 7 + .../different_definition_triggers_spec.rb | 8 + .../validators/extra_indexes_spec.rb | 7 + .../validators/extra_table_columns_spec.rb | 7 + .../validators/extra_tables_spec.rb | 7 + .../validators/extra_triggers_spec.rb | 7 + .../validators/missing_indexes_spec.rb | 14 + .../validators/missing_table_columns_spec.rb | 7 + .../validators/missing_tables_spec.rb | 9 + .../validators/missing_triggers_spec.rb | 9 + spec/lib/gitlab/database/tables_locker_spec.rb | 237 ++++++++----- spec/lib/gitlab/database/tables_truncate_spec.rb | 20 +- .../database/transaction_timeout_settings_spec.rb | 2 +- .../with_lock_retries_outside_transaction_spec.rb | 4 +- spec/lib/gitlab/database/with_lock_retries_spec.rb | 4 +- 97 files changed, 3996 insertions(+), 1722 deletions(-) create mode 100644 spec/lib/gitlab/database/async_constraints/migration_helpers_spec.rb create mode 100644 spec/lib/gitlab/database/async_constraints/postgres_async_constraint_validation_spec.rb create mode 100644 spec/lib/gitlab/database/async_constraints/validators/check_constraint_spec.rb create mode 100644 spec/lib/gitlab/database/async_constraints/validators/foreign_key_spec.rb create mode 100644 spec/lib/gitlab/database/async_constraints/validators_spec.rb create mode 100644 spec/lib/gitlab/database/async_constraints_spec.rb delete mode 100644 spec/lib/gitlab/database/async_foreign_keys/foreign_key_validator_spec.rb delete mode 100644 spec/lib/gitlab/database/async_foreign_keys/migration_helpers_spec.rb delete mode 100644 spec/lib/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation_spec.rb delete mode 100644 spec/lib/gitlab/database/async_foreign_keys_spec.rb create mode 100644 spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb create mode 100644 spec/lib/gitlab/database/load_balancing/logger_spec.rb create mode 100644 spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb create mode 100644 spec/lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers_spec.rb create mode 100644 spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb create mode 100644 spec/lib/gitlab/database/migrations/runner_backoff/active_record_mixin_spec.rb create mode 100644 spec/lib/gitlab/database/migrations/runner_backoff/communicator_spec.rb create mode 100644 spec/lib/gitlab/database/migrations/runner_backoff/migration_helpers_spec.rb create mode 100644 spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb delete mode 100644 spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb create mode 100644 spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb create mode 100644 spec/lib/gitlab/database/partitioning/list/locking_configuration_spec.rb create mode 100644 spec/lib/gitlab/database/pg_depend_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/adapters/column_database_adapter_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/adapters/column_structure_sql_adapter_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/inconsistency_spec.rb delete mode 100644 spec/lib/gitlab/database/schema_validation/index_spec.rb delete mode 100644 spec/lib/gitlab/database/schema_validation/indexes_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/runner_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/schema_objects/column_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/schema_objects/index_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/schema_objects/table_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/schema_objects/trigger_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/structure_sql_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/different_definition_indexes_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/different_definition_tables_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/different_definition_triggers_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/extra_indexes_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/extra_table_columns_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/extra_tables_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/extra_triggers_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/missing_indexes_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/missing_table_columns_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/missing_tables_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/missing_triggers_spec.rb (limited to 'spec/lib/gitlab/database') diff --git a/spec/lib/gitlab/database/async_constraints/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_constraints/migration_helpers_spec.rb new file mode 100644 index 00000000000..4dd510499ab --- /dev/null +++ b/spec/lib/gitlab/database/async_constraints/migration_helpers_spec.rb @@ -0,0 +1,288 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::AsyncConstraints::MigrationHelpers, feature_category: :database do + let(:migration) { Gitlab::Database::Migration[2.1].new } + let(:connection) { ApplicationRecord.connection } + let(:constraint_model) { Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation } + let(:table_name) { '_test_async_fks' } + let(:column_name) { 'parent_id' } + let(:fk_name) { nil } + + context 'with async FK validation on regular tables' do + before do + allow(migration).to receive(:puts) + allow(migration.connection).to receive(:transaction_open?).and_return(false) + + connection.create_table(table_name) do |t| + t.integer column_name + end + + migration.add_concurrent_foreign_key( + table_name, table_name, + column: column_name, validate: false, name: fk_name) + end + + describe '#prepare_async_foreign_key_validation' do + it 'creates the record for the async FK validation' do + expect do + migration.prepare_async_foreign_key_validation(table_name, column_name) + end.to change { constraint_model.where(table_name: table_name).count }.by(1) + + record = constraint_model.find_by(table_name: table_name) + + expect(record.name).to start_with('fk_') + expect(record).to be_foreign_key + end + + context 'when an explicit name is given' do + let(:fk_name) { 'my_fk_name' } + + it 'creates the record with the given name' do + expect do + migration.prepare_async_foreign_key_validation(table_name, name: fk_name) + end.to change { constraint_model.where(name: fk_name).count }.by(1) + + record = constraint_model.find_by(name: fk_name) + + expect(record.table_name).to eq(table_name) + expect(record).to be_foreign_key + end + end + + context 'when the FK does not exist' do + it 'returns an error' do + expect do + migration.prepare_async_foreign_key_validation(table_name, name: 'no_fk') + end.to raise_error RuntimeError, /Could not find foreign key "no_fk" on table "_test_async_fks"/ + end + end + + context 'when the record already exists' do + let(:fk_name) { 'my_fk_name' } + + it 'does attempt to create the record' do + create(:postgres_async_constraint_validation, table_name: table_name, name: fk_name) + + expect do + migration.prepare_async_foreign_key_validation(table_name, name: fk_name) + end.not_to change { constraint_model.where(name: fk_name).count } + end + end + + context 'when the async FK validation table does not exist' do + it 'does not raise an error' do + connection.drop_table(constraint_model.table_name) + + expect(constraint_model).not_to receive(:safe_find_or_create_by!) + + expect { migration.prepare_async_foreign_key_validation(table_name, column_name) }.not_to raise_error + end + end + end + + describe '#unprepare_async_foreign_key_validation' do + context 'with foreign keys' do + before do + migration.prepare_async_foreign_key_validation(table_name, column_name, name: fk_name) + end + + it 'destroys the record' do + expect do + migration.unprepare_async_foreign_key_validation(table_name, column_name) + end.to change { constraint_model.where(table_name: table_name).count }.by(-1) + end + + context 'when an explicit name is given' do + let(:fk_name) { 'my_test_async_fk' } + + it 'destroys the record' do + expect do + migration.unprepare_async_foreign_key_validation(table_name, name: fk_name) + end.to change { constraint_model.where(name: fk_name).count }.by(-1) + end + end + + context 'when the async fk validation table does not exist' do + it 'does not raise an error' do + connection.drop_table(constraint_model.table_name) + + expect(constraint_model).not_to receive(:find_by) + + expect { migration.unprepare_async_foreign_key_validation(table_name, column_name) }.not_to raise_error + end + end + end + + context 'with other types of constraints' do + let(:name) { 'my_test_async_constraint' } + let(:constraint) { create(:postgres_async_constraint_validation, table_name: table_name, name: name) } + + it 'does not destroy the record' do + constraint.update_column(:constraint_type, 99) + + expect do + migration.unprepare_async_foreign_key_validation(table_name, name: name) + end.not_to change { constraint_model.where(name: name).count } + + expect(constraint).to be_present + end + end + end + end + + context 'with partitioned tables' do + let(:partition_schema) { 'gitlab_partitions_dynamic' } + let(:partition1_name) { "#{partition_schema}.#{table_name}_202001" } + let(:partition2_name) { "#{partition_schema}.#{table_name}_202002" } + let(:fk_name) { 'my_partitioned_fk_name' } + + before do + connection.execute(<<~SQL) + CREATE TABLE #{table_name} ( + id serial NOT NULL, + #{column_name} int NOT NULL, + created_at timestamptz NOT NULL, + PRIMARY KEY (id, created_at) + ) PARTITION BY RANGE (created_at); + + CREATE TABLE #{partition1_name} PARTITION OF #{table_name} + FOR VALUES FROM ('2020-01-01') TO ('2020-02-01'); + + CREATE TABLE #{partition2_name} PARTITION OF #{table_name} + FOR VALUES FROM ('2020-02-01') TO ('2020-03-01'); + SQL + end + + describe '#prepare_partitioned_async_foreign_key_validation' do + it 'delegates to prepare_async_foreign_key_validation for each partition' do + expect(migration) + .to receive(:prepare_async_foreign_key_validation) + .with(partition1_name, column_name, name: fk_name) + + expect(migration) + .to receive(:prepare_async_foreign_key_validation) + .with(partition2_name, column_name, name: fk_name) + + migration.prepare_partitioned_async_foreign_key_validation(table_name, column_name, name: fk_name) + end + end + + describe '#unprepare_partitioned_async_foreign_key_validation' do + it 'delegates to unprepare_async_foreign_key_validation for each partition' do + expect(migration) + .to receive(:unprepare_async_foreign_key_validation) + .with(partition1_name, column_name, name: fk_name) + + expect(migration) + .to receive(:unprepare_async_foreign_key_validation) + .with(partition2_name, column_name, name: fk_name) + + migration.unprepare_partitioned_async_foreign_key_validation(table_name, column_name, name: fk_name) + end + end + end + + context 'with async check constraint validations' do + let(:table_name) { '_test_async_check_constraints' } + let(:check_name) { 'partitioning_constraint' } + + before do + allow(migration).to receive(:puts) + allow(migration.connection).to receive(:transaction_open?).and_return(false) + + connection.create_table(table_name) do |t| + t.integer column_name + end + + migration.add_check_constraint( + table_name, "#{column_name} = 1", + check_name, validate: false) + end + + describe '#prepare_async_check_constraint_validation' do + it 'creates the record for async validation' do + expect do + migration.prepare_async_check_constraint_validation(table_name, name: check_name) + end.to change { constraint_model.where(name: check_name).count }.by(1) + + record = constraint_model.find_by(name: check_name) + + expect(record.table_name).to eq(table_name) + expect(record).to be_check_constraint + end + + context 'when the check constraint does not exist' do + it 'returns an error' do + expect do + migration.prepare_async_check_constraint_validation(table_name, name: 'missing') + end.to raise_error RuntimeError, /Could not find check constraint "missing" on table "#{table_name}"/ + end + end + + context 'when the record already exists' do + it 'does attempt to create the record' do + create(:postgres_async_constraint_validation, + table_name: table_name, + name: check_name, + constraint_type: :check_constraint) + + expect do + migration.prepare_async_check_constraint_validation(table_name, name: check_name) + end.not_to change { constraint_model.where(name: check_name).count } + end + end + + context 'when the async validation table does not exist' do + it 'does not raise an error' do + connection.drop_table(constraint_model.table_name) + + expect(constraint_model).not_to receive(:safe_find_or_create_by!) + + expect { migration.prepare_async_check_constraint_validation(table_name, name: check_name) } + .not_to raise_error + end + end + end + + describe '#unprepare_async_check_constraint_validation' do + context 'with check constraints' do + before do + migration.prepare_async_check_constraint_validation(table_name, name: check_name) + end + + it 'destroys the record' do + expect do + migration.unprepare_async_check_constraint_validation(table_name, name: check_name) + end.to change { constraint_model.where(name: check_name).count }.by(-1) + end + + context 'when the async validation table does not exist' do + it 'does not raise an error' do + connection.drop_table(constraint_model.table_name) + + expect(constraint_model).not_to receive(:find_by) + + expect { migration.unprepare_async_check_constraint_validation(table_name, name: check_name) } + .not_to raise_error + end + end + end + + context 'with other types of constraints' do + let(:constraint) { create(:postgres_async_constraint_validation, table_name: table_name, name: check_name) } + + it 'does not destroy the record' do + constraint.update_column(:constraint_type, 99) + + expect do + migration.unprepare_async_check_constraint_validation(table_name, name: check_name) + end.not_to change { constraint_model.where(name: check_name).count } + + expect(constraint).to be_present + end + end + end + end +end diff --git a/spec/lib/gitlab/database/async_constraints/postgres_async_constraint_validation_spec.rb b/spec/lib/gitlab/database/async_constraints/postgres_async_constraint_validation_spec.rb new file mode 100644 index 00000000000..52fbf6d2f9b --- /dev/null +++ b/spec/lib/gitlab/database/async_constraints/postgres_async_constraint_validation_spec.rb @@ -0,0 +1,109 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation, type: :model, + feature_category: :database do + it { is_expected.to be_a Gitlab::Database::SharedModel } + + describe 'validations' do + let_it_be(:constraint_validation) { create(:postgres_async_constraint_validation) } + let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH } + let(:last_error_limit) { described_class::MAX_LAST_ERROR_LENGTH } + + subject { constraint_validation } + + it { is_expected.to validate_presence_of(:name) } + it { is_expected.to validate_uniqueness_of(:name).scoped_to(:table_name) } + it { is_expected.to validate_length_of(:name).is_at_most(identifier_limit) } + it { is_expected.to validate_presence_of(:table_name) } + it { is_expected.to validate_length_of(:table_name).is_at_most(identifier_limit) } + it { is_expected.to validate_length_of(:last_error).is_at_most(last_error_limit) } + end + + describe 'scopes' do + let!(:failed_validation) { create(:postgres_async_constraint_validation, attempts: 1) } + let!(:new_validation) { create(:postgres_async_constraint_validation) } + + describe '.ordered' do + subject { described_class.ordered } + + it { is_expected.to eq([new_validation, failed_validation]) } + end + + describe '.foreign_key_type' do + before do + new_validation.update_column(:constraint_type, 99) + end + + subject { described_class.foreign_key_type } + + it { is_expected.to eq([failed_validation]) } + + it 'does not apply the filter if the column is not present' do + expect(described_class) + .to receive(:constraint_type_exists?) + .and_return(false) + + is_expected.to match_array([failed_validation, new_validation]) + end + end + + describe '.check_constraint_type' do + before do + new_validation.update!(constraint_type: :check_constraint) + end + + subject { described_class.check_constraint_type } + + it { is_expected.to eq([new_validation]) } + end + end + + describe '.table_available?' do + subject { described_class.table_available? } + + it { is_expected.to be_truthy } + + context 'when the table does not exist' do + before do + described_class + .connection + .drop_table(described_class.table_name) + end + + it { is_expected.to be_falsy } + end + end + + describe '.constraint_type_exists?' do + it { expect(described_class.constraint_type_exists?).to be_truthy } + + it 'always asks the database' do + control = ActiveRecord::QueryRecorder.new(skip_schema_queries: false) do + described_class.constraint_type_exists? + end + + expect(control.count).to be >= 1 + expect { described_class.constraint_type_exists? }.to issue_same_number_of_queries_as(control) + end + end + + describe '#handle_exception!' do + let_it_be_with_reload(:constraint_validation) { create(:postgres_async_constraint_validation) } + + let(:error) { instance_double(StandardError, message: 'Oups', backtrace: %w[this that]) } + + subject { constraint_validation.handle_exception!(error) } + + it 'increases the attempts number' do + expect { subject }.to change { constraint_validation.reload.attempts }.by(1) + end + + it 'saves error details' do + subject + + expect(constraint_validation.reload.last_error).to eq("Oups\nthis\nthat") + end + end +end diff --git a/spec/lib/gitlab/database/async_constraints/validators/check_constraint_spec.rb b/spec/lib/gitlab/database/async_constraints/validators/check_constraint_spec.rb new file mode 100644 index 00000000000..7622b39feb1 --- /dev/null +++ b/spec/lib/gitlab/database/async_constraints/validators/check_constraint_spec.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::AsyncConstraints::Validators::CheckConstraint, feature_category: :database do + it_behaves_like 'async constraints validation' do + let(:constraint_type) { :check_constraint } + + before do + connection.create_table(table_name) do |t| + t.integer :parent_id + end + + connection.execute(<<~SQL.squish) + ALTER TABLE #{table_name} ADD CONSTRAINT #{constraint_name} + CHECK ( parent_id = 101 ) NOT VALID; + SQL + end + end +end diff --git a/spec/lib/gitlab/database/async_constraints/validators/foreign_key_spec.rb b/spec/lib/gitlab/database/async_constraints/validators/foreign_key_spec.rb new file mode 100644 index 00000000000..0e345e0e9ae --- /dev/null +++ b/spec/lib/gitlab/database/async_constraints/validators/foreign_key_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::AsyncConstraints::Validators::ForeignKey, feature_category: :database do + it_behaves_like 'async constraints validation' do + let(:constraint_type) { :foreign_key } + + before do + connection.create_table(table_name) do |t| + t.references :parent, foreign_key: { to_table: table_name, validate: false, name: constraint_name } + end + end + + context 'with fully qualified table names' do + let(:validation) do + create(:postgres_async_constraint_validation, + table_name: "public.#{table_name}", + name: constraint_name, + constraint_type: constraint_type + ) + end + + it 'validates the constraint' do + allow(connection).to receive(:execute).and_call_original + + expect(connection).to receive(:execute) + .with(/ALTER TABLE "public"."#{table_name}" VALIDATE CONSTRAINT "#{constraint_name}";/) + .ordered.and_call_original + + subject.perform + end + end + end +end diff --git a/spec/lib/gitlab/database/async_constraints/validators_spec.rb b/spec/lib/gitlab/database/async_constraints/validators_spec.rb new file mode 100644 index 00000000000..e903b79dd1b --- /dev/null +++ b/spec/lib/gitlab/database/async_constraints/validators_spec.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::AsyncConstraints::Validators, feature_category: :database do + describe '.for' do + subject { described_class.for(record) } + + context 'with foreign keys validations' do + let(:record) { build(:postgres_async_constraint_validation, :foreign_key) } + + it { is_expected.to be_a(described_class::ForeignKey) } + end + + context 'with check constraint validations' do + let(:record) { build(:postgres_async_constraint_validation, :check_constraint) } + + it { is_expected.to be_a(described_class::CheckConstraint) } + end + end +end diff --git a/spec/lib/gitlab/database/async_constraints_spec.rb b/spec/lib/gitlab/database/async_constraints_spec.rb new file mode 100644 index 00000000000..e5cf782485f --- /dev/null +++ b/spec/lib/gitlab/database/async_constraints_spec.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::AsyncConstraints, feature_category: :database do + describe '.validate_pending_entries!' do + subject { described_class.validate_pending_entries! } + + let!(:fk_validation) do + create(:postgres_async_constraint_validation, :foreign_key, attempts: 2) + end + + let(:check_validation) do + create(:postgres_async_constraint_validation, :check_constraint, attempts: 1) + end + + it 'executes pending validations' do + expect_next_instance_of(described_class::Validators::ForeignKey, fk_validation) do |validator| + expect(validator).to receive(:perform) + end + + expect_next_instance_of(described_class::Validators::CheckConstraint, check_validation) do |validator| + expect(validator).to receive(:perform) + end + + subject + end + end +end diff --git a/spec/lib/gitlab/database/async_foreign_keys/foreign_key_validator_spec.rb b/spec/lib/gitlab/database/async_foreign_keys/foreign_key_validator_spec.rb deleted file mode 100644 index 90137e259f5..00000000000 --- a/spec/lib/gitlab/database/async_foreign_keys/foreign_key_validator_spec.rb +++ /dev/null @@ -1,152 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Database::AsyncForeignKeys::ForeignKeyValidator, feature_category: :database do - include ExclusiveLeaseHelpers - - describe '#perform' do - let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) } - let(:lease_key) { "gitlab/database/asyncddl/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" } - let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION } - - let(:fk_model) { Gitlab::Database::AsyncForeignKeys::PostgresAsyncForeignKeyValidation } - let(:table_name) { '_test_async_fks' } - let(:fk_name) { 'fk_parent_id' } - let(:validation) { create(:postgres_async_foreign_key_validation, table_name: table_name, name: fk_name) } - let(:connection) { validation.connection } - - subject { described_class.new(validation) } - - before do - connection.create_table(table_name) do |t| - t.references :parent, foreign_key: { to_table: table_name, validate: false, name: fk_name } - end - end - - it 'validates the FK while controlling statement timeout' do - allow(connection).to receive(:execute).and_call_original - expect(connection).to receive(:execute) - .with("SET statement_timeout TO '43200s'").ordered.and_call_original - expect(connection).to receive(:execute) - .with('ALTER TABLE "_test_async_fks" VALIDATE CONSTRAINT "fk_parent_id";').ordered.and_call_original - expect(connection).to receive(:execute) - .with("RESET statement_timeout").ordered.and_call_original - - subject.perform - end - - context 'with fully qualified table names' do - let(:validation) do - create(:postgres_async_foreign_key_validation, - table_name: "public.#{table_name}", - name: fk_name - ) - end - - it 'validates the FK' do - allow(connection).to receive(:execute).and_call_original - - expect(connection).to receive(:execute) - .with('ALTER TABLE "public"."_test_async_fks" VALIDATE CONSTRAINT "fk_parent_id";').ordered.and_call_original - - subject.perform - end - end - - it 'removes the FK validation record from table' do - expect(validation).to receive(:destroy!).and_call_original - - expect { subject.perform }.to change { fk_model.count }.by(-1) - end - - it 'skips logic if not able to acquire exclusive lease' do - expect(lease).to receive(:try_obtain).ordered.and_return(false) - expect(connection).not_to receive(:execute).with(/ALTER TABLE/) - expect(validation).not_to receive(:destroy!) - - expect { subject.perform }.not_to change { fk_model.count } - end - - it 'logs messages around execution' do - allow(Gitlab::AppLogger).to receive(:info).and_call_original - - subject.perform - - expect(Gitlab::AppLogger) - .to have_received(:info) - .with(a_hash_including(message: 'Starting to validate foreign key')) - - expect(Gitlab::AppLogger) - .to have_received(:info) - .with(a_hash_including(message: 'Finished validating foreign key')) - end - - context 'when the FK does not exist' do - before do - connection.create_table(table_name, force: true) - end - - it 'skips validation and removes the record' do - expect(connection).not_to receive(:execute).with(/ALTER TABLE/) - - expect { subject.perform }.to change { fk_model.count }.by(-1) - end - - it 'logs an appropriate message' do - expected_message = "Skipping #{fk_name} validation since it does not exist. The queuing entry will be deleted" - - allow(Gitlab::AppLogger).to receive(:info).and_call_original - - subject.perform - - expect(Gitlab::AppLogger) - .to have_received(:info) - .with(a_hash_including(message: expected_message)) - end - end - - context 'with error handling' do - before do - allow(connection).to receive(:execute).and_call_original - - allow(connection).to receive(:execute) - .with('ALTER TABLE "_test_async_fks" VALIDATE CONSTRAINT "fk_parent_id";') - .and_raise(ActiveRecord::StatementInvalid) - end - - context 'on production' do - before do - allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false) - end - - it 'increases execution attempts' do - expect { subject.perform }.to change { validation.attempts }.by(1) - - expect(validation.last_error).to be_present - expect(validation).not_to be_destroyed - end - - it 'logs an error message including the fk_name' do - expect(Gitlab::AppLogger) - .to receive(:error) - .with(a_hash_including(:message, :fk_name)) - .and_call_original - - subject.perform - end - end - - context 'on development' do - it 'also raises errors' do - expect { subject.perform } - .to raise_error(ActiveRecord::StatementInvalid) - .and change { validation.attempts }.by(1) - - expect(validation.last_error).to be_present - expect(validation).not_to be_destroyed - end - end - end - end -end diff --git a/spec/lib/gitlab/database/async_foreign_keys/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_foreign_keys/migration_helpers_spec.rb deleted file mode 100644 index 0bd0e8045ff..00000000000 --- a/spec/lib/gitlab/database/async_foreign_keys/migration_helpers_spec.rb +++ /dev/null @@ -1,167 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Database::AsyncForeignKeys::MigrationHelpers, feature_category: :database do - let(:migration) { Gitlab::Database::Migration[2.1].new } - let(:connection) { ApplicationRecord.connection } - let(:fk_model) { Gitlab::Database::AsyncForeignKeys::PostgresAsyncForeignKeyValidation } - let(:table_name) { '_test_async_fks' } - let(:column_name) { 'parent_id' } - let(:fk_name) { nil } - - context 'with regular tables' do - before do - allow(migration).to receive(:puts) - allow(migration.connection).to receive(:transaction_open?).and_return(false) - - connection.create_table(table_name) do |t| - t.integer column_name - end - - migration.add_concurrent_foreign_key( - table_name, table_name, - column: column_name, validate: false, name: fk_name) - end - - describe '#prepare_async_foreign_key_validation' do - it 'creates the record for the async FK validation' do - expect do - migration.prepare_async_foreign_key_validation(table_name, column_name) - end.to change { fk_model.where(table_name: table_name).count }.by(1) - - record = fk_model.find_by(table_name: table_name) - - expect(record.name).to start_with('fk_') - end - - context 'when an explicit name is given' do - let(:fk_name) { 'my_fk_name' } - - it 'creates the record with the given name' do - expect do - migration.prepare_async_foreign_key_validation(table_name, name: fk_name) - end.to change { fk_model.where(name: fk_name).count }.by(1) - - record = fk_model.find_by(name: fk_name) - - expect(record.table_name).to eq(table_name) - end - end - - context 'when the FK does not exist' do - it 'returns an error' do - expect do - migration.prepare_async_foreign_key_validation(table_name, name: 'no_fk') - end.to raise_error RuntimeError, /Could not find foreign key "no_fk" on table "_test_async_fks"/ - end - end - - context 'when the record already exists' do - let(:fk_name) { 'my_fk_name' } - - it 'does attempt to create the record' do - create(:postgres_async_foreign_key_validation, table_name: table_name, name: fk_name) - - expect do - migration.prepare_async_foreign_key_validation(table_name, name: fk_name) - end.not_to change { fk_model.where(name: fk_name).count } - end - end - - context 'when the async FK validation table does not exist' do - it 'does not raise an error' do - connection.drop_table(:postgres_async_foreign_key_validations) - - expect(fk_model).not_to receive(:safe_find_or_create_by!) - - expect { migration.prepare_async_foreign_key_validation(table_name, column_name) }.not_to raise_error - end - end - end - - describe '#unprepare_async_foreign_key_validation' do - before do - migration.prepare_async_foreign_key_validation(table_name, column_name, name: fk_name) - end - - it 'destroys the record' do - expect do - migration.unprepare_async_foreign_key_validation(table_name, column_name) - end.to change { fk_model.where(table_name: table_name).count }.by(-1) - end - - context 'when an explicit name is given' do - let(:fk_name) { 'my_test_async_fk' } - - it 'destroys the record' do - expect do - migration.unprepare_async_foreign_key_validation(table_name, name: fk_name) - end.to change { fk_model.where(name: fk_name).count }.by(-1) - end - end - - context 'when the async fk validation table does not exist' do - it 'does not raise an error' do - connection.drop_table(:postgres_async_foreign_key_validations) - - expect(fk_model).not_to receive(:find_by) - - expect { migration.unprepare_async_foreign_key_validation(table_name, column_name) }.not_to raise_error - end - end - end - end - - context 'with partitioned tables' do - let(:partition_schema) { 'gitlab_partitions_dynamic' } - let(:partition1_name) { "#{partition_schema}.#{table_name}_202001" } - let(:partition2_name) { "#{partition_schema}.#{table_name}_202002" } - let(:fk_name) { 'my_partitioned_fk_name' } - - before do - connection.execute(<<~SQL) - CREATE TABLE #{table_name} ( - id serial NOT NULL, - #{column_name} int NOT NULL, - created_at timestamptz NOT NULL, - PRIMARY KEY (id, created_at) - ) PARTITION BY RANGE (created_at); - - CREATE TABLE #{partition1_name} PARTITION OF #{table_name} - FOR VALUES FROM ('2020-01-01') TO ('2020-02-01'); - - CREATE TABLE #{partition2_name} PARTITION OF #{table_name} - FOR VALUES FROM ('2020-02-01') TO ('2020-03-01'); - SQL - end - - describe '#prepare_partitioned_async_foreign_key_validation' do - it 'delegates to prepare_async_foreign_key_validation for each partition' do - expect(migration) - .to receive(:prepare_async_foreign_key_validation) - .with(partition1_name, column_name, name: fk_name) - - expect(migration) - .to receive(:prepare_async_foreign_key_validation) - .with(partition2_name, column_name, name: fk_name) - - migration.prepare_partitioned_async_foreign_key_validation(table_name, column_name, name: fk_name) - end - end - - describe '#unprepare_partitioned_async_foreign_key_validation' do - it 'delegates to unprepare_async_foreign_key_validation for each partition' do - expect(migration) - .to receive(:unprepare_async_foreign_key_validation) - .with(partition1_name, column_name, name: fk_name) - - expect(migration) - .to receive(:unprepare_async_foreign_key_validation) - .with(partition2_name, column_name, name: fk_name) - - migration.unprepare_partitioned_async_foreign_key_validation(table_name, column_name, name: fk_name) - end - end - end -end diff --git a/spec/lib/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation_spec.rb b/spec/lib/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation_spec.rb deleted file mode 100644 index ba201d93f52..00000000000 --- a/spec/lib/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation_spec.rb +++ /dev/null @@ -1,52 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Database::AsyncForeignKeys::PostgresAsyncForeignKeyValidation, type: :model, - feature_category: :database do - it { is_expected.to be_a Gitlab::Database::SharedModel } - - describe 'validations' do - let_it_be(:fk_validation) { create(:postgres_async_foreign_key_validation) } - let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH } - let(:last_error_limit) { described_class::MAX_LAST_ERROR_LENGTH } - - subject { fk_validation } - - it { is_expected.to validate_presence_of(:name) } - it { is_expected.to validate_uniqueness_of(:name) } - it { is_expected.to validate_length_of(:name).is_at_most(identifier_limit) } - it { is_expected.to validate_presence_of(:table_name) } - it { is_expected.to validate_length_of(:table_name).is_at_most(identifier_limit) } - it { is_expected.to validate_length_of(:last_error).is_at_most(last_error_limit) } - end - - describe 'scopes' do - let!(:failed_validation) { create(:postgres_async_foreign_key_validation, attempts: 1) } - let!(:new_validation) { create(:postgres_async_foreign_key_validation) } - - describe '.ordered' do - subject { described_class.ordered } - - it { is_expected.to eq([new_validation, failed_validation]) } - end - end - - describe '#handle_exception!' do - let_it_be_with_reload(:fk_validation) { create(:postgres_async_foreign_key_validation) } - - let(:error) { instance_double(StandardError, message: 'Oups', backtrace: %w[this that]) } - - subject { fk_validation.handle_exception!(error) } - - it 'increases the attempts number' do - expect { subject }.to change { fk_validation.reload.attempts }.by(1) - end - - it 'saves error details' do - subject - - expect(fk_validation.reload.last_error).to eq("Oups\nthis\nthat") - end - end -end diff --git a/spec/lib/gitlab/database/async_foreign_keys_spec.rb b/spec/lib/gitlab/database/async_foreign_keys_spec.rb deleted file mode 100644 index f15eb364929..00000000000 --- a/spec/lib/gitlab/database/async_foreign_keys_spec.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Database::AsyncForeignKeys, feature_category: :database do - describe '.validate_pending_entries!' do - subject { described_class.validate_pending_entries! } - - before do - create_list(:postgres_async_foreign_key_validation, 3) - end - - it 'takes 2 pending FK validations and executes them' do - validations = described_class::PostgresAsyncForeignKeyValidation.ordered.limit(2).to_a - - expect_next_instances_of(described_class::ForeignKeyValidator, 2, validations) do |validator| - expect(validator).to receive(:perform) - end - - subject - end - end -end diff --git a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb index 7c5c368fcb5..b2ba1a60fbb 100644 --- a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb @@ -143,6 +143,92 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers, feature_categor end end + describe '#prepare_async_index_from_sql' do + let(:index_definition) { "CREATE INDEX CONCURRENTLY #{index_name} ON #{table_name} USING btree(id)" } + + subject(:prepare_async_index_from_sql) do + migration.prepare_async_index_from_sql(index_definition) + end + + before do + connection.create_table(table_name) + + allow(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_ddl_mode!).and_call_original + end + + it 'requires ddl mode' do + prepare_async_index_from_sql + + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to have_received(:require_ddl_mode!) + end + + context 'when the given index is invalid' do + let(:index_definition) { "SELECT FROM users" } + + it 'raises a RuntimeError' do + expect { prepare_async_index_from_sql }.to raise_error(RuntimeError, 'Index statement not found!') + end + end + + context 'when the given index is valid' do + context 'when the index algorithm is not concurrent' do + let(:index_definition) { "CREATE INDEX #{index_name} ON #{table_name} USING btree(id)" } + + it 'raises a RuntimeError' do + expect { prepare_async_index_from_sql }.to raise_error(RuntimeError, 'Index must be created concurrently!') + end + end + + context 'when the index algorithm is concurrent' do + context 'when the statement tries to create an index for non-existing table' do + let(:index_definition) { "CREATE INDEX CONCURRENTLY #{index_name} ON foo_table USING btree(id)" } + + it 'raises a RuntimeError' do + expect { prepare_async_index_from_sql }.to raise_error(RuntimeError, 'Table does not exist!') + end + end + + context 'when the statement tries to create an index for an existing table' do + context 'when the async index creation is not available' do + before do + connection.drop_table(:postgres_async_indexes) + end + + it 'does not raise an error' do + expect { prepare_async_index_from_sql }.not_to raise_error + end + end + + context 'when the async index creation is available' do + context 'when there is already an index with the given name' do + before do + connection.add_index(table_name, 'id', name: index_name) + end + + it 'does not create the async index record' do + expect { prepare_async_index_from_sql }.not_to change { index_model.where(name: index_name).count } + end + end + + context 'when there is no index with the given name' do + let(:async_index) { index_model.find_by(name: index_name) } + + it 'creates the async index record' do + expect { prepare_async_index_from_sql }.to change { index_model.where(name: index_name).count }.by(1) + end + + it 'sets the async index attributes correctly' do + prepare_async_index_from_sql + + expect(async_index).to have_attributes(table_name: table_name, definition: index_definition) + end + end + end + end + end + end + end + describe '#prepare_async_index_removal' do before do connection.create_table(table_name) diff --git a/spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb b/spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb index c367f4a4493..fb9b16d46d6 100644 --- a/spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb @@ -113,5 +113,14 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchOptimizer do expect { subject }.to change { migration.reload.batch_size }.to(1_000) end end + + context 'when migration max_batch_size is less than MIN_BATCH_SIZE' do + let(:migration_params) { { max_batch_size: 900 } } + + it 'does not raise an error' do + mock_efficiency(0.7) + expect { subject }.not_to raise_error + end + end end end diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb index cc9f3d5b7f1..d9b81a2be30 100644 --- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb @@ -184,6 +184,35 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d expect(transition_log.exception_message).to eq('RuntimeError') end end + + context 'when job fails during sub batch processing' do + let(:args) { { error: ActiveRecord::StatementTimeout.new, from_sub_batch: true } } + let(:attempts) { 0 } + let(:failure) { job.failure!(**args) } + let(:job) do + create(:batched_background_migration_job, :running, batch_size: 20, sub_batch_size: 10, attempts: attempts) + end + + context 'when sub batch size can be reduced in 25%' do + it { expect { failure }.to change { job.sub_batch_size }.to 7 } + end + + context 'when retries exceeds 2 attempts' do + let(:attempts) { 3 } + + before do + allow(job).to receive(:split_and_retry!) + end + + it 'calls split_and_retry! once sub_batch_size cannot be decreased anymore' do + failure + + expect(job).to have_received(:split_and_retry!).once + end + + it { expect { failure }.not_to change { job.sub_batch_size } } + end + end end describe 'scopes' do @@ -271,6 +300,24 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d end end + describe '.extract_transition_options' do + let(:perform) { subject.class.extract_transition_options(args) } + + where(:args, :expected_result) do + [ + [[], []], + [[{ error: StandardError }], [StandardError, nil]], + [[{ error: StandardError, from_sub_batch: true }], [StandardError, true]] + ] + end + + with_them do + it 'matches expected keys and result' do + expect(perform).to match_array(expected_result) + end + end + end + describe '#can_split?' do subject { job.can_split?(exception) } @@ -327,6 +374,34 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d end end + describe '#can_reduce_sub_batch_size?' do + let(:attempts) { 0 } + let(:batch_size) { 10 } + let(:sub_batch_size) { 6 } + let(:job) do + create(:batched_background_migration_job, attempts: attempts, + batch_size: batch_size, sub_batch_size: sub_batch_size) + end + + context 'when the number of attempts is lower than the limit and batch size are within boundaries' do + let(:attempts) { 1 } + + it { expect(job.can_reduce_sub_batch_size?).to be(true) } + end + + context 'when the number of attempts is lower than the limit and batch size are outside boundaries' do + let(:batch_size) { 1 } + + it { expect(job.can_reduce_sub_batch_size?).to be(false) } + end + + context 'when the number of attempts is greater than the limit and batch size are within boundaries' do + let(:attempts) { 3 } + + it { expect(job.can_reduce_sub_batch_size?).to be(false) } + end + end + describe '#time_efficiency' do subject { job.time_efficiency } @@ -465,4 +540,80 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d end end end + + describe '#reduce_sub_batch_size!' do + let(:migration_batch_size) { 20 } + let(:migration_sub_batch_size) { 10 } + let(:job_batch_size) { 20 } + let(:job_sub_batch_size) { 10 } + let(:status) { :failed } + + let(:migration) do + create(:batched_background_migration, :active, batch_size: migration_batch_size, + sub_batch_size: migration_sub_batch_size) + end + + let(:job) do + create(:batched_background_migration_job, status, sub_batch_size: job_sub_batch_size, + batch_size: job_batch_size, batched_migration: migration) + end + + context 'when the job sub batch size can be reduced' do + let(:expected_sub_batch_size) { 7 } + + it 'reduces sub batch size in 25%' do + expect { job.reduce_sub_batch_size! }.to change { job.sub_batch_size }.to(expected_sub_batch_size) + end + + it 'log the changes' do + expect(Gitlab::AppLogger).to receive(:warn).with( + message: 'Sub batch size reduced due to timeout', + batched_job_id: job.id, + sub_batch_size: job_sub_batch_size, + reduced_sub_batch_size: expected_sub_batch_size, + attempts: job.attempts, + batched_migration_id: migration.id, + job_class_name: job.migration_job_class_name, + job_arguments: job.migration_job_arguments + ) + + job.reduce_sub_batch_size! + end + end + + context 'when reduced sub_batch_size is greater than sub_batch' do + let(:job_batch_size) { 5 } + + it "doesn't allow sub_batch_size to greater than sub_batch" do + expect { job.reduce_sub_batch_size! }.to change { job.sub_batch_size }.to 5 + end + end + + context 'when sub_batch_size is already 1' do + let(:job_sub_batch_size) { 1 } + + it "updates sub_batch_size to it's minimum value" do + expect { job.reduce_sub_batch_size! }.not_to change { job.sub_batch_size } + end + end + + context 'when job has not failed' do + let(:status) { :succeeded } + let(:error) { Gitlab::Database::BackgroundMigration::ReduceSubBatchSizeError } + + it 'raises an exception' do + expect { job.reduce_sub_batch_size! }.to raise_error(error) + end + end + + context 'when the amount to be reduced exceeds the threshold' do + let(:migration_batch_size) { 150 } + let(:migration_sub_batch_size) { 100 } + let(:job_sub_batch_size) { 30 } + + it 'prevents sub batch size to be reduced' do + expect { job.reduce_sub_batch_size! }.not_to change { job.sub_batch_size } + end + end + end end diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb index d132559acea..546f9353808 100644 --- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :model do +RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :model, feature_category: :database do it_behaves_like 'having unique enum values' it { is_expected.to be_a Gitlab::Database::SharedModel } @@ -328,6 +328,17 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m end end + describe '.finalizing' do + let!(:migration1) { create(:batched_background_migration, :active) } + let!(:migration2) { create(:batched_background_migration, :paused) } + let!(:migration3) { create(:batched_background_migration, :finalizing) } + let!(:migration4) { create(:batched_background_migration, :finished) } + + it 'returns only finalizing migrations' do + expect(described_class.finalizing).to contain_exactly(migration3) + end + end + describe '.successful_rows_counts' do let!(:migration1) { create(:batched_background_migration) } let!(:migration2) { create(:batched_background_migration) } diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb index f3a292abbae..8d74d16f4e5 100644 --- a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb @@ -8,6 +8,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, ' let(:connection) { Gitlab::Database.database_base_models[:main].connection } let(:metrics_tracker) { instance_double('::Gitlab::Database::BackgroundMigration::PrometheusMetrics', track: nil) } let(:job_class) { Class.new(Gitlab::BackgroundMigration::BatchedMigrationJob) } + let(:sub_batch_exception) { Gitlab::Database::BackgroundMigration::SubBatchTimeoutError } let_it_be(:pause_ms) { 250 } let_it_be(:active_migration) { create(:batched_background_migration, :active, job_arguments: [:id, :other_id]) } @@ -39,7 +40,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, ' sub_batch_size: 1, pause_ms: pause_ms, job_arguments: active_migration.job_arguments, - connection: connection) + connection: connection, + sub_batch_exception: sub_batch_exception) .and_return(job_instance) expect(job_instance).to receive(:perform).with(no_args) @@ -119,12 +121,14 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, ' end context 'when the migration job raises an error' do - shared_examples 'an error is raised' do |error_class| + shared_examples 'an error is raised' do |error_class, cause| + let(:expected_to_raise) { cause || error_class } + it 'marks the tracking record as failed' do expect(job_instance).to receive(:perform).with(no_args).and_raise(error_class) freeze_time do - expect { perform }.to raise_error(error_class) + expect { perform }.to raise_error(expected_to_raise) reloaded_job_record = job_record.reload @@ -137,13 +141,16 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, ' expect(job_instance).to receive(:perform).with(no_args).and_raise(error_class) expect(metrics_tracker).to receive(:track).with(job_record) - expect { perform }.to raise_error(error_class) + expect { perform }.to raise_error(expected_to_raise) end end it_behaves_like 'an error is raised', RuntimeError.new('Something broke!') it_behaves_like 'an error is raised', SignalException.new('SIGTERM') it_behaves_like 'an error is raised', ActiveRecord::StatementTimeout.new('Timeout!') + + error = StandardError.new + it_behaves_like('an error is raised', Gitlab::Database::BackgroundMigration::SubBatchTimeoutError.new(error), error) end context 'when the batched background migration does not inherit from BatchedMigrationJob' do diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb b/spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb new file mode 100644 index 00000000000..d3102a105ea --- /dev/null +++ b/spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb @@ -0,0 +1,148 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::PatroniApdex, :aggregate_failures, feature_category: :database do # rubocop:disable Layout/LineLength + let(:schema) { :main } + let(:connection) { Gitlab::Database.database_base_models[schema].connection } + + around do |example| + Gitlab::Database::SharedModel.using_connection(connection) do + example.run + end + end + + describe '#evaluate' do + let(:prometheus_url) { 'http://thanos:9090' } + let(:prometheus_config) { [prometheus_url, { allow_local_requests: true, verify: true }] } + + let(:prometheus_client) { instance_double(Gitlab::PrometheusClient) } + + let(:context) do + Gitlab::Database::BackgroundMigration::HealthStatus::Context + .new(connection, ['users'], gitlab_schema) + end + + let(:gitlab_schema) { "gitlab_#{schema}" } + let(:client_ready) { true } + let(:database_apdex_sli_query_main) { 'Apdex query for main' } + let(:database_apdex_sli_query_ci) { 'Apdex query for ci' } + let(:database_apdex_slo_main) { 0.99 } + let(:database_apdex_slo_ci) { 0.95 } + let(:database_apdex_settings) do + { + prometheus_api_url: prometheus_url, + apdex_sli_query: { + main: database_apdex_sli_query_main, + ci: database_apdex_sli_query_ci + }, + apdex_slo: { + main: database_apdex_slo_main, + ci: database_apdex_slo_ci + } + } + end + + subject(:evaluate) { described_class.new(context).evaluate } + + before do + stub_application_setting(database_apdex_settings: database_apdex_settings) + + allow(Gitlab::PrometheusClient).to receive(:new).with(*prometheus_config).and_return(prometheus_client) + allow(prometheus_client).to receive(:ready?).and_return(client_ready) + end + + shared_examples 'Patroni Apdex Evaluator' do |schema| + context "with #{schema} schema" do + let(:schema) { schema } + let(:apdex_slo_above_sli) { { main: 0.991, ci: 0.951 } } + let(:apdex_slo_below_sli) { { main: 0.989, ci: 0.949 } } + + it 'returns NoSignal signal in case the feature flag is disabled' do + stub_feature_flags(batched_migrations_health_status_patroni_apdex: false) + + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable) + expect(evaluate.reason).to include('indicator disabled') + end + + context 'without database_apdex_settings' do + let(:database_apdex_settings) { nil } + + it 'returns Unknown signal' do + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown) + expect(evaluate.reason).to include('Patroni Apdex Settings not configured') + end + end + + context 'when Prometheus client is not ready' do + let(:client_ready) { false } + + it 'returns Unknown signal' do + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown) + expect(evaluate.reason).to include('Prometheus client is not ready') + end + end + + context 'when apdex SLI query is not configured' do + let(:"database_apdex_sli_query_#{schema}") { nil } + + it 'returns Unknown signal' do + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown) + expect(evaluate.reason).to include('Apdex SLI query is not configured') + end + end + + context 'when slo is not configured' do + let(:"database_apdex_slo_#{schema}") { nil } + + it 'returns Unknown signal' do + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown) + expect(evaluate.reason).to include('Apdex SLO is not configured') + end + end + + it 'returns Normal signal when Patroni apdex SLI is above SLO' do + expect(prometheus_client).to receive(:query) + .with(send("database_apdex_sli_query_#{schema}")) + .and_return([{ "value" => [1662423310.878, apdex_slo_above_sli[schema]] }]) + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal) + expect(evaluate.reason).to include('Patroni service apdex is above SLO') + end + + it 'returns Stop signal when Patroni apdex is below SLO' do + expect(prometheus_client).to receive(:query) + .with(send("database_apdex_sli_query_#{schema}")) + .and_return([{ "value" => [1662423310.878, apdex_slo_below_sli[schema]] }]) + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Stop) + expect(evaluate.reason).to include('Patroni service apdex is below SLO') + end + + context 'when Patroni apdex can not be calculated' do + where(:result) do + [ + nil, + [], + [{}], + [{ 'value' => 1 }], + [{ 'value' => [1] }] + ] + end + + with_them do + it 'returns Unknown signal' do + expect(prometheus_client).to receive(:query).and_return(result) + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown) + expect(evaluate.reason).to include('Patroni service apdex can not be calculated') + end + end + end + end + end + + Gitlab::Database.database_base_models.each do |database_base_model, connection| + next unless connection.present? + + it_behaves_like 'Patroni Apdex Evaluator', database_base_model.to_sym + end + end +end diff --git a/spec/lib/gitlab/database/background_migration/health_status_spec.rb b/spec/lib/gitlab/database/background_migration/health_status_spec.rb index 8bc04d80fa1..4d6c729f080 100644 --- a/spec/lib/gitlab/database/background_migration/health_status_spec.rb +++ b/spec/lib/gitlab/database/background_migration/health_status_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do +RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus, feature_category: :database do let(:connection) { Gitlab::Database.database_base_models[:main].connection } around do |example| @@ -19,8 +19,10 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do let(:health_status) { Gitlab::Database::BackgroundMigration::HealthStatus } let(:autovacuum_indicator_class) { health_status::Indicators::AutovacuumActiveOnTable } let(:wal_indicator_class) { health_status::Indicators::WriteAheadLog } + let(:patroni_apdex_indicator_class) { health_status::Indicators::PatroniApdex } let(:autovacuum_indicator) { instance_double(autovacuum_indicator_class) } let(:wal_indicator) { instance_double(wal_indicator_class) } + let(:patroni_apdex_indicator) { instance_double(patroni_apdex_indicator_class) } before do allow(autovacuum_indicator_class).to receive(:new).with(migration.health_context).and_return(autovacuum_indicator) @@ -36,8 +38,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do expect(autovacuum_indicator).to receive(:evaluate).and_return(normal_signal) expect(wal_indicator_class).to receive(:new).with(migration.health_context).and_return(wal_indicator) expect(wal_indicator).to receive(:evaluate).and_return(not_available_signal) + expect(patroni_apdex_indicator_class).to receive(:new).with(migration.health_context) + .and_return(patroni_apdex_indicator) + expect(patroni_apdex_indicator).to receive(:evaluate).and_return(not_available_signal) - expect(evaluate).to contain_exactly(normal_signal, not_available_signal) + expect(evaluate).to contain_exactly(normal_signal, not_available_signal, not_available_signal) end end @@ -50,10 +55,23 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do end it 'logs interesting signals' do - signal = instance_double("#{health_status}::Signals::Stop", log_info?: true) + signal = instance_double( + "#{health_status}::Signals::Stop", + log_info?: true, + indicator_class: autovacuum_indicator_class, + short_name: 'Stop', + reason: 'Test Exception' + ) expect(autovacuum_indicator).to receive(:evaluate).and_return(signal) - expect(described_class).to receive(:log_signal).with(signal, migration) + + expect(Gitlab::BackgroundMigration::Logger).to receive(:info).with( + migration_id: migration.id, + health_status_indicator: autovacuum_indicator_class.to_s, + indicator_signal: 'Stop', + signal_reason: 'Test Exception', + message: "#{migration} signaled: #{signal}" + ) evaluate end diff --git a/spec/lib/gitlab/database/background_migration_job_spec.rb b/spec/lib/gitlab/database/background_migration_job_spec.rb index 1117c17c84a..6a1bedd800b 100644 --- a/spec/lib/gitlab/database/background_migration_job_spec.rb +++ b/spec/lib/gitlab/database/background_migration_job_spec.rb @@ -27,26 +27,6 @@ RSpec.describe Gitlab::Database::BackgroundMigrationJob do end end - describe '.for_partitioning_migration' do - let!(:job1) { create(:background_migration_job, arguments: [1, 100, 'other_table']) } - let!(:job2) { create(:background_migration_job, arguments: [1, 100, 'audit_events']) } - let!(:job3) { create(:background_migration_job, class_name: 'OtherJob', arguments: [1, 100, 'audit_events']) } - - it 'returns jobs matching class_name and the table_name job argument' do - relation = described_class.for_partitioning_migration('TestJob', 'audit_events') - - expect(relation.count).to eq(1) - expect(relation.first).to have_attributes(class_name: 'TestJob', arguments: [1, 100, 'audit_events']) - end - - it 'normalizes class names by removing leading ::' do - relation = described_class.for_partitioning_migration('::TestJob', 'audit_events') - - expect(relation.count).to eq(1) - expect(relation.first).to have_attributes(class_name: 'TestJob', arguments: [1, 100, 'audit_events']) - end - end - describe '.mark_all_as_succeeded' do let!(:job1) { create(:background_migration_job, arguments: [1, 100]) } let!(:job2) { create(:background_migration_job, arguments: [1, 100]) } diff --git a/spec/lib/gitlab/database/consistency_checker_spec.rb b/spec/lib/gitlab/database/consistency_checker_spec.rb index c0f0c349ddd..be03bd00619 100644 --- a/spec/lib/gitlab/database/consistency_checker_spec.rb +++ b/spec/lib/gitlab/database/consistency_checker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::ConsistencyChecker, feature_category: :pods do +RSpec.describe Gitlab::Database::ConsistencyChecker, feature_category: :cell do let(:batch_size) { 10 } let(:max_batches) { 4 } let(:max_runtime) { described_class::MAX_RUNTIME } diff --git a/spec/lib/gitlab/database/dynamic_model_helpers_spec.rb b/spec/lib/gitlab/database/dynamic_model_helpers_spec.rb index 31486240bfa..fe423b3639b 100644 --- a/spec/lib/gitlab/database/dynamic_model_helpers_spec.rb +++ b/spec/lib/gitlab/database/dynamic_model_helpers_spec.rb @@ -49,6 +49,21 @@ RSpec.describe Gitlab::Database::DynamicModelHelpers do expect { |b| each_batch_size.call(&b) } .to yield_successive_args(1, 1) end + + context 'when a column to be batched over is specified' do + let(:projects) { Project.order(project_namespace_id: :asc) } + + it 'iterates table in batches using the given column' do + each_batch_ids = ->(&block) do + subject.each_batch(table_name, connection: connection, of: 1, column: :project_namespace_id) do |batch| + block.call(batch.pluck(:project_namespace_id)) + end + end + + expect { |b| each_batch_ids.call(&b) } + .to yield_successive_args([projects.first.project_namespace_id], [projects.last.project_namespace_id]) + end + end end context 'when transaction is open' do @@ -95,6 +110,35 @@ RSpec.describe Gitlab::Database::DynamicModelHelpers do expect { |b| each_batch_limited.call(&b) } .to yield_successive_args([first_project.id, first_project.id]) end + + context 'when primary key is not named id' do + let(:namespace_settings1) { create(:namespace_settings) } + let(:namespace_settings2) { create(:namespace_settings) } + let(:table_name) { NamespaceSetting.table_name } + let(:connection) { NamespaceSetting.connection } + let(:primary_key) { subject.define_batchable_model(table_name, connection: connection).primary_key } + + it 'iterates table in batch ranges using the correct primary key' do + expect(primary_key).to eq("namespace_id") # Sanity check the primary key is not id + expect { |b| subject.each_batch_range(table_name, connection: connection, of: 1, &b) } + .to yield_successive_args( + [namespace_settings1.namespace_id, namespace_settings1.namespace_id], + [namespace_settings2.namespace_id, namespace_settings2.namespace_id] + ) + end + end + + context 'when a column to be batched over is specified' do + it 'iterates table in batch ranges using the given column' do + expect do |b| + subject.each_batch_range(table_name, connection: connection, of: 1, column: :project_namespace_id, &b) + end + .to yield_successive_args( + [first_project.project_namespace_id, first_project.project_namespace_id], + [second_project.project_namespace_id, second_project.project_namespace_id] + ) + end + end end context 'when transaction is open' do diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb index 28a087d5401..5d3260a77c9 100644 --- a/spec/lib/gitlab/database/gitlab_schema_spec.rb +++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb @@ -16,19 +16,28 @@ RSpec.shared_examples 'validate schema data' do |tables_and_views| end end -RSpec.describe Gitlab::Database::GitlabSchema do +RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do shared_examples 'maps table name to table schema' do using RSpec::Parameterized::TableSyntax + before do + ApplicationRecord.connection.execute(<<~SQL) + CREATE INDEX index_name_on_table_belonging_to_gitlab_main ON public.projects (name); + SQL + end + where(:name, :classification) do - 'ci_builds' | :gitlab_ci - 'my_schema.ci_builds' | :gitlab_ci - 'information_schema.columns' | :gitlab_internal - 'audit_events_part_5fc467ac26' | :gitlab_main - '_test_gitlab_main_table' | :gitlab_main - '_test_gitlab_ci_table' | :gitlab_ci - '_test_my_table' | :gitlab_shared - 'pg_attribute' | :gitlab_internal + 'ci_builds' | :gitlab_ci + 'my_schema.ci_builds' | :gitlab_ci + 'my_schema.ci_runner_machine_builds_100' | :gitlab_ci + 'my_schema._test_gitlab_main_table' | :gitlab_main + 'information_schema.columns' | :gitlab_internal + 'audit_events_part_5fc467ac26' | :gitlab_main + '_test_gitlab_main_table' | :gitlab_main + '_test_gitlab_ci_table' | :gitlab_ci + '_test_my_table' | :gitlab_shared + 'pg_attribute' | :gitlab_internal + 'index_name_on_table_belonging_to_gitlab_main' | :gitlab_main end with_them do @@ -49,8 +58,10 @@ RSpec.describe Gitlab::Database::GitlabSchema do context "for #{db_config_name} using #{db_class}" do let(:db_data_sources) { db_class.connection.data_sources } - # The Geo database does not share the same structure as all decomposed databases - subject { described_class.views_and_tables_to_schema.select { |_, v| v != :gitlab_geo } } + # The embedding and Geo databases do not share the same structure as all decomposed databases + subject do + described_class.views_and_tables_to_schema.reject { |_, v| v == :gitlab_embedding || v == :gitlab_geo } + end it 'new data sources are added' do missing_data_sources = db_data_sources.to_set - subject.keys @@ -116,10 +127,10 @@ RSpec.describe Gitlab::Database::GitlabSchema do end end - describe '.table_schemas' do + describe '.table_schemas!' do let(:tables) { %w[users projects ci_builds] } - subject { described_class.table_schemas(tables) } + subject { described_class.table_schemas!(tables) } it 'returns the matched schemas' do expect(subject).to match_array %i[gitlab_main gitlab_ci].to_set @@ -128,26 +139,8 @@ RSpec.describe Gitlab::Database::GitlabSchema do context 'when one of the tables does not have a matching table schema' do let(:tables) { %w[users projects unknown ci_builds] } - context 'and undefined parameter is false' do - subject { described_class.table_schemas(tables, undefined: false) } - - it 'includes a nil value' do - is_expected.to match_array [:gitlab_main, nil, :gitlab_ci].to_set - end - end - - context 'and undefined parameter is true' do - subject { described_class.table_schemas(tables, undefined: true) } - - it 'includes "undefined_"' do - is_expected.to match_array [:gitlab_main, :undefined_unknown, :gitlab_ci].to_set - end - end - - context 'and undefined parameter is not specified' do - it 'includes a nil value' do - is_expected.to match_array [:gitlab_main, :undefined_unknown, :gitlab_ci].to_set - end + it 'raises error' do + expect { subject }.to raise_error(/Could not find gitlab schema for table unknown/) end end end @@ -160,23 +153,7 @@ RSpec.describe Gitlab::Database::GitlabSchema do context 'when mapping fails' do let(:name) { 'unknown_table' } - context "and parameter 'undefined' is set to true" do - subject { described_class.table_schema(name, undefined: true) } - - it { is_expected.to eq(:undefined_unknown_table) } - end - - context "and parameter 'undefined' is set to false" do - subject { described_class.table_schema(name, undefined: false) } - - it { is_expected.to be_nil } - end - - context "and parameter 'undefined' is not set" do - subject { described_class.table_schema(name) } - - it { is_expected.to eq(:undefined_unknown_table) } - end + it { is_expected.to be_nil } end end @@ -192,7 +169,8 @@ RSpec.describe Gitlab::Database::GitlabSchema do expect { subject }.to raise_error( Gitlab::Database::GitlabSchema::UnknownSchemaError, "Could not find gitlab schema for table #{name}: " \ - "Any new tables must be added to the database dictionary" + "Any new or deleted tables must be added to the database dictionary " \ + "See https://docs.gitlab.com/ee/development/database/database_dictionary.html" ) end end diff --git a/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb b/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb index 768855464c1..a57f02b22df 100644 --- a/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb @@ -2,18 +2,13 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::LoadBalancing::ActionCableCallbacks, :request_store do +RSpec.describe Gitlab::Database::LoadBalancing::ActionCableCallbacks, :request_store, feature_category: :shared do describe '.wrapper' do - it 'uses primary and then releases the connection and clears the session' do + it 'releases the connection and clears the session' do expect(Gitlab::Database::LoadBalancing).to receive(:release_hosts) expect(Gitlab::Database::LoadBalancing::Session).to receive(:clear_session) - described_class.wrapper.call( - nil, - lambda do - expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).to eq(true) - end - ) + described_class.wrapper.call(nil, lambda {}) end context 'with an exception' do diff --git a/spec/lib/gitlab/database/load_balancing/logger_spec.rb b/spec/lib/gitlab/database/load_balancing/logger_spec.rb new file mode 100644 index 00000000000..81883fa6f1a --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/logger_spec.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::Logger, feature_category: :database do + subject { described_class.new('/dev/null') } + + it_behaves_like 'a json logger', {} + + it 'excludes context' do + expect(described_class.exclude_context?).to be(true) + end +end diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb index bfd9c644ffa..9a559c7ccb4 100644 --- a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb @@ -90,7 +90,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego end end - context 'with failures' do + context 'with StandardError' do before do allow(Gitlab::ErrorTracking).to receive(:track_exception) allow(service).to receive(:sleep) @@ -142,6 +142,21 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego service.perform_service_discovery end end + + context 'with Exception' do + it 'logs error and re-raises the exception' do + error = Exception.new('uncaught-test-error') + + expect(service).to receive(:refresh_if_necessary).and_raise(error) + + expect(Gitlab::Database::LoadBalancing::Logger).to receive(:error).with( + event: :service_discovery_unexpected_exception, + message: "Service discovery encountered an uncaught error: uncaught-test-error" + ) + + expect { service.perform_service_discovery }.to raise_error(Exception, error.message) + end + end end describe '#refresh_if_necessary' do diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb index 7eb20f77417..5a52394742f 100644 --- a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb @@ -62,59 +62,40 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware, feature include_examples 'job data consistency' end - shared_examples_for 'mark data consistency location' do |data_consistency| - include_context 'data consistency worker class', data_consistency, :load_balancing_for_test_data_consistency_worker - + shared_examples_for 'mark data consistency location' do |data_consistency, worker_klass| let(:location) { '0/D525E3A8' } + include_context 'when tracking WAL location reference' - context 'when feature flag is disabled' do - let(:expected_consistency) { :always } - - before do - stub_feature_flags(load_balancing_for_test_data_consistency_worker: false) - end - - include_examples 'does not pass database locations' + if worker_klass + let(:worker_class) { worker_klass } + let(:expected_consistency) { data_consistency } + else + include_context 'data consistency worker class', data_consistency, :load_balancing_for_test_data_consistency_worker end context 'when write was not performed' do before do - allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary?).and_return(false) + stub_no_writes_performed! end context 'when replica hosts are available' do it 'passes database_replica_location' do - expected_location = {} - - Gitlab::Database::LoadBalancing.each_load_balancer do |lb| - expect(lb.host) - .to receive(:database_replica_location) - .and_return(location) - - expected_location[lb.name] = location - end + expected_locations = expect_tracked_locations_when_replicas_available run_middleware - expect(job['wal_locations']).to eq(expected_location) + expect(job['wal_locations']).to eq(expected_locations) expect(job['wal_location_source']).to eq(:replica) end end context 'when no replica hosts are available' do it 'passes primary_write_location' do - expected_location = {} - - Gitlab::Database::LoadBalancing.each_load_balancer do |lb| - expect(lb).to receive(:host).and_return(nil) - expect(lb).to receive(:primary_write_location).and_return(location) - - expected_location[lb.name] = location - end + expected_locations = expect_tracked_locations_when_no_replicas_available run_middleware - expect(job['wal_locations']).to eq(expected_location) + expect(job['wal_locations']).to eq(expected_locations) expect(job['wal_location_source']).to eq(:replica) end end @@ -124,23 +105,15 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware, feature context 'when write was performed' do before do - allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary?).and_return(true) + stub_write_performed! end it 'passes primary write location', :aggregate_failures do - expected_location = {} - - Gitlab::Database::LoadBalancing.each_load_balancer do |lb| - expect(lb) - .to receive(:primary_write_location) - .and_return(location) - - expected_location[lb.name] = location - end + expected_locations = expect_tracked_locations_from_primary_only run_middleware - expect(job['wal_locations']).to eq(expected_location) + expect(job['wal_locations']).to eq(expected_locations) expect(job['wal_location_source']).to eq(:primary) end @@ -149,19 +122,36 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware, feature end context 'when worker cannot be constantized' do - let(:worker_class) { 'ActionMailer::MailDeliveryJob' } + let(:worker_class) { 'InvalidWorker' } let(:expected_consistency) { :always } include_examples 'does not pass database locations' end context 'when worker class does not include ApplicationWorker' do - let(:worker_class) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper } + let(:worker_class) { Gitlab::SidekiqConfig::DummyWorker } let(:expected_consistency) { :always } include_examples 'does not pass database locations' end + context 'when job contains wrapped worker' do + let(:worker_class) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper } + + context 'when wrapped worker does not include WorkerAttributes' do + let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wrapped" => Gitlab::SidekiqConfig::DummyWorker } } + let(:expected_consistency) { :always } + + include_examples 'does not pass database locations' + end + + context 'when wrapped worker includes WorkerAttributes' do + let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wrapped" => ActionMailer::MailDeliveryJob } } + + include_examples 'mark data consistency location', :delayed, ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper + end + end + context 'database wal location was already provided' do let(:old_location) { '0/D525E3A8' } let(:new_location) { 'AB/12345' } diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb index abf10456d0a..7703b5680c2 100644 --- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_gitlab_redis_queues do +RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_gitlab_redis_queues, feature_category: :scalability do let(:middleware) { described_class.new } let(:worker) { worker_class.new } let(:location) { '0/D525E3A8' } @@ -15,6 +15,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ replication_lag!(false) Gitlab::Database::LoadBalancing::Session.clear_session + + stub_const("#{described_class.name}::REPLICA_WAIT_SLEEP_SECONDS", 0.0) end after do @@ -76,14 +78,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ end shared_examples_for 'sticks based on data consistency' do - context 'when load_balancing_for_test_data_consistency_worker is disabled' do - before do - stub_feature_flags(load_balancing_for_test_data_consistency_worker: false) - end - - include_examples 'stick to the primary', 'primary' - end - context 'when database wal location is set' do let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'wal_locations' => wal_locations } } @@ -119,9 +113,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ end end - shared_examples_for 'sleeps when necessary' do + shared_examples_for 'essential sleep' do context 'when WAL locations are blank', :freeze_time do - let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wal_locations" => {}, "created_at" => Time.current.to_f - (described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3) } } + let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wal_locations" => {}, "created_at" => Time.current.to_f - (described_class::REPLICA_WAIT_SLEEP_SECONDS + 0.2) } } it 'does not sleep' do expect(middleware).not_to receive(:sleep) @@ -134,7 +128,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, "created_at" => Time.current.to_f - elapsed_time } } context 'when delay interval has not elapsed' do - let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3 } + let(:elapsed_time) { described_class::REPLICA_WAIT_SLEEP_SECONDS + 0.2 } context 'when replica is up to date' do before do @@ -158,41 +152,46 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ end it 'sleeps until the minimum delay is reached' do - expect(middleware).to receive(:sleep).with(be_within(0.01).of(described_class::MINIMUM_DELAY_INTERVAL_SECONDS - elapsed_time)) + expect(middleware).to receive(:sleep).with(described_class::REPLICA_WAIT_SLEEP_SECONDS) run_middleware end end - end - - context 'when delay interval has elapsed' do - let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS + 0.3 } - - it 'does not sleep' do - expect(middleware).not_to receive(:sleep) - - run_middleware - end - end - context 'when created_at is in the future' do - let(:elapsed_time) { -5 } + context 'when replica is never not up to date' do + before do + Gitlab::Database::LoadBalancing.each_load_balancer do |lb| + allow(lb).to receive(:select_up_to_date_host).and_return(false, false) + end + end - it 'does not sleep' do - expect(middleware).not_to receive(:sleep) + it 'sleeps until the maximum delay is reached' do + expect(middleware).to receive(:sleep).exactly(3).times.with(described_class::REPLICA_WAIT_SLEEP_SECONDS) - run_middleware + run_middleware + end end end end end - context 'when worker class does not include ApplicationWorker' do + context 'when worker class does not include WorkerAttributes' do let(:worker) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper.new } include_examples 'stick to the primary', 'primary' end + context 'when job contains wrapped worker class' do + let(:worker) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper.new } + let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, 'wrapped' => 'ActionMailer::MailDeliveryJob' } } + + it 'uses wrapped job if available' do + expect(middleware).to receive(:select_load_balancing_strategy).with(ActionMailer::MailDeliveryJob, job).and_call_original + + run_middleware + end + end + context 'when worker data consistency is :always' do include_context 'data consistency worker class', :always, :load_balancing_for_test_data_consistency_worker @@ -200,7 +199,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ context 'when delay interval has not elapsed', :freeze_time do let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, "created_at" => Time.current.to_f - elapsed_time } } - let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3 } + let(:elapsed_time) { described_class::REPLICA_WAIT_SLEEP_SECONDS + 0.2 } it 'does not sleep' do expect(middleware).not_to receive(:sleep) @@ -214,7 +213,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ include_context 'data consistency worker class', :delayed, :load_balancing_for_test_data_consistency_worker include_examples 'sticks based on data consistency' - include_examples 'sleeps when necessary' + include_examples 'essential sleep' context 'when replica is not up to date' do before do @@ -263,7 +262,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ include_context 'data consistency worker class', :sticky, :load_balancing_for_test_data_consistency_worker include_examples 'sticks based on data consistency' - include_examples 'sleeps when necessary' + include_examples 'essential sleep' context 'when replica is not up to date' do before do diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb index 59e16e6ca8b..f65c27688b8 100644 --- a/spec/lib/gitlab/database/load_balancing_spec.rb +++ b/spec/lib/gitlab/database/load_balancing_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::LoadBalancing, :suppress_gitlab_schemas_validate_connection, feature_category: :pods do +RSpec.describe Gitlab::Database::LoadBalancing, :suppress_gitlab_schemas_validate_connection, feature_category: :cell do describe '.base_models' do it 'returns the models to apply load balancing to' do models = described_class.base_models @@ -497,14 +497,15 @@ RSpec.describe Gitlab::Database::LoadBalancing, :suppress_gitlab_schemas_validat where(:queries, :expected_role) do [ # Reload cache. The schema loading queries should be handled by - # primary. + # replica even when the current session is stuck to the primary. [ -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_primary! model.connection.clear_cache! model.connection.schema_cache.add('users') model.connection.pool.release_connection }, - :primary + :replica ], # Call model's connection method diff --git a/spec/lib/gitlab/database/lock_writes_manager_spec.rb b/spec/lib/gitlab/database/lock_writes_manager_spec.rb index c06c463d918..2aa95372338 100644 --- a/spec/lib/gitlab/database/lock_writes_manager_spec.rb +++ b/spec/lib/gitlab/database/lock_writes_manager_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :pods do +RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :cell do let(:connection) { ApplicationRecord.connection } let(:test_table) { '_test_table' } let(:logger) { instance_double(Logger) } @@ -122,6 +122,13 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: : } end + it 'returns result hash with action skipped' do + subject.lock_writes + + expect(subject.lock_writes).to eq({ action: "skipped", database: "main", dry_run: false, +table: test_table }) + end + context 'when running in dry_run mode' do let(:dry_run) { true } @@ -146,6 +153,11 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: : connection.execute("truncate #{test_table}") end.not_to raise_error end + + it 'returns result hash with action locked' do + expect(subject.lock_writes).to eq({ action: "locked", database: "main", dry_run: dry_run, +table: test_table }) + end end end @@ -186,6 +198,11 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: : subject.unlock_writes end + it 'returns result hash with action unlocked' do + expect(subject.unlock_writes).to eq({ action: "unlocked", database: "main", dry_run: dry_run, +table: test_table }) + end + context 'when running in dry_run mode' do let(:dry_run) { true } @@ -206,6 +223,11 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: : connection.execute("delete from #{test_table}") end.to raise_error(ActiveRecord::StatementInvalid, /Table: "#{test_table}" is write protected/) end + + it 'returns result hash with dry_run true' do + expect(subject.unlock_writes).to eq({ action: "unlocked", database: "main", dry_run: dry_run, +table: test_table }) + end end end diff --git a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb index 3c2d9ca82f2..552df64096a 100644 --- a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb +++ b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb @@ -85,31 +85,40 @@ RSpec.describe Gitlab::Database::LooseForeignKeys do end end - describe '.definitions' do - subject(:definitions) { described_class.definitions } - - it 'contains at least all parent tables that have triggers' do - all_definition_parent_tables = definitions.map { |d| d.to_table }.to_set + context 'all tables have correct triggers installed' do + let(:all_tables_from_yaml) { described_class.definitions.pluck(:to_table).uniq } + let(:all_tables_with_triggers) do triggers_query = <<~SQL - SELECT event_object_table, trigger_name - FROM information_schema.triggers + SELECT event_object_table FROM information_schema.triggers WHERE trigger_name LIKE '%_loose_fk_trigger' - GROUP BY event_object_table, trigger_name SQL - all_triggers = ApplicationRecord.connection.execute(triggers_query) - - all_triggers.each do |trigger| - table = trigger['event_object_table'] - trigger_name = trigger['trigger_name'] - error_message = <<~END - Missing a loose foreign key definition for parent table: #{table} with trigger: #{trigger_name}. - Loose foreign key definitions must be added before triggers are added and triggers must be removed before removing the loose foreign key definition. - Read more at https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html ." - END - expect(all_definition_parent_tables).to include(table), error_message - end + ApplicationRecord.connection.execute(triggers_query) + .pluck('event_object_table').uniq + end + + it 'all YAML tables do have `track_record_deletions` installed' do + missing_trigger_tables = all_tables_from_yaml - all_tables_with_triggers + + expect(missing_trigger_tables).to be_empty, <<~END + The loose foreign keys definitions require using `track_record_deletions` + for the following tables: #{missing_trigger_tables}. + Read more at https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html." + END + end + + it 'no extra tables have `track_record_deletions` installed' do + extra_trigger_tables = all_tables_with_triggers - all_tables_from_yaml + + pending 'This result of this test is informatory, and not critical' if extra_trigger_tables.any? + + expect(extra_trigger_tables).to be_empty, <<~END + The following tables have unused `track_record_deletions` triggers installed, + but they are not referenced by any of the loose foreign key definitions: #{extra_trigger_tables}. + You can remove them in one of the future releases as part of `db/post_migrate`. + Read more at https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html." + END end end diff --git a/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb b/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb index be9346e3829..e4241348b54 100644 --- a/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, - :reestablished_active_record_base, :delete, query_analyzers: false, feature_category: :pods do + :reestablished_active_record_base, :delete, query_analyzers: false, feature_category: :cell do using RSpec::Parameterized::TableSyntax let(:schema_class) { Class.new(Gitlab::Database::Migration[2.1]) } @@ -86,7 +86,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, let(:create_gitlab_shared_table_migration_class) { create_table_migration(gitlab_shared_table_name) } before do - skip_if_multiple_databases_are_setup(:ci) + skip_if_database_exists(:ci) end it 'does not lock any newly created tables' do @@ -106,7 +106,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, context 'when multiple databases' do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) end let(:migration_class) { create_table_migration(table_name, skip_automatic_lock_on_writes) } @@ -224,13 +224,12 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, let(:config_model) { Gitlab::Database.database_base_models[:main] } it "raises an error about undefined gitlab_schema" do - expected_error_message = <<~ERROR - No gitlab_schema is defined for the table #{table_name}. Please consider - adding it to the database dictionary. - More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html - ERROR - - expect { run_migration }.to raise_error(expected_error_message) + expect { run_migration }.to raise_error( + Gitlab::Database::GitlabSchema::UnknownSchemaError, + "Could not find gitlab schema for table foobar: " \ + "Any new or deleted tables must be added to the database dictionary " \ + "See https://docs.gitlab.com/ee/development/database/database_dictionary.html" + ) end end end @@ -238,7 +237,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, context 'when renaming a table' do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) create_table_migration(old_table_name).migrate(:up) # create the table first before renaming it end @@ -277,7 +276,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, let(:config_model) { Gitlab::Database.database_base_models[:main] } before do - skip_if_multiple_databases_are_setup(:ci) + skip_if_database_exists(:ci) end it 'does not lock any newly created tables' do @@ -305,7 +304,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, context 'when multiple databases' do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) migration_class.connection.execute("CREATE TABLE #{table_name}()") migration_class.migrate(:up) end diff --git a/spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb b/spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb new file mode 100644 index 00000000000..cee5f54bd6a --- /dev/null +++ b/spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::MigrationHelpers::ConvertToBigint, feature_category: :database do + describe 'com_or_dev_or_test_but_not_jh?' do + using RSpec::Parameterized::TableSyntax + + where(:dot_com, :dev_or_test, :jh, :expectation) do + true | true | true | true + true | false | true | false + false | true | true | true + false | false | true | false + true | true | false | true + true | false | false | true + false | true | false | true + false | false | false | false + end + + with_them do + it 'returns true for GitLab.com (but not JH), dev, or test' do + allow(Gitlab).to receive(:com?).and_return(dot_com) + allow(Gitlab).to receive(:dev_or_test_env?).and_return(dev_or_test) + allow(Gitlab).to receive(:jh?).and_return(jh) + + migration = Class + .new + .include(Gitlab::Database::MigrationHelpers::ConvertToBigint) + .new + + expect(migration.com_or_dev_or_test_but_not_jh?).to eq(expectation) + end + end + end +end diff --git a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb index 25fc676d09e..2b58cdff931 100644 --- a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb @@ -7,20 +7,22 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do ActiveRecord::Migration.new.extend(described_class) end + let_it_be(:table_name) { :_test_loose_fk_test_table } + let(:model) do Class.new(ApplicationRecord) do - self.table_name = '_test_loose_fk_test_table' + self.table_name = :_test_loose_fk_test_table end end before(:all) do - migration.create_table :_test_loose_fk_test_table do |t| + migration.create_table table_name do |t| t.timestamps end end after(:all) do - migration.drop_table :_test_loose_fk_test_table + migration.drop_table table_name end before do @@ -33,11 +35,13 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do expect(LooseForeignKeys::DeletedRecord.count).to eq(0) end + + it { expect(migration.has_loose_foreign_key?(table_name)).to be_falsy } end context 'when the record deletion tracker trigger is installed' do before do - migration.track_record_deletions(:_test_loose_fk_test_table) + migration.track_record_deletions(table_name) end it 'stores the record deletion' do @@ -55,7 +59,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do .first expect(deleted_record.primary_key_value).to eq(record_to_be_deleted.id) - expect(deleted_record.fully_qualified_table_name).to eq('public._test_loose_fk_test_table') + expect(deleted_record.fully_qualified_table_name).to eq("public.#{table_name}") expect(deleted_record.partition_number).to eq(1) end @@ -64,5 +68,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do expect(LooseForeignKeys::DeletedRecord.count).to eq(3) end + + it { expect(migration.has_loose_foreign_key?(table_name)).to be_truthy } end end diff --git a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb index 714fbab5aff..faf0447c054 100644 --- a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_analyzers: false, - stub_feature_flags: false, feature_category: :pods do + stub_feature_flags: false, feature_category: :cell do let(:schema_class) { Class.new(Gitlab::Database::Migration[1.0]).include(described_class) } # We keep only the GitlabSchemasValidateConnection analyzer running @@ -506,7 +506,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a def down; end end, query_matcher: /FROM ci_builds/, - setup: -> (_) { skip_if_multiple_databases_not_setup }, + setup: -> (_) { skip_if_shared_database(:ci) }, expected: { no_gitlab_schema: { main: :cross_schema_error, diff --git a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb index 0d75094a2fd..8b653e2d89d 100644 --- a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb @@ -416,4 +416,83 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do end end end + + describe '#truncate_tables!' do + before do + ApplicationRecord.connection.execute(<<~SQL) + CREATE TABLE _test_gitlab_main_table (id serial primary key); + CREATE TABLE _test_gitlab_main_table2 (id serial primary key); + + INSERT INTO _test_gitlab_main_table DEFAULT VALUES; + INSERT INTO _test_gitlab_main_table2 DEFAULT VALUES; + SQL + + Ci::ApplicationRecord.connection.execute(<<~SQL) + CREATE TABLE _test_gitlab_ci_table (id serial primary key); + SQL + end + + it 'truncates the table' do + expect(migration).to receive(:execute).with('TRUNCATE TABLE "_test_gitlab_main_table"').and_call_original + + expect { migration.truncate_tables!('_test_gitlab_main_table') } + .to change { ApplicationRecord.connection.select_value('SELECT count(1) from _test_gitlab_main_table') }.to(0) + end + + it 'truncates multiple tables' do + expect(migration).to receive(:execute).with('TRUNCATE TABLE "_test_gitlab_main_table", "_test_gitlab_main_table2"').and_call_original + + expect { migration.truncate_tables!('_test_gitlab_main_table', '_test_gitlab_main_table2') } + .to change { ApplicationRecord.connection.select_value('SELECT count(1) from _test_gitlab_main_table') }.to(0) + .and change { ApplicationRecord.connection.select_value('SELECT count(1) from _test_gitlab_main_table2') }.to(0) + end + + it 'raises an ArgumentError if truncating multiple gitlab_schema' do + expect do + migration.truncate_tables!('_test_gitlab_main_table', '_test_gitlab_ci_table') + end.to raise_error(ArgumentError, /one `gitlab_schema`/) + end + + context 'with multiple databases' do + before do + skip_if_shared_database(:ci) + end + + context 'for ci database' do + before do + migration.instance_variable_set :@connection, Ci::ApplicationRecord.connection + end + + it 'skips the TRUNCATE statement tables not in schema for connection' do + expect(migration).not_to receive(:execute) + + migration.truncate_tables!('_test_gitlab_main_table') + end + end + + context 'for main database' do + before do + migration.instance_variable_set :@connection, ApplicationRecord.connection + end + + it 'executes a TRUNCATE statement' do + expect(migration).to receive(:execute).with('TRUNCATE TABLE "_test_gitlab_main_table"') + + migration.truncate_tables!('_test_gitlab_main_table') + end + end + end + + context 'with single database' do + before do + skip_if_database_exists(:ci) + end + + it 'executes a TRUNCATE statement' do + expect(migration).to receive(:execute).with('TRUNCATE TABLE "_test_gitlab_main_table"') + + migration.truncate_tables!('_test_gitlab_main_table') + end + end + end end diff --git a/spec/lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers_spec.rb new file mode 100644 index 00000000000..f7d11184ac7 --- /dev/null +++ b/spec/lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers_spec.rb @@ -0,0 +1,97 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::MigrationHelpers::WraparoundVacuumHelpers, feature_category: :database do + include Database::DatabaseHelpers + + let(:table_name) { 'ci_builds' } + + describe '#check_if_wraparound_in_progress' do + let(:migration) do + ActiveRecord::Migration.new.extend(described_class) + end + + subject { migration.check_if_wraparound_in_progress(table_name) } + + it 'delegates to the wraparound class' do + expect(described_class::WraparoundCheck) + .to receive(:new) + .with(table_name, migration: migration) + .and_call_original + + expect { subject }.not_to raise_error + end + end + + describe described_class::WraparoundCheck do + let(:migration) do + ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers::WraparoundVacuumHelpers) + end + + describe '#execute' do + subject do + described_class.new(table_name, migration: migration).execute + end + + context 'with wraparound vacuuum running' do + before do + swapout_view_for_table(:pg_stat_activity, connection: migration.connection, schema: 'pg_temp') + + migration.connection.execute(<<~SQL.squish) + INSERT INTO pg_stat_activity ( + datid, datname, pid, backend_start, xact_start, query_start, + state_change, wait_event_type, wait_event, state, backend_xmin, + query, backend_type) + VALUES ( + 16401, current_database(), 178, '2023-03-30 08:10:50.851322+00', + '2023-03-30 08:10:50.890485+00', now() - '150 minutes'::interval, + '2023-03-30 08:10:50.890485+00', 'IO', 'DataFileRead', 'active','3214790381'::xid, + 'autovacuum: VACUUM public.ci_builds (to prevent wraparound)', 'autovacuum worker') + SQL + end + + it 'outputs a message related to autovacuum' do + expect { subject } + .to output(/Autovacuum with wraparound prevention mode is running on `ci_builds`/).to_stdout + end + + it { expect { subject }.to output(/autovacuum: VACUUM public.ci_builds \(to prevent wraparound\)/).to_stdout } + it { expect { subject }.to output(/Current duration: 2 hours, 30 minutes/).to_stdout } + + context 'when GITLAB_MIGRATIONS_DISABLE_WRAPAROUND_CHECK is set' do + before do + stub_env('GITLAB_MIGRATIONS_DISABLE_WRAPAROUND_CHECK' => 'true') + end + + it { expect { subject }.not_to output(/autovacuum/i).to_stdout } + + it 'is disabled on .com' do + expect(Gitlab).to receive(:com?).and_return(true) + + expect { subject }.not_to raise_error + end + end + + context 'when executed by self-managed' do + before do + allow(Gitlab).to receive(:com?).and_return(false) + allow(Gitlab).to receive(:dev_or_test_env?).and_return(false) + end + + it { expect { subject }.not_to output(/autovacuum/i).to_stdout } + end + end + + context 'with wraparound vacuuum not running' do + it { expect { subject }.not_to output(/autovacuum/i).to_stdout } + end + + context 'when the table does not exist' do + let(:table_name) { :no_table } + + it { expect { subject }.to raise_error described_class::WraparoundError, /no_table/ } + end + end + end +end diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index 9df23776be8..b1e8301d69f 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::MigrationHelpers do +RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database do include Database::TableSchemaHelpers include Database::TriggerHelpers @@ -14,8 +14,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do allow(model).to receive(:puts) end + it { expect(model.singleton_class.ancestors).to include(described_class::WraparoundVacuumHelpers) } + describe 'overridden dynamic model helpers' do - let(:test_table) { '_test_batching_table' } + let(:test_table) { :_test_batching_table } before do model.connection.execute(<<~SQL) @@ -120,157 +122,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end end - describe '#create_table_with_constraints' do - let(:table_name) { :test_table } - let(:column_attributes) do - [ - { name: 'id', sql_type: 'bigint', null: false, default: nil }, - { name: 'created_at', sql_type: 'timestamp with time zone', null: false, default: nil }, - { name: 'updated_at', sql_type: 'timestamp with time zone', null: false, default: nil }, - { name: 'some_id', sql_type: 'integer', null: false, default: nil }, - { name: 'active', sql_type: 'boolean', null: false, default: 'true' }, - { name: 'name', sql_type: 'text', null: true, default: nil } - ] - end - - before do - allow(model).to receive(:transaction_open?).and_return(true) - end - - context 'when no check constraints are defined' do - it 'creates the table as expected' do - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - end - - expect_table_columns_to_match(column_attributes, table_name) - end - end - - context 'when check constraints are defined' do - context 'when the text_limit is explicity named' do - it 'creates the table as expected' do - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - - t.text_limit :name, 255, name: 'check_name_length' - t.check_constraint :some_id_is_positive, 'some_id > 0' - end - - expect_table_columns_to_match(column_attributes, table_name) - - expect_check_constraint(table_name, 'check_name_length', 'char_length(name) <= 255') - expect_check_constraint(table_name, 'some_id_is_positive', 'some_id > 0') - end - end - - context 'when the text_limit is not named' do - it 'creates the table as expected, naming the text limit' do - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - - t.text_limit :name, 255 - t.check_constraint :some_id_is_positive, 'some_id > 0' - end - - expect_table_columns_to_match(column_attributes, table_name) - - expect_check_constraint(table_name, 'check_cda6f69506', 'char_length(name) <= 255') - expect_check_constraint(table_name, 'some_id_is_positive', 'some_id > 0') - end - end - - it 'runs the change within a with_lock_retries' do - expect(model).to receive(:with_lock_retries).ordered.and_yield - expect(model).to receive(:create_table).ordered.and_call_original - expect(model).to receive(:execute).with(<<~SQL).ordered - ALTER TABLE "#{table_name}"\nADD CONSTRAINT "check_cda6f69506" CHECK (char_length("name") <= 255) - SQL - - model.create_table_with_constraints table_name do |t| - t.text :name - t.text_limit :name, 255 - end - end - - context 'when with_lock_retries re-runs the block' do - it 'only creates constraint for unique definitions' do - expected_sql = <<~SQL - ALTER TABLE "#{table_name}"\nADD CONSTRAINT "check_cda6f69506" CHECK (char_length("name") <= 255) - SQL - - expect(model).to receive(:create_table).twice.and_call_original - - expect(model).to receive(:execute).with(expected_sql).and_raise(ActiveRecord::LockWaitTimeout) - expect(model).to receive(:execute).with(expected_sql).and_call_original - - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - - t.text_limit :name, 255 - end - - expect_table_columns_to_match(column_attributes, table_name) - - expect_check_constraint(table_name, 'check_cda6f69506', 'char_length(name) <= 255') - end - end - - context 'when constraints are given invalid names' do - let(:expected_max_length) { described_class::MAX_IDENTIFIER_NAME_LENGTH } - let(:expected_error_message) { "The maximum allowed constraint name is #{expected_max_length} characters" } - - context 'when the explicit text limit name is not valid' do - it 'raises an error' do - too_long_length = expected_max_length + 1 - - expect do - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - - t.text_limit :name, 255, name: ('a' * too_long_length) - t.check_constraint :some_id_is_positive, 'some_id > 0' - end - end.to raise_error(expected_error_message) - end - end - - context 'when a check constraint name is not valid' do - it 'raises an error' do - too_long_length = expected_max_length + 1 - - expect do - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - - t.text_limit :name, 255 - t.check_constraint ('a' * too_long_length), 'some_id > 0' - end - end.to raise_error(expected_error_message) - end - end - end - end - end - describe '#add_concurrent_index' do context 'outside a transaction' do before do @@ -392,7 +243,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do context 'when targeting a partition table' do let(:schema) { 'public' } - let(:name) { '_test_partition_01' } + let(:name) { :_test_partition_01 } let(:identifier) { "#{schema}.#{name}" } before do @@ -471,10 +322,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do context 'when targeting a partition table' do let(:schema) { 'public' } - let(:partition_table_name) { '_test_partition_01' } + let(:partition_table_name) { :_test_partition_01 } let(:identifier) { "#{schema}.#{partition_table_name}" } - let(:index_name) { '_test_partitioned_index' } - let(:partition_index_name) { '_test_partition_01_partition_id_idx' } + let(:index_name) { :_test_partitioned_index } + let(:partition_index_name) { :_test_partition_01_partition_id_idx } let(:column_name) { 'partition_id' } before do @@ -544,10 +395,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do context 'when targeting a partition table' do let(:schema) { 'public' } - let(:partition_table_name) { '_test_partition_01' } + let(:partition_table_name) { :_test_partition_01 } let(:identifier) { "#{schema}.#{partition_table_name}" } - let(:index_name) { '_test_partitioned_index' } - let(:partition_index_name) { '_test_partition_01_partition_id_idx' } + let(:index_name) { :_test_partitioned_index } + let(:partition_index_name) { :_test_partition_01_partition_id_idx } before do model.execute(<<~SQL) @@ -928,13 +779,13 @@ RSpec.describe Gitlab::Database::MigrationHelpers do it 'references the custom target columns when provided', :aggregate_failures do expect(model).to receive(:with_lock_retries).and_yield expect(model).to receive(:execute).with( - "ALTER TABLE projects\n" \ - "ADD CONSTRAINT fk_multiple_columns\n" \ - "FOREIGN KEY \(partition_number, user_id\)\n" \ - "REFERENCES users \(partition_number, id\)\n" \ - "ON UPDATE CASCADE\n" \ - "ON DELETE CASCADE\n" \ - "NOT VALID;\n" + "ALTER TABLE projects " \ + "ADD CONSTRAINT fk_multiple_columns " \ + "FOREIGN KEY \(partition_number, user_id\) " \ + "REFERENCES users \(partition_number, id\) " \ + "ON UPDATE CASCADE " \ + "ON DELETE CASCADE " \ + "NOT VALID;" ) model.add_concurrent_foreign_key( @@ -979,6 +830,80 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end end end + + context 'when creating foreign key on a partitioned table' do + let(:source) { :_test_source_partitioned_table } + let(:dest) { :_test_dest_partitioned_table } + let(:args) { [source, dest] } + let(:options) { { column: [:partition_id, :owner_id], target_column: [:partition_id, :id] } } + + before do + model.execute(<<~SQL) + CREATE TABLE public.#{source} ( + id serial NOT NULL, + partition_id serial NOT NULL, + owner_id bigint NOT NULL, + PRIMARY KEY (id, partition_id) + ) PARTITION BY LIST(partition_id); + + CREATE TABLE #{source}_1 + PARTITION OF public.#{source} + FOR VALUES IN (1); + + CREATE TABLE public.#{dest} ( + id serial NOT NULL, + partition_id serial NOT NULL, + PRIMARY KEY (id, partition_id) + ); + SQL + end + + it 'creates the FK without using NOT VALID', :aggregate_failures do + allow(model).to receive(:execute).and_call_original + + expect(model).to receive(:with_lock_retries).and_yield + + expect(model).to receive(:execute).with( + "ALTER TABLE #{source} " \ + "ADD CONSTRAINT fk_multiple_columns " \ + "FOREIGN KEY \(partition_id, owner_id\) " \ + "REFERENCES #{dest} \(partition_id, id\) " \ + "ON UPDATE CASCADE ON DELETE CASCADE ;" + ) + + model.add_concurrent_foreign_key( + *args, + name: :fk_multiple_columns, + on_update: :cascade, + allow_partitioned: true, + **options + ) + end + + it 'raises an error if allow_partitioned is not set' do + expect(model).not_to receive(:with_lock_retries).and_yield + expect(model).not_to receive(:execute).with(/FOREIGN KEY/) + + expect { model.add_concurrent_foreign_key(*args, **options) } + .to raise_error ArgumentError, /use add_concurrent_partitioned_foreign_key/ + end + + context 'when the reverse_lock_order flag is set' do + it 'explicitly locks the tables in target-source order', :aggregate_failures do + expect(model).to receive(:with_lock_retries).and_call_original + expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) + expect(model).to receive(:execute).with(/SET statement_timeout TO/) + expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) + expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) + + expect(model).to receive(:execute).with("LOCK TABLE #{dest}, #{source} IN ACCESS EXCLUSIVE MODE") + expect(model).to receive(:execute).with(/REFERENCES #{dest} \(partition_id, id\)/) + + model.add_concurrent_foreign_key(*args, reverse_lock_order: true, allow_partitioned: true, **options) + end + end + end end end @@ -1047,8 +972,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end describe '#foreign_key_exists?' do - let(:referenced_table_name) { '_test_gitlab_main_referenced' } - let(:referencing_table_name) { '_test_gitlab_main_referencing' } + let(:referenced_table_name) { :_test_gitlab_main_referenced } + let(:referencing_table_name) { :_test_gitlab_main_referencing } + let(:schema) { 'public' } + let(:identifier) { "#{schema}.#{referencing_table_name}" } before do model.connection.execute(<<~SQL) @@ -1085,6 +1012,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do expect(model.foreign_key_exists?(referencing_table_name, target_table)).to be_truthy end + it 'finds existing foreign_keys by identifier' do + expect(model.foreign_key_exists?(identifier, target_table)).to be_truthy + end + it 'compares by column name if given' do expect(model.foreign_key_exists?(referencing_table_name, target_table, column: :user_id)).to be_falsey end @@ -1119,6 +1050,38 @@ RSpec.describe Gitlab::Database::MigrationHelpers do it_behaves_like 'foreign key checks' end + context 'if the schema cache does not include the constrained_columns column' do + let(:target_table) { nil } + + around do |ex| + model.transaction do + require_migration!('add_columns_to_postgres_foreign_keys') + AddColumnsToPostgresForeignKeys.new.down + Gitlab::Database::PostgresForeignKey.reset_column_information + Gitlab::Database::PostgresForeignKey.columns_hash # Force populate the column hash in the old schema + AddColumnsToPostgresForeignKeys.new.up + + # Rolling back reverts the schema cache information, so we need to run the example here before the rollback. + ex.run + + raise ActiveRecord::Rollback + end + + # make sure that we're resetting the schema cache here so that we don't leak the change to other tests. + Gitlab::Database::PostgresForeignKey.reset_column_information + # Double-check that the column information is back to normal + expect(Gitlab::Database::PostgresForeignKey.columns_hash.keys).to include('constrained_columns') + end + + # This test verifies that the situation we're trying to set up for the shared examples is actually being + # set up correctly + it 'correctly sets up the test without the column in the columns_hash' do + expect(Gitlab::Database::PostgresForeignKey.columns_hash.keys).not_to include('constrained_columns') + end + + it_behaves_like 'foreign key checks' + end + it 'compares by target table if no column given' do expect(model.foreign_key_exists?(:projects, :other_table)).to be_falsey end @@ -1129,8 +1092,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end context 'with foreign key using multiple columns' do - let(:p_referenced_table_name) { '_test_gitlab_main_p_referenced' } - let(:p_referencing_table_name) { '_test_gitlab_main_p_referencing' } + let(:p_referenced_table_name) { :_test_gitlab_main_p_referenced } + let(:p_referencing_table_name) { :_test_gitlab_main_p_referencing } before do model.connection.execute(<<~SQL) @@ -1254,7 +1217,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end context 'when the table is write-locked' do - let(:test_table) { '_test_table' } + let(:test_table) { :_test_table } let(:lock_writes_manager) do Gitlab::Database::LockWritesManager.new( table_name: test_table, @@ -1436,7 +1399,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end context 'when the table in the other database is write-locked' do - let(:test_table) { '_test_table' } + let(:test_table) { :_test_table } let(:lock_writes_manager) do Gitlab::Database::LockWritesManager.new( table_name: test_table, @@ -2129,7 +2092,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end describe '#create_temporary_columns_and_triggers' do - let(:table) { :test_table } + let(:table) { :_test_table } let(:column) { :id } let(:mappings) do { @@ -2223,7 +2186,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end describe '#initialize_conversion_of_integer_to_bigint' do - let(:table) { :test_table } + let(:table) { :_test_table } let(:column) { :id } let(:tmp_column) { model.convert_to_bigint_column(column) } @@ -2308,7 +2271,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end describe '#restore_conversion_of_integer_to_bigint' do - let(:table) { :test_table } + let(:table) { :_test_table } let(:column) { :id } let(:tmp_column) { model.convert_to_bigint_column(column) } @@ -2363,7 +2326,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end describe '#revert_initialize_conversion_of_integer_to_bigint' do - let(:table) { :test_table } + let(:table) { :_test_table } before do model.create_table table, id: false do |t| @@ -2794,39 +2757,39 @@ RSpec.describe Gitlab::Database::MigrationHelpers do describe '#add_primary_key_using_index' do it "executes the statement to add the primary key" do - expect(model).to receive(:execute).with /ALTER TABLE "test_table" ADD CONSTRAINT "old_name" PRIMARY KEY USING INDEX "new_name"/ + expect(model).to receive(:execute).with /ALTER TABLE "_test_table" ADD CONSTRAINT "old_name" PRIMARY KEY USING INDEX "new_name"/ - model.add_primary_key_using_index(:test_table, :old_name, :new_name) + model.add_primary_key_using_index(:_test_table, :old_name, :new_name) end end context 'when changing the primary key of a given table' do before do - model.create_table(:test_table, primary_key: :id) do |t| + model.create_table(:_test_table, primary_key: :id) do |t| t.integer :partition_number, default: 1 end - model.add_index(:test_table, :id, unique: true, name: :old_index_name) - model.add_index(:test_table, [:id, :partition_number], unique: true, name: :new_index_name) + model.add_index(:_test_table, :id, unique: true, name: :old_index_name) + model.add_index(:_test_table, [:id, :partition_number], unique: true, name: :new_index_name) end describe '#swap_primary_key' do it 'executes statements to swap primary key', :aggregate_failures do expect(model).to receive(:with_lock_retries).with(raise_on_exhaustion: true).ordered.and_yield - expect(model).to receive(:execute).with(/ALTER TABLE "test_table" DROP CONSTRAINT "test_table_pkey" CASCADE/).and_call_original - expect(model).to receive(:execute).with(/ALTER TABLE "test_table" ADD CONSTRAINT "test_table_pkey" PRIMARY KEY USING INDEX "new_index_name"/).and_call_original + expect(model).to receive(:execute).with(/ALTER TABLE "_test_table" DROP CONSTRAINT "_test_table_pkey" CASCADE/).and_call_original + expect(model).to receive(:execute).with(/ALTER TABLE "_test_table" ADD CONSTRAINT "_test_table_pkey" PRIMARY KEY USING INDEX "new_index_name"/).and_call_original - model.swap_primary_key(:test_table, :test_table_pkey, :new_index_name) + model.swap_primary_key(:_test_table, :_test_table_pkey, :new_index_name) end context 'when new index does not exist' do before do - model.remove_index(:test_table, column: [:id, :partition_number]) + model.remove_index(:_test_table, column: [:id, :partition_number]) end it 'raises ActiveRecord::StatementInvalid' do expect do - model.swap_primary_key(:test_table, :test_table_pkey, :new_index_name) + model.swap_primary_key(:_test_table, :_test_table_pkey, :new_index_name) end.to raise_error(ActiveRecord::StatementInvalid) end end @@ -2835,27 +2798,27 @@ RSpec.describe Gitlab::Database::MigrationHelpers do describe '#unswap_primary_key' do it 'executes statements to unswap primary key' do expect(model).to receive(:with_lock_retries).with(raise_on_exhaustion: true).ordered.and_yield - expect(model).to receive(:execute).with(/ALTER TABLE "test_table" DROP CONSTRAINT "test_table_pkey" CASCADE/).ordered.and_call_original - expect(model).to receive(:execute).with(/ALTER TABLE "test_table" ADD CONSTRAINT "test_table_pkey" PRIMARY KEY USING INDEX "old_index_name"/).ordered.and_call_original + expect(model).to receive(:execute).with(/ALTER TABLE "_test_table" DROP CONSTRAINT "_test_table_pkey" CASCADE/).ordered.and_call_original + expect(model).to receive(:execute).with(/ALTER TABLE "_test_table" ADD CONSTRAINT "_test_table_pkey" PRIMARY KEY USING INDEX "old_index_name"/).ordered.and_call_original - model.unswap_primary_key(:test_table, :test_table_pkey, :old_index_name) + model.unswap_primary_key(:_test_table, :_test_table_pkey, :old_index_name) end end end describe '#drop_sequence' do it "executes the statement to drop the sequence" do - expect(model).to receive(:execute).with /ALTER TABLE "test_table" ALTER COLUMN "test_column" DROP DEFAULT;\nDROP SEQUENCE IF EXISTS "test_table_id_seq"/ + expect(model).to receive(:execute).with /ALTER TABLE "_test_table" ALTER COLUMN "test_column" DROP DEFAULT;\nDROP SEQUENCE IF EXISTS "_test_table_id_seq"/ - model.drop_sequence(:test_table, :test_column, :test_table_id_seq) + model.drop_sequence(:_test_table, :test_column, :_test_table_id_seq) end end describe '#add_sequence' do it "executes the statement to add the sequence" do - expect(model).to receive(:execute).with "CREATE SEQUENCE \"test_table_id_seq\" START 1;\nALTER TABLE \"test_table\" ALTER COLUMN \"test_column\" SET DEFAULT nextval(\'test_table_id_seq\')\n" + expect(model).to receive(:execute).with "CREATE SEQUENCE \"_test_table_id_seq\" START 1;\nALTER TABLE \"_test_table\" ALTER COLUMN \"test_column\" SET DEFAULT nextval(\'_test_table_id_seq\')\n" - model.add_sequence(:test_table, :test_column, :test_table_id_seq, 1) + model.add_sequence(:_test_table, :test_column, :_test_table_id_seq, 1) end end @@ -2890,4 +2853,18 @@ RSpec.describe Gitlab::Database::MigrationHelpers do it { is_expected.to be_falsey } end end + + describe "#table_partitioned?" do + subject { model.table_partitioned?(table_name) } + + let(:table_name) { 'p_ci_builds_metadata' } + + it { is_expected.to be_truthy } + + context 'with a non-partitioned table' do + let(:table_name) { 'users' } + + it { is_expected.to be_falsey } + end + end end diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb index 3e249b14f2e..f5ce207773f 100644 --- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb @@ -482,16 +482,46 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d .not_to raise_error end - it 'logs a warning when migration does not exist' do - expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!) + context 'when specified migration does not exist' do + let(:lab_key) { 'DBLAB_ENVIRONMENT' } - create(:batched_background_migration, :active, migration_attributes.merge(gitlab_schema: :gitlab_something_else)) + context 'when DBLAB_ENVIRONMENT is not set' do + it 'logs a warning' do + stub_env(lab_key, nil) + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!) - expect(Gitlab::AppLogger).to receive(:warn) - .with("Could not find batched background migration for the given configuration: #{configuration}") + create(:batched_background_migration, :active, migration_attributes.merge(gitlab_schema: :gitlab_something_else)) - expect { ensure_batched_background_migration_is_finished } - .not_to raise_error + expect(Gitlab::AppLogger).to receive(:warn) + .with("Could not find batched background migration for the given configuration: #{configuration}") + + expect { ensure_batched_background_migration_is_finished } + .not_to raise_error + end + end + + context 'when DBLAB_ENVIRONMENT is set' do + it 'raises an error' do + stub_env(lab_key, 'foo') + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!) + + create(:batched_background_migration, :active, migration_attributes.merge(gitlab_schema: :gitlab_something_else)) + + expect { ensure_batched_background_migration_is_finished } + .to raise_error(Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::NonExistentMigrationError) + end + end + end + + context 'when within transaction' do + before do + allow(migration).to receive(:transaction_open?).and_return(true) + end + + it 'does raise an exception' do + expect { ensure_batched_background_migration_is_finished } + .to raise_error /`ensure_batched_background_migration_is_finished` cannot be run inside a transaction./ + end end it 'finalizes the migration' do diff --git a/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb b/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb index 6848fc85aa1..07d913cf5cc 100644 --- a/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb +++ b/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb @@ -23,43 +23,46 @@ RSpec.describe Gitlab::Database::Migrations::ConstraintsHelpers do end end - describe '#check_constraint_exists?' do + describe '#check_constraint_exists?', :aggregate_failures do before do - ActiveRecord::Migration.connection.execute( - 'ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID' - ) - - ActiveRecord::Migration.connection.execute( - 'CREATE SCHEMA new_test_schema' - ) - - ActiveRecord::Migration.connection.execute( - 'CREATE TABLE new_test_schema.projects (id integer, name character varying)' - ) - - ActiveRecord::Migration.connection.execute( - 'ALTER TABLE new_test_schema.projects ADD CONSTRAINT check_2 CHECK (char_length(name) <= 5)' - ) + ActiveRecord::Migration.connection.execute(<<~SQL) + ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID; + CREATE SCHEMA new_test_schema; + CREATE TABLE new_test_schema.projects (id integer, name character varying); + ALTER TABLE new_test_schema.projects ADD CONSTRAINT check_2 CHECK (char_length(name) <= 5); + SQL end it 'returns true if a constraint exists' do expect(model) .to be_check_constraint_exists(:projects, 'check_1') + + expect(described_class) + .to be_check_constraint_exists(:projects, 'check_1', connection: model.connection) end it 'returns false if a constraint does not exist' do expect(model) .not_to be_check_constraint_exists(:projects, 'this_does_not_exist') + + expect(described_class) + .not_to be_check_constraint_exists(:projects, 'this_does_not_exist', connection: model.connection) end it 'returns false if a constraint with the same name exists in another table' do expect(model) .not_to be_check_constraint_exists(:users, 'check_1') + + expect(described_class) + .not_to be_check_constraint_exists(:users, 'check_1', connection: model.connection) end it 'returns false if a constraint with the same name exists for the same table in another schema' do expect(model) .not_to be_check_constraint_exists(:projects, 'check_2') + + expect(described_class) + .not_to be_check_constraint_exists(:projects, 'check_2', connection: model.connection) end end diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb index 4f347034c0b..0b25389c667 100644 --- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb +++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb @@ -18,7 +18,9 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do let(:migration_name) { 'test' } let(:migration_version) { '12345' } let(:migration_meta) { { 'max_batch_size' => 1, 'total_tuple_count' => 10, 'interval' => 60 } } - let(:expected_json_keys) { %w[version name walltime success total_database_size_change query_statistics] } + let(:expected_json_keys) do + %w[version name walltime success total_database_size_change query_statistics error_message] + end it 'executes the given block' do expect do |b| @@ -90,16 +92,14 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do end context 'upon failure' do - where(exception: ['something went wrong', SystemStackError, Interrupt]) + where(:exception, :error_message) do + [[StandardError, 'something went wrong'], [ActiveRecord::StatementTimeout, 'timeout']] + end with_them do subject(:observe) do instrumentation.observe(version: migration_version, name: migration_name, - connection: connection, meta: migration_meta) { raise exception } - end - - it 'raises the exception' do - expect { observe }.to raise_error(exception) + connection: connection, meta: migration_meta) { raise exception, error_message } end context 'retrieving observations' do @@ -107,10 +107,6 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do before do observe - # rubocop:disable Lint/RescueException - rescue Exception - # rubocop:enable Lint/RescueException - # ignore (we expect this exception) end it 'records a valid observation', :aggregate_failures do @@ -118,6 +114,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do expect(subject['success']).to be_falsey expect(subject['version']).to eq(migration_version) expect(subject['name']).to eq(migration_name) + expect(subject['error_message']).to eq(error_message) end it 'transforms observation to expected json' do diff --git a/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb b/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb new file mode 100644 index 00000000000..33e83ea2575 --- /dev/null +++ b/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Migrations::PgBackendPid, feature_category: :database do + describe Gitlab::Database::Migrations::PgBackendPid::MigratorPgBackendPid do + let(:klass) do + Class.new do + def with_advisory_lock_connection + yield :conn + end + end + end + + it 're-yields with same arguments and wraps it with calls to .say' do + patched_instance = klass.prepend(described_class).new + expect(Gitlab::Database::Migrations::PgBackendPid).to receive(:say).twice + + expect { |b| patched_instance.with_advisory_lock_connection(&b) }.to yield_with_args(:conn) + end + end + + describe '.patch!' do + it 'patches ActiveRecord::Migrator' do + expect(ActiveRecord::Migrator).to receive(:prepend).with(described_class::MigratorPgBackendPid) + + described_class.patch! + end + end + + describe '.say' do + it 'outputs the connection information' do + conn = ActiveRecord::Base.connection + + expect(conn).to receive(:object_id).and_return(9876) + expect(conn).to receive(:select_value).with('SELECT pg_backend_pid()').and_return(12345) + expect(Gitlab::Database).to receive(:db_config_name).with(conn).and_return('main') + + expected_output = "main: == [advisory_lock_connection] object_id: 9876, pg_backend_pid: 12345\n" + + expect { described_class.say(conn) }.to output(expected_output).to_stdout + end + + it 'outputs nothing if ActiveRecord::Migration.verbose is false' do + conn = ActiveRecord::Base.connection + + allow(ActiveRecord::Migration).to receive(:verbose).and_return(false) + + expect { described_class.say(conn) }.not_to output.to_stdout + end + end +end diff --git a/spec/lib/gitlab/database/migrations/runner_backoff/active_record_mixin_spec.rb b/spec/lib/gitlab/database/migrations/runner_backoff/active_record_mixin_spec.rb new file mode 100644 index 00000000000..ddf11598d21 --- /dev/null +++ b/spec/lib/gitlab/database/migrations/runner_backoff/active_record_mixin_spec.rb @@ -0,0 +1,106 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Migrations::RunnerBackoff::ActiveRecordMixin, feature_category: :database do + let(:migration_class) { Gitlab::Database::Migration[2.1] } + + describe described_class::ActiveRecordMigrationProxyRunnerBackoff do + let(:migration) { instance_double(migration_class) } + + let(:class_def) do + Class.new do + attr_reader :migration + + def initialize(migration) + @migration = migration + end + end.prepend(described_class) + end + + describe '#enable_runner_backoff?' do + subject { class_def.new(migration).enable_runner_backoff? } + + it 'delegates to #migration' do + expect(migration).to receive(:enable_runner_backoff?).and_return(true) + + expect(subject).to eq(true) + end + + it 'returns false if migration does not implement it' do + expect(migration).to receive(:respond_to?).with(:enable_runner_backoff?).and_return(false) + + expect(subject).to eq(false) + end + end + end + + describe described_class::ActiveRecordMigratorRunnerBackoff do + let(:class_def) do + Class.new do + attr_reader :receiver + + def initialize(receiver) + @receiver = receiver + end + + def execute_migration_in_transaction(migration) + receiver.execute_migration_in_transaction(migration) + end + end.prepend(described_class) + end + + let(:receiver) { instance_double(ActiveRecord::Migrator, 'receiver') } + + subject { class_def.new(receiver) } + + before do + allow(migration).to receive(:name).and_return('TestClass') + allow(receiver).to receive(:execute_migration_in_transaction) + end + + context 'with runner backoff disabled' do + let(:migration) { instance_double(migration_class, enable_runner_backoff?: false) } + + it 'calls super method' do + expect(receiver).to receive(:execute_migration_in_transaction).with(migration) + + subject.execute_migration_in_transaction(migration) + end + end + + context 'with runner backoff enabled' do + let(:migration) { instance_double(migration_class, enable_runner_backoff?: true) } + + it 'calls super method' do + expect(Gitlab::Database::Migrations::RunnerBackoff::Communicator) + .to receive(:execute_with_lock).with(migration).and_call_original + + expect(receiver).to receive(:execute_migration_in_transaction) + .with(migration) + + subject.execute_migration_in_transaction(migration) + end + end + end + + describe '.patch!' do + subject { described_class.patch! } + + it 'patches MigrationProxy' do + expect(ActiveRecord::MigrationProxy) + .to receive(:prepend) + .with(described_class::ActiveRecordMigrationProxyRunnerBackoff) + + subject + end + + it 'patches Migrator' do + expect(ActiveRecord::Migrator) + .to receive(:prepend) + .with(described_class::ActiveRecordMigratorRunnerBackoff) + + subject + end + end +end diff --git a/spec/lib/gitlab/database/migrations/runner_backoff/communicator_spec.rb b/spec/lib/gitlab/database/migrations/runner_backoff/communicator_spec.rb new file mode 100644 index 00000000000..cfc3fb398e2 --- /dev/null +++ b/spec/lib/gitlab/database/migrations/runner_backoff/communicator_spec.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Migrations::RunnerBackoff::Communicator, :clean_gitlab_redis_shared_state, feature_category: :database do + let(:migration) { instance_double(Gitlab::Database::Migration[2.1], name: 'TestClass') } + + describe '.execute_with_lock' do + it 'delegates to a new instance object' do + expect_next_instance_of(described_class, migration) do |communicator| + expect(communicator).to receive(:execute_with_lock).and_call_original + end + + expect { |b| described_class.execute_with_lock(migration, &b) }.to yield_control + end + end + + describe '.backoff_runner?' do + subject { described_class.backoff_runner? } + + it { is_expected.to be_falsey } + + it 'is true when the lock is held' do + described_class.execute_with_lock(migration) do + is_expected.to be_truthy + end + end + + it 'reads from Redis' do + recorder = RedisCommands::Recorder.new { subject } + expect(recorder.log).to include([:exists, 'gitlab:exclusive_lease:gitlab/database/migration/runner/backoff']) + end + + context 'with runner_migrations_backoff disabled' do + before do + stub_feature_flags(runner_migrations_backoff: false) + end + + it 'is false when the lock is held' do + described_class.execute_with_lock(migration) do + is_expected.to be_falsey + end + end + end + end + + describe '#execute_with_lock' do + include ExclusiveLeaseHelpers + + let(:communicator) { described_class.new(migration) } + let!(:lease) { stub_exclusive_lease(described_class::KEY, :uuid, timeout: described_class::EXPIRY) } + + it { expect { |b| communicator.execute_with_lock(&b) }.to yield_control } + + it 'raises error if it can not set the key' do + expect(lease).to receive(:try_obtain).ordered.and_return(false) + + expect { communicator.execute_with_lock { 1 / 0 } }.to raise_error 'Could not set backoff key' + end + + it 'removes the lease after executing the migration' do + expect(lease).to receive(:try_obtain).ordered.and_return(true) + expect(lease).to receive(:cancel).ordered.and_return(true) + + expect { communicator.execute_with_lock }.not_to raise_error + end + + context 'with logger' do + let(:logger) { instance_double(Gitlab::AppLogger) } + let(:communicator) { described_class.new(migration, logger: logger) } + + it 'logs messages around execution' do + expect(logger).to receive(:info).ordered + .with({ class: 'TestClass', message: 'Executing migration with Runner backoff' }) + expect(logger).to receive(:info).ordered + .with({ class: 'TestClass', message: 'Runner backoff key is set' }) + expect(logger).to receive(:info).ordered + .with({ class: 'TestClass', message: 'Runner backoff key was removed' }) + + communicator.execute_with_lock + end + end + end +end diff --git a/spec/lib/gitlab/database/migrations/runner_backoff/migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/runner_backoff/migration_helpers_spec.rb new file mode 100644 index 00000000000..9eefc34a7cc --- /dev/null +++ b/spec/lib/gitlab/database/migrations/runner_backoff/migration_helpers_spec.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Migrations::RunnerBackoff::MigrationHelpers, feature_category: :database do + let(:class_def) do + Class.new.prepend(described_class) + end + + describe '.enable_runner_backoff!' do + it 'sets the flag' do + expect { class_def.enable_runner_backoff! } + .to change { class_def.enable_runner_backoff? } + .from(false).to(true) + end + end + + describe '.enable_runner_backoff?' do + subject { class_def.enable_runner_backoff? } + + it { is_expected.to be_falsy } + + it 'returns true if the flag is set' do + class_def.enable_runner_backoff! + + is_expected.to be_truthy + end + end + + describe '#enable_runner_backoff?' do + subject { class_def.new.enable_runner_backoff? } + + it { is_expected.to be_falsy } + + it 'returns true if the flag is set' do + class_def.enable_runner_backoff! + + is_expected.to be_truthy + end + end +end diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb index 66eb5a5de51..7c71076e8f3 100644 --- a/spec/lib/gitlab/database/migrations/runner_spec.rb +++ b/spec/lib/gitlab/database/migrations/runner_spec.rb @@ -65,7 +65,7 @@ RSpec.describe Gitlab::Database::Migrations::Runner, :reestablished_active_recor end before do - skip_if_multiple_databases_not_setup unless database == :main + skip_if_shared_database(database) stub_const('Gitlab::Database::Migrations::Runner::BASE_RESULT_DIR', base_result_dir) allow(ActiveRecord::Migrator).to receive(:new) do |dir, _all_migrations, _schema_migration_class, version_to_migrate| diff --git a/spec/lib/gitlab/database/migrations/sidekiq_helpers_spec.rb b/spec/lib/gitlab/database/migrations/sidekiq_helpers_spec.rb index fb1cb46171f..bf3a9e16548 100644 --- a/spec/lib/gitlab/database/migrations/sidekiq_helpers_spec.rb +++ b/spec/lib/gitlab/database/migrations/sidekiq_helpers_spec.rb @@ -78,158 +78,174 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do clear_queues end - context "when the constant is not defined" do - it "doesn't try to delete it" do - my_non_constant = +"SomeThingThatIsNotAConstant" + context 'when inside a transaction' do + it 'raises RuntimeError' do + expect(model).to receive(:transaction_open?).and_return(true) - expect(Sidekiq::Queue).not_to receive(:new).with(any_args) - model.sidekiq_remove_jobs(job_klasses: [my_non_constant]) + expect { model.sidekiq_remove_jobs(job_klasses: [worker.name]) } + .to raise_error(RuntimeError) end end - context "when the constant is defined" do - it "will use it find job instances to delete" do - my_constant = worker.name - expect(Sidekiq::Queue) - .to receive(:new) - .with(worker.queue) - .and_call_original - model.sidekiq_remove_jobs(job_klasses: [my_constant]) + context 'when outside a transaction' do + before do + allow(model).to receive(:transaction_open?).and_return(false) + allow(model).to receive(:disable_statement_timeout).and_call_original end - end - it "removes all related job instances from the job classes' queues" do - worker.perform_async - worker_two.perform_async - same_queue_different_worker.perform_async - unrelated_worker.perform_async - - worker_queue = Sidekiq::Queue.new(worker.queue) - worker_two_queue = Sidekiq::Queue.new(worker_two.queue) - unrelated_queue = Sidekiq::Queue.new(unrelated_worker.queue) - - expect(worker_queue.size).to eq(2) - expect(worker_two_queue.size).to eq(1) - expect(unrelated_queue.size).to eq(1) - - model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name]) - - expect(worker_queue.size).to eq(1) - expect(worker_two_queue.size).to eq(0) - expect(worker_queue.map(&:klass)).not_to include(worker.name) - expect(worker_queue.map(&:klass)).to include( - same_queue_different_worker.name - ) - expect(worker_two_queue.map(&:klass)).not_to include(worker_two.name) - expect(unrelated_queue.size).to eq(1) - end + context "when the constant is not defined" do + it "doesn't try to delete it" do + my_non_constant = +"SomeThingThatIsNotAConstant" - context "when job instances are in the scheduled set" do - it "removes all related job instances from the scheduled set" do - worker.perform_in(1.hour) - worker_two.perform_in(1.hour) - unrelated_worker.perform_in(1.hour) + expect(Sidekiq::Queue).not_to receive(:new).with(any_args) + model.sidekiq_remove_jobs(job_klasses: [my_non_constant]) + end + end - scheduled = Sidekiq::ScheduledSet.new + context "when the constant is defined" do + it "will use it find job instances to delete" do + my_constant = worker.name + expect(Sidekiq::Queue) + .to receive(:new) + .with(worker.queue) + .and_call_original + model.sidekiq_remove_jobs(job_klasses: [my_constant]) + end + end - expect(scheduled.size).to eq(3) - expect(scheduled.map(&:klass)).to include( - worker.name, - worker_two.name, - unrelated_worker.name - ) + it "removes all related job instances from the job classes' queues" do + worker.perform_async + worker_two.perform_async + same_queue_different_worker.perform_async + unrelated_worker.perform_async + + worker_queue = Sidekiq::Queue.new(worker.queue) + worker_two_queue = Sidekiq::Queue.new(worker_two.queue) + unrelated_queue = Sidekiq::Queue.new(unrelated_worker.queue) + + expect(worker_queue.size).to eq(2) + expect(worker_two_queue.size).to eq(1) + expect(unrelated_queue.size).to eq(1) model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name]) - expect(scheduled.size).to eq(1) - expect(scheduled.map(&:klass)).not_to include(worker.name) - expect(scheduled.map(&:klass)).not_to include(worker_two.name) - expect(scheduled.map(&:klass)).to include(unrelated_worker.name) + expect(worker_queue.size).to eq(1) + expect(worker_two_queue.size).to eq(0) + expect(worker_queue.map(&:klass)).not_to include(worker.name) + expect(worker_queue.map(&:klass)).to include( + same_queue_different_worker.name + ) + expect(worker_two_queue.map(&:klass)).not_to include(worker_two.name) + expect(unrelated_queue.size).to eq(1) end - end - - context "when job instances are in the retry set" do - include_context "when handling retried jobs" - it "removes all related job instances from the retry set" do - retry_in(worker, 1.hour) - retry_in(worker, 2.hours) - retry_in(worker, 3.hours) - retry_in(worker_two, 4.hours) - retry_in(unrelated_worker, 5.hours) + context "when job instances are in the scheduled set" do + it "removes all related job instances from the scheduled set" do + worker.perform_in(1.hour) + worker_two.perform_in(1.hour) + unrelated_worker.perform_in(1.hour) - retries = Sidekiq::RetrySet.new + scheduled = Sidekiq::ScheduledSet.new - expect(retries.size).to eq(5) - expect(retries.map(&:klass)).to include( - worker.name, - worker_two.name, - unrelated_worker.name - ) + expect(scheduled.size).to eq(3) + expect(scheduled.map(&:klass)).to include( + worker.name, + worker_two.name, + unrelated_worker.name + ) - model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name]) + model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name]) - expect(retries.size).to eq(1) - expect(retries.map(&:klass)).not_to include(worker.name) - expect(retries.map(&:klass)).not_to include(worker_two.name) - expect(retries.map(&:klass)).to include(unrelated_worker.name) + expect(scheduled.size).to eq(1) + expect(scheduled.map(&:klass)).not_to include(worker.name) + expect(scheduled.map(&:klass)).not_to include(worker_two.name) + expect(scheduled.map(&:klass)).to include(unrelated_worker.name) + end end - end - # Imitate job deletion returning zero and then non zero. - context "when job fails to be deleted" do - let(:job_double) do - instance_double( - "Sidekiq::JobRecord", - klass: worker.name - ) - end + context "when job instances are in the retry set" do + include_context "when handling retried jobs" - context "and does not work enough times in a row before max attempts" do - it "tries the max attempts without succeeding" do - worker.perform_async + it "removes all related job instances from the retry set" do + retry_in(worker, 1.hour) + retry_in(worker, 2.hours) + retry_in(worker, 3.hours) + retry_in(worker_two, 4.hours) + retry_in(unrelated_worker, 5.hours) - allow(job_double).to receive(:delete).and_return(true) + retries = Sidekiq::RetrySet.new - # Scheduled set runs last so only need to stub out its values. - allow(Sidekiq::ScheduledSet) - .to receive(:new) - .and_return([job_double]) - - expect(model.sidekiq_remove_jobs(job_klasses: [worker.name])) - .to eq( - { - attempts: 5, - success: false - } - ) + expect(retries.size).to eq(5) + expect(retries.map(&:klass)).to include( + worker.name, + worker_two.name, + unrelated_worker.name + ) + + model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name]) + + expect(retries.size).to eq(1) + expect(retries.map(&:klass)).not_to include(worker.name) + expect(retries.map(&:klass)).not_to include(worker_two.name) + expect(retries.map(&:klass)).to include(unrelated_worker.name) end end - context "and then it works enough times in a row before max attempts" do - it "succeeds" do - worker.perform_async - - # attempt 1: false will increment the streak once to 1 - # attempt 2: true resets it back to 0 - # attempt 3: false will increment the streak once to 1 - # attempt 4: false will increment the streak once to 2, loop breaks - allow(job_double).to receive(:delete).and_return(false, true, false) + # Imitate job deletion returning zero and then non zero. + context "when job fails to be deleted" do + let(:job_double) do + instance_double( + "Sidekiq::JobRecord", + klass: worker.name + ) + end - worker.perform_async + context "and does not work enough times in a row before max attempts" do + it "tries the max attempts without succeeding" do + worker.perform_async + + allow(job_double).to receive(:delete).and_return(true) + + # Scheduled set runs last so only need to stub out its values. + allow(Sidekiq::ScheduledSet) + .to receive(:new) + .and_return([job_double]) + + expect(model.sidekiq_remove_jobs(job_klasses: [worker.name])) + .to eq( + { + attempts: 5, + success: false + } + ) + end + end - # Scheduled set runs last so only need to stub out its values. - allow(Sidekiq::ScheduledSet) - .to receive(:new) - .and_return([job_double]) - - expect(model.sidekiq_remove_jobs(job_klasses: [worker.name])) - .to eq( - { - attempts: 4, - success: true - } - ) + context "and then it works enough times in a row before max attempts" do + it "succeeds" do + worker.perform_async + + # attempt 1: false will increment the streak once to 1 + # attempt 2: true resets it back to 0 + # attempt 3: false will increment the streak once to 1 + # attempt 4: false will increment the streak once to 2, loop breaks + allow(job_double).to receive(:delete).and_return(false, true, false) + + worker.perform_async + + # Scheduled set runs last so only need to stub out its values. + allow(Sidekiq::ScheduledSet) + .to receive(:new) + .and_return([job_double]) + + expect(model.sidekiq_remove_jobs(job_klasses: [worker.name])) + .to eq( + { + attempts: 4, + success: true + } + ) + end end end end diff --git a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb index 57c5011590c..6bcefa455cf 100644 --- a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb +++ b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb @@ -48,6 +48,7 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez let(:result_dir) { Pathname.new(Dir.mktmpdir) } let(:connection) { base_model.connection } let(:table_name) { "_test_column_copying" } + let(:num_rows_in_table) { 1000 } let(:from_id) { 0 } after do @@ -61,7 +62,7 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez data bigint default 0 ); - insert into #{table_name} (id) select i from generate_series(1, 1000) g(i); + insert into #{table_name} (id) select i from generate_series(1, #{num_rows_in_table}) g(i); SQL end @@ -134,6 +135,24 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez expect(calls).not_to be_empty end + it 'samples 1 job with a batch size higher than the table size' do + calls = [] + define_background_migration(migration_name) do |*args| + travel 1.minute + calls << args + end + + queue_migration(migration_name, table_name, :id, + job_interval: 5.minutes, + batch_size: num_rows_in_table * 2, + sub_batch_size: num_rows_in_table * 2) + + described_class.new(result_dir: result_dir, connection: connection, + from_id: from_id).run_jobs(for_duration: 3.minutes) + + expect(calls.size).to eq(1) + end + context 'with multiple jobs to run' do let(:last_id) do Gitlab::Database::SharedModel.using_connection(connection) do diff --git a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb index f94a40c93e1..e48937037fa 100644 --- a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb +++ b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb @@ -16,7 +16,7 @@ RSpec.describe 'cross-database foreign keys' do end def is_cross_db?(fk_record) - Gitlab::Database::GitlabSchema.table_schemas([fk_record.from_table, fk_record.to_table]).many? + Gitlab::Database::GitlabSchema.table_schemas!([fk_record.from_table, fk_record.to_table]).many? end it 'onlies have allowed list of cross-database foreign keys', :aggregate_failures do diff --git a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb index b39b273bba9..fa7645d581c 100644 --- a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb +++ b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::ObsoleteIgnoredColumns do +RSpec.describe Gitlab::Database::ObsoleteIgnoredColumns, feature_category: :database do before do stub_const('Testing', Module.new) stub_const('Testing::MyBase', Class.new(ActiveRecord::Base)) @@ -16,11 +16,10 @@ RSpec.describe Gitlab::Database::ObsoleteIgnoredColumns do Testing.module_eval do Testing::MyBase.class_eval do + include IgnorableColumns end SomeAbstract.class_eval do - include IgnorableColumns - self.abstract_class = true self.table_name = 'projects' @@ -29,8 +28,6 @@ RSpec.describe Gitlab::Database::ObsoleteIgnoredColumns do end Testing::B.class_eval do - include IgnorableColumns - self.table_name = 'issues' ignore_column :id, :other, remove_after: '2019-01-01', remove_with: '12.0' diff --git a/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb new file mode 100644 index 00000000000..f415e892818 --- /dev/null +++ b/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb @@ -0,0 +1,178 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_category: :database do + let(:connection) { ActiveRecord::Base.connection } + let(:table_name) { :_test_gitlab_ci_partitioned_test } + let(:model) { class_double(ApplicationRecord, table_name: table_name, connection: connection) } + let(:next_partition_if) { nil } + let(:detach_partition_if) { nil } + + subject(:strategy) do + described_class.new(model, :partition, + next_partition_if: next_partition_if, + detach_partition_if: detach_partition_if) + end + + before do + next if table_name.to_s.starts_with?('p_') + + connection.execute(<<~SQL) + create table #{table_name} + ( + id serial not null, + partition_id bigint not null, + created_at timestamptz not null, + primary key (id, partition_id) + ) + partition by list(partition_id); + + create table #{table_name}_100 + partition of #{table_name} for values in (100); + + create table #{table_name}_101 + partition of #{table_name} for values in (101); + SQL + end + + describe '#current_partitions' do + it 'detects both partitions' do + expect(strategy.current_partitions).to eq( + [ + Gitlab::Database::Partitioning::SingleNumericListPartition.new( + table_name, 100, partition_name: "#{table_name}_100" + ), + Gitlab::Database::Partitioning::SingleNumericListPartition.new( + table_name, 101, partition_name: "#{table_name}_101" + ) + ]) + end + end + + describe '#validate_and_fix' do + it 'does not call change_column_default' do + expect(strategy.model.connection).not_to receive(:change_column_default) + + strategy.validate_and_fix + end + end + + describe '#active_partition' do + it 'is the partition with the largest value' do + expect(strategy.active_partition.value).to eq(101) + end + end + + describe '#missing_partitions' do + context 'when next_partition_if returns true' do + let(:next_partition_if) { proc { true } } + + it 'is a partition definition for the next partition in the series' do + extra = strategy.missing_partitions + + expect(extra.length).to eq(1) + expect(extra.first.value).to eq(102) + end + end + + context 'when next_partition_if returns false' do + let(:next_partition_if) { proc { false } } + + it 'is empty' do + expect(strategy.missing_partitions).to be_empty + end + end + + context 'when there are no partitions for the table' do + it 'returns a partition for value 1' do + connection.execute("drop table #{table_name}_100; drop table #{table_name}_101;") + + missing_partitions = strategy.missing_partitions + + expect(missing_partitions.size).to eq(1) + missing_partition = missing_partitions.first + + expect(missing_partition.value).to eq(100) + end + end + end + + describe '#extra_partitions' do + context 'when all partitions are true for detach_partition_if' do + let(:detach_partition_if) { ->(_p) { true } } + + it { expect(strategy.extra_partitions).to be_empty } + end + + context 'when all partitions are false for detach_partition_if' do + let(:detach_partition_if) { proc { false } } + + it { expect(strategy.extra_partitions).to be_empty } + end + end + + describe '#initial_partition' do + it 'starts with the value 100', :aggregate_failures do + initial_partition = strategy.initial_partition + expect(initial_partition.value).to eq(100) + expect(initial_partition.table).to eq(strategy.table_name) + expect(initial_partition.partition_name).to eq("#{strategy.table_name}_100") + end + + context 'with routing tables' do + let(:table_name) { :p_test_gitlab_ci_partitioned_test } + + it 'removes the prefix', :aggregate_failures do + initial_partition = strategy.initial_partition + + expect(initial_partition.value).to eq(100) + expect(initial_partition.table).to eq(strategy.table_name) + expect(initial_partition.partition_name).to eq('test_gitlab_ci_partitioned_test_100') + end + end + end + + describe '#next_partition' do + before do + allow(strategy) + .to receive(:active_partition) + .and_return(instance_double(Gitlab::Database::Partitioning::SingleNumericListPartition, value: 105)) + end + + it 'is one after the active partition', :aggregate_failures do + next_partition = strategy.next_partition + + expect(next_partition.value).to eq(106) + expect(next_partition.table).to eq(strategy.table_name) + expect(next_partition.partition_name).to eq("#{strategy.table_name}_106") + end + + context 'with routing tables' do + let(:table_name) { :p_test_gitlab_ci_partitioned_test } + + it 'removes the prefix', :aggregate_failures do + next_partition = strategy.next_partition + + expect(next_partition.value).to eq(106) + expect(next_partition.table).to eq(strategy.table_name) + expect(next_partition.partition_name).to eq('test_gitlab_ci_partitioned_test_106') + end + end + end + + describe '#ensure_partitioning_column_ignored_or_readonly!' do + it 'does not raise when the column is not ignored' do + expect do + Class.new(ApplicationRecord) do + include PartitionedTable + + partitioned_by :partition_id, + strategy: :ci_sliding_list, + next_partition_if: proc { false }, + detach_partition_if: proc { false } + end + end.not_to raise_error + end + end +end diff --git a/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb b/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb deleted file mode 100644 index cd3a94f5737..00000000000 --- a/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb +++ /dev/null @@ -1,273 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition do - include Gitlab::Database::DynamicModelHelpers - include Database::TableSchemaHelpers - - let(:migration_context) { Gitlab::Database::Migration[2.0].new } - - let(:connection) { migration_context.connection } - let(:table_name) { '_test_table_to_partition' } - let(:table_identifier) { "#{connection.current_schema}.#{table_name}" } - let(:partitioning_column) { :partition_number } - let(:partitioning_default) { 1 } - let(:referenced_table_name) { '_test_referenced_table' } - let(:other_referenced_table_name) { '_test_other_referenced_table' } - let(:parent_table_name) { "#{table_name}_parent" } - let(:lock_tables) { [] } - - let(:model) { define_batchable_model(table_name, connection: connection) } - - let(:parent_model) { define_batchable_model(parent_table_name, connection: connection) } - - let(:converter) do - described_class.new( - migration_context: migration_context, - table_name: table_name, - partitioning_column: partitioning_column, - parent_table_name: parent_table_name, - zero_partition_value: partitioning_default, - lock_tables: lock_tables - ) - end - - before do - # Suppress printing migration progress - allow(migration_context).to receive(:puts) - allow(migration_context.connection).to receive(:transaction_open?).and_return(false) - - connection.execute(<<~SQL) - create table #{referenced_table_name} ( - id bigserial primary key not null - ) - SQL - - connection.execute(<<~SQL) - create table #{other_referenced_table_name} ( - id bigserial primary key not null - ) - SQL - - connection.execute(<<~SQL) - insert into #{referenced_table_name} default values; - insert into #{other_referenced_table_name} default values; - SQL - - connection.execute(<<~SQL) - create table #{table_name} ( - id bigserial not null, - #{partitioning_column} bigint not null default #{partitioning_default}, - referenced_id bigint not null references #{referenced_table_name} (id) on delete cascade, - other_referenced_id bigint not null references #{other_referenced_table_name} (id) on delete set null, - primary key (id, #{partitioning_column}) - ) - SQL - - connection.execute(<<~SQL) - insert into #{table_name} (referenced_id, other_referenced_id) - select #{referenced_table_name}.id, #{other_referenced_table_name}.id - from #{referenced_table_name}, #{other_referenced_table_name}; - SQL - end - - describe "#prepare_for_partitioning" do - subject(:prepare) { converter.prepare_for_partitioning } - - it 'adds a check constraint' do - expect { prepare }.to change { - Gitlab::Database::PostgresConstraint - .check_constraints - .by_table_identifier(table_identifier) - .count - }.from(0).to(1) - end - end - - describe '#revert_prepare_for_partitioning' do - before do - converter.prepare_for_partitioning - end - - subject(:revert_prepare) { converter.revert_preparation_for_partitioning } - - it 'removes a check constraint' do - expect { revert_prepare }.to change { - Gitlab::Database::PostgresConstraint - .check_constraints - .by_table_identifier("#{connection.current_schema}.#{table_name}") - .count - }.from(1).to(0) - end - end - - describe "#convert_to_zero_partition" do - subject(:partition) { converter.partition } - - before do - converter.prepare_for_partitioning - end - - context 'when the primary key is incorrect' do - before do - connection.execute(<<~SQL) - alter table #{table_name} drop constraint #{table_name}_pkey; - alter table #{table_name} add constraint #{table_name}_pkey PRIMARY KEY (id); - SQL - end - - it 'throws a reasonable error message' do - expect { partition }.to raise_error(described_class::UnableToPartition, /#{partitioning_column}/) - end - end - - context 'when there is not a supporting check constraint' do - before do - connection.execute(<<~SQL) - alter table #{table_name} drop constraint partitioning_constraint; - SQL - end - - it 'throws a reasonable error message' do - expect { partition }.to raise_error(described_class::UnableToPartition, /constraint /) - end - end - - it 'migrates the table to a partitioned table' do - fks_before = migration_context.foreign_keys(table_name) - - partition - - expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1) - expect(migration_context.foreign_keys(parent_table_name).map(&:options)).to match_array(fks_before.map(&:options)) - - connection.execute(<<~SQL) - insert into #{table_name} (referenced_id, other_referenced_id) select #{referenced_table_name}.id, #{other_referenced_table_name}.id from #{referenced_table_name}, #{other_referenced_table_name}; - SQL - - # Create a second partition - connection.execute(<<~SQL) - create table #{table_name}2 partition of #{parent_table_name} FOR VALUES IN (2) - SQL - - parent_model.create!(partitioning_column => 2, :referenced_id => 1, :other_referenced_id => 1) - expect(parent_model.pluck(:id)).to match_array([1, 2, 3]) - end - - context 'when the existing table is owned by a different user' do - before do - connection.execute(<<~SQL) - CREATE USER other_user SUPERUSER; - ALTER TABLE #{table_name} OWNER TO other_user; - SQL - end - - let(:current_user) { model.connection.select_value('select current_user') } - - it 'partitions without error' do - expect { partition }.not_to raise_error - end - end - - context 'with locking tables' do - let(:lock_tables) { [table_name] } - - it 'locks the table' do - recorder = ActiveRecord::QueryRecorder.new { partition } - - expect(recorder.log).to include(/LOCK "_test_table_to_partition" IN ACCESS EXCLUSIVE MODE/) - end - end - - context 'when an error occurs during the conversion' do - def fail_first_time - # We can't directly use a boolean here, as we need something that will be passed by-reference to the proc - fault_status = { faulted: false } - proc do |m, *args, **kwargs| - next m.call(*args, **kwargs) if fault_status[:faulted] - - fault_status[:faulted] = true - raise 'fault!' - end - end - - def fail_sql_matching(regex) - proc do - allow(migration_context.connection).to receive(:execute).and_call_original - allow(migration_context.connection).to receive(:execute).with(regex).and_wrap_original(&fail_first_time) - end - end - - def fail_adding_fk(from_table, to_table) - proc do - allow(migration_context.connection).to receive(:add_foreign_key).and_call_original - expect(migration_context.connection).to receive(:add_foreign_key).with(from_table, to_table, any_args) - .and_wrap_original(&fail_first_time) - end - end - - where(:case_name, :fault) do - [ - ["creating parent table", lazy { fail_sql_matching(/CREATE/i) }], - ["adding the first foreign key", lazy { fail_adding_fk(parent_table_name, referenced_table_name) }], - ["adding the second foreign key", lazy { fail_adding_fk(parent_table_name, other_referenced_table_name) }], - ["attaching table", lazy { fail_sql_matching(/ATTACH/i) }] - ] - end - - before do - # Set up the fault that we'd like to inject - fault.call - end - - with_them do - it 'recovers from a fault', :aggregate_failures do - expect { converter.partition }.to raise_error(/fault/) - expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(0) - - expect { converter.partition }.not_to raise_error - expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1) - end - end - end - end - - describe '#revert_conversion_to_zero_partition' do - before do - converter.prepare_for_partitioning - converter.partition - end - - subject(:revert_conversion) { converter.revert_partitioning } - - it 'detaches the partition' do - expect { revert_conversion }.to change { - Gitlab::Database::PostgresPartition - .for_parent_table(parent_table_name).count - }.from(1).to(0) - end - - it 'does not drop the child partition' do - expect { revert_conversion }.not_to change { table_oid(table_name) } - end - - it 'removes the parent table' do - expect { revert_conversion }.to change { table_oid(parent_table_name).present? }.from(true).to(false) - end - - it 're-adds the check constraint' do - expect { revert_conversion }.to change { - Gitlab::Database::PostgresConstraint - .check_constraints - .by_table_identifier(table_identifier) - .count - }.by(1) - end - - it 'moves sequences back to the original table' do - expect { revert_conversion }.to change { converter.send(:sequences_owned_by, table_name).count }.from(0) - .and change { converter.send(:sequences_owned_by, parent_table_name).count }.to(0) - end - end -end diff --git a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb index 646ae50fb44..04940028aee 100644 --- a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb +++ b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb @@ -25,23 +25,23 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do before do connection.execute(<<~SQL) - CREATE TABLE referenced_table ( + CREATE TABLE _test_referenced_table ( id bigserial primary key not null ) SQL connection.execute(<<~SQL) - CREATE TABLE parent_table ( + CREATE TABLE _test_parent_table ( id bigserial not null, referenced_id bigint not null, created_at timestamptz not null, primary key (id, created_at), - constraint fk_referenced foreign key (referenced_id) references referenced_table(id) + constraint fk_referenced foreign key (referenced_id) references _test_referenced_table(id) ) PARTITION BY RANGE(created_at) SQL end - def create_partition(name:, from:, to:, attached:, drop_after:, table: 'parent_table') + def create_partition(name:, from:, to:, attached:, drop_after:, table: :_test_parent_table) from = from.beginning_of_month to = to.beginning_of_month full_name = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{name}" @@ -64,20 +64,20 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do describe '#perform' do context 'when the partition should not be dropped yet' do it 'does not drop the partition' do - create_partition(name: 'test_partition', + create_partition(name: :_test_partition, from: 2.months.ago, to: 1.month.ago, attached: false, drop_after: 1.day.from_now) dropper.perform - expect_partition_present('test_partition') + expect_partition_present(:_test_partition) end end context 'with a partition to drop' do before do - create_partition(name: 'test_partition', + create_partition(name: :_test_partition, from: 2.months.ago, to: 1.month.ago.beginning_of_month, attached: false, @@ -87,45 +87,45 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do it 'drops the partition' do dropper.perform - expect(table_oid('test_partition')).to be_nil + expect(table_oid(:_test_partition)).to be_nil end context 'removing foreign keys' do it 'removes foreign keys from the table before dropping it' do expect(dropper).to receive(:drop_detached_partition).and_wrap_original do |drop_method, partition| - expect(partition.table_name).to eq('test_partition') + expect(partition.table_name).to eq('_test_partition') expect(foreign_key_exists_by_name(partition.table_name, 'fk_referenced', schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)).to be_falsey drop_method.call(partition) end - expect(foreign_key_exists_by_name('test_partition', 'fk_referenced', schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)).to be_truthy + expect(foreign_key_exists_by_name(:_test_partition, 'fk_referenced', schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)).to be_truthy dropper.perform end it 'does not remove foreign keys from the parent table' do - expect { dropper.perform }.not_to change { foreign_key_exists_by_name('parent_table', 'fk_referenced') }.from(true) + expect { dropper.perform }.not_to change { foreign_key_exists_by_name('_test_parent_table', 'fk_referenced') }.from(true) end context 'when another process drops the foreign key' do it 'skips dropping that foreign key' do expect(dropper).to receive(:drop_foreign_key_if_present).and_wrap_original do |drop_meth, *args| - connection.execute('alter table gitlab_partitions_dynamic.test_partition drop constraint fk_referenced;') + connection.execute('alter table gitlab_partitions_dynamic._test_partition drop constraint fk_referenced;') drop_meth.call(*args) end dropper.perform - expect_partition_removed('test_partition') + expect_partition_removed(:_test_partition) end end context 'when another process drops the partition' do it 'skips dropping the foreign key' do expect(dropper).to receive(:drop_foreign_key_if_present).and_wrap_original do |drop_meth, *args| - connection.execute('drop table gitlab_partitions_dynamic.test_partition') - Postgresql::DetachedPartition.where(table_name: 'test_partition').delete_all + connection.execute('drop table gitlab_partitions_dynamic._test_partition') + Postgresql::DetachedPartition.where(table_name: :_test_partition).delete_all end expect(Gitlab::AppLogger).not_to receive(:error) @@ -159,7 +159,7 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do context 'when the partition to drop is still attached to its table' do before do - create_partition(name: 'test_partition', + create_partition(name: :_test_partition, from: 2.months.ago, to: 1.month.ago.beginning_of_month, attached: true, @@ -169,8 +169,8 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do it 'does not drop the partition, but does remove the DetachedPartition entry' do dropper.perform aggregate_failures do - expect(table_oid('test_partition')).not_to be_nil - expect(Postgresql::DetachedPartition.find_by(table_name: 'test_partition')).to be_nil + expect(table_oid(:_test_partition)).not_to be_nil + expect(Postgresql::DetachedPartition.find_by(table_name: :_test_partition)).to be_nil end end @@ -185,20 +185,20 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do dropper.perform - expect(table_oid('test_partition')).not_to be_nil + expect(table_oid(:_test_partition)).not_to be_nil end end end context 'with multiple partitions to drop' do before do - create_partition(name: 'partition_1', + create_partition(name: :_test_partition_1, from: 3.months.ago, to: 2.months.ago, attached: false, drop_after: 1.second.ago) - create_partition(name: 'partition_2', + create_partition(name: :_test_partition_2, from: 2.months.ago, to: 1.month.ago, attached: false, @@ -208,8 +208,8 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do it 'drops both partitions' do dropper.perform - expect_partition_removed('partition_1') - expect_partition_removed('partition_2') + expect_partition_removed(:_test_partition_1) + expect_partition_removed(:_test_partition_2) end context 'when the first drop returns an error' do @@ -223,7 +223,7 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do expect(Postgresql::DetachedPartition.count).to eq(1) errored_partition_name = Postgresql::DetachedPartition.first!.table_name - dropped_partition_name = (%w[partition_1 partition_2] - [errored_partition_name]).first + dropped_partition_name = (%w[_test_partition_1 _test_partition_2] - [errored_partition_name]).first expect_partition_present(errored_partition_name) expect_partition_removed(dropped_partition_name) end diff --git a/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb new file mode 100644 index 00000000000..8e2a53ea76f --- /dev/null +++ b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb @@ -0,0 +1,365 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_category: :database do + include Gitlab::Database::DynamicModelHelpers + include Database::TableSchemaHelpers + include Database::InjectFailureHelpers + + include_context 'with a table structure for converting a table to a list partition' + + let(:converter) do + described_class.new( + migration_context: migration_context, + table_name: table_name, + partitioning_column: partitioning_column, + parent_table_name: parent_table_name, + zero_partition_value: partitioning_default, + lock_tables: lock_tables + ) + end + + describe "#prepare_for_partitioning" do + subject(:prepare) { converter.prepare_for_partitioning(async: async) } + + let(:async) { false } + + it 'adds a check constraint' do + expect { prepare }.to change { + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier) + .count + }.from(0).to(1) + end + + context 'when it fails to add constraint' do + before do + allow(migration_context).to receive(:add_check_constraint) + end + + it 'raises UnableToPartition error' do + expect { prepare } + .to raise_error(described_class::UnableToPartition) + .and change { + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier) + .count + }.by(0) + end + end + + context 'when async' do + let(:async) { true } + + it 'adds a NOT VALID check constraint' do + expect { prepare }.to change { + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier) + .count + }.from(0).to(1) + + constraint = + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier) + .last + + expect(constraint.definition).to end_with('NOT VALID') + end + + it 'adds a PostgresAsyncConstraintValidation record' do + expect { prepare }.to change { + Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation.count + }.by(1) + + record = Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation + .where(table_name: table_name).last + + expect(record.name).to eq described_class::PARTITIONING_CONSTRAINT_NAME + expect(record).to be_check_constraint + end + + context 'when constraint exists but is not valid' do + before do + converter.prepare_for_partitioning(async: true) + end + + it 'validates the check constraint' do + expect { prepare }.to change { + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier).first.constraint_valid? + }.from(false).to(true) + end + + context 'when it fails to validate constraint' do + before do + allow(migration_context).to receive(:validate_check_constraint) + end + + it 'raises UnableToPartition error' do + expect { prepare } + .to raise_error(described_class::UnableToPartition, + starting_with('Error validating partitioning constraint')) + .and change { + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier) + .count + }.by(0) + end + end + end + + context 'when constraint exists and is valid' do + before do + converter.prepare_for_partitioning(async: false) + end + + it 'raises UnableToPartition error' do + expect(Gitlab::AppLogger).to receive(:info).with(starting_with('Nothing to do')) + prepare + end + end + end + end + + describe '#revert_preparation_for_partitioning' do + before do + converter.prepare_for_partitioning + end + + subject(:revert_prepare) { converter.revert_preparation_for_partitioning } + + it 'removes a check constraint' do + expect { revert_prepare }.to change { + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier("#{connection.current_schema}.#{table_name}") + .count + }.from(1).to(0) + end + end + + describe "#partition" do + subject(:partition) { converter.partition } + + let(:async) { false } + + before do + converter.prepare_for_partitioning(async: async) + end + + context 'when the primary key is incorrect' do + before do + connection.execute(<<~SQL) + alter table #{referencing_table_name} drop constraint fk_referencing; -- this depends on the primary key + alter table #{other_referencing_table_name} drop constraint fk_referencing_other; -- this does too + alter table #{table_name} drop constraint #{table_name}_pkey; + alter table #{table_name} add constraint #{table_name}_pkey PRIMARY KEY (id); + SQL + end + + it 'throws a reasonable error message' do + expect { partition }.to raise_error(described_class::UnableToPartition, /#{partitioning_column}/) + end + end + + context 'when there is not a supporting check constraint' do + before do + connection.execute(<<~SQL) + alter table #{table_name} drop constraint partitioning_constraint; + SQL + end + + it 'throws a reasonable error message' do + expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./) + end + end + + context 'when supporting check constraint is not valid' do + let(:async) { true } + + it 'throws a reasonable error message' do + expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./) + end + end + + it 'migrates the table to a partitioned table' do + fks_before = migration_context.foreign_keys(table_name) + + partition + + expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1) + expect(migration_context.foreign_keys(parent_table_name).map(&:options)).to match_array(fks_before.map(&:options)) + + connection.execute(<<~SQL) + insert into #{table_name} (referenced_id, other_referenced_id) select #{referenced_table_name}.id, #{other_referenced_table_name}.id from #{referenced_table_name}, #{other_referenced_table_name}; + SQL + + # Create a second partition + connection.execute(<<~SQL) + create table #{table_name}2 partition of #{parent_table_name} FOR VALUES IN (2) + SQL + + parent_model.create!(partitioning_column => 2, :referenced_id => 1, :other_referenced_id => 1) + expect(parent_model.pluck(:id)).to match_array([1, 2, 3]) + + expect { referencing_model.create!(partitioning_column => 1, :ref_id => 1) }.not_to raise_error + end + + context 'when the existing table is owned by a different user' do + before do + connection.execute(<<~SQL) + CREATE USER other_user SUPERUSER; + ALTER TABLE #{table_name} OWNER TO other_user; + SQL + end + + let(:current_user) { model.connection.select_value('select current_user') } + + it 'partitions without error' do + expect { partition }.not_to raise_error + end + end + + context 'with locking tables' do + let(:lock_tables) { [table_name] } + + it 'locks the table' do + recorder = ActiveRecord::QueryRecorder.new { partition } + + expect(recorder.log).to include(/LOCK "_test_table_to_partition" IN ACCESS EXCLUSIVE MODE/) + end + end + + context 'when an error occurs during the conversion' do + before do + # Set up the fault that we'd like to inject + fault.call + end + + let(:old_fks) do + Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(table_identifier).not_inherited + end + + let(:new_fks) do + Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(parent_table_identifier).not_inherited + end + + context 'when partitioning fails the first time' do + where(:case_name, :fault) do + [ + ["creating parent table", lazy { fail_sql_matching(/CREATE/i) }], + ["adding the first foreign key", lazy { fail_adding_fk(parent_table_name, referenced_table_name) }], + ["adding the second foreign key", lazy { fail_adding_fk(parent_table_name, other_referenced_table_name) }], + ["attaching table", lazy { fail_sql_matching(/ATTACH/i) }] + ] + end + + with_them do + it 'recovers from a fault', :aggregate_failures do + expect { converter.partition }.to raise_error(/fault/) + expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(0) + + expect { converter.partition }.not_to raise_error + expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1) + end + end + end + end + + context 'when table has LFK triggers' do + before do + migration_context.track_record_deletions(table_name) + end + + it 'moves the trigger on the parent table', :aggregate_failures do + expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy + + expect { partition }.not_to raise_error + + expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy + expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy + end + + context 'with locking tables' do + let(:lock_tables) { [table_name] } + + it 'locks the table before dropping the triggers' do + recorder = ActiveRecord::QueryRecorder.new { partition } + + lock_index = recorder.log.find_index do |log| + log.start_with?('LOCK "_test_table_to_partition" IN ACCESS EXCLUSIVE MODE') + end + + trigger_index = recorder.log.find_index do |log| + log.start_with?('DROP TRIGGER IF EXISTS _test_table_to_partition_loose_fk_trigger') + end + + expect(lock_index).to be_present + expect(trigger_index).to be_present + expect(lock_index).to be < trigger_index + end + end + end + end + + describe '#revert_partitioning' do + before do + converter.prepare_for_partitioning + converter.partition + end + + subject(:revert_conversion) { converter.revert_partitioning } + + it 'detaches the partition' do + expect { revert_conversion }.to change { + Gitlab::Database::PostgresPartition + .for_parent_table(parent_table_name).count + }.from(1).to(0) + end + + it 'does not drop the child partition' do + expect { revert_conversion }.not_to change { table_oid(table_name) } + end + + it 'removes the parent table' do + expect { revert_conversion }.to change { table_oid(parent_table_name).present? }.from(true).to(false) + end + + it 're-adds the check constraint' do + expect { revert_conversion }.to change { + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier) + .count + }.by(1) + end + + it 'moves sequences back to the original table' do + expect { revert_conversion }.to change { converter.send(:sequences_owned_by, table_name).count }.from(0) + .and change { converter.send(:sequences_owned_by, parent_table_name).count }.to(0) + end + + context 'when table has LFK triggers' do + before do + migration_context.track_record_deletions(parent_table_name) + migration_context.track_record_deletions(table_name) + end + + it 'restores the trigger on the partition', :aggregate_failures do + expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy + expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy + + expect { revert_conversion }.not_to raise_error + + expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy + end + end + end +end diff --git a/spec/lib/gitlab/database/partitioning/list/locking_configuration_spec.rb b/spec/lib/gitlab/database/partitioning/list/locking_configuration_spec.rb new file mode 100644 index 00000000000..851add43e3c --- /dev/null +++ b/spec/lib/gitlab/database/partitioning/list/locking_configuration_spec.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Partitioning::List::LockingConfiguration, feature_category: :database do + let(:migration_context) do + Gitlab::Database::Migration[2.1].new.tap do |migration| + migration.extend Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers + migration.extend Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers + end + end + + let(:locking_order) { %w[table_1 table_2 table_3] } + + subject(:locking_configuration) { described_class.new(migration_context, table_locking_order: locking_order) } + + describe '#locking_statement_for' do + it 'only includes locking information for tables in the locking specification' do + expect(subject.locking_statement_for(%w[table_1 table_other])).to eq(subject.locking_statement_for('table_1')) + end + + it 'is nil when none of the tables match the lock configuration' do + expect(subject.locking_statement_for('table_other')).to be_nil + end + + it 'is a lock tables statement' do + expect(subject.locking_statement_for(%w[table_3 table_2])).to eq(<<~SQL) + LOCK "table_2", "table_3" IN ACCESS EXCLUSIVE MODE + SQL + end + + it 'raises if a table name with schema is passed' do + expect { subject.locking_statement_for('public.test') }.to raise_error(ArgumentError) + end + end + + describe '#lock_ordering_for' do + it 'is the intersection with the locking specification, in the order of the specification' do + expect(subject.locking_order_for(%w[table_other table_3 table_1])).to eq(%w[table_1 table_3]) + end + + it 'raises if a table name with schema is passed' do + expect { subject.locking_order_for('public.test') }.to raise_error(ArgumentError) + end + end +end diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb index 2212cb09888..eac4a162879 100644 --- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb +++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb @@ -6,7 +6,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do include Database::PartitioningHelpers include ExclusiveLeaseHelpers - let(:partitioned_table_name) { "_test_gitlab_main_my_model_example_table" } + let(:partitioned_table_name) { :_test_gitlab_main_my_model_example_table } context 'creating partitions (mocked)' do subject(:sync_partitions) { described_class.new(model).sync_partitions } @@ -45,7 +45,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do sync_partitions end - context 'with eplicitly provided connection' do + context 'with explicitly provided connection' do let(:connection) { Ci::ApplicationRecord.connection } it 'uses the explicitly provided connection when any' do @@ -59,6 +59,14 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do end end + context 'when an ArgumentError occurs during partition management' do + it 'raises error' do + expect(partitioning_strategy).to receive(:missing_partitions).and_raise(ArgumentError) + + expect { sync_partitions }.to raise_error(ArgumentError) + end + end + context 'when an error occurs during partition management' do it 'does not raise an error' do expect(partitioning_strategy).to receive(:missing_partitions).and_raise('this should never happen (tm)') @@ -115,7 +123,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do let(:manager) { described_class.new(model) } let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) } let(:connection) { ActiveRecord::Base.connection } - let(:table) { "foo" } + let(:table) { :_test_foo } let(:partitioning_strategy) do double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil) end @@ -144,7 +152,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do end it 'logs an error if the partitions are not detachable' do - allow(Gitlab::Database::PostgresForeignKey).to receive(:by_referenced_table_identifier).with("public.foo") + allow(Gitlab::Database::PostgresForeignKey).to receive(:by_referenced_table_identifier).with("public._test_foo") .and_return([double(name: "fk_1", constrained_table_identifier: "public.constrainted_table_1")]) expect(Gitlab::AppLogger).to receive(:error).with( @@ -154,7 +162,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do exception_class: Gitlab::Database::Partitioning::PartitionManager::UnsafeToDetachPartitionError, exception_message: "Cannot detach foo1, it would block while checking foreign key fk_1 on public.constrainted_table_1", - table_name: "foo" + table_name: :_test_foo } ) @@ -230,23 +238,20 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do expect(pending_drop.drop_after).to eq(Time.current + described_class::RETAIN_DETACHED_PARTITIONS_FOR) end - # Postgres 11 does not support foreign keys to partitioned tables - if ApplicationRecord.database.version.to_f >= 12 - context 'when the model is the target of a foreign key' do - before do - connection.execute(<<~SQL) + context 'when the model is the target of a foreign key' do + before do + connection.execute(<<~SQL) create unique index idx_for_fk ON #{partitioned_table_name}(created_at); create table _test_gitlab_main_referencing_table ( id bigserial primary key not null, referencing_created_at timestamptz references #{partitioned_table_name}(created_at) ); - SQL - end + SQL + end - it 'does not detach partitions with a referenced foreign key' do - expect { subject }.not_to change { find_partitions(my_model.table_name).size } - end + it 'does not detach partitions with a referenced foreign key' do + expect { subject }.not_to change { find_partitions(my_model.table_name).size } end end end diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb index 1885e84ac4c..fc279051800 100644 --- a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb +++ b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb @@ -54,6 +54,11 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition allow(backfill_job).to receive(:sleep) end + after do + connection.drop_table source_table + connection.drop_table destination_table + end + let(:source_model) { Class.new(ActiveRecord::Base) } let(:destination_model) { Class.new(ActiveRecord::Base) } let(:timestamp) { Time.utc(2020, 1, 2).round } @@ -82,7 +87,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition end it 'breaks the assigned batch into smaller batches' do - expect_next_instance_of(described_class::BulkCopy) do |bulk_copy| + expect_next_instance_of(Gitlab::Database::PartitioningMigrationHelpers::BulkCopy) do |bulk_copy| expect(bulk_copy).to receive(:copy_between).with(source1.id, source2.id) expect(bulk_copy).to receive(:copy_between).with(source3.id, source3.id) end diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb index f0e34476cf2..d5f4afd7ba4 100644 --- a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb +++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers do +RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers, feature_category: :database do include Database::TableSchemaHelpers let(:migration) do @@ -16,15 +16,23 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers let(:partition_schema) { 'gitlab_partitions_dynamic' } let(:partition1_name) { "#{partition_schema}.#{source_table_name}_202001" } let(:partition2_name) { "#{partition_schema}.#{source_table_name}_202002" } + let(:validate) { true } let(:options) do { column: column_name, name: foreign_key_name, on_delete: :cascade, - validate: true + on_update: nil, + primary_key: :id } end + let(:create_options) do + options + .except(:primary_key) + .merge!(reverse_lock_order: false, target_column: :id, validate: validate) + end + before do allow(migration).to receive(:puts) allow(migration).to receive(:transaction_open?).and_return(false) @@ -67,12 +75,11 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers expect(migration).to receive(:concurrent_partitioned_foreign_key_name).and_return(foreign_key_name) - expect_add_concurrent_fk_and_call_original(partition1_name, target_table_name, **options) - expect_add_concurrent_fk_and_call_original(partition2_name, target_table_name, **options) + expect_add_concurrent_fk_and_call_original(partition1_name, target_table_name, **create_options) + expect_add_concurrent_fk_and_call_original(partition2_name, target_table_name, **create_options) - expect(migration).to receive(:with_lock_retries).ordered.and_yield - expect(migration).to receive(:add_foreign_key) - .with(source_table_name, target_table_name, **options) + expect(migration).to receive(:add_concurrent_foreign_key) + .with(source_table_name, target_table_name, allow_partitioned: true, **create_options) .ordered .and_call_original @@ -81,6 +88,39 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers expect_foreign_key_to_exist(source_table_name, foreign_key_name) end + context 'with validate: false option' do + let(:validate) { false } + let(:options) do + { + column: column_name, + name: foreign_key_name, + on_delete: :cascade, + on_update: nil, + primary_key: :id + } + end + + it 'creates the foreign key only on partitions' do + expect(migration).to receive(:foreign_key_exists?) + .with(source_table_name, target_table_name, **options) + .and_return(false) + + expect(migration).to receive(:concurrent_partitioned_foreign_key_name).and_return(foreign_key_name) + + expect_add_concurrent_fk_and_call_original(partition1_name, target_table_name, **create_options) + expect_add_concurrent_fk_and_call_original(partition2_name, target_table_name, **create_options) + + expect(migration).not_to receive(:add_concurrent_foreign_key) + .with(source_table_name, target_table_name, **create_options) + + migration.add_concurrent_partitioned_foreign_key( + source_table_name, target_table_name, + column: column_name, validate: false) + + expect_foreign_key_not_to_exist(source_table_name, foreign_key_name) + end + end + def expect_add_concurrent_fk_and_call_original(source_table_name, target_table_name, options) expect(migration).to receive(:add_concurrent_foreign_key) .ordered @@ -100,8 +140,6 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers .and_return(true) expect(migration).not_to receive(:add_concurrent_foreign_key) - expect(migration).not_to receive(:with_lock_retries) - expect(migration).not_to receive(:add_foreign_key) migration.add_concurrent_partitioned_foreign_key(source_table_name, target_table_name, column: column_name) @@ -110,30 +148,43 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers end context 'when additional foreign key options are given' do - let(:options) do + let(:exits_options) do { column: column_name, name: '_my_fk_name', on_delete: :restrict, - validate: true + on_update: nil, + primary_key: :id } end + let(:create_options) do + exits_options + .except(:primary_key) + .merge!(reverse_lock_order: false, target_column: :id, validate: true) + end + it 'forwards them to the foreign key helper methods' do expect(migration).to receive(:foreign_key_exists?) - .with(source_table_name, target_table_name, **options) + .with(source_table_name, target_table_name, **exits_options) .and_return(false) expect(migration).not_to receive(:concurrent_partitioned_foreign_key_name) - expect_add_concurrent_fk(partition1_name, target_table_name, **options) - expect_add_concurrent_fk(partition2_name, target_table_name, **options) + expect_add_concurrent_fk(partition1_name, target_table_name, **create_options) + expect_add_concurrent_fk(partition2_name, target_table_name, **create_options) - expect(migration).to receive(:with_lock_retries).ordered.and_yield - expect(migration).to receive(:add_foreign_key).with(source_table_name, target_table_name, **options).ordered + expect(migration).to receive(:add_concurrent_foreign_key) + .with(source_table_name, target_table_name, allow_partitioned: true, **create_options) + .ordered - migration.add_concurrent_partitioned_foreign_key(source_table_name, target_table_name, - column: column_name, name: '_my_fk_name', on_delete: :restrict) + migration.add_concurrent_partitioned_foreign_key( + source_table_name, + target_table_name, + column: column_name, + name: '_my_fk_name', + on_delete: :restrict + ) end def expect_add_concurrent_fk(source_table_name, target_table_name, options) @@ -153,4 +204,39 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers end end end + + describe '#validate_partitioned_foreign_key' do + context 'when run inside a transaction block' do + it 'raises an error' do + expect(migration).to receive(:transaction_open?).and_return(true) + + expect do + migration.validate_partitioned_foreign_key(source_table_name, column_name, name: '_my_fk_name') + end.to raise_error(/can not be run inside a transaction/) + end + end + + context 'when run outside a transaction block' do + before do + migration.add_concurrent_partitioned_foreign_key( + source_table_name, + target_table_name, + column: column_name, + name: foreign_key_name, + validate: false + ) + end + + it 'validates FK for each partition' do + expect(migration).to receive(:execute).with(/SET statement_timeout TO 0/).twice + expect(migration).to receive(:execute).with(/RESET statement_timeout/).twice + expect(migration).to receive(:execute) + .with(/ALTER TABLE #{partition1_name} VALIDATE CONSTRAINT #{foreign_key_name}/).ordered + expect(migration).to receive(:execute) + .with(/ALTER TABLE #{partition2_name} VALIDATE CONSTRAINT #{foreign_key_name}/).ordered + + migration.validate_partitioned_foreign_key(source_table_name, column_name, name: foreign_key_name) + end + end + end end diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb index e76b1da3834..571c67db597 100644 --- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb +++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb @@ -2,10 +2,11 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers do +RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers, feature_category: :database do include Database::PartitioningHelpers include Database::TriggerHelpers include Database::TableSchemaHelpers + include MigrationsHelpers let(:migration) do ActiveRecord::Migration.new.extend(described_class) @@ -14,9 +15,9 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe let_it_be(:connection) { ActiveRecord::Base.connection } let(:source_table) { :_test_original_table } - let(:partitioned_table) { '_test_migration_partitioned_table' } - let(:function_name) { '_test_migration_function_name' } - let(:trigger_name) { '_test_migration_trigger_name' } + let(:partitioned_table) { :_test_migration_partitioned_table } + let(:function_name) { :_test_migration_function_name } + let(:trigger_name) { :_test_migration_trigger_name } let(:partition_column) { 'created_at' } let(:min_date) { Date.new(2019, 12) } let(:max_date) { Date.new(2020, 3) } @@ -42,15 +43,15 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe end context 'list partitioning conversion helpers' do - shared_examples_for 'delegates to ConvertTableToFirstListPartition' do + shared_examples_for 'delegates to ConvertTable' do let(:extra_options) { {} } it 'throws an error if in a transaction' do allow(migration).to receive(:transaction_open?).and_return(true) expect { migrate }.to raise_error(/cannot be run inside a transaction/) end - it 'delegates to a method on ConvertTableToFirstListPartition' do - expect_next_instance_of(Gitlab::Database::Partitioning::ConvertTableToFirstListPartition, + it 'delegates to a method on List::ConvertTable' do + expect_next_instance_of(Gitlab::Database::Partitioning::List::ConvertTable, migration_context: migration, table_name: source_table, parent_table_name: partitioned_table, @@ -65,7 +66,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe end describe '#convert_table_to_first_list_partition' do - it_behaves_like 'delegates to ConvertTableToFirstListPartition' do + it_behaves_like 'delegates to ConvertTable' do let(:lock_tables) { [source_table] } let(:extra_options) { { lock_tables: lock_tables } } let(:expected_method) { :partition } @@ -80,7 +81,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe end describe '#revert_converting_table_to_first_list_partition' do - it_behaves_like 'delegates to ConvertTableToFirstListPartition' do + it_behaves_like 'delegates to ConvertTable' do let(:expected_method) { :revert_partitioning } let(:migrate) do migration.revert_converting_table_to_first_list_partition(table_name: source_table, @@ -92,19 +93,20 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe end describe '#prepare_constraint_for_list_partitioning' do - it_behaves_like 'delegates to ConvertTableToFirstListPartition' do + it_behaves_like 'delegates to ConvertTable' do let(:expected_method) { :prepare_for_partitioning } let(:migrate) do migration.prepare_constraint_for_list_partitioning(table_name: source_table, partitioning_column: partition_column, parent_table_name: partitioned_table, - initial_partitioning_value: min_date) + initial_partitioning_value: min_date, + async: false) end end end describe '#revert_preparing_constraint_for_list_partitioning' do - it_behaves_like 'delegates to ConvertTableToFirstListPartition' do + it_behaves_like 'delegates to ConvertTable' do let(:expected_method) { :revert_preparation_for_partitioning } let(:migrate) do migration.revert_preparing_constraint_for_list_partitioning(table_name: source_table, @@ -121,12 +123,8 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe let(:old_primary_key) { 'id' } let(:new_primary_key) { [old_primary_key, partition_column] } - before do - allow(migration).to receive(:queue_background_migration_jobs_by_range_at_intervals) - end - context 'when the table is not allowed' do - let(:source_table) { :this_table_is_not_allowed } + let(:source_table) { :_test_this_table_is_not_allowed } it 'raises an error' do expect(migration).to receive(:assert_table_is_allowed).with(source_table).and_call_original @@ -227,7 +225,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe end end - let(:non_int_table) { :another_example } + let(:non_int_table) { :_test_another_example } let(:old_primary_key) { 'identifier' } it 'does not change the primary key datatype' do @@ -422,7 +420,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe let(:migration_class) { 'Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' } context 'when the table is not allowed' do - let(:source_table) { :this_table_is_not_allowed } + let(:source_table) { :_test_this_table_is_not_allowed } it 'raises an error' do expect(migration).to receive(:assert_table_is_allowed).with(source_table).and_call_original @@ -462,7 +460,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe describe '#enqueue_partitioning_data_migration' do context 'when the table is not allowed' do - let(:source_table) { :this_table_is_not_allowed } + let(:source_table) { :_test_this_table_is_not_allowed } it 'raises an error' do expect(migration).to receive(:assert_table_is_allowed).with(source_table).and_call_original @@ -484,17 +482,15 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe end context 'when records exist in the source table' do - let(:migration_class) { '::Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' } + let(:migration_class) { described_class::MIGRATION } let(:sub_batch_size) { described_class::SUB_BATCH_SIZE } - let(:pause_seconds) { described_class::PAUSE_SECONDS } let!(:first_id) { source_model.create!(name: 'Bob', age: 20).id } let!(:second_id) { source_model.create!(name: 'Alice', age: 30).id } let!(:third_id) { source_model.create!(name: 'Sam', age: 40).id } before do stub_const("#{described_class.name}::BATCH_SIZE", 2) - - expect(migration).to receive(:queue_background_migration_jobs_by_range_at_intervals).and_call_original + stub_const("#{described_class.name}::SUB_BATCH_SIZE", 1) end it 'enqueues jobs to copy each batch of data' do @@ -503,13 +499,13 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe Sidekiq::Testing.fake! do migration.enqueue_partitioning_data_migration source_table - expect(BackgroundMigrationWorker.jobs.size).to eq(2) - - first_job_arguments = [first_id, second_id, source_table.to_s, partitioned_table, 'id'] - expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([migration_class, first_job_arguments]) - - second_job_arguments = [third_id, third_id, source_table.to_s, partitioned_table, 'id'] - expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([migration_class, second_job_arguments]) + expect(migration_class).to have_scheduled_batched_migration( + table_name: source_table, + column_name: :id, + job_arguments: [partitioned_table], + batch_size: described_class::BATCH_SIZE, + sub_batch_size: described_class::SUB_BATCH_SIZE + ) end end end @@ -517,7 +513,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe describe '#cleanup_partitioning_data_migration' do context 'when the table is not allowed' do - let(:source_table) { :this_table_is_not_allowed } + let(:source_table) { :_test_this_table_is_not_allowed } it 'raises an error' do expect(migration).to receive(:assert_table_is_allowed).with(source_table).and_call_original @@ -528,18 +524,36 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe end end - context 'when tracking records exist in the background_migration_jobs table' do - let(:migration_class) { 'Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' } - let!(:job1) { create(:background_migration_job, class_name: migration_class, arguments: [1, 10, source_table]) } - let!(:job2) { create(:background_migration_job, class_name: migration_class, arguments: [11, 20, source_table]) } - let!(:job3) { create(:background_migration_job, class_name: migration_class, arguments: [1, 10, 'other_table']) } + context 'when tracking records exist in the batched_background_migrations table' do + let(:migration_class) { described_class::MIGRATION } + + before do + create( + :batched_background_migration, + job_class_name: migration_class, + table_name: source_table, + column_name: :id, + job_arguments: [partitioned_table] + ) + + create( + :batched_background_migration, + job_class_name: migration_class, + table_name: 'other_table', + column_name: :id, + job_arguments: ['other_table_partitioned'] + ) + end it 'deletes those pertaining to the given table' do expect { migration.cleanup_partitioning_data_migration(source_table) } - .to change { ::Gitlab::Database::BackgroundMigrationJob.count }.from(3).to(1) + .to change { ::Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(-1) - remaining_record = ::Gitlab::Database::BackgroundMigrationJob.first - expect(remaining_record).to have_attributes(class_name: migration_class, arguments: [1, 10, 'other_table']) + expect(::Gitlab::Database::BackgroundMigration::BatchedMigration.where(table_name: 'other_table').any?) + .to be_truthy + + expect(::Gitlab::Database::BackgroundMigration::BatchedMigration.where(table_name: source_table).any?) + .to be_falsy end end end @@ -577,10 +591,10 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe end describe '#finalize_backfilling_partitioned_table' do - let(:source_column) { 'id' } + let(:source_column) { :id } context 'when the table is not allowed' do - let(:source_table) { :this_table_is_not_allowed } + let(:source_table) { :_test_this_table_is_not_allowed } it 'raises an error' do expect(migration).to receive(:assert_table_is_allowed).with(source_table).and_call_original @@ -601,131 +615,28 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe end end - context 'finishing pending background migration jobs' do + context 'finishing pending batched background migration jobs' do let(:source_table_double) { double('table name') } let(:raw_arguments) { [1, 50_000, source_table_double, partitioned_table, source_column] } let(:background_job) { double('background job', args: ['background jobs', raw_arguments]) } - - before do - allow(migration).to receive(:table_exists?).with(partitioned_table).and_return(true) - allow(migration).to receive(:copy_missed_records) - allow(migration).to receive(:execute).with(/VACUUM/) - allow(migration).to receive(:execute).with(/^(RE)?SET/) - end - - it 'finishes remaining jobs for the correct table' do - expect_next_instance_of(described_class::JobArguments) do |job_arguments| - expect(job_arguments).to receive(:source_table_name).and_call_original - end - - expect(Gitlab::BackgroundMigration).to receive(:steal) - .with(described_class::MIGRATION_CLASS_NAME) - .and_yield(background_job) - - expect(source_table_double).to receive(:==).with(source_table.to_s) - - migration.finalize_backfilling_partitioned_table source_table - end - - it 'requires the migration helper to execute in DML mode' do - expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!) - - expect(Gitlab::BackgroundMigration).to receive(:steal) - .with(described_class::MIGRATION_CLASS_NAME) - .and_yield(background_job) - - migration.finalize_backfilling_partitioned_table source_table - end - end - - context 'when there is missed data' do - let(:partitioned_model) { Class.new(ActiveRecord::Base) } - let(:timestamp) { Time.utc(2019, 12, 1, 12).round } - let!(:record1) { source_model.create!(name: 'Bob', age: 20, created_at: timestamp, updated_at: timestamp) } - let!(:record2) { source_model.create!(name: 'Alice', age: 30, created_at: timestamp, updated_at: timestamp) } - let!(:record3) { source_model.create!(name: 'Sam', age: 40, created_at: timestamp, updated_at: timestamp) } - let!(:record4) { source_model.create!(name: 'Sue', age: 50, created_at: timestamp, updated_at: timestamp) } - - let!(:pending_job1) do - create(:background_migration_job, - class_name: described_class::MIGRATION_CLASS_NAME, - arguments: [record1.id, record2.id, source_table, partitioned_table, source_column]) - end - - let!(:pending_job2) do - create(:background_migration_job, - class_name: described_class::MIGRATION_CLASS_NAME, - arguments: [record3.id, record3.id, source_table, partitioned_table, source_column]) - end - - let!(:succeeded_job) do - create(:background_migration_job, :succeeded, - class_name: described_class::MIGRATION_CLASS_NAME, - arguments: [record4.id, record4.id, source_table, partitioned_table, source_column]) + let(:bbm_arguments) do + { + job_class_name: described_class::MIGRATION, + table_name: source_table, + column_name: connection.primary_key(source_table), + job_arguments: [partitioned_table] + } end before do - partitioned_model.primary_key = :id - partitioned_model.table_name = partitioned_table - - allow(migration).to receive(:queue_background_migration_jobs_by_range_at_intervals) - - migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date - - allow(Gitlab::BackgroundMigration).to receive(:steal) + allow(migration).to receive(:table_exists?).with(partitioned_table).and_return(true) allow(migration).to receive(:execute).with(/VACUUM/) allow(migration).to receive(:execute).with(/^(RE)?SET/) end - it 'idempotently cleans up after failed background migrations' do - expect(partitioned_model.count).to eq(0) - - partitioned_model.insert(record2.attributes, unique_by: [:id, :created_at]) - - expect_next_instance_of(Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable) do |backfill| - allow(backfill).to receive(:transaction_open?).and_return(false) - - expect(backfill).to receive(:perform) - .with(record1.id, record2.id, source_table, partitioned_table, source_column) - .and_call_original - - expect(backfill).to receive(:perform) - .with(record3.id, record3.id, source_table, partitioned_table, source_column) - .and_call_original - end - - migration.finalize_backfilling_partitioned_table source_table - - expect(partitioned_model.count).to eq(3) - - [record1, record2, record3].each do |original| - copy = partitioned_model.find(original.id) - expect(copy.attributes).to eq(original.attributes) - end - - expect(partitioned_model.find_by_id(record4.id)).to be_nil - - [pending_job1, pending_job2].each do |job| - expect(job.reload).to be_succeeded - end - end - - it 'raises an error if no job tracking records are marked as succeeded' do - expect_next_instance_of(Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable) do |backfill| - allow(backfill).to receive(:transaction_open?).and_return(false) - - expect(backfill).to receive(:perform).and_return(0) - end - - expect do - migration.finalize_backfilling_partitioned_table source_table - end.to raise_error(/failed to update tracking record/) - end - - it 'vacuums the table after loading is complete' do - expect_next_instance_of(Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable) do |backfill| - allow(backfill).to receive(:perform).and_return(1) - end + it 'ensures finishing of remaining jobs and vacuums the partitioned table' do + expect(migration).to receive(:ensure_batched_background_migration_is_finished) + .with(bbm_arguments) expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:with_suppressed).and_yield expect(migration).to receive(:disable_statement_timeout).and_call_original diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb index ae74ee60a4b..9df238a0024 100644 --- a/spec/lib/gitlab/database/partitioning_spec.rb +++ b/spec/lib/gitlab/database/partitioning_spec.rb @@ -2,11 +2,11 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::Partitioning do +RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do include Database::PartitioningHelpers include Database::TableSchemaHelpers - let(:connection) { ApplicationRecord.connection } + let(:main_connection) { ApplicationRecord.connection } around do |example| previously_registered_models = described_class.registered_models.dup @@ -65,21 +65,26 @@ RSpec.describe Gitlab::Database::Partitioning do describe '.sync_partitions' do let(:ci_connection) { Ci::ApplicationRecord.connection } - let(:table_names) { %w[partitioning_test1 partitioning_test2] } + let(:table_names) { %w[_test_partitioning_test1 _test_partitioning_test2] } let(:models) do - table_names.map do |table_name| + [ Class.new(ApplicationRecord) do include PartitionedTable - self.table_name = table_name + self.table_name = :_test_partitioning_test1 partitioned_by :created_at, strategy: :monthly + end, + Class.new(Gitlab::Database::Partitioning::TableWithoutModel).tap do |klass| + klass.table_name = :_test_partitioning_test2 + klass.partitioned_by(:created_at, strategy: :monthly) + klass.limit_connection_names = %i[main] end - end + ] end before do table_names.each do |table_name| - connection.execute(<<~SQL) + execute_on_each_database(<<~SQL) CREATE TABLE #{table_name} ( id serial not null, created_at timestamptz not null, @@ -96,32 +101,12 @@ RSpec.describe Gitlab::Database::Partitioning do end context 'with multiple databases' do - before do - table_names.each do |table_name| - ci_connection.execute("DROP TABLE IF EXISTS #{table_name}") - - ci_connection.execute(<<~SQL) - CREATE TABLE #{table_name} ( - id serial not null, - created_at timestamptz not null, - PRIMARY KEY (id, created_at)) - PARTITION BY RANGE (created_at); - SQL - end - end - - after do - table_names.each do |table_name| - ci_connection.execute("DROP TABLE IF EXISTS #{table_name}") - end - end - it 'creates partitions in each database' do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) expect { described_class.sync_partitions(models) } - .to change { find_partitions(table_names.first, conn: connection).size }.from(0) - .and change { find_partitions(table_names.last, conn: connection).size }.from(0) + .to change { find_partitions(table_names.first, conn: main_connection).size }.from(0) + .and change { find_partitions(table_names.last, conn: main_connection).size }.from(0) .and change { find_partitions(table_names.first, conn: ci_connection).size }.from(0) .and change { find_partitions(table_names.last, conn: ci_connection).size }.from(0) end @@ -150,16 +135,18 @@ RSpec.describe Gitlab::Database::Partitioning do Class.new(Ci::ApplicationRecord) do include PartitionedTable - self.table_name = 'partitioning_test3' + self.table_name = :_test_partitioning_test3 partitioned_by :created_at, strategy: :monthly end end before do - (table_names + ['partitioning_test3']).each do |table_name| - ci_connection.execute("DROP TABLE IF EXISTS #{table_name}") + skip_if_shared_database(:ci) + + (table_names + [:_test_partitioning_test3]).each do |table_name| + execute_on_each_database("DROP TABLE IF EXISTS #{table_name}") - ci_connection.execute(<<~SQL) + execute_on_each_database(<<~SQL) CREATE TABLE #{table_name} ( id serial not null, created_at timestamptz not null, @@ -170,20 +157,33 @@ RSpec.describe Gitlab::Database::Partitioning do end after do - (table_names + ['partitioning_test3']).each do |table_name| + (table_names + [:_test_partitioning_test3]).each do |table_name| ci_connection.execute("DROP TABLE IF EXISTS #{table_name}") end end it 'manages partitions for models for the given database', :aggregate_failures do - skip_if_multiple_databases_not_setup(:ci) - expect { described_class.sync_partitions([models.first, ci_model], only_on: 'ci') } .to change { find_partitions(ci_model.table_name, conn: ci_connection).size }.from(0) - expect(find_partitions(models.first.table_name).size).to eq(0) + expect(find_partitions(models.first.table_name, conn: main_connection).size).to eq(0) expect(find_partitions(models.first.table_name, conn: ci_connection).size).to eq(0) - expect(find_partitions(ci_model.table_name).size).to eq(0) + expect(find_partitions(ci_model.table_name, conn: main_connection).size).to eq(0) + end + end + + context 'when partition_manager_sync_partitions feature flag is disabled' do + before do + described_class.register_models(models) + stub_feature_flags(partition_manager_sync_partitions: false) + end + + it 'skips sync_partitions' do + expect(described_class::PartitionManager).not_to receive(:new) + expect(described_class).to receive(:sync_partitions) + .and_call_original + + described_class.sync_partitions(models) end end end @@ -228,7 +228,7 @@ RSpec.describe Gitlab::Database::Partitioning do end describe '.drop_detached_partitions' do - let(:table_names) { %w[detached_test_partition1 detached_test_partition2] } + let(:table_names) { %w[_test_detached_test_partition1 _test_detached_test_partition2] } before do table_names.each do |table_name| diff --git a/spec/lib/gitlab/database/pg_depend_spec.rb b/spec/lib/gitlab/database/pg_depend_spec.rb new file mode 100644 index 00000000000..547a2c84b76 --- /dev/null +++ b/spec/lib/gitlab/database/pg_depend_spec.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::PgDepend, type: :model, feature_category: :database do + let(:connection) { described_class.connection } + + describe '.from_pg_extension' do + subject { described_class.from_pg_extension('VIEW') } + + context 'when having views as dependency' do + before do + connection.execute('CREATE EXTENSION IF NOT EXISTS pg_stat_statements;') + end + + it 'returns pg_stat_statements', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410508' do + expect(subject.pluck('relname')).to eq(['pg_stat_statements']) + end + end + end +end diff --git a/spec/lib/gitlab/database/postgres_foreign_key_spec.rb b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb index ae56f66737d..03343c134ae 100644 --- a/spec/lib/gitlab/database/postgres_foreign_key_spec.rb +++ b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb @@ -70,13 +70,29 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ end describe '#by_constrained_table_name' do - it 'finds the foreign keys for the constrained table' do - expected = described_class.where(name: %w[fk_constrained_to_referenced fk_constrained_to_other_referenced]).to_a + let(:expected) { described_class.where(name: %w[fk_constrained_to_referenced fk_constrained_to_other_referenced]).to_a } + it 'finds the foreign keys for the constrained table' do expect(described_class.by_constrained_table_name(table_name("constrained_table"))).to match_array(expected) end end + describe '#by_constrained_table_name_or_identifier' do + let(:expected) { described_class.where(name: %w[fk_constrained_to_referenced fk_constrained_to_other_referenced]).to_a } + + context 'when using table name' do + it 'finds the foreign keys for the constrained table' do + expect(described_class.by_constrained_table_name_or_identifier(table_name("constrained_table"))).to match_array(expected) + end + end + + context 'when using identifier' do + it 'finds the foreign keys for the constrained table' do + expect(described_class.by_constrained_table_name_or_identifier(schema_table_name('constrained_table'))).to match_array(expected) + end + end + end + describe '#by_name' do it 'finds foreign keys by name' do expect(described_class.by_name('fk_constrained_to_referenced').pluck(:name)).to contain_exactly('fk_constrained_to_referenced') @@ -187,10 +203,8 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ end end - context 'when supporting foreign keys to inherited tables in postgres 12' do + context 'when supporting foreign keys on partitioned tables' do before do - skip('not supported before postgres 12') if ApplicationRecord.database.version.to_f < 12 - ApplicationRecord.connection.execute(<<~SQL) create table #{schema_table_name('parent')} ( id bigserial primary key not null @@ -232,6 +246,40 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ end end + context 'with two tables both partitioned' do + before do + ApplicationRecord.connection.execute(<<~SQL) + create table #{table_name('parent')} ( + id bigserial primary key not null + ) partition by hash(id); + + create table #{table_name('child')} + partition of #{table_name('parent')} for values with (remainder 1, modulus 2); + + create table #{table_name('ref_parent')} ( + id bigserial primary key not null + ) partition by hash(id); + + create table #{table_name('ref_child_1')} + partition of #{table_name('ref_parent')} for values with (remainder 1, modulus 3); + + create table #{table_name('ref_child_2')} + partition of #{table_name('ref_parent')} for values with (remainder 2, modulus 3); + + alter table #{table_name('parent')} add constraint fk foreign key (id) references #{table_name('ref_parent')} (id); + SQL + end + + describe '#child_foreign_keys' do + it 'is the child foreign keys of the partitioned parent fk' do + fk = described_class.by_constrained_table_name(table_name('parent')).first + children = fk.child_foreign_keys + expect(children.count).to eq(1) + expect(children.first.constrained_table_name).to eq(table_name('child')) + end + end + end + def schema_table_name(name) "public.#{table_name(name)}" end diff --git a/spec/lib/gitlab/database/postgres_partition_spec.rb b/spec/lib/gitlab/database/postgres_partition_spec.rb index 14a4d405621..48dbdbc7757 100644 --- a/spec/lib/gitlab/database/postgres_partition_spec.rb +++ b/spec/lib/gitlab/database/postgres_partition_spec.rb @@ -2,7 +2,8 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::PostgresPartition, type: :model do +RSpec.describe Gitlab::Database::PostgresPartition, type: :model, feature_category: :database do + let(:current_schema) { ActiveRecord::Base.connection.select_value("SELECT current_schema()") } let(:schema) { 'gitlab_partitions_dynamic' } let(:name) { '_test_partition_01' } let(:identifier) { "#{schema}.#{name}" } @@ -56,9 +57,20 @@ RSpec.describe Gitlab::Database::PostgresPartition, type: :model do expect(partitions.pluck(:name)).to eq([name, second_name]) end + it 'returns the partitions if the parent table schema is included in the table name' do + partitions = described_class.for_parent_table("#{current_schema}._test_partitioned_table") + + expect(partitions.count).to eq(2) + expect(partitions.pluck(:name)).to eq([name, second_name]) + end + it 'does not return partitions for tables not in the current schema' do expect(described_class.for_parent_table('_test_other_table').count).to eq(0) end + + it 'does not return partitions for tables if the schema is not the current' do + expect(described_class.for_parent_table('foo_bar._test_partitioned_table').count).to eq(0) + end end describe '#parent_identifier' do diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb index 3a92f35d585..6a0c4226db8 100644 --- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb +++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb @@ -57,10 +57,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana "for query accessing gitlab_main and unknown schema" => { model: ApplicationRecord, sql: "SELECT 1 FROM projects LEFT JOIN not_in_schema ON not_in_schema.project_id=projects.id", - expectations: { - gitlab_schemas: "gitlab_main,undefined_not_in_schema", - db_config_name: "main" - } + expect_error: + /Could not find gitlab schema for table not_in_schema/ } } end @@ -74,10 +72,14 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana allow(::Ci::ApplicationRecord.load_balancer).to receive(:configuration) .and_return(Gitlab::Database::LoadBalancing::Configuration.for_model(::Ci::ApplicationRecord)) - expect(described_class.schemas_metrics).to receive(:increment) - .with(expectations).and_call_original + if expect_error + expect { process_sql(model, sql) }.to raise_error(expect_error) + else + expect(described_class.schemas_metrics).to receive(:increment) + .with(expectations).and_call_original - process_sql(model, sql) + process_sql(model, sql) + end end end end diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb index d31be6cb883..e3ff5ab4779 100644 --- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb +++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection, query_analyzers: false, - feature_category: :pods do + feature_category: :cell do let(:analyzer) { described_class } # We keep only the GitlabSchemasValidateConnection analyzer running @@ -28,19 +28,19 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection model: ApplicationRecord, sql: "SELECT 1 FROM projects LEFT JOIN ci_builds ON ci_builds.project_id=projects.id", expect_error: /The query tried to access \["projects", "ci_builds"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } + setup: -> (_) { skip_if_shared_database(:ci) } }, "for query accessing gitlab_ci and gitlab_main the gitlab_schemas is always ordered" => { model: ApplicationRecord, sql: "SELECT 1 FROM ci_builds LEFT JOIN projects ON ci_builds.project_id=projects.id", expect_error: /The query tried to access \["ci_builds", "projects"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } + setup: -> (_) { skip_if_shared_database(:ci) } }, "for query accessing main table from CI database" => { model: Ci::ApplicationRecord, sql: "SELECT 1 FROM projects", expect_error: /The query tried to access \["projects"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } + setup: -> (_) { skip_if_shared_database(:ci) } }, "for query accessing CI database" => { model: Ci::ApplicationRecord, @@ -51,13 +51,14 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection model: ::ApplicationRecord, sql: "SELECT 1 FROM ci_builds", expect_error: /The query tried to access \["ci_builds"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } + setup: -> (_) { skip_if_shared_database(:ci) } }, "for query accessing unknown gitlab_schema" => { model: ::ApplicationRecord, sql: "SELECT 1 FROM new_table", - expect_error: /The query tried to access \["new_table"\] \(of undefined_new_table\)/, - setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } + expect_error: + /Could not find gitlab schema for table new_table/, + setup: -> (_) { skip_if_shared_database(:ci) } } } end @@ -77,7 +78,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection context "when analyzer is enabled for tests", :query_analyzers do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) end it "throws an error when trying to access a table that belongs to the gitlab_main schema from the ci database" do diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb index a4322689bf9..02bd6b51463 100644 --- a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb +++ b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification, query_analyzers: false, - feature_category: :pods do + feature_category: :cell do let_it_be(:pipeline, refind: true) { create(:ci_pipeline) } let_it_be(:project, refind: true) { create(:project) } @@ -118,6 +118,18 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio end end + context 'when ci_pipelines are ignored for cross modification' do + it 'does not raise error' do + Project.transaction do + expect do + described_class.temporary_ignore_tables_in_transaction(%w[ci_pipelines], url: 'TODO') do + run_queries + end + end.not_to raise_error + end + end + end + context 'when data modification happens in nested transactions' do it 'raises error' do Project.transaction(requires_new: true) do @@ -209,27 +221,16 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio end end - context 'when some table with a defined schema and another table with undefined gitlab_schema is modified' do - it 'raises an error including including message about undefined schema' do - expect do - Project.transaction do - project.touch - project.connection.execute('UPDATE foo_bars_undefined_table SET a=1 WHERE id = -1') - end - end.to raise_error /Cross-database data modification.*The gitlab_schema was undefined/ - end - end - context 'when execution is rescued with StandardError' do it 'raises cross-database data modification exception' do expect do Project.transaction do project.touch - project.connection.execute('UPDATE foo_bars_undefined_table SET a=1 WHERE id = -1') + project.connection.execute('UPDATE ci_pipelines SET id=1 WHERE id = -1') end rescue StandardError # Ensures that standard rescue does not silence errors - end.to raise_error /Cross-database data modification.*The gitlab_schema was undefined/ + end.to raise_error /Cross-database data modification/ end end diff --git a/spec/lib/gitlab/database/reflection_spec.rb b/spec/lib/gitlab/database/reflection_spec.rb index 779bdbe50f0..641dd48be36 100644 --- a/spec/lib/gitlab/database/reflection_spec.rb +++ b/spec/lib/gitlab/database/reflection_spec.rb @@ -191,9 +191,15 @@ RSpec.describe Gitlab::Database::Reflection, feature_category: :database do expect(database.postgresql_minimum_supported_version?).to eq(false) end - it 'returns true when using PostgreSQL 12' do + it 'returns false when using PostgreSQL 12' do allow(database).to receive(:version).and_return('12') + expect(database.postgresql_minimum_supported_version?).to eq(false) + end + + it 'returns true when using PostgreSQL 13' do + allow(database).to receive(:version).and_return('13') + expect(database.postgresql_minimum_supported_version?).to eq(true) end end diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb index a8af9bb5a38..4d0e58b0937 100644 --- a/spec/lib/gitlab/database/reindexing_spec.rb +++ b/spec/lib/gitlab/database/reindexing_spec.rb @@ -71,7 +71,7 @@ RSpec.describe Gitlab::Database::Reindexing, feature_category: :database, time_t context 'when async FK validation is enabled' do it 'executes FK validation for each database prior to any reindexing actions' do - expect(Gitlab::Database::AsyncForeignKeys).to receive(:validate_pending_entries!).ordered.exactly(databases_count).times + expect(Gitlab::Database::AsyncConstraints).to receive(:validate_pending_entries!).ordered.exactly(databases_count).times expect(described_class).to receive(:automatic_reindexing).ordered.exactly(databases_count).times described_class.invoke @@ -82,7 +82,7 @@ RSpec.describe Gitlab::Database::Reindexing, feature_category: :database, time_t it 'does not execute FK validation' do stub_feature_flags(database_async_foreign_key_validation: false) - expect(Gitlab::Database::AsyncForeignKeys).not_to receive(:validate_pending_entries!) + expect(Gitlab::Database::AsyncConstraints).not_to receive(:validate_pending_entries!) described_class.invoke end diff --git a/spec/lib/gitlab/database/schema_validation/adapters/column_database_adapter_spec.rb b/spec/lib/gitlab/database/schema_validation/adapters/column_database_adapter_spec.rb new file mode 100644 index 00000000000..d81f5f3dbec --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/adapters/column_database_adapter_spec.rb @@ -0,0 +1,72 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Adapters::ColumnDatabaseAdapter, feature_category: :database do + subject(:adapter) { described_class.new(db_result) } + + let(:column_name) { 'email' } + let(:column_default) { "'no-reply@gitlab.com'::character varying" } + let(:not_null) { true } + let(:partition_key) { false } + let(:db_result) do + { + 'table_name' => 'projects', + 'column_name' => column_name, + 'data_type' => 'character varying', + 'column_default' => column_default, + 'not_null' => not_null, + 'partition_key' => partition_key + } + end + + describe '#name' do + it { expect(adapter.name).to eq('email') } + end + + describe '#table_name' do + it { expect(adapter.table_name).to eq('projects') } + end + + describe '#data_type' do + it { expect(adapter.data_type).to eq('character varying') } + end + + describe '#default' do + context "when there's no default value in the column" do + let(:column_default) { nil } + + it { expect(adapter.default).to be_nil } + end + + context 'when the column name is id' do + let(:column_name) { 'id' } + + it { expect(adapter.default).to be_nil } + end + + context 'when the column default includes nextval' do + let(:column_default) { "nextval('my_seq'::regclass)" } + + it { expect(adapter.default).to be_nil } + end + + it { expect(adapter.default).to eq("DEFAULT 'no-reply@gitlab.com'::character varying") } + end + + describe '#nullable' do + context 'when column is not null' do + it { expect(adapter.nullable).to eq('NOT NULL') } + end + + context 'when column is nullable' do + let(:not_null) { false } + + it { expect(adapter.nullable).to be_nil } + end + end + + describe '#partition_key?' do + it { expect(adapter.partition_key?).to be(false) } + end +end diff --git a/spec/lib/gitlab/database/schema_validation/adapters/column_structure_sql_adapter_spec.rb b/spec/lib/gitlab/database/schema_validation/adapters/column_structure_sql_adapter_spec.rb new file mode 100644 index 00000000000..64b59e65be6 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/adapters/column_structure_sql_adapter_spec.rb @@ -0,0 +1,78 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Adapters::ColumnStructureSqlAdapter, feature_category: :database do + subject(:adapter) { described_class.new(table_name, column_def, partition_stmt) } + + let(:table_name) { 'test_table' } + let(:file_path) { Rails.root.join('spec/fixtures/structure.sql') } + let(:table_stmts) { PgQuery.parse(File.read(file_path)).tree.stmts.filter_map { |s| s.stmt.create_stmt } } + let(:table) { table_stmts.find { |table| table.relation.relname == table_name } } + let(:partition_stmt) { table.partspec } + let(:column_stmts) { table.table_elts } + let(:column_def) { column_stmts.find { |col| col.column_def.colname == column_name }.column_def } + + where(:column_name, :data_type, :default_value, :nullable, :partition_key) do + [ + ['id', 'bigint', nil, 'NOT NULL', false], + ['integer_column', 'integer', nil, nil, false], + ['integer_with_default_column', 'integer', 'DEFAULT 1', nil, false], + ['smallint_with_default_column', 'smallint', 'DEFAULT 0', 'NOT NULL', false], + ['double_precision_with_default_column', 'double precision', 'DEFAULT 1.0', nil, false], + ['numeric_with_default_column', 'numeric', 'DEFAULT 1.0', 'NOT NULL', false], + ['boolean_with_default_colum', 'boolean', 'DEFAULT true', 'NOT NULL', false], + ['varying_with_default_column', 'character varying', "DEFAULT 'DEFAULT'::character varying", 'NOT NULL', false], + ['varying_with_limit_and_default_column', 'character varying(255)', "DEFAULT 'DEFAULT'::character varying", + nil, false], + ['text_with_default_column', 'text', "DEFAULT ''::text", 'NOT NULL', false], + ['array_with_default_column', 'character varying(255)[]', "DEFAULT '{one,two}'::character varying[]", + 'NOT NULL', false], + ['jsonb_with_default_column', 'jsonb', "DEFAULT '[]'::jsonb", 'NOT NULL', false], + ['timestamptz_with_default_column', 'timestamp(6) with time zone', "DEFAULT now()", nil, false], + ['timestamp_with_default_column', 'timestamp(6) without time zone', + "DEFAULT '2022-01-23 00:00:00+00'::timestamp without time zone", 'NOT NULL', false], + ['date_with_default_column', 'date', 'DEFAULT 2023-04-05', nil, false], + ['inet_with_default_column', 'inet', "DEFAULT '0.0.0.0'::inet", 'NOT NULL', false], + ['macaddr_with_default_column', 'macaddr', "DEFAULT '00-00-00-00-00-000'::macaddr", 'NOT NULL', false], + ['uuid_with_default_column', 'uuid', "DEFAULT '00000000-0000-0000-0000-000000000000'::uuid", 'NOT NULL', false], + ['partition_key', 'bigint', 'DEFAULT 1', 'NOT NULL', true], + ['created_at', 'timestamp with time zone', 'DEFAULT now()', 'NOT NULL', true] + ] + end + + with_them do + describe '#name' do + it { expect(adapter.name).to eq(column_name) } + end + + describe '#table_name' do + it { expect(adapter.table_name).to eq(table_name) } + end + + describe '#data_type' do + it { expect(adapter.data_type).to eq(data_type) } + end + + describe '#nullable' do + it { expect(adapter.nullable).to eq(nullable) } + end + + describe '#default' do + it { expect(adapter.default).to eq(default_value) } + end + + describe '#partition_key?' do + it { expect(adapter.partition_key?).to eq(partition_key) } + end + end + + context 'when the data type is not mapped' do + let(:column_name) { 'unmapped_column_type' } + let(:error_class) { Gitlab::Database::SchemaValidation::Adapters::UndefinedPGType } + + describe '#data_type' do + it { expect { adapter.data_type }.to raise_error(error_class) } + end + end +end diff --git a/spec/lib/gitlab/database/schema_validation/database_spec.rb b/spec/lib/gitlab/database/schema_validation/database_spec.rb index c0026f91b46..0b5f433b1c9 100644 --- a/spec/lib/gitlab/database/schema_validation/database_spec.rb +++ b/spec/lib/gitlab/database/schema_validation/database_spec.rb @@ -2,44 +2,92 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::SchemaValidation::Database, feature_category: :database do - let(:database_name) { 'main' } - let(:database_indexes) do - [['index', 'CREATE UNIQUE INDEX "index" ON public.achievements USING btree (namespace_id, lower(name))']] - end +RSpec.shared_examples 'database schema assertions for' do |fetch_by_name_method, exists_method, all_objects_method| + subject(:database) { described_class.new(connection) } - let(:query_result) { instance_double('ActiveRecord::Result', rows: database_indexes) } - let(:database_model) { Gitlab::Database.database_base_models[database_name] } + let(:database_model) { Gitlab::Database.database_base_models['main'] } let(:connection) { database_model.connection } - subject(:database) { described_class.new(connection) } - before do - allow(connection).to receive(:exec_query).and_return(query_result) + allow(connection).to receive(:select_rows).and_return(results) + allow(connection).to receive(:exec_query).and_return(results) end - describe '#fetch_index_by_name' do - context 'when index does not exist' do - it 'returns nil' do - index = database.fetch_index_by_name('non_existing_index') + describe "##{fetch_by_name_method}" do + it 'returns nil when schema object does not exists' do + expect(database.public_send(fetch_by_name_method, 'invalid-object-name')).to be_nil + end + + it 'returns the schema object by name' do + expect(database.public_send(fetch_by_name_method, valid_schema_object_name).name).to eq(valid_schema_object_name) + end + end + + describe "##{exists_method}" do + it 'returns true when schema object exists' do + expect(database.public_send(exists_method, valid_schema_object_name)).to be_truthy + end - expect(index).to be_nil - end + it 'returns false when schema object does not exists' do + expect(database.public_send(exists_method, 'invalid-object')).to be_falsey end + end - it 'returns index by name' do - index = database.fetch_index_by_name('index') + describe "##{all_objects_method}" do + it 'returns all the schema objects' do + schema_objects = database.public_send(all_objects_method) - expect(index.name).to eq('index') + expect(schema_objects).to all(be_a(schema_object)) + expect(schema_objects.map(&:name)).to eq([valid_schema_object_name]) end end +end + +RSpec.describe Gitlab::Database::SchemaValidation::Database, feature_category: :database do + context 'when having indexes' do + let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index } + let(:valid_schema_object_name) { 'index' } + let(:results) do + [['index', 'CREATE UNIQUE INDEX "index" ON public.achievements USING btree (namespace_id, lower(name))']] + end - describe '#indexes' do - it 'returns indexes' do - indexes = database.indexes + include_examples 'database schema assertions for', 'fetch_index_by_name', 'index_exists?', 'indexes' + end - expect(indexes).to all(be_a(Gitlab::Database::SchemaValidation::Index)) - expect(indexes.map(&:name)).to eq(['index']) + context 'when having triggers' do + let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Trigger } + let(:valid_schema_object_name) { 'my_trigger' } + let(:results) do + [['my_trigger', 'CREATE TRIGGER my_trigger BEFORE INSERT ON todos FOR EACH ROW EXECUTE FUNCTION trigger()']] end + + include_examples 'database schema assertions for', 'fetch_trigger_by_name', 'trigger_exists?', 'triggers' + end + + context 'when having tables' do + let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Table } + let(:valid_schema_object_name) { 'my_table' } + let(:results) do + [ + { + 'table_name' => 'my_table', + 'column_name' => 'id', + 'not_null' => true, + 'data_type' => 'bigint', + 'partition_key' => false, + 'column_default' => "nextval('audit_events_id_seq'::regclass)" + }, + { + 'table_name' => 'my_table', + 'column_name' => 'details', + 'not_null' => false, + 'data_type' => 'text', + 'partition_key' => false, + 'column_default' => nil + } + ] + end + + include_examples 'database schema assertions for', 'fetch_table_by_name', 'table_exists?', 'tables' end end diff --git a/spec/lib/gitlab/database/schema_validation/inconsistency_spec.rb b/spec/lib/gitlab/database/schema_validation/inconsistency_spec.rb new file mode 100644 index 00000000000..a49ff8339a1 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/inconsistency_spec.rb @@ -0,0 +1,96 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Inconsistency, feature_category: :database do + let(:validator) { Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionIndexes } + + let(:database_statement) { 'CREATE INDEX index_name ON public.achievements USING btree (namespace_id)' } + let(:structure_sql_statement) { 'CREATE INDEX index_name ON public.achievements USING btree (id)' } + + let(:structure_stmt) { PgQuery.parse(structure_sql_statement).tree.stmts.first.stmt.index_stmt } + let(:database_stmt) { PgQuery.parse(database_statement).tree.stmts.first.stmt.index_stmt } + + let(:structure_sql_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index.new(structure_stmt) } + let(:database_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index.new(database_stmt) } + + subject(:inconsistency) { described_class.new(validator, structure_sql_object, database_object) } + + describe '#object_name' do + it 'returns the index name' do + expect(inconsistency.object_name).to eq('index_name') + end + end + + describe '#diff' do + it 'returns a diff between the structure.sql and the database' do + expect(inconsistency.diff).to be_a(Diffy::Diff) + expect(inconsistency.diff.string1).to eq("#{structure_sql_statement}\n") + expect(inconsistency.diff.string2).to eq("#{database_statement}\n") + end + end + + describe '#error_message' do + it 'returns the error message' do + stub_const "#{validator}::ERROR_MESSAGE", 'error message %s' + + expect(inconsistency.error_message).to eq('error message index_name') + end + end + + describe '#type' do + it 'returns the type of the validator' do + expect(inconsistency.type).to eq('different_definition_indexes') + end + end + + describe '#table_name' do + it 'returns the table name' do + expect(inconsistency.table_name).to eq('achievements') + end + end + + describe '#object_type' do + it 'returns the structure sql object type' do + expect(inconsistency.object_type).to eq('Index') + end + + context 'when the structure sql object is not available' do + subject(:inconsistency) { described_class.new(validator, nil, database_object) } + + it 'returns the database object type' do + expect(inconsistency.object_type).to eq('Index') + end + end + end + + describe '#structure_sql_statement' do + it 'returns structure sql statement' do + expect(inconsistency.structure_sql_statement).to eq("#{structure_sql_statement}\n") + end + end + + describe '#database_statement' do + it 'returns database statement' do + expect(inconsistency.database_statement).to eq("#{database_statement}\n") + end + end + + describe '#inspect' do + let(:expected_output) do + <<~MSG + ------------------------------------------------------ + The index_name index has a different statement between structure.sql and database + Diff: + \e[31m-CREATE INDEX index_name ON public.achievements USING btree (id)\e[0m + \e[32m+CREATE INDEX index_name ON public.achievements USING btree (namespace_id)\e[0m + + ------------------------------------------------------ + MSG + end + + it 'prints the inconsistency message' do + expect(inconsistency.inspect).to eql(expected_output) + end + end +end diff --git a/spec/lib/gitlab/database/schema_validation/index_spec.rb b/spec/lib/gitlab/database/schema_validation/index_spec.rb deleted file mode 100644 index 297211d79ed..00000000000 --- a/spec/lib/gitlab/database/schema_validation/index_spec.rb +++ /dev/null @@ -1,22 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -RSpec.describe Gitlab::Database::SchemaValidation::Index, feature_category: :database do - let(:index_statement) { 'CREATE INDEX index_name ON public.achievements USING btree (namespace_id)' } - - let(:stmt) { PgQuery.parse(index_statement).tree.stmts.first.stmt.index_stmt } - - let(:index) { described_class.new(stmt) } - - describe '#name' do - it 'returns index name' do - expect(index.name).to eq('index_name') - end - end - - describe '#statement' do - it 'returns index statement' do - expect(index.statement).to eq(index_statement) - end - end -end diff --git a/spec/lib/gitlab/database/schema_validation/indexes_spec.rb b/spec/lib/gitlab/database/schema_validation/indexes_spec.rb deleted file mode 100644 index 4351031a4b4..00000000000 --- a/spec/lib/gitlab/database/schema_validation/indexes_spec.rb +++ /dev/null @@ -1,56 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Database::SchemaValidation::Indexes, feature_category: :database do - let(:structure_file_path) { Rails.root.join('spec/fixtures/structure.sql') } - let(:database_indexes) do - [ - ['wrong_index', 'CREATE UNIQUE INDEX wrong_index ON public.table_name (column_name)'], - ['extra_index', 'CREATE INDEX extra_index ON public.table_name (column_name)'], - ['index', 'CREATE UNIQUE INDEX "index" ON public.achievements USING btree (namespace_id, lower(name))'] - ] - end - - let(:database_name) { 'main' } - - let(:database_model) { Gitlab::Database.database_base_models[database_name] } - - let(:connection) { database_model.connection } - - let(:query_result) { instance_double('ActiveRecord::Result', rows: database_indexes) } - - let(:database) { Gitlab::Database::SchemaValidation::Database.new(connection) } - let(:structure_file) { Gitlab::Database::SchemaValidation::StructureSql.new(structure_file_path) } - - subject(:schema_validation) { described_class.new(structure_file, database) } - - before do - allow(connection).to receive(:exec_query).and_return(query_result) - end - - describe '#missing_indexes' do - it 'returns missing indexes' do - missing_indexes = %w[ - missing_index - index_namespaces_public_groups_name_id - index_on_deploy_keys_id_and_type_and_public - index_users_on_public_email_excluding_null_and_empty - ] - - expect(schema_validation.missing_indexes).to match_array(missing_indexes) - end - end - - describe '#extra_indexes' do - it 'returns extra indexes' do - expect(schema_validation.extra_indexes).to match_array(['extra_index']) - end - end - - describe '#wrong_indexes' do - it 'returns wrong indexes' do - expect(schema_validation.wrong_indexes).to match_array(['wrong_index']) - end - end -end diff --git a/spec/lib/gitlab/database/schema_validation/runner_spec.rb b/spec/lib/gitlab/database/schema_validation/runner_spec.rb new file mode 100644 index 00000000000..f5d1c6ba31b --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/runner_spec.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Runner, feature_category: :database do + let(:structure_file_path) { Rails.root.join('spec/fixtures/structure.sql') } + let(:connection) { ActiveRecord::Base.connection } + + let(:database) { Gitlab::Database::SchemaValidation::Database.new(connection) } + let(:structure_sql) { Gitlab::Database::SchemaValidation::StructureSql.new(structure_file_path, 'public') } + + describe '#execute' do + subject(:inconsistencies) { described_class.new(structure_sql, database).execute } + + it 'returns inconsistencies' do + expect(inconsistencies).not_to be_empty + end + + it 'execute all validators' do + all_validators = Gitlab::Database::SchemaValidation::Validators::BaseValidator.all_validators + + expect(all_validators).to all(receive(:new).with(structure_sql, database).and_call_original) + + inconsistencies + end + + context 'when validators are passed' do + subject(:inconsistencies) { described_class.new(structure_sql, database, validators: validators).execute } + + let(:class_name) { 'Gitlab::Database::SchemaValidation::Validators::ExtraIndexes' } + let(:inconsistency_class_name) { 'Gitlab::Database::SchemaValidation::Inconsistency' } + + let(:extra_indexes) { class_double(class_name) } + let(:instace_extra_index) { instance_double(class_name, execute: [inconsistency]) } + let(:inconsistency) { instance_double(inconsistency_class_name, object_name: 'test') } + + let(:validators) { [extra_indexes] } + + it 'only execute the validators passed' do + expect(extra_indexes).to receive(:new).with(structure_sql, database).and_return(instace_extra_index) + + Gitlab::Database::SchemaValidation::Validators::BaseValidator.all_validators.each do |validator| + expect(validator).not_to receive(:new).with(structure_sql, database) + end + + expect(inconsistencies.map(&:object_name)).to eql ['test'] + end + end + end +end diff --git a/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb new file mode 100644 index 00000000000..7d6a279def9 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::SchemaInconsistency, type: :model, feature_category: :database do + it { is_expected.to be_a ApplicationRecord } + + describe 'associations' do + it { is_expected.to belong_to(:issue) } + end + + describe "Validations" do + it { is_expected.to validate_presence_of(:object_name) } + it { is_expected.to validate_presence_of(:valitador_name) } + it { is_expected.to validate_presence_of(:table_name) } + end +end diff --git a/spec/lib/gitlab/database/schema_validation/schema_objects/column_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_objects/column_spec.rb new file mode 100644 index 00000000000..74bc5f43b50 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/schema_objects/column_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::SchemaObjects::Column, feature_category: :database do + subject(:column) { described_class.new(adapter) } + + let(:database_adapter) { 'Gitlab::Database::SchemaValidation::Adapters::ColumnDatabaseAdapter' } + let(:adapter) do + instance_double(database_adapter, name: 'id', table_name: 'projects', + data_type: 'bigint', default: nil, nullable: 'NOT NULL') + end + + describe '#name' do + it { expect(column.name).to eq('id') } + end + + describe '#table_name' do + it { expect(column.table_name).to eq('projects') } + end + + describe '#statement' do + it { expect(column.statement).to eq('id bigint NOT NULL') } + end +end diff --git a/spec/lib/gitlab/database/schema_validation/schema_objects/index_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_objects/index_spec.rb new file mode 100644 index 00000000000..43d8fa38ec8 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/schema_objects/index_spec.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::SchemaObjects::Index, feature_category: :database do + let(:statement) { 'CREATE INDEX index_name ON public.achievements USING btree (namespace_id)' } + let(:name) { 'index_name' } + let(:table_name) { 'achievements' } + + include_examples 'schema objects assertions for', 'index_stmt' +end diff --git a/spec/lib/gitlab/database/schema_validation/schema_objects/table_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_objects/table_spec.rb new file mode 100644 index 00000000000..60ea9581517 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/schema_objects/table_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::SchemaObjects::Table, feature_category: :database do + subject(:table) { described_class.new(name, columns) } + + let(:name) { 'my_table' } + let(:column_class) { 'Gitlab::Database::SchemaValidation::SchemaObjects::Column' } + let(:columns) do + [ + instance_double(column_class, name: 'id', statement: 'id bigint NOT NULL', partition_key?: false), + instance_double(column_class, name: 'col', statement: 'col text', partition_key?: false), + instance_double(column_class, name: 'partition', statement: 'partition integer DEFAULT 1', partition_key?: true) + ] + end + + describe '#name' do + it { expect(table.name).to eq('my_table') } + end + + describe '#table_name' do + it { expect(table.table_name).to eq('my_table') } + end + + describe '#statement' do + it { expect(table.statement).to eq('CREATE TABLE my_table (id bigint NOT NULL, col text)') } + + it 'ignores the partition column' do + expect(table.statement).not_to include('partition integer DEFAULT 1') + end + end + + describe '#fetch_column_by_name' do + it { expect(table.fetch_column_by_name('col')).not_to be_nil } + + it { expect(table.fetch_column_by_name('invalid')).to be_nil } + end + + describe '#column_exists?' do + it { expect(table.column_exists?('col')).to eq(true) } + + it { expect(table.column_exists?('invalid')).to eq(false) } + end +end diff --git a/spec/lib/gitlab/database/schema_validation/schema_objects/trigger_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_objects/trigger_spec.rb new file mode 100644 index 00000000000..3c2481dfae0 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/schema_objects/trigger_spec.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::SchemaObjects::Trigger, feature_category: :database do + let(:statement) { 'CREATE TRIGGER my_trigger BEFORE INSERT ON todos FOR EACH ROW EXECUTE FUNCTION trigger()' } + let(:name) { 'my_trigger' } + let(:table_name) { 'todos' } + + include_examples 'schema objects assertions for', 'create_trig_stmt' +end diff --git a/spec/lib/gitlab/database/schema_validation/structure_sql_spec.rb b/spec/lib/gitlab/database/schema_validation/structure_sql_spec.rb new file mode 100644 index 00000000000..b0c056ff5db --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/structure_sql_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.shared_examples 'structure sql schema assertions for' do |object_exists_method, all_objects_method| + subject(:structure_sql) { described_class.new(structure_file_path, schema_name) } + + let(:structure_file_path) { Rails.root.join('spec/fixtures/structure.sql') } + let(:schema_name) { 'public' } + + describe "##{object_exists_method}" do + it 'returns true when schema object exists' do + expect(structure_sql.public_send(object_exists_method, valid_schema_object_name)).to be_truthy + end + + it 'returns false when schema object does not exists' do + expect(structure_sql.public_send(object_exists_method, 'invalid-object-name')).to be_falsey + end + end + + describe "##{all_objects_method}" do + it 'returns all the schema objects' do + schema_objects = structure_sql.public_send(all_objects_method) + + expect(schema_objects).to all(be_a(schema_object)) + expect(schema_objects.map(&:name)).to eq(expected_objects) + end + end +end + +RSpec.describe Gitlab::Database::SchemaValidation::StructureSql, feature_category: :database do + let(:structure_file_path) { Rails.root.join('spec/fixtures/structure.sql') } + let(:schema_name) { 'public' } + + subject(:structure_sql) { described_class.new(structure_file_path, schema_name) } + + context 'when having indexes' do + let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index } + let(:valid_schema_object_name) { 'index' } + let(:expected_objects) do + %w[missing_index wrong_index index index_namespaces_public_groups_name_id + index_on_deploy_keys_id_and_type_and_public index_users_on_public_email_excluding_null_and_empty] + end + + include_examples 'structure sql schema assertions for', 'index_exists?', 'indexes' + end + + context 'when having triggers' do + let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Trigger } + let(:valid_schema_object_name) { 'trigger' } + let(:expected_objects) { %w[trigger wrong_trigger missing_trigger_1 projects_loose_fk_trigger] } + + include_examples 'structure sql schema assertions for', 'trigger_exists?', 'triggers' + end + + context 'when having tables' do + let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Table } + let(:valid_schema_object_name) { 'test_table' } + let(:expected_objects) do + %w[test_table ci_project_mirrors wrong_table extra_table_columns missing_table missing_table_columns + operations_user_lists] + end + + include_examples 'structure sql schema assertions for', 'table_exists?', 'tables' + end +end diff --git a/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb b/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb new file mode 100644 index 00000000000..84db721fc2d --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb @@ -0,0 +1,82 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::TrackInconsistency, feature_category: :database do + describe '#execute' do + let(:validator) { Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionIndexes } + + let(:database_statement) { 'CREATE INDEX index_name ON public.achievements USING btree (namespace_id)' } + let(:structure_sql_statement) { 'CREATE INDEX index_name ON public.achievements USING btree (id)' } + + let(:structure_stmt) { PgQuery.parse(structure_sql_statement).tree.stmts.first.stmt.index_stmt } + let(:database_stmt) { PgQuery.parse(database_statement).tree.stmts.first.stmt.index_stmt } + + let(:structure_sql_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index.new(structure_stmt) } + let(:database_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index.new(database_stmt) } + + let(:inconsistency) do + Gitlab::Database::SchemaValidation::Inconsistency.new(validator, structure_sql_object, database_object) + end + + let_it_be(:project) { create(:project) } + let_it_be(:user) { create(:user) } + + subject(:execute) { described_class.new(inconsistency, project, user).execute } + + before do + stub_spam_services + end + + context 'when is not GitLab.com' do + it 'does not create a schema inconsistency record' do + allow(Gitlab).to receive(:com?).and_return(false) + + expect { execute }.not_to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count } + end + end + + context 'when the issue creation fails' do + let(:issue_creation) { instance_double(Mutations::Issues::Create, resolve: { errors: 'error' }) } + + before do + allow(Mutations::Issues::Create).to receive(:new).and_return(issue_creation) + end + + it 'does not create a schema inconsistency record' do + allow(Gitlab).to receive(:com?).and_return(true) + + expect { execute }.not_to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count } + end + end + + context 'when a new inconsistency is found' do + before do + project.add_developer(user) + end + + it 'creates a new schema inconsistency record' do + allow(Gitlab).to receive(:com?).and_return(true) + + expect { execute }.to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count } + end + end + + context 'when the schema inconsistency already exists' do + before do + project.add_developer(user) + end + + let!(:schema_inconsistency) do + create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements', + valitador_name: 'different_definition_indexes') + end + + it 'does not create a schema inconsistency record' do + allow(Gitlab).to receive(:com?).and_return(true) + + expect { execute }.not_to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count } + end + end + end +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb new file mode 100644 index 00000000000..036ad6424f0 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::BaseValidator, feature_category: :database do + describe '.all_validators' do + subject(:all_validators) { described_class.all_validators } + + it 'returns an array of all validators' do + expect(all_validators).to eq([ + Gitlab::Database::SchemaValidation::Validators::ExtraTables, + Gitlab::Database::SchemaValidation::Validators::ExtraTableColumns, + Gitlab::Database::SchemaValidation::Validators::ExtraIndexes, + Gitlab::Database::SchemaValidation::Validators::ExtraTriggers, + Gitlab::Database::SchemaValidation::Validators::MissingTables, + Gitlab::Database::SchemaValidation::Validators::MissingTableColumns, + Gitlab::Database::SchemaValidation::Validators::MissingIndexes, + Gitlab::Database::SchemaValidation::Validators::MissingTriggers, + Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTables, + Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionIndexes, + Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTriggers + ]) + end + end + + describe '#execute' do + let(:structure_sql) { instance_double(Gitlab::Database::SchemaValidation::StructureSql) } + let(:database) { instance_double(Gitlab::Database::SchemaValidation::Database) } + + subject(:inconsistencies) { described_class.new(structure_sql, database).execute } + + it 'raises an exception' do + expect { inconsistencies }.to raise_error(NoMethodError) + end + end +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/different_definition_indexes_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/different_definition_indexes_spec.rb new file mode 100644 index 00000000000..b9744c86b80 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/different_definition_indexes_spec.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionIndexes, + feature_category: :database do + include_examples 'index validators', described_class, ['wrong_index'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/different_definition_tables_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/different_definition_tables_spec.rb new file mode 100644 index 00000000000..746418b757e --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/different_definition_tables_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTables, feature_category: :database do + include_examples 'table validators', described_class, ['wrong_table'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/different_definition_triggers_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/different_definition_triggers_spec.rb new file mode 100644 index 00000000000..4d065929708 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/different_definition_triggers_spec.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTriggers, + feature_category: :database do + include_examples 'trigger validators', described_class, ['wrong_trigger'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/extra_indexes_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/extra_indexes_spec.rb new file mode 100644 index 00000000000..842dbb42120 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/extra_indexes_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::ExtraIndexes, feature_category: :database do + include_examples 'index validators', described_class, ['extra_index'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/extra_table_columns_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/extra_table_columns_spec.rb new file mode 100644 index 00000000000..9d17a2fffa9 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/extra_table_columns_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::ExtraTableColumns, feature_category: :database do + include_examples 'table validators', described_class, ['extra_table_columns'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/extra_tables_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/extra_tables_spec.rb new file mode 100644 index 00000000000..edaf79e3c93 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/extra_tables_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::ExtraTables, feature_category: :database do + include_examples 'table validators', described_class, ['extra_table'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/extra_triggers_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/extra_triggers_spec.rb new file mode 100644 index 00000000000..d2e1c18a1ab --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/extra_triggers_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::ExtraTriggers, feature_category: :database do + include_examples 'trigger validators', described_class, ['extra_trigger'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/missing_indexes_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/missing_indexes_spec.rb new file mode 100644 index 00000000000..c402c3a2fa7 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/missing_indexes_spec.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::MissingIndexes, feature_category: :database do + missing_indexes = %w[ + missing_index + index_namespaces_public_groups_name_id + index_on_deploy_keys_id_and_type_and_public + index_users_on_public_email_excluding_null_and_empty + ] + + include_examples 'index validators', described_class, missing_indexes +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/missing_table_columns_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/missing_table_columns_spec.rb new file mode 100644 index 00000000000..de2956b4dd9 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/missing_table_columns_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::MissingTableColumns, feature_category: :database do + include_examples 'table validators', described_class, ['missing_table_columns'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/missing_tables_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/missing_tables_spec.rb new file mode 100644 index 00000000000..7c80923e860 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/missing_tables_spec.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::MissingTables, feature_category: :database do + missing_tables = %w[ci_project_mirrors missing_table operations_user_lists test_table] + + include_examples 'table validators', described_class, missing_tables +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/missing_triggers_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/missing_triggers_spec.rb new file mode 100644 index 00000000000..87bc3ded808 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/missing_triggers_spec.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::MissingTriggers, feature_category: :database do + missing_triggers = %w[missing_trigger_1 projects_loose_fk_trigger] + + include_examples 'trigger validators', described_class, missing_triggers +end diff --git a/spec/lib/gitlab/database/tables_locker_spec.rb b/spec/lib/gitlab/database/tables_locker_spec.rb index d74f455eaad..aaafe27f7ca 100644 --- a/spec/lib/gitlab/database/tables_locker_spec.rb +++ b/spec/lib/gitlab/database/tables_locker_spec.rb @@ -2,20 +2,42 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base, :delete, :silence_stdout, - :suppress_gitlab_schemas_validate_connection, feature_category: :pods do - let(:detached_partition_table) { '_test_gitlab_main_part_20220101' } - let(:lock_writes_manager) do - instance_double(Gitlab::Database::LockWritesManager, lock_writes: nil, unlock_writes: nil) +RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate_connection, :silence_stdout, + feature_category: :cell do + let(:default_lock_writes_manager) do + instance_double( + Gitlab::Database::LockWritesManager, + lock_writes: { action: 'any action' }, + unlock_writes: { action: 'unlocked' } + ) end before do - allow(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager) + allow(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(default_lock_writes_manager) + # Limiting the scope of the tests to a subset of the database tables + allow(Gitlab::Database::GitlabSchema).to receive(:tables_to_schema).and_return({ + 'application_setttings' => :gitlab_main_clusterwide, + 'projects' => :gitlab_main, + 'security_findings' => :gitlab_main, + 'ci_builds' => :gitlab_ci, + 'ci_jobs' => :gitlab_ci, + 'loose_foreign_keys_deleted_records' => :gitlab_shared, + 'ar_internal_metadata' => :gitlab_internal + }) end before(:all) do + create_partition_sql = <<~SQL + CREATE TABLE IF NOT EXISTS #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.security_findings_test_partition + PARTITION OF security_findings + FOR VALUES IN (0) + SQL + + ApplicationRecord.connection.execute(create_partition_sql) + Ci::ApplicationRecord.connection.execute(create_partition_sql) + create_detached_partition_sql = <<~SQL - CREATE TABLE IF NOT EXISTS gitlab_partitions_dynamic._test_gitlab_main_part_20220101 ( + CREATE TABLE IF NOT EXISTS #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_gitlab_main_part_202201 ( id bigserial primary key not null ) SQL @@ -29,35 +51,89 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base drop_after: Time.current ) end + Gitlab::Database::SharedModel.using_connection(Ci::ApplicationRecord.connection) do + Postgresql::DetachedPartition.create!( + table_name: '_test_gitlab_main_part_20220101', + drop_after: Time.current + ) + end end - after(:all) do - drop_detached_partition_sql = <<~SQL - DROP TABLE IF EXISTS gitlab_partitions_dynamic._test_gitlab_main_part_20220101 - SQL + shared_examples "lock tables" do |gitlab_schema, database_name| + let(:connection) { Gitlab::Database.database_base_models[database_name].connection } + let(:tables_to_lock) do + Gitlab::Database::GitlabSchema + .tables_to_schema.filter_map { |table_name, schema| table_name if schema == gitlab_schema } + end - ApplicationRecord.connection.execute(drop_detached_partition_sql) - Ci::ApplicationRecord.connection.execute(drop_detached_partition_sql) + it "locks table in schema #{gitlab_schema} and database #{database_name}" do + expect(tables_to_lock).not_to be_empty - Gitlab::Database::SharedModel.using_connection(ApplicationRecord.connection) do - Postgresql::DetachedPartition.delete_all + tables_to_lock.each do |table_name| + lock_writes_manager = instance_double(Gitlab::Database::LockWritesManager, lock_writes: nil) + + expect(Gitlab::Database::LockWritesManager).to receive(:new).with( + table_name: table_name, + connection: connection, + database_name: database_name, + with_retries: true, + logger: anything, + dry_run: anything + ).once.and_return(lock_writes_manager) + expect(lock_writes_manager).to receive(:lock_writes).once + end + + subject + end + + it 'returns list of actions' do + expect(subject).to include({ action: 'any action' }) end end - shared_examples "lock tables" do |table_schema, database_name| - let(:table_name) do + shared_examples "unlock tables" do |gitlab_schema, database_name| + let(:connection) { Gitlab::Database.database_base_models[database_name].connection } + + let(:tables_to_unlock) do Gitlab::Database::GitlabSchema - .tables_to_schema.filter_map { |table_name, schema| table_name if schema == table_schema } - .first + .tables_to_schema.filter_map { |table_name, schema| table_name if schema == gitlab_schema } + end + + it "unlocks table in schema #{gitlab_schema} and database #{database_name}" do + expect(tables_to_unlock).not_to be_empty + + tables_to_unlock.each do |table_name| + lock_writes_manager = instance_double(Gitlab::Database::LockWritesManager, unlock_writes: nil) + + expect(Gitlab::Database::LockWritesManager).to receive(:new).with( + table_name: table_name, + connection: anything, + database_name: database_name, + with_retries: true, + logger: anything, + dry_run: anything + ).once.and_return(lock_writes_manager) + expect(lock_writes_manager).to receive(:unlock_writes) + end + + subject end - let(:database) { database_name } + it 'returns list of actions' do + expect(subject).to include({ action: 'unlocked' }) + end + end + + shared_examples "lock partitions" do |partition_identifier, database_name| + let(:connection) { Gitlab::Database.database_base_models[database_name].connection } + + it 'locks the partition' do + lock_writes_manager = instance_double(Gitlab::Database::LockWritesManager, lock_writes: nil) - it "locks table in schema #{table_schema} and database #{database_name}" do expect(Gitlab::Database::LockWritesManager).to receive(:new).with( - table_name: table_name, - connection: anything, - database_name: database, + table_name: partition_identifier, + connection: connection, + database_name: database_name, with_retries: true, logger: anything, dry_run: anything @@ -68,20 +144,16 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base end end - shared_examples "unlock tables" do |table_schema, database_name| - let(:table_name) do - Gitlab::Database::GitlabSchema - .tables_to_schema.filter_map { |table_name, schema| table_name if schema == table_schema } - .first - end + shared_examples "unlock partitions" do |partition_identifier, database_name| + let(:connection) { Gitlab::Database.database_base_models[database_name].connection } - let(:database) { database_name } + it 'unlocks the partition' do + lock_writes_manager = instance_double(Gitlab::Database::LockWritesManager, unlock_writes: nil) - it "unlocks table in schema #{table_schema} and database #{database_name}" do expect(Gitlab::Database::LockWritesManager).to receive(:new).with( - table_name: table_name, - connection: anything, - database_name: database, + table_name: partition_identifier, + connection: connection, + database_name: database_name, with_retries: true, logger: anything, dry_run: anything @@ -94,31 +166,35 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base context 'when running on single database' do before do - skip_if_multiple_databases_are_setup(:ci) + skip_if_database_exists(:ci) end describe '#lock_writes' do subject { described_class.new.lock_writes } - it 'does not call Gitlab::Database::LockWritesManager.lock_writes' do - expect(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager) - expect(lock_writes_manager).not_to receive(:lock_writes) + it 'does not lock any table' do + expect(Gitlab::Database::LockWritesManager).to receive(:new) + .with(any_args).and_return(default_lock_writes_manager) + expect(default_lock_writes_manager).not_to receive(:lock_writes) subject end - include_examples "unlock tables", :gitlab_main, 'main' - include_examples "unlock tables", :gitlab_ci, 'ci' - include_examples "unlock tables", :gitlab_shared, 'main' - include_examples "unlock tables", :gitlab_internal, 'main' + it_behaves_like 'unlock tables', :gitlab_main, 'main' + it_behaves_like 'unlock tables', :gitlab_ci, 'main' + it_behaves_like 'unlock tables', :gitlab_main_clusterwide, 'main' + it_behaves_like 'unlock tables', :gitlab_shared, 'main' + it_behaves_like 'unlock tables', :gitlab_internal, 'main' end describe '#unlock_writes' do subject { described_class.new.lock_writes } it 'does call Gitlab::Database::LockWritesManager.unlock_writes' do - expect(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager) - expect(lock_writes_manager).to receive(:unlock_writes) + expect(Gitlab::Database::LockWritesManager).to receive(:new) + .with(any_args).and_return(default_lock_writes_manager) + expect(default_lock_writes_manager).to receive(:unlock_writes) + expect(default_lock_writes_manager).not_to receive(:lock_writes) subject end @@ -127,49 +203,67 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base context 'when running on multiple databases' do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) end describe '#lock_writes' do subject { described_class.new.lock_writes } - include_examples "lock tables", :gitlab_ci, 'main' - include_examples "lock tables", :gitlab_main, 'ci' - - include_examples "unlock tables", :gitlab_main, 'main' - include_examples "unlock tables", :gitlab_ci, 'ci' - include_examples "unlock tables", :gitlab_shared, 'main' - include_examples "unlock tables", :gitlab_shared, 'ci' - include_examples "unlock tables", :gitlab_internal, 'main' - include_examples "unlock tables", :gitlab_internal, 'ci' + it_behaves_like 'lock tables', :gitlab_ci, 'main' + it_behaves_like 'lock tables', :gitlab_main, 'ci' + it_behaves_like 'lock tables', :gitlab_main_clusterwide, 'ci' + + it_behaves_like 'unlock tables', :gitlab_main_clusterwide, 'main' + it_behaves_like 'unlock tables', :gitlab_main, 'main' + it_behaves_like 'unlock tables', :gitlab_ci, 'ci' + it_behaves_like 'unlock tables', :gitlab_shared, 'main' + it_behaves_like 'unlock tables', :gitlab_shared, 'ci' + it_behaves_like 'unlock tables', :gitlab_internal, 'main' + it_behaves_like 'unlock tables', :gitlab_internal, 'ci' + + gitlab_main_partition = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.security_findings_test_partition" + it_behaves_like 'unlock partitions', gitlab_main_partition, 'main' + it_behaves_like 'lock partitions', gitlab_main_partition, 'ci' + + gitlab_main_detached_partition = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_gitlab_main_part_20220101" + it_behaves_like 'unlock partitions', gitlab_main_detached_partition, 'main' + it_behaves_like 'lock partitions', gitlab_main_detached_partition, 'ci' end describe '#unlock_writes' do subject { described_class.new.unlock_writes } - include_examples "unlock tables", :gitlab_ci, 'main' - include_examples "unlock tables", :gitlab_main, 'ci' - include_examples "unlock tables", :gitlab_main, 'main' - include_examples "unlock tables", :gitlab_ci, 'ci' - include_examples "unlock tables", :gitlab_shared, 'main' - include_examples "unlock tables", :gitlab_shared, 'ci' - include_examples "unlock tables", :gitlab_internal, 'main' - include_examples "unlock tables", :gitlab_internal, 'ci' + it_behaves_like "unlock tables", :gitlab_ci, 'main' + it_behaves_like "unlock tables", :gitlab_main, 'ci' + it_behaves_like "unlock tables", :gitlab_main, 'main' + it_behaves_like "unlock tables", :gitlab_ci, 'ci' + it_behaves_like "unlock tables", :gitlab_shared, 'main' + it_behaves_like "unlock tables", :gitlab_shared, 'ci' + it_behaves_like "unlock tables", :gitlab_internal, 'main' + it_behaves_like "unlock tables", :gitlab_internal, 'ci' + + gitlab_main_partition = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.security_findings_test_partition" + it_behaves_like 'unlock partitions', gitlab_main_partition, 'main' + it_behaves_like 'unlock partitions', gitlab_main_partition, 'ci' + + gitlab_main_detached_partition = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_gitlab_main_part_20220101" + it_behaves_like 'unlock partitions', gitlab_main_detached_partition, 'main' + it_behaves_like 'unlock partitions', gitlab_main_detached_partition, 'ci' end context 'when running in dry_run mode' do subject { described_class.new(dry_run: true).lock_writes } - it 'passes dry_run flag to LockManger' do + it 'passes dry_run flag to LockWritesManager' do expect(Gitlab::Database::LockWritesManager).to receive(:new).with( - table_name: 'users', + table_name: 'security_findings', connection: anything, database_name: 'ci', with_retries: true, logger: anything, dry_run: true - ).and_return(lock_writes_manager) - expect(lock_writes_manager).to receive(:lock_writes) + ).and_return(default_lock_writes_manager) + expect(default_lock_writes_manager).to receive(:lock_writes) subject end @@ -185,8 +279,9 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base end it 'does not lock any tables if the ci database is shared with main database' do - expect(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager) - expect(lock_writes_manager).not_to receive(:lock_writes) + expect(Gitlab::Database::LockWritesManager).to receive(:new) + .with(any_args).and_return(default_lock_writes_manager) + expect(default_lock_writes_manager).not_to receive(:lock_writes) subject end @@ -220,7 +315,3 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base end end end - -def number_of_triggers(connection) - connection.select_value("SELECT count(*) FROM information_schema.triggers") -end diff --git a/spec/lib/gitlab/database/tables_truncate_spec.rb b/spec/lib/gitlab/database/tables_truncate_spec.rb index 3bb2f4e982c..ef76c9b8da3 100644 --- a/spec/lib/gitlab/database/tables_truncate_spec.rb +++ b/spec/lib/gitlab/database/tables_truncate_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_base, - :suppress_gitlab_schemas_validate_connection, feature_category: :pods do + :suppress_gitlab_schemas_validate_connection, feature_category: :cell do include MigrationsHelpers let(:min_batch_size) { 1 } @@ -48,7 +48,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba end before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) # Creating some test tables on the main database main_tables_sql = <<~SQL @@ -79,8 +79,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba ALTER TABLE _test_gitlab_hook_logs DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_hook_logs_202201; SQL - main_connection.execute(main_tables_sql) - ci_connection.execute(main_tables_sql) + execute_on_each_database(main_tables_sql) ci_tables_sql = <<~SQL CREATE TABLE _test_gitlab_ci_items (id serial NOT NULL PRIMARY KEY); @@ -92,15 +91,13 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba ); SQL - main_connection.execute(ci_tables_sql) - ci_connection.execute(ci_tables_sql) + execute_on_each_database(ci_tables_sql) internal_tables_sql = <<~SQL CREATE TABLE _test_gitlab_shared_items (id serial NOT NULL PRIMARY KEY); SQL - main_connection.execute(internal_tables_sql) - ci_connection.execute(internal_tables_sql) + execute_on_each_database(internal_tables_sql) # Filling the tables 5.times do |i| @@ -156,7 +153,9 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba "_test_gitlab_ci_items" => :gitlab_ci, "_test_gitlab_ci_references" => :gitlab_ci, "_test_gitlab_shared_items" => :gitlab_shared, - "_test_gitlab_geo_items" => :gitlab_geo + "_test_gitlab_geo_items" => :gitlab_geo, + "detached_partitions" => :gitlab_shared, + "postgres_partitions" => :gitlab_shared } ) @@ -314,8 +313,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba context 'when running with multiple shared databases' do before do skip_if_multiple_databases_not_setup(:ci) - ci_db_config = Ci::ApplicationRecord.connection_db_config - allow(::Gitlab::Database).to receive(:db_config_share_with).with(ci_db_config).and_return('main') + skip_if_database_exists(:ci) end it 'raises an error when truncating the main database that it is a single database setup' do diff --git a/spec/lib/gitlab/database/transaction_timeout_settings_spec.rb b/spec/lib/gitlab/database/transaction_timeout_settings_spec.rb index 5b68f9a3757..2725b22ca9d 100644 --- a/spec/lib/gitlab/database/transaction_timeout_settings_spec.rb +++ b/spec/lib/gitlab/database/transaction_timeout_settings_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::TransactionTimeoutSettings, feature_category: :pods do +RSpec.describe Gitlab::Database::TransactionTimeoutSettings, feature_category: :cell do let(:connection) { ActiveRecord::Base.connection } subject { described_class.new(connection) } diff --git a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb index 9ccae754a92..82bba31193b 100644 --- a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb +++ b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb @@ -61,12 +61,12 @@ RSpec.describe Gitlab::Database::WithLockRetriesOutsideTransaction, feature_cate context 'lock_fiber' do it 'acquires lock successfully' do - check_exclusive_lock_query = """ + check_exclusive_lock_query = <<~QUERY SELECT 1 FROM pg_locks l JOIN pg_class t ON l.relation = t.oid WHERE t.relkind = 'r' AND l.mode = 'ExclusiveLock' AND t.relname = '#{Project.table_name}' - """ + QUERY expect(connection.execute(check_exclusive_lock_query).to_a).to be_present end diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb index 7fe6362634b..7e0435c815b 100644 --- a/spec/lib/gitlab/database/with_lock_retries_spec.rb +++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb @@ -61,12 +61,12 @@ RSpec.describe Gitlab::Database::WithLockRetries, feature_category: :database do context 'lock_fiber' do it 'acquires lock successfully' do - check_exclusive_lock_query = """ + check_exclusive_lock_query = <<~QUERY SELECT 1 FROM pg_locks l JOIN pg_class t ON l.relation = t.oid WHERE t.relkind = 'r' AND l.mode = 'ExclusiveLock' AND t.relname = '#{Project.table_name}' - """ + QUERY expect(connection.execute(check_exclusive_lock_query).to_a).to be_present end -- cgit v1.2.3