Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-12-21 15:15:58 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-12-21 15:15:58 +0300
commit2779809e45970e7660521b94dbebcf24ed00d60d (patch)
tree98df2ede3cb2bdcb42955e5c4684dd70a8a79c81 /spec
parent4ecd816dcbbf2c3a83087ea1add13f087530e9eb (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/db/schema_spec.rb1
-rw-r--r--spec/factories/ci/pipeline_chat_data.rb9
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb67
-rw-r--r--spec/lib/gitlab/ci/config/external/context_spec.rb85
-rw-r--r--spec/lib/gitlab/ci/config/external/file/remote_spec.rb34
-rw-r--r--spec/lib/gitlab/database/migration_helpers/v2_spec.rb83
-rw-r--r--spec/migrations/20231218092401_queue_backfill_partition_id_ci_pipeline_chat_data_spec.rb56
-rw-r--r--spec/models/ci/pipeline_chat_data_spec.rb27
8 files changed, 358 insertions, 4 deletions
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 7e3f2a3b61e..9236c166b68 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -48,6 +48,7 @@ RSpec.describe 'Database schema', feature_category: :database do
chat_teams: %w[team_id],
ci_builds: %w[project_id runner_id user_id erased_by_id trigger_request_id partition_id auto_canceled_by_partition_id],
ci_namespace_monthly_usages: %w[namespace_id],
+ ci_pipeline_chat_data: %w[partition_id],
ci_pipeline_variables: %w[partition_id],
ci_pipelines: %w[partition_id],
ci_runner_projects: %w[runner_id],
diff --git a/spec/factories/ci/pipeline_chat_data.rb b/spec/factories/ci/pipeline_chat_data.rb
new file mode 100644
index 00000000000..1f61c98f542
--- /dev/null
+++ b/spec/factories/ci/pipeline_chat_data.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_pipeline_chat_data, class: 'Ci::PipelineChatData' do
+ pipeline factory: :ci_empty_pipeline
+ chat_name
+ response_url { "https://response.com" }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb
new file mode 100644
index 00000000000..ad1900ab6a6
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineChatData,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_chat_data_table) { table(:ci_pipeline_chat_data, database: :ci) }
+ let!(:pipeline1) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline2) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:invalid_ci_pipeline_chat_data) do
+ ci_pipeline_chat_data_table.create!(
+ id: 1,
+ pipeline_id: pipeline1.id,
+ chat_name_id: 1,
+ response_url: '',
+ partition_id: pipeline1.partition_id
+ )
+ end
+
+ let!(:valid_ci_pipeline_chat_data) do
+ ci_pipeline_chat_data_table.create!(
+ id: 2,
+ pipeline_id: pipeline2.id,
+ chat_name_id: 2,
+ response_url: '',
+ partition_id: pipeline2.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_chat_data_table.minimum(:id),
+ end_id: ci_pipeline_chat_data_table.maximum(:id),
+ batch_table: :ci_pipeline_chat_data,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_chat_data.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ before do
+ allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
+ pipeline1.update!(partition_id: 101)
+ end
+
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to change { invalid_ci_pipeline_chat_data.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/external/context_spec.rb b/spec/lib/gitlab/ci/config/external/context_spec.rb
index 9ac72ebbac8..3409fc53d19 100644
--- a/spec/lib/gitlab/ci/config/external/context_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/context_spec.rb
@@ -159,10 +159,14 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
shared_examples 'a mutated context' do
let(:mutated) { subject.mutate(new_attributes) }
+ let(:lazy_response) { double('lazy_response') }
before do
+ allow(lazy_response).to receive(:execute).and_return(lazy_response)
+
subject.expandset << :a_file
subject.set_deadline(15.seconds)
+ subject.execute_remote_parallel_request(lazy_response)
end
it { expect(mutated).not_to eq(subject) }
@@ -170,8 +174,9 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
it { expect(mutated).to have_attributes(new_attributes) }
it { expect(mutated.pipeline).to eq(subject.pipeline) }
it { expect(mutated.expandset).to eq(subject.expandset) }
- it { expect(mutated.execution_deadline).to eq(mutated.execution_deadline) }
- it { expect(mutated.logger).to eq(mutated.logger) }
+ it { expect(mutated.execution_deadline).to eq(subject.execution_deadline) }
+ it { expect(mutated.logger).to eq(subject.logger) }
+ it { expect(mutated.parallel_requests).to eq(subject.parallel_requests) }
end
context 'with attributes' do
@@ -212,4 +217,80 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
end
end
end
+
+ describe '#execute_remote_parallel_request' do
+ let(:lazy_response1) { double('lazy_response', wait: true, complete?: complete1) }
+ let(:lazy_response2) { double('lazy_response') }
+
+ let(:complete1) { false }
+
+ before do
+ allow(lazy_response1).to receive(:execute).and_return(lazy_response1)
+ allow(lazy_response2).to receive(:execute).and_return(lazy_response2)
+ end
+
+ context 'when the queue is empty' do
+ before do
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 2)
+ end
+
+ it 'adds the new lazy response to the queue' do
+ expect { subject.execute_remote_parallel_request(lazy_response1) }
+ .to change { subject.parallel_requests }
+ .from([])
+ .to([lazy_response1])
+ end
+ end
+
+ context 'when there is a lazy response in the queue' do
+ before do
+ subject.execute_remote_parallel_request(lazy_response1)
+ end
+
+ context 'when there is a free slot in the queue' do
+ before do
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 2)
+ end
+
+ it 'adds the new lazy response to the queue' do
+ expect { subject.execute_remote_parallel_request(lazy_response2) }
+ .to change { subject.parallel_requests }
+ .from([lazy_response1])
+ .to([lazy_response1, lazy_response2])
+ end
+ end
+
+ context 'when the queue is full' do
+ before do
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 1)
+ end
+
+ context 'when the first lazy response in the queue is complete' do
+ let(:complete1) { true }
+
+ it 'removes the completed lazy response and adds the new one to the queue' do
+ expect(lazy_response1).not_to receive(:wait)
+
+ expect { subject.execute_remote_parallel_request(lazy_response2) }
+ .to change { subject.parallel_requests }
+ .from([lazy_response1])
+ .to([lazy_response2])
+ end
+ end
+
+ context 'when the first lazy response in the queue is not complete' do
+ let(:complete1) { false }
+
+ it 'waits for the first lazy response to complete and then adds the new one to the queue' do
+ expect(lazy_response1).to receive(:wait)
+
+ expect { subject.execute_remote_parallel_request(lazy_response2) }
+ .to change { subject.parallel_requests }
+ .from([lazy_response1])
+ .to([lazy_response1, lazy_response2])
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
index 7293e640112..adca9e750d0 100644
--- a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
@@ -157,6 +157,40 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
it_behaves_like "#content"
end
+ describe '#preload_content' do
+ context 'when the parallel request queue is full' do
+ let(:location1) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.secret_file1.yml' }
+ let(:location2) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.secret_file2.yml' }
+
+ before do
+ # Makes the parallel queue full easily
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 1)
+
+ # Adding a failing promise to the queue
+ promise = Concurrent::Promise.new do
+ sleep 1.1
+ raise Timeout::Error
+ end
+
+ context.execute_remote_parallel_request(
+ Gitlab::HTTP_V2::LazyResponse.new(promise, location1, {}, nil)
+ )
+
+ stub_full_request(location2).to_return(body: remote_file_content)
+ end
+
+ it 'waits for the queue' do
+ file2 = described_class.new({ remote: location2 }, context)
+
+ start_at = Time.current
+ file2.preload_content
+ end_at = Time.current
+
+ expect(end_at - start_at).to be > 1
+ end
+ end
+ end
+
describe "#error_message" do
subject(:error_message) do
Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([remote_file])
diff --git a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
index 8b653e2d89d..afcec5ea214 100644
--- a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
+RSpec.describe Gitlab::Database::MigrationHelpers::V2, feature_category: :database do
include Database::TriggerHelpers
include Database::TableSchemaHelpers
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
context 'when the batch column does exist' do
it 'passes it when creating the column' do
expect(migration).to receive(:create_column_from)
- .with(:_test_table, existing_column, added_column, type: nil, batch_column_name: :status)
+ .with(:_test_table, existing_column, added_column, type: nil, batch_column_name: :status, type_cast_function: nil)
.and_call_original
migration.public_send(operation, :_test_table, :original, :renamed, batch_column_name: :status)
@@ -495,4 +495,83 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
end
end
end
+
+ describe '#change_column_type_concurrently' do
+ let(:table_name) { :_test_change_column_type_concurrently }
+
+ before do
+ migration.connection.execute(<<~SQL)
+ DROP TABLE IF EXISTS #{table_name};
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ user_id bigint,
+ name character varying
+ );
+ /* at least one record for batching update */
+ INSERT INTO #{table_name} (id, user_id, name)
+ VALUES (1, 9, '{ \"lucky_number\": 8 }')
+ SQL
+ end
+
+ it 'adds a column of the new type and triggers to keep these two columns in sync' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.change_column_type_concurrently(table_name, :name, :text)
+ end
+ expect(recorder.log).to include(/ALTER TABLE "_test_change_column_type_concurrently" ADD "name_for_type_change" text/)
+ expect(recorder.log).to include(/BEGIN\n IF NEW."name" IS NOT DISTINCT FROM NULL AND NEW."name_for_type_change" IS DISTINCT FROM NULL THEN\n NEW."name" = NEW."name_for_type_change";\n END IF;\n\n IF NEW."name_for_type_change" IS NOT DISTINCT FROM NULL AND NEW."name" IS DISTINCT FROM NULL THEN\n NEW."name_for_type_change" = NEW."name";\n END IF;\n\n RETURN NEW;\nEND/m)
+ expect(recorder.log).to include(/BEGIN\n NEW."name" := NEW."name_for_type_change";\n RETURN NEW;\nEND/m)
+ expect(recorder.log).to include(/BEGIN\n NEW."name_for_type_change" := NEW."name";\n RETURN NEW;\nEND/m)
+ expect(recorder.log).to include(/ON "_test_change_column_type_concurrently"\nFOR EACH ROW\sEXECUTE FUNCTION/m)
+ expect(recorder.log).to include(/UPDATE .* WHERE "_test_change_column_type_concurrently"."id" >= \d+/)
+ end
+
+ context 'with batch column name' do
+ it 'updates the new column using the batch column' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.change_column_type_concurrently(table_name, :name, :text, batch_column_name: :user_id)
+ end
+ expect(recorder.log).to include(/UPDATE .* WHERE "_test_change_column_type_concurrently"."user_id" >= \d+/)
+ end
+ end
+
+ context 'with type cast function' do
+ it 'updates the new column with casting the value to the given type' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.change_column_type_concurrently(table_name, :name, :text, type_cast_function: 'JSON')
+ end
+ expect(recorder.log).to include(/SET "name_for_type_change" = JSON\("_test_change_column_type_concurrently"\."name"\)/m)
+ end
+ end
+ end
+
+ describe '#undo_change_column_type_concurrently' do
+ let(:table_name) { :_test_undo_change_column_type_concurrently }
+
+ before do
+ migration.connection.execute(<<~SQL)
+ DROP TABLE IF EXISTS #{table_name};
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ user_id bigint,
+ name character varying
+ );
+ /* at least one record for batching update */
+ INSERT INTO #{table_name} (id, user_id, name)
+ VALUES (1, 9, 'For every young')
+ SQL
+ end
+
+ it 'undoes the column type change' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ migration.change_column_type_concurrently(table_name, :name, :text)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.undo_change_column_type_concurrently(table_name, :name)
+ end
+ expect(recorder.log).to include(/DROP TRIGGER IF EXISTS .+ON "_test_undo_change_column_type_concurrently"/m)
+ expect(recorder.log).to include(/ALTER TABLE "_test_undo_change_column_type_concurrently" DROP COLUMN "name_for_type_change"/)
+ end
+ end
end
diff --git a/spec/migrations/20231218092401_queue_backfill_partition_id_ci_pipeline_chat_data_spec.rb b/spec/migrations/20231218092401_queue_backfill_partition_id_ci_pipeline_chat_data_spec.rb
new file mode 100644
index 00000000000..bd99dd9998a
--- /dev/null
+++ b/spec/migrations/20231218092401_queue_backfill_partition_id_ci_pipeline_chat_data_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillPartitionIdCiPipelineChatData, migration: :gitlab_ci, feature_category: :continuous_integration do
+ let!(:batched_migrations) { table(:batched_background_migrations) }
+ let!(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ context 'with migration present' do
+ let!(:ci_backfill_partition_id_ci_pipeline_chat_data_migration) do
+ batched_migrations.create!(
+ job_class_name: 'BackfillPartitionIdCiPipelineChatData',
+ table_name: :ci_pipeline_chat_data,
+ column_name: :id,
+ job_arguments: [],
+ interval: 2.minutes,
+ min_value: 1,
+ max_value: 2,
+ batch_size: 1000,
+ sub_batch_size: 100,
+ gitlab_schema: :gitlab_ci,
+ status: 3 # finished
+ )
+ end
+
+ context 'when migration finished successfully' do
+ it 'does not raise exception' do
+ expect { migrate! }.not_to raise_error
+ end
+
+ it 'schedules background jobs for each batch of ci_pipeline_chat_data' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ gitlab_schema: :gitlab_ci,
+ table_name: :ci_pipeline_chat_data,
+ column_name: :id,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ end
+ end
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_chat_data_spec.rb b/spec/models/ci/pipeline_chat_data_spec.rb
new file mode 100644
index 00000000000..4c9dc7edd88
--- /dev/null
+++ b/spec/models/ci/pipeline_chat_data_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineChatData, type: :model, feature_category: :continuous_integration do
+ it { is_expected.to belong_to(:chat_name) }
+ it { is_expected.to belong_to(:pipeline) }
+
+ it { is_expected.to validate_presence_of(:pipeline_id) }
+ it { is_expected.to validate_presence_of(:chat_name_id) }
+ it { is_expected.to validate_presence_of(:response_url) }
+
+ describe 'partitioning', :ci_partitionable do
+ include Ci::PartitioningHelpers
+
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:pipeline_chat_data) { create(:ci_pipeline_chat_data, pipeline: pipeline) }
+
+ before do
+ stub_current_partition_id
+ end
+
+ it 'assigns the same partition id as the one that pipeline has' do
+ expect(pipeline_chat_data.partition_id).to eq(ci_testing_partition_id)
+ end
+ end
+end