Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/factories/ci/runners.rb3
-rw-r--r--spec/factories/users.rb2
-rw-r--r--spec/frontend/editor/schema/ci/ci_schema_spec.js14
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_job_failure.yml (renamed from spec/frontend/editor/schema/ci/yaml_tests/negative_tests/auto_cancel_pipeline.yml)1
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_new_commit.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml4
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_job_failure.yml (renamed from spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml)1
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_new_commit.yml3
-rw-r--r--spec/graphql/resolvers/ci/group_runners_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/project_runners_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/runners_resolver_spec.rb10
-rw-r--r--spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb84
-rw-r--r--spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb2
-rw-r--r--spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb224
-rw-r--r--spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb6
-rw-r--r--spec/lib/gitlab/runtime_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb7
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb2
-rw-r--r--spec/migrations/20231212135235_queue_backfill_vs_code_settings_version_spec.rb26
-rw-r--r--spec/models/ci/pipeline_metadata_spec.rb2
-rw-r--r--spec/models/ci/pipeline_spec.rb32
-rw-r--r--spec/models/ci/runner_spec.rb2
-rw-r--r--spec/models/project_authorizations/changes_spec.rb112
-rw-r--r--spec/models/user_spec.rb21
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb158
-rw-r--r--spec/requests/api/graphql/ci/runners_spec.rb183
-rw-r--r--spec/services/ci/cancel_pipeline_service_spec.rb76
-rw-r--r--spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb2
-rw-r--r--spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb209
-rw-r--r--spec/services/members/create_service_spec.rb26
-rw-r--r--spec/support/finder_collection.rb2
-rw-r--r--spec/support/helpers/dns_helpers.rb13
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb1
-rw-r--r--spec/support/shared_examples/redis/redis_shared_examples.rb61
-rw-r--r--spec/support/sidekiq.rb10
-rw-r--r--spec/support/sidekiq_middleware.rb9
41 files changed, 1062 insertions, 272 deletions
diff --git a/spec/factories/ci/runners.rb b/spec/factories/ci/runners.rb
index 2d67a4c0e80..63e8cec82e6 100644
--- a/spec/factories/ci/runners.rb
+++ b/spec/factories/ci/runners.rb
@@ -14,6 +14,7 @@ FactoryBot.define do
groups { [] }
projects { [] }
token_expires_at { nil }
+ creator { nil }
end
after(:build) do |runner, evaluator|
@@ -24,6 +25,8 @@ FactoryBot.define do
evaluator.groups.each do |group|
runner.runner_namespaces << build(:ci_runner_namespace, runner: runner, namespace: group)
end
+
+ runner.creator = evaluator.creator if evaluator.creator
end
after(:create) do |runner, evaluator|
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 03dbe04b337..7ade859dcf2 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -20,8 +20,6 @@ FactoryBot.define do
true
end
- assign_ns &&= Feature.enabled?(:create_personal_ns_outside_model, Feature.current_request)
-
user.assign_personal_namespace if assign_ns
end
diff --git a/spec/frontend/editor/schema/ci/ci_schema_spec.js b/spec/frontend/editor/schema/ci/ci_schema_spec.js
index 7986509074e..949cf1367ff 100644
--- a/spec/frontend/editor/schema/ci/ci_schema_spec.js
+++ b/spec/frontend/editor/schema/ci/ci_schema_spec.js
@@ -38,8 +38,8 @@ import SecretsYaml from './yaml_tests/positive_tests/secrets.yml';
import ServicesYaml from './yaml_tests/positive_tests/services.yml';
import NeedsParallelMatrixYaml from './yaml_tests/positive_tests/needs_parallel_matrix.yml';
import ScriptYaml from './yaml_tests/positive_tests/script.yml';
-import AutoCancelPipelineOnJobFailureAllYaml from './yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml';
-import AutoCancelPipelineOnJobFailureNoneYaml from './yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml';
+import WorkflowAutoCancelOnJobFailureYaml from './yaml_tests/positive_tests/workflow/auto_cancel/on_job_failure.yml';
+import WorkflowAutoCancelOnNewCommitYaml from './yaml_tests/positive_tests/workflow/auto_cancel/on_new_commit.yml';
// YAML NEGATIVE TEST
import ArtifactsNegativeYaml from './yaml_tests/negative_tests/artifacts.yml';
@@ -66,7 +66,8 @@ import NeedsParallelMatrixNumericYaml from './yaml_tests/negative_tests/needs/pa
import NeedsParallelMatrixWrongParallelValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_parallel_value.yml';
import NeedsParallelMatrixWrongMatrixValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_matrix_value.yml';
import ScriptNegativeYaml from './yaml_tests/negative_tests/script.yml';
-import AutoCancelPipelineNegativeYaml from './yaml_tests/negative_tests/auto_cancel_pipeline.yml';
+import WorkflowAutoCancelOnJobFailureNegativeYaml from './yaml_tests/negative_tests/workflow/auto_cancel/on_job_failure.yml';
+import WorkflowAutoCancelOnNewCommitNegativeYaml from './yaml_tests/negative_tests/workflow/auto_cancel/on_new_commit.yml';
const ajv = new Ajv({
strictTypes: false,
@@ -110,8 +111,8 @@ describe('positive tests', () => {
SecretsYaml,
NeedsParallelMatrixYaml,
ScriptYaml,
- AutoCancelPipelineOnJobFailureAllYaml,
- AutoCancelPipelineOnJobFailureNoneYaml,
+ WorkflowAutoCancelOnJobFailureYaml,
+ WorkflowAutoCancelOnNewCommitYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a
@@ -157,7 +158,8 @@ describe('negative tests', () => {
NeedsParallelMatrixWrongParallelValueYaml,
NeedsParallelMatrixWrongMatrixValueYaml,
ScriptNegativeYaml,
- AutoCancelPipelineNegativeYaml,
+ WorkflowAutoCancelOnJobFailureNegativeYaml,
+ WorkflowAutoCancelOnNewCommitNegativeYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/auto_cancel_pipeline.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_job_failure.yml
index 0ba3e5632e3..2bf9effe1be 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/auto_cancel_pipeline.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_job_failure.yml
@@ -1,4 +1,3 @@
-# invalid workflow:auto-cancel:on-job-failure
workflow:
auto_cancel:
on_job_failure: unexpected_value
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_new_commit.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_new_commit.yml
new file mode 100644
index 00000000000..371662efd24
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_new_commit.yml
@@ -0,0 +1,3 @@
+workflow:
+ auto_cancel:
+ on_new_commit: unexpected_value
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml
deleted file mode 100644
index b99eb50e962..00000000000
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-# valid workflow:auto-cancel:on-job-failure
-workflow:
- auto_cancel:
- on_job_failure: none
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_job_failure.yml
index bf84ff16f42..79d18f40721 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_job_failure.yml
@@ -1,4 +1,3 @@
-# valid workflow:auto-cancel:on-job-failure
workflow:
auto_cancel:
on_job_failure: all
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_new_commit.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_new_commit.yml
new file mode 100644
index 00000000000..a1641878e4d
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_new_commit.yml
@@ -0,0 +1,3 @@
+workflow:
+ auto_cancel:
+ on_new_commit: conservative
diff --git a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
index d1eec0baeea..d1726c8da6c 100644
--- a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe Resolvers::Ci::GroupRunnersResolver, feature_category: :fleet_vis
status_status: 'active',
type_type: :group_type,
tag_name: ['active_runner'],
- preload: false,
+ preload: {},
search: 'abc',
sort: 'contacted_asc',
membership: :descendants,
diff --git a/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb b/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
index 85b55521174..59ba7d4200c 100644
--- a/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :fleet_v
status_status: 'active',
type_type: :group_type,
tag_name: ['active_runner'],
- preload: false,
+ preload: {},
search: 'abc',
sort: 'contacted_asc',
project: project
diff --git a/spec/graphql/resolvers/ci/runners_resolver_spec.rb b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
index 85a90924384..a0239a6ff34 100644
--- a/spec/graphql/resolvers/ci/runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :fleet_visibili
upgrade_status: 'recommended',
type_type: :instance_type,
tag_name: ['active_runner'],
- preload: false,
+ preload: {},
search: 'abc',
sort: 'contacted_asc',
creator_id: '1',
@@ -125,7 +125,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :fleet_visibili
let(:expected_params) do
{
active: false,
- preload: false
+ preload: {}
}
end
@@ -145,7 +145,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :fleet_visibili
let(:expected_params) do
{
active: false,
- preload: false
+ preload: {}
}
end
@@ -163,7 +163,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :fleet_visibili
end
let(:expected_params) do
- { preload: false }
+ { preload: {} }
end
it 'calls RunnersFinder with expected arguments' do
@@ -181,7 +181,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :fleet_visibili
let(:expected_params) do
{
- preload: false,
+ preload: {},
version_prefix: 'a.b'
}
end
diff --git a/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb
new file mode 100644
index 00000000000..725cd7f4bca
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillVsCodeSettingsVersion, schema: 20231212135235, feature_category: :web_ide do
+ let(:vs_code_settings) { table(:vs_code_settings) }
+
+ let(:users) { table(:users) }
+
+ let(:user) do
+ users.create!(
+ email: "test1@example.com",
+ username: "test1",
+ notification_email: "test@example.com",
+ name: "test",
+ state: "active",
+ projects_limit: 10)
+ end
+
+ let(:persistent_settings) { VsCode::Settings::SETTINGS_TYPES.filter { |type| type != 'machines' } }
+
+ subject(:migration) do
+ described_class.new(
+ start_id: vs_code_settings.first.id,
+ end_id: vs_code_settings.last.id,
+ batch_table: :vs_code_settings,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ )
+ end
+
+ describe "#perform" do
+ context 'when it finds vs_code_setting rows with version that is nil or zero' do
+ let(:settings) do
+ persistent_settings.each_with_index.map do |type, index|
+ vs_code_settings.create!(user_id: user.id,
+ setting_type: type,
+ content: '{}',
+ uuid: SecureRandom.uuid,
+ version: index.odd? ? nil : 0)
+ end
+ end
+
+ it 'sets version field with default value for setting type' do
+ settings.each do |setting|
+ expect(setting.version).to eq(nil).or eq(0)
+ end
+
+ migration.perform
+
+ settings.each do |setting|
+ expect(setting.reload.version)
+ .to eq(described_class::VsCodeSetting::DEFAULT_SETTING_VERSIONS[setting.setting_type])
+ end
+ end
+ end
+
+ context 'when it finds vs_code_setting rows with version that is not nil or zero' do
+ let(:settings) do
+ persistent_settings.map do |type|
+ vs_code_settings.create!(user_id: user.id,
+ setting_type: type,
+ content: '{}',
+ uuid: SecureRandom.uuid,
+ version: 1)
+ end
+ end
+
+ it 'does not set version field' do
+ settings.each do |setting|
+ expect(setting.version).to eq(1)
+ end
+
+ migration.perform
+
+ settings.each do |setting|
+ expect(setting.reload.version).to eq(1)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb b/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
index bdd66cc00a1..764908ee040 100644
--- a/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::AutoCancel, feature_category: :pipelin
it 'returns errors' do
expect(config.errors)
- .to include('auto cancel on new commit must be one of: conservative, interruptible, disabled')
+ .to include('auto cancel on new commit must be one of: conservative, interruptible, none')
end
end
end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
index ad92d90e253..4dd29e1fb15 100644
--- a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
require 'rspec-parameterized'
-require 'set'
+require 'set' # rubocop:disable Lint/RedundantRequireStatement -- Ruby 3.1 and earlier needs this. Drop this line after Ruby 3.2+ is only supported.
MOCK_LINE = Struct.new(:text, :type, :index, :old_pos, :new_pos)
diff --git a/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb b/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb
new file mode 100644
index 00000000000..eca75d93c80
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb
@@ -0,0 +1,224 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'rspec-parameterized'
+require 'support/helpers/rails_helpers'
+
+RSpec.describe Gitlab::Instrumentation::RedisClientMiddleware, :request_store, feature_category: :scalability do
+ using RSpec::Parameterized::TableSyntax
+ include RedisHelpers
+
+ let_it_be(:redis_store_class) { define_helper_redis_store_class }
+ let_it_be(:redis_client) { RedisClient.new(redis_store_class.redis_client_params) }
+
+ before do
+ redis_client.call("flushdb")
+ end
+
+ describe 'read and write' do
+ where(:setup, :command, :expect_write, :expect_read) do
+ # The response is 'OK', the request size is the combined size of array
+ # elements. Exercise counting of a status reply.
+ [] | [:set, 'foo', 'bar'] | (3 + 3 + 3) | 2
+
+ # The response is 1001, so 4 bytes. Exercise counting an integer reply.
+ [[:set, 'foobar', 1000]] | [:incr, 'foobar'] | (4 + 6) | 4
+
+ # Exercise counting empty multi bulk reply. Returns an empty hash `{}`
+ [] | [:hgetall, 'foobar'] | (7 + 6) | 2
+
+ # Hgetall response length is combined length of keys and values in the
+ # hash. Exercises counting of a multi bulk reply
+ # Returns `{"field"=>"hello world"}`, 5 for field, 11 for hello world, 8 for {, }, 4 "s, =, >
+ [[:hset, 'myhash', 'field', 'hello world']] | [:hgetall, 'myhash'] | (7 + 6) | (5 + 11 + 8)
+
+ # Exercise counting of a bulk reply
+ [[:set, 'foo', 'bar' * 100]] | [:get, 'foo'] | (3 + 3) | (3 * 100)
+
+ # Nested array response: [['foo', 0.0], ['bar', 1.0]]. Returns scores as float.
+ [[:zadd, 'myset', 0, 'foo'],
+ [:zadd, 'myset', 1, 'bar']] | [:zrange, 'myset', 0, -1, 'withscores'] | (6 + 5 + 1 + 2 + 10) | (3 + 3 + 3 + 3)
+ end
+
+ with_them do
+ it 'counts bytes read and written' do
+ setup.each { |cmd| redis_client.call(*cmd) }
+ RequestStore.clear!
+ redis_client.call(*command)
+
+ expect(Gitlab::Instrumentation::Redis.read_bytes).to eq(expect_read)
+ expect(Gitlab::Instrumentation::Redis.write_bytes).to eq(expect_write)
+ end
+ end
+ end
+
+ describe 'counting' do
+ let(:instrumentation_class) { redis_store_class.instrumentation_class }
+
+ it 'counts successful requests' do
+ expect(instrumentation_class).to receive(:instance_count_request).with(1).and_call_original
+
+ redis_client.call(:get, 'foobar')
+ end
+
+ it 'counts successful pipelined requests' do
+ expect(instrumentation_class).to receive(:instance_count_request).with(2).and_call_original
+ expect(instrumentation_class).to receive(:instance_count_pipelined_request).with(2).and_call_original
+
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, '{foobar}buz')
+ pipeline.call(:get, '{foobar}baz')
+ end
+ end
+
+ context 'when encountering exceptions' do
+ before do
+ allow(redis_client.instance_variable_get(:@raw_connection)).to receive(:call).and_raise(
+ RedisClient::ConnectionError, 'Connection was closed or lost')
+ end
+
+ it 'counts exception' do
+ expect(instrumentation_class).to receive(:instance_count_exception)
+ .with(instance_of(RedisClient::ConnectionError)).and_call_original
+ expect(instrumentation_class).to receive(:log_exception)
+ .with(instance_of(RedisClient::ConnectionError)).and_call_original
+ expect(instrumentation_class).to receive(:instance_count_request).and_call_original
+
+ expect do
+ redis_client.call(:auth, 'foo', 'bar')
+ end.to raise_error(RedisClient::Error)
+ end
+ end
+
+ context 'in production environment' do
+ before do
+ stub_rails_env('production') # to avoid raising CrossSlotError
+ end
+
+ it 'counts disallowed cross-slot requests' do
+ expect(instrumentation_class).to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ redis_client.call(:mget, 'foo', 'bar')
+ end
+
+ it 'does not count allowed cross-slot requests' do
+ expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis_client.call(:mget, 'foo', 'bar')
+ end
+ end
+
+ it 'does not count allowed non-cross-slot requests' do
+ expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis_client.call(:mget, 'bar')
+ end
+ end
+
+ it 'skips count for non-cross-slot requests' do
+ expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ redis_client.call(:mget, '{foo}bar', '{foo}baz')
+ end
+ end
+
+ context 'without active RequestStore' do
+ before do
+ ::RequestStore.end!
+ end
+
+ it 'still runs cross-slot validation' do
+ expect do
+ redis_client.call('mget', 'foo', 'bar')
+ end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
+ end
+ end
+ end
+
+ describe 'latency' do
+ let(:instrumentation_class) { redis_store_class.instrumentation_class }
+
+ describe 'commands in the apdex' do
+ where(:command) do
+ [
+ [[:get, 'foobar']],
+ [%w[GET foobar]]
+ ]
+ end
+
+ with_them do
+ it 'measures requests we want in the apdex' do
+ expect(instrumentation_class).to receive(:instance_observe_duration).with(a_value > 0)
+ .and_call_original
+
+ redis_client.call(*command)
+ end
+ end
+
+ context 'with pipelined commands' do
+ it 'measures requests that do not have blocking commands' do
+ expect(instrumentation_class).to receive(:instance_observe_duration).twice.with(a_value > 0)
+ .and_call_original
+
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, '{foobar}buz')
+ pipeline.call(:get, '{foobar}baz')
+ end
+ end
+
+ it 'raises error when keys are not from the same slot' do
+ expect do
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, 'foo')
+ pipeline.call(:get, 'bar')
+ end
+ end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
+ end
+ end
+ end
+
+ describe 'commands not in the apdex' do
+ where(:setup, :command) do
+ [['rpush', 'foobar', 1]] | ['brpop', 'foobar', 0]
+ [['rpush', 'foobar', 1]] | ['blpop', 'foobar', 0]
+ [['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
+ [['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
+ [['zadd', 'foobar', 1, 'a']] | ['bzpopmin', 'foobar', 0]
+ [['zadd', 'foobar', 1, 'a']] | ['bzpopmax', 'foobar', 0]
+ [['xadd', 'mystream', 1, 'myfield', 'mydata']] | ['xread', 'block', 1, 'streams', 'mystream', '0-0']
+ [['xadd', 'foobar', 1, 'myfield', 'mydata'],
+ ['xgroup', 'create', 'foobar', 'mygroup',
+ 0]] | ['xreadgroup', 'group', 'mygroup', 'myconsumer', 'block', 1, 'streams', 'foobar', '0-0']
+ [] | ['command']
+ end
+
+ with_them do
+ it 'skips requests we do not want in the apdex' do
+ setup.each { |cmd| redis_client.call(*cmd) }
+
+ expect(instrumentation_class).not_to receive(:instance_observe_duration)
+
+ redis_client.call(*command)
+ end
+ end
+
+ context 'with pipelined commands' do
+ it 'skips requests that have blocking commands' do
+ expect(instrumentation_class).not_to receive(:instance_observe_duration)
+
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, '{foobar}buz')
+ pipeline.call(:rpush, '{foobar}baz', 1)
+ pipeline.call(:brpop, '{foobar}baz', 0)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb b/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
index 68dd784fb7e..1c62f5679d0 100644
--- a/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
before do
allow(Gitlab::Metrics::System).to receive(:monotonic_time)
- .and_return(0, 1, shutdown_timeout_seconds, 0, 1, Sidekiq[:timeout] + 2)
+ .and_return(0, 1, shutdown_timeout_seconds, 0, 1, Sidekiq.default_configuration[:timeout] + 2)
allow(Process).to receive(:kill)
allow(::Sidekiq).to receive(:logger).and_return(logger)
allow(logger).to receive(:warn)
@@ -81,7 +81,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
let(:signal_params) do
[
[:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds],
- [:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2]
+ [:TERM, pid, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2]
]
end
@@ -95,7 +95,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
let(:signal_params) do
[
[:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds],
- [:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2],
+ [:TERM, pid, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2],
[:KILL, kill_pid, 'hard shut down', nil]
]
end
diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb
index 05bcdf2fc96..bd5914c9df8 100644
--- a/spec/lib/gitlab/runtime_spec.rb
+++ b/spec/lib/gitlab/runtime_spec.rb
@@ -127,10 +127,10 @@ RSpec.describe Gitlab::Runtime, feature_category: :cloud_connector do
before do
stub_const('::Sidekiq', sidekiq_type)
allow(sidekiq_type).to receive(:server?).and_return(true)
- allow(sidekiq_type).to receive(:[]).with(:concurrency).and_return(2)
+ allow(sidekiq_type).to receive(:default_configuration).and_return({ concurrency: 2 })
end
- it_behaves_like "valid runtime", :sidekiq, 5
+ it_behaves_like "valid runtime", :sidekiq, 2
it 'identifies as an application runtime' do
expect(described_class.application?).to be true
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index 5885151ecb5..f741fd8fae9 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -186,7 +186,8 @@ RSpec.describe Gitlab::SidekiqConfig do
allow(::Gitlab::SidekiqConfig::WorkerRouter)
.to receive(:global).and_return(::Gitlab::SidekiqConfig::WorkerRouter.new(test_routes))
- allow(Sidekiq).to receive(:[]).with(:queues).and_return(%w[default background_migration])
+ allow(Sidekiq).to receive_message_chain(:default_configuration, :queues)
+ .and_return(%w[default background_migration])
mappings = described_class.current_worker_queue_mappings
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 2e07fa100e8..b1a8a9f4da3 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -492,7 +492,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'completed_at' => current_utc_time.to_i }
end
- subject { described_class.new }
+ subject { described_class.new(Sidekiq.logger) }
it 'update payload correctly' do
travel_to(current_utc_time) do
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 9cf9901007c..e1662903fa4 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics, feature_category: :shar
describe '.initialize_process_metrics' do
it 'sets concurrency metrics' do
- expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq.default_configuration[:concurrency].to_i)
described_class.initialize_process_metrics
end
@@ -122,7 +122,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics, feature_category: :shar
end
it 'sets the concurrency metric' do
- expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq.default_configuration[:concurrency].to_i)
described_class.initialize_process_metrics
end
diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
index bf379d9cb0d..96d4042b1e6 100644
--- a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
let(:migrator) { described_class.new(mappings) }
let(:set_after) do
- Sidekiq.redis { |c| c.zrange(set_name, 0, -1, with_scores: true) }
+ Sidekiq.redis { |c| c.call("ZRANGE", set_name, 0, -1, "WITHSCORES") }
.map { |item, score| [Gitlab::Json.load(item), score] }
end
@@ -226,8 +226,9 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
let(:logger) { nil }
def list_queues
- queues = Sidekiq.redis do |conn|
- conn.scan_each(match: "queue:*").to_a
+ queues = []
+ Sidekiq.redis do |conn|
+ conn.scan("MATCH", "queue:*") { |key| queues << key }
end
queues.uniq.map { |queue| queue.split(':', 2).last }
end
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index 55e3885d257..ecdab2651a2 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -174,7 +174,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues,
context 'when both multi-store feature flags are off' do
def with_redis(&block)
- Sidekiq.redis(&block)
+ Gitlab::Redis::Queues.with(&block)
end
before do
diff --git a/spec/migrations/20231212135235_queue_backfill_vs_code_settings_version_spec.rb b/spec/migrations/20231212135235_queue_backfill_vs_code_settings_version_spec.rb
new file mode 100644
index 00000000000..e3e08720950
--- /dev/null
+++ b/spec/migrations/20231212135235_queue_backfill_vs_code_settings_version_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillVsCodeSettingsVersion, feature_category: :web_ide do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vs_code_settings,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_metadata_spec.rb b/spec/models/ci/pipeline_metadata_spec.rb
index 1a426118063..c114c0e945e 100644
--- a/spec/models/ci/pipeline_metadata_spec.rb
+++ b/spec/models/ci/pipeline_metadata_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Ci::PipelineMetadata, feature_category: :pipeline_composition do
is_expected.to define_enum_for(
:auto_cancel_on_new_commit
).with_values(
- conservative: 0, interruptible: 1, disabled: 2
+ conservative: 0, interruptible: 1, none: 2
).with_prefix
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index e075ea232d3..52c3792ac93 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -5727,4 +5727,36 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
end
+
+ describe '#auto_cancel_on_new_commit' do
+ let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) }
+
+ subject(:auto_cancel_on_new_commit) { pipeline.auto_cancel_on_new_commit }
+
+ context 'when pipeline_metadata is not present' do
+ it { is_expected.to eq('conservative') }
+ end
+
+ context 'when pipeline_metadata is present' do
+ before_all do
+ create(:ci_pipeline_metadata, project: pipeline.project, pipeline: pipeline)
+ end
+
+ context 'when auto_cancel_on_new_commit is nil' do
+ before do
+ pipeline.pipeline_metadata.auto_cancel_on_new_commit = nil
+ end
+
+ it { is_expected.to eq('conservative') }
+ end
+
+ context 'when auto_cancel_on_new_commit is a valid value' do
+ before do
+ pipeline.pipeline_metadata.auto_cancel_on_new_commit = 'interruptible'
+ end
+
+ it { is_expected.to eq('interruptible') }
+ end
+ end
+ end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 8c4d5fef24d..d4f7db3bddd 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -532,7 +532,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
let_it_be(:runner3) { create(:ci_runner, creator_id: 1) }
let_it_be(:runner4) { create(:ci_runner, creator_id: nil) }
- it 'returns runners with creator_id \'1\'' do
+ it "returns runners with creator_id '1'" do
is_expected.to contain_exactly(runner2, runner3)
end
end
diff --git a/spec/models/project_authorizations/changes_spec.rb b/spec/models/project_authorizations/changes_spec.rb
index d6ccfccbcbe..714144841fb 100644
--- a/spec/models/project_authorizations/changes_spec.rb
+++ b/spec/models/project_authorizations/changes_spec.rb
@@ -28,36 +28,49 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
end
shared_examples_for 'publishes AuthorizationsChangedEvent' do
- it 'publishes a AuthorizationsChangedEvent event with project id' do
- project_ids.each do |project_id|
- project_data = { project_id: project_id }
- project_event = instance_double('::ProjectAuthorizations::AuthorizationsChangedEvent', data: project_data)
+ it 'does not publish a AuthorizationsChangedEvent event' do
+ expect(::Gitlab::EventStore).not_to receive(:publish)
+ .with(an_instance_of(::ProjectAuthorizations::AuthorizationsChangedEvent))
- allow(::ProjectAuthorizations::AuthorizationsChangedEvent).to receive(:new)
- .with(data: project_data)
- .and_return(project_event)
+ apply_project_authorization_changes
+ end
- allow(::Gitlab::EventStore).to receive(:publish)
- expect(::Gitlab::EventStore).to receive(:publish).with(project_event)
+ context 'when feature flag "add_policy_approvers_to_rules" is disabled' do
+ before do
+ stub_feature_flags(add_policy_approvers_to_rules: false)
end
- apply_project_authorization_changes
+ it 'publishes a AuthorizationsChangedEvent event with project id' do
+ allow(::Gitlab::EventStore).to receive(:publish)
+ project_ids.each do |project_id|
+ project_data = { project_id: project_id }
+ project_event = instance_double('::ProjectAuthorizations::AuthorizationsChangedEvent', data: project_data)
+
+ allow(::ProjectAuthorizations::AuthorizationsChangedEvent).to receive(:new)
+ .with(data: project_data)
+ .and_return(project_event)
+
+ expect(::Gitlab::EventStore).to receive(:publish).with(project_event)
+ end
+
+ apply_project_authorization_changes
+ end
end
end
shared_examples_for 'publishes AuthorizationsRemovedEvent' do
it 'publishes a AuthorizationsRemovedEvent event with project id' do
- project_ids.each do |project_id|
+ allow(::Gitlab::EventStore).to receive(:publish_group)
+ project_events = project_ids.map do |project_id|
project_data = { project_id: project_id, user_ids: user_ids }
project_event = instance_double('::ProjectAuthorizations::AuthorizationsRemovedEvent', data: project_data)
allow(::ProjectAuthorizations::AuthorizationsRemovedEvent).to receive(:new)
.with(data: project_data)
.and_return(project_event)
-
- allow(::Gitlab::EventStore).to receive(:publish)
- expect(::Gitlab::EventStore).to receive(:publish).with(project_event)
+ project_event
end
+ expect(::Gitlab::EventStore).to receive(:publish_group).with(project_events)
apply_project_authorization_changes
end
@@ -69,7 +82,43 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it 'does not publish a AuthorizationsRemovedEvent event' do
expect(::Gitlab::EventStore).not_to(
- receive(:publish).with(an_instance_of(::ProjectAuthorizations::AuthorizationsRemovedEvent))
+ receive(:publish_group).with(
+ array_including(an_instance_of(::ProjectAuthorizations::AuthorizationsRemovedEvent))
+ )
+ )
+
+ apply_project_authorization_changes
+ end
+ end
+ end
+
+ shared_examples_for 'publishes AuthorizationsAddedEvent' do
+ it 'publishes a AuthorizationsAddedEvent event with project id' do
+ allow(::Gitlab::EventStore).to receive(:publish_group)
+ project_events = project_ids.map do |project_id|
+ project_data = { project_id: project_id, user_ids: user_ids }
+ project_event = instance_double('::ProjectAuthorizations::AuthorizationsAddedEvent', data: project_data)
+
+ allow(::ProjectAuthorizations::AuthorizationsAddedEvent).to receive(:new)
+ .with(data: project_data)
+ .and_return(project_event)
+ project_event
+ end
+ expect(::Gitlab::EventStore).to receive(:publish_group).with(project_events)
+
+ apply_project_authorization_changes
+ end
+
+ context 'when feature flag "add_policy_approvers_to_rules" is disabled' do
+ before do
+ stub_feature_flags(add_policy_approvers_to_rules: false)
+ end
+
+ it 'does not publish a AuthorizationsAddedEvent event' do
+ expect(::Gitlab::EventStore).not_to(
+ receive(:publish_group).with(array_including(
+ an_instance_of(::ProjectAuthorizations::AuthorizationsAddedEvent))
+ )
)
apply_project_authorization_changes
@@ -88,8 +137,23 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
shared_examples_for 'does not publish AuthorizationsRemovedEvent' do
it 'does not publish a AuthorizationsRemovedEvent event' do
- expect(::Gitlab::EventStore).not_to receive(:publish)
- .with(an_instance_of(::ProjectAuthorizations::AuthorizationsRemovedEvent))
+ expect(::Gitlab::EventStore).not_to(
+ receive(:publish_group).with(
+ array_including(an_instance_of(::ProjectAuthorizations::AuthorizationsRemovedEvent))
+ )
+ )
+
+ apply_project_authorization_changes
+ end
+ end
+
+ shared_examples_for 'does not publish AuthorizationsAddedEvent' do
+ it 'does not publish a AuthorizationsAddedEvent event' do
+ expect(::Gitlab::EventStore).not_to(
+ receive(:publish_group).with(
+ array_including(an_instance_of(::ProjectAuthorizations::AuthorizationsAddedEvent))
+ )
+ )
apply_project_authorization_changes
end
@@ -101,6 +165,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
let_it_be(:project_2) { create(:project) }
let_it_be(:project_3) { create(:project) }
let(:project_ids) { [project_1.id, project_2.id, project_3.id] }
+ let(:user_ids) { [user.id] }
let(:authorizations_to_add) do
[
@@ -155,6 +220,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'logs the detail', batch_size: 2
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsAddedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
context 'when the GitLab installation does not have a replica database configured' do
@@ -166,6 +232,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between batches'
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsAddedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
end
end
@@ -178,6 +245,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between batches'
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsAddedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
end
end
@@ -242,6 +310,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'logs the detail', batch_size: 2
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
context 'when the GitLab installation does not have a replica database configured' do
before do
@@ -253,6 +322,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
end
@@ -265,6 +335,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
context 'when the user_ids list is empty' do
@@ -273,6 +344,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not removes project authorizations of the users in the current project'
it_behaves_like 'does not publish AuthorizationsChangedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
context 'when the user_ids list is nil' do
@@ -281,6 +353,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not removes project authorizations of the users in the current project'
it_behaves_like 'does not publish AuthorizationsChangedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
end
@@ -344,6 +417,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'logs the detail', batch_size: 2
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
context 'when the GitLab installation does not have a replica database configured' do
before do
@@ -355,6 +429,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
end
@@ -367,6 +442,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
context 'when the project_ids list is empty' do
@@ -375,6 +451,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not removes any project authorizations from the current user'
it_behaves_like 'does not publish AuthorizationsChangedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
context 'when the user_ids list is nil' do
@@ -383,6 +460,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not removes any project authorizations from the current user'
it_behaves_like 'does not publish AuthorizationsChangedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 68fdbeba84d..6d3e9058f91 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -5798,27 +5798,6 @@ RSpec.describe User, feature_category: :user_profile do
expect(user.namespace).to be_nil
end
-
- context 'when create_personal_ns_outside_model feature flag is disabled' do
- before do
- stub_feature_flags(create_personal_ns_outside_model: false)
- end
-
- it 'creates the namespace' do
- expect(user.namespace).to be_nil
-
- user.save!
-
- expect(user.namespace).to be_present
- expect(user.namespace).to be_kind_of(Namespaces::UserNamespace)
- end
-
- it 'creates the namespace setting' do
- user.save!
-
- expect(user.namespace.namespace_settings).to be_persisted
- end
- end
end
context 'for an existing user' do
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 8262640b283..1b6948d0380 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -876,107 +876,95 @@ RSpec.describe 'Query.runner(id)', :freeze_time, feature_category: :fleet_visibi
end
describe 'Query limits' do
- def runner_query(runner)
- <<~SINGLE
- runner(id: "#{runner.to_global_id}") {
- #{all_graphql_fields_for('CiRunner', excluded: excluded_fields)}
- createdBy {
- id
- username
- webPath
- webUrl
- }
- groups {
- nodes {
- id
- path
- fullPath
- webUrl
- }
- }
- projects {
- nodes {
- id
- path
- fullPath
- webUrl
- }
- }
- ownerProject {
- id
- path
- fullPath
- webUrl
- }
+ let_it_be(:user2) { another_admin }
+ let_it_be(:user3) { create(:user) }
+ let_it_be(:tag_list) { %w[n_plus_1_test some_tag] }
+ let_it_be(:args) do
+ { current_user: user, token: { personal_access_token: create(:personal_access_token, user: user) } }
+ end
+
+ let_it_be(:runner1) { create(:ci_runner, tag_list: tag_list, creator: user) }
+ let_it_be(:runner2) do
+ create(:ci_runner, :group, groups: [group], tag_list: tag_list, creator: user)
+ end
+
+ let_it_be(:runner3) do
+ create(:ci_runner, :project, projects: [project1], tag_list: tag_list, creator: user)
+ end
+
+ let(:single_discrete_runners_query) do
+ multiple_discrete_runners_query([])
+ end
+
+ let(:runner_fragment) do
+ <<~QUERY
+ #{all_graphql_fields_for('CiRunner', excluded: excluded_fields)}
+ createdBy {
+ id
+ username
+ webPath
+ webUrl
}
- SINGLE
+ QUERY
end
- let(:active_project_runner2) { create(:ci_runner, :project) }
- let(:active_group_runner2) { create(:ci_runner, :group) }
+ # Exclude fields that are already hardcoded above (or tested separately),
+ # and also some fields from deeper objects which are problematic:
+ # - createdBy: Known N+1 issues, but only on exotic fields which we don't normally use
+ # - ownerProject.pipeline: Needs arguments (iid or sha)
+ # - project.productAnalyticsState: Can be requested only for 1 Project(s) at a time.
+ let(:excluded_fields) { %w[createdBy jobs pipeline productAnalyticsState] }
+
+ it 'avoids N+1 queries', :use_sql_query_cache do
+ discrete_runners_control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ post_graphql(single_discrete_runners_query, **args)
+ end
+
+ additional_runners = setup_additional_records
+
+ expect do
+ post_graphql(multiple_discrete_runners_query(additional_runners), **args)
- # Exclude fields that are already hardcoded above
- let(:excluded_fields) { %w[createdBy jobs groups projects ownerProject] }
+ raise StandardError, flattened_errors if graphql_errors # Ensure any error in query causes test to fail
+ end.not_to exceed_query_limit(discrete_runners_control)
+ end
- let(:single_query) do
+ def runner_query(runner, nr)
<<~QUERY
- {
- instance_runner1: #{runner_query(active_instance_runner)}
- group_runner1: #{runner_query(active_group_runner)}
- project_runner1: #{runner_query(active_project_runner)}
+ runner#{nr}: runner(id: "#{runner.to_global_id}") {
+ #{runner_fragment}
}
QUERY
end
- let(:double_query) do
+ def multiple_discrete_runners_query(additional_runners)
<<~QUERY
{
- instance_runner1: #{runner_query(active_instance_runner)}
- instance_runner2: #{runner_query(inactive_instance_runner)}
- group_runner1: #{runner_query(active_group_runner)}
- group_runner2: #{runner_query(active_group_runner2)}
- project_runner1: #{runner_query(active_project_runner)}
- project_runner2: #{runner_query(active_project_runner2)}
+ #{runner_query(runner1, 1)}
+ #{runner_query(runner2, 2)}
+ #{runner_query(runner3, 3)}
+ #{additional_runners.each_with_index.map { |r, i| runner_query(r, 4 + i) }.join("\n")}
}
QUERY
end
- it 'does not execute more queries per runner', :aggregate_failures, quarantine: "https://gitlab.com/gitlab-org/gitlab/-/issues/391442" do
- # warm-up license cache and so on:
- personal_access_token = create(:personal_access_token, user: user)
- args = { current_user: user, token: { personal_access_token: personal_access_token } }
- post_graphql(double_query, **args)
-
- control = ActiveRecord::QueryRecorder.new { post_graphql(single_query, **args) }
-
- personal_access_token = create(:personal_access_token, user: another_admin)
- args = { current_user: another_admin, token: { personal_access_token: personal_access_token } }
- expect { post_graphql(double_query, **args) }.not_to exceed_query_limit(control)
-
- expect(graphql_data.count).to eq 6
- expect(graphql_data).to match(
- a_hash_including(
- 'instance_runner1' => a_graphql_entity_for(active_instance_runner),
- 'instance_runner2' => a_graphql_entity_for(inactive_instance_runner),
- 'group_runner1' => a_graphql_entity_for(
- active_group_runner,
- groups: { 'nodes' => contain_exactly(a_graphql_entity_for(group)) }
- ),
- 'group_runner2' => a_graphql_entity_for(
- active_group_runner2,
- groups: { 'nodes' => active_group_runner2.groups.map { |g| a_graphql_entity_for(g) } }
- ),
- 'project_runner1' => a_graphql_entity_for(
- active_project_runner,
- projects: { 'nodes' => active_project_runner.projects.map { |p| a_graphql_entity_for(p) } },
- owner_project: a_graphql_entity_for(active_project_runner.projects[0])
- ),
- 'project_runner2' => a_graphql_entity_for(
- active_project_runner2,
- projects: { 'nodes' => active_project_runner2.projects.map { |p| a_graphql_entity_for(p) } },
- owner_project: a_graphql_entity_for(active_project_runner2.projects[0])
- )
- ))
+ def setup_additional_records
+ # Add more runners (including owned by other users)
+ runner4 = create(:ci_runner, tag_list: tag_list + %w[tag1 tag2], creator: user2)
+ runner5 = create(:ci_runner, :group, groups: [create(:group)], tag_list: tag_list + %w[tag2 tag3], creator: user3)
+ # Add one more project to runner
+ runner3.assign_to(create(:project))
+
+ # Add more runner managers (including to existing runners)
+ runner_manager1 = create(:ci_runner_machine, runner: runner1)
+ create(:ci_runner_machine, runner: runner1)
+ create(:ci_runner_machine, runner: runner2, system_xid: runner_manager1.system_xid)
+ create(:ci_runner_machine, runner: runner3)
+ create(:ci_runner_machine, runner: runner4, version: '16.4.1')
+ create(:ci_runner_machine, runner: runner5, version: '16.4.0', system_xid: runner_manager1.system_xid)
+ create(:ci_runner_machine, runner: runner3)
+
+ [runner4, runner5]
end
end
diff --git a/spec/requests/api/graphql/ci/runners_spec.rb b/spec/requests/api/graphql/ci/runners_spec.rb
index 0fe14bef778..189106fae7b 100644
--- a/spec/requests/api/graphql/ci/runners_spec.rb
+++ b/spec/requests/api/graphql/ci/runners_spec.rb
@@ -18,22 +18,34 @@ RSpec.describe 'Query.runners', feature_category: :fleet_visibility do
let(:fields) do
<<~QUERY
nodes {
- #{all_graphql_fields_for('CiRunner', excluded: %w[createdBy ownerProject])}
- createdBy {
- username
- webPath
- webUrl
- }
- ownerProject {
- id
- path
- fullPath
- webUrl
- }
+ #{all_graphql_fields_for('CiRunner', excluded: excluded_fields)}
}
QUERY
end
+ let(:query) do
+ %(
+ query {
+ runners {
+ #{fields}
+ }
+ }
+ )
+ end
+
+ # Exclude fields from deeper objects which are problematic:
+ # - ownerProject.pipeline: Needs arguments (iid or sha)
+ # - project.productAnalyticsState: Can be requested only for 1 Project(s) at a time.
+ let(:excluded_fields) { %w[pipeline productAnalyticsState] }
+
+ it 'returns expected runners' do
+ post_graphql(query, current_user: current_user)
+
+ expect(runners_graphql_data['nodes']).to contain_exactly(
+ *Ci::Runner.all.map { |expected_runner| a_graphql_entity_for(expected_runner) }
+ )
+ end
+
context 'with filters' do
shared_examples 'a working graphql query returning expected runners' do
it_behaves_like 'a working graphql query' do
@@ -49,31 +61,6 @@ RSpec.describe 'Query.runners', feature_category: :fleet_visibility do
*Array(expected_runners).map { |expected_runner| a_graphql_entity_for(expected_runner) }
)
end
-
- it 'does not execute more queries per runner', :aggregate_failures do
- # warm-up license cache and so on:
- personal_access_token = create(:personal_access_token, user: current_user)
- args = { current_user: current_user, token: { personal_access_token: personal_access_token } }
- post_graphql(query, **args)
- expect(graphql_data_at(:runners, :nodes)).not_to be_empty
-
- admin2 = create(:admin)
- personal_access_token = create(:personal_access_token, user: admin2)
- args = { current_user: admin2, token: { personal_access_token: personal_access_token } }
- control = ActiveRecord::QueryRecorder.new { post_graphql(query, **args) }
-
- runner2 = create(:ci_runner, :instance, version: '14.0.0', tag_list: %w[tag5 tag6], creator: admin2)
- runner3 = create(:ci_runner, :project, version: '14.0.1', projects: [project], tag_list: %w[tag3 tag8],
- creator: current_user)
-
- create(:ci_build, :failed, runner: runner2)
- create(:ci_runner_machine, runner: runner2, version: '16.4.1')
-
- create(:ci_build, :failed, runner: runner3)
- create(:ci_runner_machine, runner: runner3, version: '16.4.0')
-
- expect { post_graphql(query, **args) }.not_to exceed_query_limit(control)
- end
end
context 'when filtered on type and status' do
@@ -179,52 +166,88 @@ RSpec.describe 'Query.runners', feature_category: :fleet_visibility do
end
end
end
+ end
- context 'without filters' do
- context 'with managers requested for multiple runners' do
- let(:fields) do
- <<~QUERY
- nodes {
- managers {
- nodes {
- #{all_graphql_fields_for('CiRunnerManager', max_depth: 1)}
- }
- }
- }
- QUERY
- end
+ describe 'Runner query limits' do
+ let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:user3) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:tag_list) { %w[n_plus_1_test some_tag] }
+ let_it_be(:args) do
+ { current_user: user, token: { personal_access_token: create(:personal_access_token, user: user) } }
+ end
- let(:query) do
- %(
- query {
- runners {
- #{fields}
- }
- }
- )
- end
+ let_it_be(:runner1) { create(:ci_runner, tag_list: tag_list, creator: user) }
+ let_it_be(:runner2) do
+ create(:ci_runner, :group, groups: [group], tag_list: tag_list, creator: user)
+ end
- it 'does not execute more queries per runner', :aggregate_failures do
- # warm-up license cache and so on:
- personal_access_token = create(:personal_access_token, user: current_user)
- args = { current_user: current_user, token: { personal_access_token: personal_access_token } }
- post_graphql(query, **args)
- expect(graphql_data_at(:runners, :nodes)).not_to be_empty
-
- admin2 = create(:admin)
- personal_access_token = create(:personal_access_token, user: admin2)
- args = { current_user: admin2, token: { personal_access_token: personal_access_token } }
- control = ActiveRecord::QueryRecorder.new { post_graphql(query, **args) }
-
- create(:ci_runner, :instance, :with_runner_manager, version: '14.0.0', tag_list: %w[tag5 tag6],
- creator: admin2)
- create(:ci_runner, :project, :with_runner_manager, version: '14.0.1', projects: [project],
- tag_list: %w[tag3 tag8],
- creator: current_user)
-
- expect { post_graphql(query, **args) }.not_to exceed_query_limit(control)
- end
- end
+ let_it_be(:runner3) do
+ create(:ci_runner, :project, projects: [project], tag_list: tag_list, creator: user)
+ end
+
+ let(:runner_fragment) do
+ <<~QUERY
+ #{all_graphql_fields_for('CiRunner', excluded: excluded_fields)}
+ createdBy {
+ id
+ username
+ webPath
+ webUrl
+ }
+ QUERY
+ end
+
+ # Exclude fields that are already hardcoded above (or tested separately),
+ # and also some fields from deeper objects which are problematic:
+ # - createdBy: Known N+1 issues, but only on exotic fields which we don't normally use
+ # - ownerProject.pipeline: Needs arguments (iid or sha)
+ # - project.productAnalyticsState: Can be requested only for 1 Project(s) at a time.
+ let(:excluded_fields) { %w[createdBy jobs pipeline productAnalyticsState] }
+
+ let(:runners_query) do
+ <<~QUERY
+ {
+ runners {
+ nodes { #{runner_fragment} }
+ }
+ }
+ QUERY
+ end
+
+ it 'avoids N+1 queries', :use_sql_query_cache do
+ personal_access_token = create(:personal_access_token, user: user)
+ args = { current_user: user, token: { personal_access_token: personal_access_token } }
+
+ runners_control = ActiveRecord::QueryRecorder.new(skip_cached: false) { post_graphql(runners_query, **args) }
+
+ setup_additional_records
+
+ expect { post_graphql(runners_query, **args) }.not_to exceed_query_limit(runners_control)
+ end
+
+ def setup_additional_records
+ # Add more runners (including owned by other users)
+ runner4 = create(:ci_runner, tag_list: tag_list + %w[tag1 tag2], creator: user2)
+ runner5 = create(:ci_runner, :group, groups: [create(:group)], tag_list: tag_list + %w[tag2 tag3], creator: user3)
+ # Add one more project to runner
+ runner3.assign_to(create(:project))
+
+ # Add more runner managers (including to existing runners)
+ runner_manager1 = create(:ci_runner_machine, runner: runner1)
+ create(:ci_runner_machine, runner: runner1)
+ create(:ci_runner_machine, runner: runner2, system_xid: runner_manager1.system_xid)
+ create(:ci_runner_machine, runner: runner3)
+ create(:ci_runner_machine, runner: runner4, version: '16.4.1')
+ create(:ci_runner_machine, runner: runner5, version: '16.4.0', system_xid: runner_manager1.system_xid)
+ create(:ci_runner_machine, runner: runner3)
+
+ create(:ci_build, :failed, runner: runner4)
+ create(:ci_build, :failed, runner: runner5)
+
+ [runner4, runner5]
end
end
diff --git a/spec/services/ci/cancel_pipeline_service_spec.rb b/spec/services/ci/cancel_pipeline_service_spec.rb
index 256d2db1ed2..6051485c4df 100644
--- a/spec/services/ci/cancel_pipeline_service_spec.rb
+++ b/spec/services/ci/cancel_pipeline_service_spec.rb
@@ -13,12 +13,14 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
current_user: current_user,
cascade_to_children: cascade_to_children,
auto_canceled_by_pipeline: auto_canceled_by_pipeline,
- execute_async: execute_async)
+ execute_async: execute_async,
+ safe_cancellation: safe_cancellation)
end
let(:cascade_to_children) { true }
let(:auto_canceled_by_pipeline) { nil }
let(:execute_async) { true }
+ let(:safe_cancellation) { false }
shared_examples 'force_execute' do
context 'when pipeline is not cancelable' do
@@ -30,9 +32,14 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
context 'when pipeline is cancelable' do
before do
- create(:ci_build, :running, pipeline: pipeline)
- create(:ci_build, :created, pipeline: pipeline)
- create(:ci_build, :success, pipeline: pipeline)
+ create(:ci_build, :running, pipeline: pipeline, name: 'build1')
+ create(:ci_build, :created, pipeline: pipeline, name: 'build2')
+ create(:ci_build, :success, pipeline: pipeline, name: 'build3')
+ create(:ci_build, :pending, :interruptible, pipeline: pipeline, name: 'build4')
+
+ create(:ci_bridge, :running, pipeline: pipeline, name: 'bridge1')
+ create(:ci_bridge, :running, :interruptible, pipeline: pipeline, name: 'bridge2')
+ create(:ci_bridge, :success, :interruptible, pipeline: pipeline, name: 'bridge3')
end
it 'logs the event' do
@@ -55,7 +62,15 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
it 'cancels all cancelable jobs' do
expect(response).to be_success
- expect(pipeline.all_jobs.pluck(:status)).to match_array(%w[canceled canceled success])
+ expect(pipeline.all_jobs.pluck(:name, :status)).to match_array([
+ %w[build1 canceled],
+ %w[build2 canceled],
+ %w[build3 success],
+ %w[build4 canceled],
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success]
+ ])
end
context 'when auto_canceled_by_pipeline is provided' do
@@ -74,6 +89,28 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
end
end
+ context 'when cascade_to_children: false and safe_cancellation: true' do
+ # We are testing the `safe_cancellation: true`` case with only `cascade_to_children: false`
+ # because `safe_cancellation` is passed as `true` only when `cascade_to_children` is `false`
+ # from `CancelRedundantPipelinesService`.
+
+ let(:cascade_to_children) { false }
+ let(:safe_cancellation) { true }
+
+ it 'cancels only interruptible jobs' do
+ expect(response).to be_success
+ expect(pipeline.all_jobs.pluck(:name, :status)).to match_array([
+ %w[build1 running],
+ %w[build2 created],
+ %w[build3 success],
+ %w[build4 canceled],
+ %w[bridge1 running],
+ %w[bridge2 canceled],
+ %w[bridge3 success]
+ ])
+ end
+ end
+
context 'when pipeline has child pipelines' do
let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
let!(:child_job) { create(:ci_build, :running, pipeline: child_pipeline) }
@@ -81,8 +118,8 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
let!(:grandchild_job) { create(:ci_build, :running, pipeline: grandchild_pipeline) }
before do
- child_pipeline.source_bridge.update!(status: :running)
- grandchild_pipeline.source_bridge.update!(status: :running)
+ child_pipeline.source_bridge.update!(name: 'child_pipeline_bridge', status: :running)
+ grandchild_pipeline.source_bridge.update!(name: 'grandchild_pipeline_bridge', status: :running)
end
context 'when execute_async: false' do
@@ -91,8 +128,15 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
it 'cancels the bridge jobs and child jobs' do
expect(response).to be_success
- expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
- expect(child_pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success],
+ %w[child_pipeline_bridge canceled]
+ ])
+ expect(child_pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[grandchild_pipeline_bridge canceled]
+ ])
expect(child_job.reload).to be_canceled
expect(grandchild_job.reload).to be_canceled
end
@@ -110,7 +154,12 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
expect(response).to be_success
- expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success],
+ %w[child_pipeline_bridge canceled]
+ ])
end
end
@@ -124,7 +173,12 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
expect(response).to be_success
- expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success],
+ %w[child_pipeline_bridge canceled]
+ ])
expect(child_job.reload).to be_running
end
end
diff --git a/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb b/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb
index 851c6f8fbea..3ad6164bd01 100644
--- a/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb
+++ b/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'creates a pipeline with errors' do
expect(pipeline).to be_persisted
expect(pipeline.errors.full_messages).to include(
- 'workflow:auto_cancel on new commit must be one of: conservative, interruptible, disabled')
+ 'workflow:auto_cancel on new commit must be one of: conservative, interruptible, none')
end
end
end
diff --git a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
index 0d83187f9e4..7b5eef92f53 100644
--- a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
+++ b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
project.update!(auto_cancel_pending_pipelines: 'enabled')
end
- it 'cancels only previous interruptible builds' do
+ it 'cancels only previous non started builds' do
execute
expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
@@ -153,6 +153,36 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
end
+
+ context 'when the child pipeline auto_cancel_on_new_commit is `interruptible`' do
+ before do
+ child_pipeline.create_pipeline_metadata!(
+ project: child_pipeline.project, auto_cancel_on_new_commit: 'interruptible'
+ )
+ end
+
+ it 'cancels interruptible child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'success')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+ end
+ end
+ end
end
context 'when the child pipeline has non-interruptible non-started job' do
@@ -227,6 +257,37 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
end
end
+ context 'when there are non-interruptible completed jobs in the pipeline' do
+ before do
+ create(:ci_build, :failed, pipeline: prev_pipeline)
+ create(:ci_build, :success, pipeline: prev_pipeline)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+
context 'when there are trigger jobs' do
before do
create(:ci_bridge, :created, pipeline: prev_pipeline)
@@ -246,6 +307,152 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
end
end
+ context 'when auto_cancel_on_new_commit is `interruptible`' do
+ before do
+ prev_pipeline.create_pipeline_metadata!(
+ project: prev_pipeline.project, auto_cancel_on_new_commit: 'interruptible'
+ )
+ end
+
+ it 'cancels only interruptible jobs' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'created')
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'cancels non started builds' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
+ context 'when there are non-interruptible completed jobs in the pipeline' do
+ before do
+ create(:ci_build, :failed, pipeline: prev_pipeline)
+ create(:ci_build, :success, pipeline: prev_pipeline)
+ end
+
+ it 'still cancels only interruptible jobs' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'canceled', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly(
+ 'created', 'success', 'running', 'failed', 'success'
+ )
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+ end
+
+ context 'when auto_cancel_on_new_commit is `none`' do
+ before do
+ prev_pipeline.create_pipeline_metadata!(
+ project: prev_pipeline.project, auto_cancel_on_new_commit: 'none'
+ )
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
+ context 'when auto_cancel_on_new_commit is `conservative`' do
+ before do
+ prev_pipeline.create_pipeline_metadata!(
+ project: prev_pipeline.project, auto_cancel_on_new_commit: 'conservative'
+ )
+ end
+
+ it 'cancels only previous non started builds' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'cancels only previous non started builds' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
+ context 'when there are non-interruptible completed jobs in the pipeline' do
+ before do
+ create(:ci_build, :failed, pipeline: prev_pipeline)
+ create(:ci_build, :success, pipeline: prev_pipeline)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+ end
+
+ context 'when auto_cancel_on_new_commit is an invalid value' do
+ before do
+ allow(prev_pipeline).to receive(:auto_cancel_on_new_commit).and_return('invalid')
+ relation = Ci::Pipeline.id_in(prev_pipeline.id)
+ allow(relation).to receive(:each).and_yield(prev_pipeline)
+ allow(Ci::Pipeline).to receive(:id_in).and_return(relation)
+ end
+
+ it 'raises an error' do
+ expect { execute }.to raise_error(ArgumentError, 'Unknown auto_cancel_on_new_commit value: invalid')
+ end
+ end
+
it 'does not cancel future pipelines' do
expect(prev_pipeline.id).to be < pipeline.id
expect(build_statuses(pipeline)).to contain_exactly('pending')
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index af151f93dc7..c08b40e9528 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -119,14 +119,34 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
before do
# validations will fail because we try to invite them to the project as a guest
source.group.add_developer(member)
+ allow(Gitlab::EventStore).to receive(:publish)
end
- it 'triggers the members added and authorizations changed events' do
+ it 'triggers the authorizations changed events' do
expect(Gitlab::EventStore)
- .to receive(:publish)
- .with(an_instance_of(ProjectAuthorizations::AuthorizationsChangedEvent))
+ .to receive(:publish_group)
+ .with(array_including(an_instance_of(ProjectAuthorizations::AuthorizationsAddedEvent)))
.and_call_original
+ execute_service
+ end
+
+ context 'when feature flag "add_policy_approvers_to_rules" is disabled' do
+ before do
+ stub_feature_flags(add_policy_approvers_to_rules: false)
+ end
+
+ it 'triggers the authorizations changed event' do
+ expect(Gitlab::EventStore)
+ .to receive(:publish)
+ .with(an_instance_of(ProjectAuthorizations::AuthorizationsChangedEvent))
+ .and_call_original
+
+ execute_service
+ end
+ end
+
+ it 'triggers the members added event' do
expect(Gitlab::EventStore)
.to receive(:publish)
.with(an_instance_of(Members::MembersAddedEvent))
diff --git a/spec/support/finder_collection.rb b/spec/support/finder_collection.rb
index 494dd4bdca1..93363943449 100644
--- a/spec/support/finder_collection.rb
+++ b/spec/support/finder_collection.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'set'
+require 'set' # rubocop:disable Lint/RedundantRequireStatement -- Ruby 3.1 and earlier needs this. Drop this line after Ruby 3.2+ is only supported.
module Support
# Ensure that finders' `execute` method always returns
diff --git a/spec/support/helpers/dns_helpers.rb b/spec/support/helpers/dns_helpers.rb
index be26c80d217..e673e36adbd 100644
--- a/spec/support/helpers/dns_helpers.rb
+++ b/spec/support/helpers/dns_helpers.rb
@@ -6,6 +6,7 @@ module DnsHelpers
stub_invalid_dns!
permit_local_dns!
permit_postgresql!
+ permit_redis!
end
def permit_dns!
@@ -53,6 +54,18 @@ module DnsHelpers
ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).map(&:host).compact.uniq
end
+ def permit_redis!
+ # https://github.com/redis-rb/redis-client/blob/v0.11.2/lib/redis_client/ruby_connection.rb#L51 uses Socket.tcp that
+ # calls Addrinfo.getaddrinfo internally.
+ hosts = Gitlab::Redis::ALL_CLASSES.map do |redis_instance|
+ redis_instance.redis_client_params[:host]
+ end.uniq.compact
+
+ hosts.each do |host|
+ allow(Addrinfo).to receive(:getaddrinfo).with(host, anything, nil, :STREAM, anything, anything, any_args).and_call_original
+ end
+ end
+
def stub_resolver(stubbed_lookups = {})
resolver = instance_double('Resolv::DNS')
allow(resolver).to receive(:timeouts=)
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
index 69c20a00c5a..060976eba2d 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
@@ -93,8 +93,6 @@ RSpec.shared_context 'structured_logger' do
end
before do
- allow(Sidekiq).to receive(:logger).and_return(logger)
-
allow(subject).to receive(:current_time).and_return(timestamp.to_f)
allow(Process).to receive(:clock_gettime).with(Process::CLOCK_REALTIME, :float_second)
@@ -103,7 +101,7 @@ RSpec.shared_context 'structured_logger' do
.and_return(clock_thread_cputime_start, clock_thread_cputime_end)
end
- subject { described_class.new }
+ subject { described_class.new(logger) }
def call_subject(job, queue)
# This structured logger strongly depends on execution of `InstrumentationLogger`
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
index 85ee3ed4183..d541dee438e 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
@@ -55,6 +55,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_mem_total_bytes, anything, {}, :all).and_return(sidekiq_mem_total_bytes)
allow(concurrency_metric).to receive(:set)
+ allow(completion_seconds_metric).to receive(:get)
end
end
diff --git a/spec/support/shared_examples/redis/redis_shared_examples.rb b/spec/support/shared_examples/redis/redis_shared_examples.rb
index 4929a753829..1f7834a4d7c 100644
--- a/spec/support/shared_examples/redis/redis_shared_examples.rb
+++ b/spec/support/shared_examples/redis/redis_shared_examples.rb
@@ -86,6 +86,67 @@ RSpec.shared_examples "redis_shared_examples" do
end
end
+ describe '.redis_client_params' do
+ # .redis_client_params wraps over `.redis_store_options` by modifying its outputs
+ # to be compatible with `RedisClient`. We test for compatibility in this block while
+ # the contents of redis_store_options are tested in the `.params` block.
+
+ subject { described_class.new(rails_env).redis_client_params }
+
+ let(:rails_env) { 'development' }
+ let(:config_file_name) { config_old_format_socket }
+
+ shared_examples 'instrumentation_class in custom key' do
+ it 'moves instrumentation class into custom' do
+ expect(subject[:custom][:instrumentation_class]).to eq(described_class.store_name)
+ expect(subject[:instrumentation_class]).to be_nil
+ end
+ end
+
+ context 'when url is host based' do
+ context 'with old format' do
+ let(:config_file_name) { config_old_format_host }
+
+ it 'does not raise ArgumentError for invalid keywords' do
+ expect { RedisClient.config(**subject) }.not_to raise_error
+ end
+
+ it_behaves_like 'instrumentation_class in custom key'
+ end
+
+ context 'with new format' do
+ let(:config_file_name) { config_new_format_host }
+
+ where(:rails_env, :host) do
+ [
+ %w[development development-host],
+ %w[test test-host],
+ %w[production production-host]
+ ]
+ end
+
+ with_them do
+ it 'does not raise ArgumentError for invalid keywords in SentinelConfig' do
+ expect(subject[:name]).to eq(host)
+ expect { RedisClient.sentinel(**subject) }.not_to raise_error
+ end
+
+ it_behaves_like 'instrumentation_class in custom key'
+ end
+ end
+ end
+
+ context 'when url contains unix socket reference' do
+ let(:config_file_name) { config_old_format_socket }
+
+ it 'does not raise ArgumentError for invalid keywords' do
+ expect { RedisClient.config(**subject) }.not_to raise_error
+ end
+
+ it_behaves_like 'instrumentation_class in custom key'
+ end
+ end
+
describe '.params' do
subject { described_class.new(rails_env).params }
diff --git a/spec/support/sidekiq.rb b/spec/support/sidekiq.rb
index b25f39c5e74..6c354c780b2 100644
--- a/spec/support/sidekiq.rb
+++ b/spec/support/sidekiq.rb
@@ -1,13 +1,19 @@
# frozen_string_literal: true
RSpec.configure do |config|
- def gitlab_sidekiq_inline(&block)
+ def gitlab_sidekiq_inline
# We need to cleanup the queues before running jobs in specs because the
# middleware might have written to redis
redis_queues_cleanup!
redis_queues_metadata_cleanup!
- Sidekiq::Testing.inline!(&block)
+
+ # Scoped inline! is thread-safe which breaks capybara specs
+ # see https://github.com/sidekiq/sidekiq/issues/6069
+ Sidekiq::Testing.inline!
+
+ yield
ensure
+ Sidekiq::Testing.fake! # fake is the default so we reset it to that
redis_queues_cleanup!
redis_queues_metadata_cleanup!
end
diff --git a/spec/support/sidekiq_middleware.rb b/spec/support/sidekiq_middleware.rb
index f4d90ff5151..cbd6163d46b 100644
--- a/spec/support/sidekiq_middleware.rb
+++ b/spec/support/sidekiq_middleware.rb
@@ -6,15 +6,6 @@ require 'sidekiq/testing'
module SidekiqMiddleware
def with_sidekiq_server_middleware(&block)
Sidekiq::Testing.server_middleware.clear
-
- if Gem::Version.new(Sidekiq::VERSION) != Gem::Version.new('6.5.12')
- raise 'New version of sidekiq detected, please remove this line'
- end
-
- # This line is a workaround for a Sidekiq bug that is already fixed in v7.0.0
- # https://github.com/mperham/sidekiq/commit/1b83a152786ed382f07fff12d2608534f1e3c922
- Sidekiq::Testing.server_middleware.instance_variable_set(:@config, Sidekiq)
-
Sidekiq::Testing.server_middleware(&block)
ensure
Sidekiq::Testing.server_middleware.clear