Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-10-20 11:43:02 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-10-20 11:43:02 +0300
commitd9ab72d6080f594d0b3cae15f14b3ef2c6c638cb (patch)
tree2341ef426af70ad1e289c38036737e04b0aa5007 /spec/services/projects
parentd6e514dd13db8947884cd58fe2a9c2a063400a9b (diff)
Add latest changes from gitlab-org/gitlab@14-4-stable-eev14.4.0-rc42
Diffstat (limited to 'spec/services/projects')
-rw-r--r--spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb133
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb541
-rw-r--r--spec/services/projects/create_service_spec.rb40
-rw-r--r--spec/services/projects/destroy_service_spec.rb109
-rw-r--r--spec/services/projects/group_links/update_service_spec.rb90
-rw-r--r--spec/services/projects/import_service_spec.rb6
-rw-r--r--spec/services/projects/move_access_service_spec.rb2
-rw-r--r--spec/services/projects/operations/update_service_spec.rb8
-rw-r--r--spec/services/projects/participants_service_spec.rb146
-rw-r--r--spec/services/projects/transfer_service_spec.rb185
-rw-r--r--spec/services/projects/update_pages_service_spec.rb16
-rw-r--r--spec/services/projects/update_service_spec.rb62
12 files changed, 891 insertions, 447 deletions
diff --git a/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb b/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb
new file mode 100644
index 00000000000..dfe2ff9e57c
--- /dev/null
+++ b/spec/services/projects/container_repository/cache_tags_created_at_service_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Projects::ContainerRepository::CacheTagsCreatedAtService, :clean_gitlab_redis_cache do
+ let_it_be(:dummy_tag_class) { Struct.new(:name, :created_at) }
+ let_it_be(:repository) { create(:container_repository) }
+
+ let(:tags) { create_tags(5) }
+ let(:service) { described_class.new(repository) }
+
+ shared_examples 'not interacting with redis' do
+ it 'does not interact with redis' do
+ expect(::Gitlab::Redis::Cache).not_to receive(:with)
+
+ subject
+ end
+ end
+
+ describe '#populate' do
+ subject { service.populate(tags) }
+
+ context 'with tags' do
+ it 'gets values from redis' do
+ expect(::Gitlab::Redis::Cache).to receive(:with).and_call_original
+
+ expect(subject).to eq(0)
+
+ tags.each { |t| expect(t.created_at).to eq(nil) }
+ end
+
+ context 'with cached values' do
+ let(:cached_tags) { tags.first(2) }
+
+ before do
+ ::Gitlab::Redis::Cache.with do |redis|
+ cached_tags.each do |tag|
+ redis.set(cache_key(tag), rfc3339(10.days.ago))
+ end
+ end
+ end
+
+ it 'gets values from redis' do
+ expect(::Gitlab::Redis::Cache).to receive(:with).and_call_original
+
+ expect(subject).to eq(2)
+
+ cached_tags.each { |t| expect(t.created_at).not_to eq(nil) }
+ (tags - cached_tags).each { |t| expect(t.created_at).to eq(nil) }
+ end
+ end
+ end
+
+ context 'with no tags' do
+ let(:tags) { [] }
+
+ it_behaves_like 'not interacting with redis'
+ end
+ end
+
+ describe '#insert' do
+ let(:max_ttl) { 90.days }
+
+ subject { service.insert(tags, max_ttl) }
+
+ context 'with tags' do
+ let(:tag) { tags.first }
+ let(:ttl) { 90.days - 3.days }
+
+ before do
+ travel_to(Time.zone.local(2021, 9, 2, 12, 0, 0))
+
+ tag.created_at = DateTime.rfc3339(3.days.ago.rfc3339)
+ end
+
+ after do
+ travel_back
+ end
+
+ it 'inserts values in redis' do
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect(redis)
+ .to receive(:set)
+ .with(cache_key(tag), rfc3339(tag.created_at), ex: ttl.to_i)
+ .and_call_original
+ end
+
+ subject
+ end
+
+ context 'with some of them already cached' do
+ let(:tag) { tags.first }
+
+ before do
+ ::Gitlab::Redis::Cache.with do |redis|
+ redis.set(cache_key(tag), rfc3339(10.days.ago))
+ end
+ service.populate(tags)
+ end
+
+ it_behaves_like 'not interacting with redis'
+ end
+ end
+
+ context 'with no tags' do
+ let(:tags) { [] }
+
+ it_behaves_like 'not interacting with redis'
+ end
+
+ context 'with no expires_in' do
+ let(:max_ttl) { nil }
+
+ it_behaves_like 'not interacting with redis'
+ end
+ end
+
+ def create_tags(size)
+ Array.new(size) do |i|
+ dummy_tag_class.new("Tag #{i}", nil)
+ end
+ end
+
+ def cache_key(tag)
+ "container_repository:{#{repository.id}}:tag:#{tag.name}:created_at"
+ end
+
+ def rfc3339(date_time)
+ # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
+ # The caching will use DateTime rfc3339
+ DateTime.rfc3339(date_time.rfc3339).rfc3339
+ end
+end
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index eed22416868..289bbf4540e 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -2,14 +2,14 @@
require 'spec_helper'
-RSpec.describe Projects::ContainerRepository::CleanupTagsService do
+RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_redis_cache do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :private) }
- let_it_be(:repository) { create(:container_repository, :root, project: project) }
- let(:service) { described_class.new(project, user, params) }
+ let(:repository) { create(:container_repository, :root, project: project) }
+ let(:service) { described_class.new(repository, user, params) }
let(:tags) { %w[latest A Ba Bb C D E] }
before do
@@ -39,291 +39,442 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
end
describe '#execute' do
- subject { service.execute(repository) }
+ subject { service.execute }
- context 'when no params are specified' do
- let(:params) { {} }
+ shared_examples 'reading and removing tags' do |caching_enabled: true|
+ context 'when no params are specified' do
+ let(:params) { {} }
- it 'does not remove anything' do
- expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
- .not_to receive(:execute)
+ it 'does not remove anything' do
+ expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
+ .not_to receive(:execute)
+ expect_no_caching
- is_expected.to eq(expected_service_response(before_truncate_size: 0, after_truncate_size: 0, before_delete_size: 0))
- end
- end
-
- context 'when regex matching everything is specified' do
- shared_examples 'removes all matches' do
- it 'does remove all tags except latest' do
- expect_delete(%w(A Ba Bb C D E))
-
- is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C D E)))
+ is_expected.to eq(expected_service_response(before_truncate_size: 0, after_truncate_size: 0, before_delete_size: 0))
end
end
- let(:params) do
- { 'name_regex_delete' => '.*' }
- end
+ context 'when regex matching everything is specified' do
+ shared_examples 'removes all matches' do
+ it 'does remove all tags except latest' do
+ expect_no_caching
- it_behaves_like 'removes all matches'
+ expect_delete(%w(A Ba Bb C D E))
+
+ is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C D E)))
+ end
+ end
- context 'with deprecated name_regex param' do
let(:params) do
- { 'name_regex' => '.*' }
+ { 'name_regex_delete' => '.*' }
end
it_behaves_like 'removes all matches'
+
+ context 'with deprecated name_regex param' do
+ let(:params) do
+ { 'name_regex' => '.*' }
+ end
+
+ it_behaves_like 'removes all matches'
+ end
end
- end
- context 'with invalid regular expressions' do
- RSpec.shared_examples 'handling an invalid regex' do
- it 'keeps all tags' do
- expect(Projects::ContainerRepository::DeleteTagsService)
- .not_to receive(:new)
- subject
+ context 'with invalid regular expressions' do
+ shared_examples 'handling an invalid regex' do
+ it 'keeps all tags' do
+ expect_no_caching
+
+ expect(Projects::ContainerRepository::DeleteTagsService)
+ .not_to receive(:new)
+
+ subject
+ end
+
+ it { is_expected.to eq(status: :error, message: 'invalid regex') }
+
+ it 'calls error tracking service' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
+
+ subject
+ end
end
- it { is_expected.to eq(status: :error, message: 'invalid regex') }
+ context 'when name_regex_delete is invalid' do
+ let(:params) { { 'name_regex_delete' => '*test*' } }
+
+ it_behaves_like 'handling an invalid regex'
+ end
- it 'calls error tracking service' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
+ context 'when name_regex is invalid' do
+ let(:params) { { 'name_regex' => '*test*' } }
- subject
+ it_behaves_like 'handling an invalid regex'
end
- end
- context 'when name_regex_delete is invalid' do
- let(:params) { { 'name_regex_delete' => '*test*' } }
+ context 'when name_regex_keep is invalid' do
+ let(:params) { { 'name_regex_keep' => '*test*' } }
- it_behaves_like 'handling an invalid regex'
+ it_behaves_like 'handling an invalid regex'
+ end
end
- context 'when name_regex is invalid' do
- let(:params) { { 'name_regex' => '*test*' } }
+ context 'when delete regex matching specific tags is used' do
+ let(:params) do
+ { 'name_regex_delete' => 'C|D' }
+ end
- it_behaves_like 'handling an invalid regex'
- end
+ it 'does remove C and D' do
+ expect_delete(%w(C D))
- context 'when name_regex_keep is invalid' do
- let(:params) { { 'name_regex_keep' => '*test*' } }
+ expect_no_caching
- it_behaves_like 'handling an invalid regex'
- end
- end
+ is_expected.to eq(expected_service_response(deleted: %w(C D), before_truncate_size: 2, after_truncate_size: 2, before_delete_size: 2))
+ end
- context 'when delete regex matching specific tags is used' do
- let(:params) do
- { 'name_regex_delete' => 'C|D' }
- end
+ context 'with overriding allow regex' do
+ let(:params) do
+ { 'name_regex_delete' => 'C|D',
+ 'name_regex_keep' => 'C' }
+ end
- it 'does remove C and D' do
- expect_delete(%w(C D))
+ it 'does not remove C' do
+ expect_delete(%w(D))
- is_expected.to eq(expected_service_response(deleted: %w(C D), before_truncate_size: 2, after_truncate_size: 2, before_delete_size: 2))
- end
+ expect_no_caching
- context 'with overriding allow regex' do
- let(:params) do
- { 'name_regex_delete' => 'C|D',
- 'name_regex_keep' => 'C' }
+ is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ end
end
- it 'does not remove C' do
- expect_delete(%w(D))
+ context 'with name_regex_delete overriding deprecated name_regex' do
+ let(:params) do
+ { 'name_regex' => 'C|D',
+ 'name_regex_delete' => 'D' }
+ end
+
+ it 'does not remove C' do
+ expect_delete(%w(D))
+
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ end
end
end
- context 'with name_regex_delete overriding deprecated name_regex' do
+ context 'with allow regex value' do
let(:params) do
- { 'name_regex' => 'C|D',
- 'name_regex_delete' => 'D' }
+ { 'name_regex_delete' => '.*',
+ 'name_regex_keep' => 'B.*' }
end
- it 'does not remove C' do
- expect_delete(%w(D))
+ it 'does not remove B*' do
+ expect_delete(%w(A C D E))
+
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(D), before_truncate_size: 1, after_truncate_size: 1, before_delete_size: 1))
+ is_expected.to eq(expected_service_response(deleted: %w(A C D E), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
end
end
- end
- context 'with allow regex value' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'name_regex_keep' => 'B.*' }
- end
+ context 'when keeping only N tags' do
+ let(:params) do
+ { 'name_regex' => 'A|B.*|C',
+ 'keep_n' => 1 }
+ end
- it 'does not remove B*' do
- expect_delete(%w(A C D E))
+ it 'sorts tags by date' do
+ expect_delete(%w(Bb Ba C))
- is_expected.to eq(expected_service_response(deleted: %w(A C D E), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
- end
- end
+ expect_no_caching
- context 'when keeping only N tags' do
- let(:params) do
- { 'name_regex' => 'A|B.*|C',
- 'keep_n' => 1 }
+ expect(service).to receive(:order_by_date).and_call_original
+
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 3))
+ end
end
- it 'sorts tags by date' do
- expect_delete(%w(Bb Ba C))
+ context 'when not keeping N tags' do
+ let(:params) do
+ { 'name_regex' => 'A|B.*|C' }
+ end
+
+ it 'does not sort tags by date' do
+ expect_delete(%w(A Ba Bb C))
- expect(service).to receive(:order_by_date).and_call_original
+ expect_no_caching
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 3))
- end
- end
+ expect(service).not_to receive(:order_by_date)
- context 'when not keeping N tags' do
- let(:params) do
- { 'name_regex' => 'A|B.*|C' }
+ is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
+ end
end
- it 'does not sort tags by date' do
- expect_delete(%w(A Ba Bb C))
+ context 'when removing keeping only 3' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 3 }
+ end
- expect(service).not_to receive(:order_by_date)
+ it 'does remove B* and C as they are the oldest' do
+ expect_delete(%w(Bb Ba C))
- is_expected.to eq(expected_service_response(deleted: %w(A Ba Bb C), before_truncate_size: 4, after_truncate_size: 4, before_delete_size: 4))
- end
- end
+ expect_no_caching
- context 'when removing keeping only 3' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 3 }
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ end
end
- it 'does remove B* and C as they are the oldest' do
- expect_delete(%w(Bb Ba C))
+ context 'when removing older than 1 day' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'older_than' => '1 day' }
+ end
+
+ it 'does remove B* and C as they are older than 1 day' do
+ expect_delete(%w(Ba Bb C))
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
- end
- end
+ expect_no_caching
- context 'when removing older than 1 day' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'older_than' => '1 day' }
+ is_expected.to eq(expected_service_response(deleted: %w(Ba Bb C), before_delete_size: 3))
+ end
end
- it 'does remove B* and C as they are older than 1 day' do
- expect_delete(%w(Ba Bb C))
+ context 'when combining all parameters' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day' }
+ end
+
+ it 'does remove B* and C' do
+ expect_delete(%w(Bb Ba C))
- is_expected.to eq(expected_service_response(deleted: %w(Ba Bb C), before_delete_size: 3))
- end
- end
+ expect_no_caching
- context 'when combining all parameters' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day' }
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ end
end
- it 'does remove B* and C' do
- expect_delete(%w(Bb Ba C))
+ context 'when running a container_expiration_policy' do
+ let(:user) { nil }
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
- end
- end
+ context 'with valid container_expiration_policy param' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true }
+ end
- context 'when running a container_expiration_policy' do
- let(:user) { nil }
+ it 'succeeds without a user' do
+ expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- context 'with valid container_expiration_policy param' do
- let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day',
- 'container_expiration_policy' => true }
+ caching_enabled ? expect_caching : expect_no_caching
+
+ is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ end
end
- it 'succeeds without a user' do
- expect_delete(%w(Bb Ba C), container_expiration_policy: true)
+ context 'without container_expiration_policy param' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day' }
+ end
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ it 'fails' do
+ is_expected.to eq(status: :error, message: 'access denied')
+ end
end
end
- context 'without container_expiration_policy param' do
+ context 'truncating the tags list' do
let(:params) do
- { 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day' }
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 1
+ }
+ end
+
+ shared_examples 'returning the response' do |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
+ it 'returns the response' do
+ expect_no_caching
+
+ result = subject
+
+ service_response = expected_service_response(
+ status: status,
+ original_size: original_size,
+ before_truncate_size: before_truncate_size,
+ after_truncate_size: after_truncate_size,
+ before_delete_size: before_delete_size,
+ deleted: nil
+ )
+
+ expect(result).to eq(service_response)
+ end
+ end
+
+ where(:feature_flag_enabled, :max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
+ false | 10 | :success | :success | false
+ false | 10 | :error | :error | false
+ false | 3 | :success | :success | false
+ false | 3 | :error | :error | false
+ false | 0 | :success | :success | false
+ false | 0 | :error | :error | false
+ true | 10 | :success | :success | false
+ true | 10 | :error | :error | false
+ true | 3 | :success | :error | true
+ true | 3 | :error | :error | true
+ true | 0 | :success | :success | false
+ true | 0 | :error | :error | false
end
- it 'fails' do
- is_expected.to eq(status: :error, message: 'access denied')
+ with_them do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
+ stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
+ allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
+ expect(service).to receive(:execute).and_return(status: delete_tags_service_status)
+ end
+ end
+
+ original_size = 7
+ keep_n = 1
+
+ it_behaves_like(
+ 'returning the response',
+ status: params[:expected_status],
+ original_size: original_size,
+ before_truncate_size: original_size - keep_n,
+ after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
+ before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1 # one tag is filtered out with older_than filter
+ )
end
end
end
- context 'truncating the tags list' do
+ context 'caching' do
let(:params) do
{
'name_regex_delete' => '.*',
- 'keep_n' => 1
+ 'keep_n' => 1,
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true
+ }
+ end
+
+ let(:tags_and_created_ats) do
+ {
+ 'A' => 1.hour.ago,
+ 'Ba' => 5.days.ago,
+ 'Bb' => 5.days.ago,
+ 'C' => 1.month.ago,
+ 'D' => nil,
+ 'E' => nil
}
end
- shared_examples 'returning the response' do |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
- it 'returns the response' do
- result = subject
+ let(:cacheable_tags) { tags_and_created_ats.reject { |_, value| value.nil? } }
- service_response = expected_service_response(
- status: status,
- original_size: original_size,
- before_truncate_size: before_truncate_size,
- after_truncate_size: after_truncate_size,
- before_delete_size: before_delete_size,
- deleted: nil
- )
+ before do
+ expect_delete(%w(Bb Ba C), container_expiration_policy: true)
+ travel_to(Time.zone.local(2021, 9, 2, 12, 0, 0))
+ # We froze time so we need to set the created_at stubs again
+ stub_digest_config('sha256:configA', 1.hour.ago)
+ stub_digest_config('sha256:configB', 5.days.ago)
+ stub_digest_config('sha256:configC', 1.month.ago)
+ end
- expect(result).to eq(service_response)
- end
+ after do
+ travel_back
end
- where(:feature_flag_enabled, :max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
- false | 10 | :success | :success | false
- false | 10 | :error | :error | false
- false | 3 | :success | :success | false
- false | 3 | :error | :error | false
- false | 0 | :success | :success | false
- false | 0 | :error | :error | false
- true | 10 | :success | :success | false
- true | 10 | :error | :error | false
- true | 3 | :success | :error | true
- true | 3 | :error | :error | true
- true | 0 | :success | :success | false
- true | 0 | :error | :error | false
+ it 'caches the created_at values' do
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect_mget(redis, tags_and_created_ats.keys)
+
+ expect_set(redis, cacheable_tags)
+ end
+
+ expect(subject).to include(cached_tags_count: 0)
end
- with_them do
+ context 'with cached values' do
before do
- stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
- stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
- allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
- expect(service).to receive(:execute).and_return(status: delete_tags_service_status)
+ ::Gitlab::Redis::Cache.with do |redis|
+ redis.set(cache_key('C'), rfc3339(1.month.ago))
+ end
+ end
+
+ it 'uses them' do
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect_mget(redis, tags_and_created_ats.keys)
+
+ # because C is already in cache, it should not be cached again
+ expect_set(redis, cacheable_tags.except('C'))
+ end
+
+ # We will ping the container registry for all tags *except* for C because it's cached
+ expect(ContainerRegistry::Blob).to receive(:new).with(repository, "digest" => "sha256:configA").and_call_original
+ expect(ContainerRegistry::Blob).to receive(:new).with(repository, "digest" => "sha256:configB").twice.and_call_original
+ expect(ContainerRegistry::Blob).not_to receive(:new).with(repository, "digest" => "sha256:configC")
+ expect(ContainerRegistry::Blob).to receive(:new).with(repository, "digest" => "sha256:configD").and_call_original
+
+ expect(subject).to include(cached_tags_count: 1)
+ end
+ end
+
+ def expect_mget(redis, keys)
+ expect(redis).to receive(:mget).with(keys.map(&method(:cache_key))).and_call_original
+ end
+
+ def expect_set(redis, tags)
+ tags.each do |tag_name, created_at|
+ ex = 1.day.seconds - (Time.zone.now - created_at).seconds
+ if ex > 0
+ expect(redis).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex.to_i)
end
end
+ end
+
+ def cache_key(tag_name)
+ "container_repository:{#{repository.id}}:tag:#{tag_name}:created_at"
+ end
+
+ def rfc3339(date_time)
+ # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
+ # The caching will use DateTime rfc3339
+ DateTime.rfc3339(date_time.rfc3339).rfc3339
+ end
+ end
+
+ context 'with container_registry_expiration_policies_caching enabled for the project' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_caching: project)
+ end
+
+ it_behaves_like 'reading and removing tags', caching_enabled: true
+ end
- original_size = 7
- keep_n = 1
+ context 'with container_registry_expiration_policies_caching disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_caching: false)
+ end
+
+ it_behaves_like 'reading and removing tags', caching_enabled: false
+ end
- it_behaves_like(
- 'returning the response',
- status: params[:expected_status],
- original_size: original_size,
- before_truncate_size: original_size - keep_n,
- after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
- before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1 # one tag is filtered out with older_than filter
- )
+ context 'with container_registry_expiration_policies_caching not enabled for the project' do
+ let_it_be(:another_project) { create(:project) }
+
+ before do
+ stub_feature_flags(container_registry_expiration_policies_caching: another_project)
end
+
+ it_behaves_like 'reading and removing tags', caching_enabled: false
end
end
@@ -368,7 +519,19 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
original_size: original_size,
before_truncate_size: before_truncate_size,
after_truncate_size: after_truncate_size,
- before_delete_size: before_delete_size
+ before_delete_size: before_delete_size,
+ cached_tags_count: 0
}.compact.merge(deleted_size: deleted&.size)
end
+
+ def expect_no_caching
+ expect(::Gitlab::Redis::Cache).not_to receive(:with)
+ end
+
+ def expect_caching
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:mget).and_call_original
+ expect(redis).to receive(:set).and_call_original
+ end
+ end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index e15d9341fd1..d7c43ac676e 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -622,6 +622,22 @@ RSpec.describe Projects::CreateService, '#execute' do
end
end
+ context 'when SAST initialization is requested' do
+ let(:project) { create_project(user, opts) }
+
+ before do
+ opts[:initialize_with_sast] = '1'
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
+ end
+
+ it 'creates a commit for SAST', :aggregate_failures do
+ expect(project.repository.commit_count).to be(1)
+ expect(project.repository.commit.message).to eq(
+ 'Configure SAST in `.gitlab-ci.yml`, creating this file if it does not already exist'
+ )
+ end
+ end
+
describe 'create integration for the project' do
subject(:project) { create_project(user, opts) }
@@ -823,25 +839,23 @@ RSpec.describe Projects::CreateService, '#execute' do
let_it_be(:user) { create :user }
context 'when parent group is present' do
- let_it_be(:group) do
+ let_it_be(:group, reload: true) do
create(:group) do |group|
group.add_owner(user)
end
end
before do
- allow_next_found_instance_of(Group) do |group|
- allow(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
- end
+ group.update_shared_runners_setting!(shared_runners_setting)
user.refresh_authorized_projects # Ensure cache is warm
end
context 'default value based on parent group setting' do
where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
- 'enabled' | nil | true
- 'disabled_with_override' | nil | false
- 'disabled_and_unoverridable' | nil | false
+ Namespace::SR_ENABLED | nil | true
+ Namespace::SR_DISABLED_WITH_OVERRIDE | nil | false
+ Namespace::SR_DISABLED_AND_UNOVERRIDABLE | nil | false
end
with_them do
@@ -858,11 +872,11 @@ RSpec.describe Projects::CreateService, '#execute' do
context 'parent group is present and allows desired config' do
where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
- 'enabled' | true | true
- 'enabled' | false | false
- 'disabled_with_override' | false | false
- 'disabled_with_override' | true | true
- 'disabled_and_unoverridable' | false | false
+ Namespace::SR_ENABLED | true | true
+ Namespace::SR_ENABLED | false | false
+ Namespace::SR_DISABLED_WITH_OVERRIDE | false | false
+ Namespace::SR_DISABLED_WITH_OVERRIDE | true | true
+ Namespace::SR_DISABLED_AND_UNOVERRIDABLE | false | false
end
with_them do
@@ -878,7 +892,7 @@ RSpec.describe Projects::CreateService, '#execute' do
context 'parent group is present and disallows desired config' do
where(:shared_runners_setting, :desired_config_for_new_project) do
- 'disabled_and_unoverridable' | true
+ Namespace::SR_DISABLED_AND_UNOVERRIDABLE | true
end
with_them do
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 27688d8c966..9bdd9800fcc 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -39,26 +39,64 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
let!(:job_variables) { create(:ci_job_variable, job: build) }
let!(:report_result) { create(:ci_build_report_result, build: build) }
let!(:pending_state) { create(:ci_build_pending_state, build: build) }
+ let!(:pipeline_artifact) { create(:ci_pipeline_artifact, pipeline: pipeline) }
- it 'deletes build related records' do
- expect { destroy_project(project, user, {}) }.to change { Ci::Build.count }.by(-1)
+ it 'deletes build and pipeline related records' do
+ expect { destroy_project(project, user, {}) }
+ .to change { Ci::Build.count }.by(-1)
.and change { Ci::BuildTraceChunk.count }.by(-1)
.and change { Ci::JobArtifact.count }.by(-2)
+ .and change { Ci::DeletedObject.count }.by(2)
+ .and change { Ci::PipelineArtifact.count }.by(-1)
.and change { Ci::JobVariable.count }.by(-1)
.and change { Ci::BuildPendingState.count }.by(-1)
.and change { Ci::BuildReportResult.count }.by(-1)
.and change { Ci::BuildRunnerSession.count }.by(-1)
+ .and change { Ci::Pipeline.count }.by(-1)
end
- it 'avoids N+1 queries', skip: 'skipped until fixed in https://gitlab.com/gitlab-org/gitlab/-/issues/24644' do
- recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
+ context 'with abort_deleted_project_pipelines disabled' do
+ stub_feature_flags(abort_deleted_project_pipelines: false)
- project = create(:project, :repository, namespace: user.namespace)
- pipeline = create(:ci_pipeline, project: project)
- builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
- create_list(:ci_build_trace_chunk, 3, build: builds[0])
+ it 'avoids N+1 queries' do
+ recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
- expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ project = create(:project, :repository, namespace: user.namespace)
+ pipeline = create(:ci_pipeline, project: project)
+ builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
+ create(:ci_pipeline_artifact, pipeline: pipeline)
+ create_list(:ci_build_trace_chunk, 3, build: builds[0])
+
+ expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ end
+ end
+
+ context 'with ci_optimize_project_records_destruction disabled' do
+ stub_feature_flags(ci_optimize_project_records_destruction: false)
+
+ it 'avoids N+1 queries' do
+ recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
+
+ project = create(:project, :repository, namespace: user.namespace)
+ pipeline = create(:ci_pipeline, project: project)
+ builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
+ create_list(:ci_build_trace_chunk, 3, build: builds[0])
+
+ expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ end
+ end
+
+ context 'with ci_optimize_project_records_destruction and abort_deleted_project_pipelines enabled' do
+ it 'avoids N+1 queries' do
+ recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
+
+ project = create(:project, :repository, namespace: user.namespace)
+ pipeline = create(:ci_pipeline, project: project)
+ builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
+ create_list(:ci_build_trace_chunk, 3, build: builds[0])
+
+ expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ end
end
it_behaves_like 'deleting the project'
@@ -95,24 +133,63 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
end
context 'with abort_deleted_project_pipelines feature disabled' do
- it 'does not cancel project ci pipelines' do
+ before do
stub_feature_flags(abort_deleted_project_pipelines: false)
+ end
+ it 'does not bulk-fail project ci pipelines' do
expect(::Ci::AbortPipelinesService).not_to receive(:new)
destroy_project(project, user, {})
end
+
+ it 'does not destroy CI records via DestroyPipelineService' do
+ expect(::Ci::DestroyPipelineService).not_to receive(:new)
+
+ destroy_project(project, user, {})
+ end
end
context 'with abort_deleted_project_pipelines feature enabled' do
- it 'performs cancel for project ci pipelines' do
- stub_feature_flags(abort_deleted_project_pipelines: true)
- pipelines = build_list(:ci_pipeline, 3, :running)
- allow(project).to receive(:all_pipelines).and_return(pipelines)
+ let!(:pipelines) { create_list(:ci_pipeline, 3, :running, project: project) }
+ let(:destroy_pipeline_service) { double('DestroyPipelineService', execute: nil) }
- expect(::Ci::AbortPipelinesService).to receive_message_chain(:new, :execute).with(pipelines, :project_deleted)
+ context 'with ci_optimize_project_records_destruction disabled' do
+ before do
+ stub_feature_flags(ci_optimize_project_records_destruction: false)
+ end
- destroy_project(project, user, {})
+ it 'bulk-fails project ci pipelines' do
+ expect(::Ci::AbortPipelinesService)
+ .to receive_message_chain(:new, :execute)
+ .with(project.all_pipelines, :project_deleted)
+
+ destroy_project(project, user, {})
+ end
+
+ it 'does not destroy CI records via DestroyPipelineService' do
+ expect(::Ci::DestroyPipelineService).not_to receive(:new)
+
+ destroy_project(project, user, {})
+ end
+ end
+
+ context 'with ci_optimize_project_records_destruction enabled' do
+ it 'executes DestroyPipelineService for project ci pipelines' do
+ allow(::Ci::DestroyPipelineService).to receive(:new).and_return(destroy_pipeline_service)
+
+ expect(::Ci::AbortPipelinesService)
+ .to receive_message_chain(:new, :execute)
+ .with(project.all_pipelines, :project_deleted)
+
+ pipelines.each do |pipeline|
+ expect(destroy_pipeline_service)
+ .to receive(:execute)
+ .with(pipeline)
+ end
+
+ destroy_project(project, user, {})
+ end
end
end
diff --git a/spec/services/projects/group_links/update_service_spec.rb b/spec/services/projects/group_links/update_service_spec.rb
index 4a38fb0c7d9..ff1618c3bbe 100644
--- a/spec/services/projects/group_links/update_service_spec.rb
+++ b/spec/services/projects/group_links/update_service_spec.rb
@@ -34,86 +34,40 @@ RSpec.describe Projects::GroupLinks::UpdateService, '#execute' do
end
context 'project authorizations update' do
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is enabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: true)
- end
-
- it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
- expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
- .to receive(:perform_async).with(link.project.id)
-
- subject
- end
-
- it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
- expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
- receive(:bulk_perform_in)
- .with(1.hour,
- [[user.id]],
- batch_delay: 30.seconds, batch_size: 100)
- )
-
- subject
- end
-
- it 'updates project authorizations of users who had access to the project via the group share', :sidekiq_inline do
- group.add_maintainer(user)
-
- expect { subject }.to(
- change { Ability.allowed?(user, :create_release, project) }
- .from(true).to(false))
- end
- end
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ .to receive(:perform_async).with(link.project.id)
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is disabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: false)
- end
+ subject
+ end
- it 'calls UserProjectAccessChangedService to update project authorizations' do
- expect_next_instance_of(UserProjectAccessChangedService, [user.id]) do |service|
- expect(service).to receive(:execute)
- end
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ [[user.id]],
+ batch_delay: 30.seconds, batch_size: 100)
+ )
- subject
- end
+ subject
+ end
- it 'updates project authorizations of users who had access to the project via the group share' do
- group.add_maintainer(user)
+ it 'updates project authorizations of users who had access to the project via the group share', :sidekiq_inline do
+ group.add_maintainer(user)
- expect { subject }.to(
- change { Ability.allowed?(user, :create_release, project) }
- .from(true).to(false))
- end
+ expect { subject }.to(
+ change { Ability.allowed?(user, :create_release, project) }
+ .from(true).to(false))
end
end
context 'with only param not requiring authorization refresh' do
let(:group_link_params) { { expires_at: Date.tomorrow } }
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is enabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: true)
- end
-
- it 'does not perform any project authorizations update using `AuthorizedProjectUpdate::ProjectRecalculateWorker`' do
- expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).not_to receive(:perform_async)
-
- subject
- end
- end
-
- context 'when the feature flag `specialized_worker_for_project_share_update_auth_recalculation` is disabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_share_update_auth_recalculation: false)
- end
-
- it 'does not perform any project authorizations update using `UserProjectAccessChangedService`' do
- expect(UserProjectAccessChangedService).not_to receive(:new)
+ it 'does not perform any project authorizations update using `AuthorizedProjectUpdate::ProjectRecalculateWorker`' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).not_to receive(:perform_async)
- subject
- end
+ subject
end
end
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index 92e18b6cb46..1d63f72ec38 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -86,6 +86,12 @@ RSpec.describe Projects::ImportService do
end
context 'with a Github repository' do
+ it 'tracks the start of import' do
+ expect(Gitlab::GithubImport::ParallelImporter).to receive(:track_start_import)
+
+ subject.execute
+ end
+
it 'succeeds if repository import was scheduled' do
expect_any_instance_of(Gitlab::GithubImport::ParallelImporter)
.to receive(:execute)
diff --git a/spec/services/projects/move_access_service_spec.rb b/spec/services/projects/move_access_service_spec.rb
index 90167ffebed..45e10c3ca84 100644
--- a/spec/services/projects/move_access_service_spec.rb
+++ b/spec/services/projects/move_access_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Projects::MoveAccessService do
describe '#execute' do
shared_examples 'move the accesses' do
- it do
+ it 'moves the accesses', :sidekiq_inline do
expect(project_with_access.project_members.count).to eq 4
expect(project_with_access.project_group_links.count).to eq 3
expect(project_with_access.authorized_users.count).to eq 4
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index a71fafb2121..b64f2d1e7d6 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -294,10 +294,10 @@ RSpec.describe Projects::Operations::UpdateService do
end
context 'without setting' do
- it 'does not create a setting' do
- expect(result[:status]).to eq(:error)
-
- expect(project.reload.error_tracking_setting).to be_nil
+ it 'creates setting with default values' do
+ expect(result[:status]).to eq(:success)
+ expect(project.error_tracking_setting.enabled).to be_truthy
+ expect(project.error_tracking_setting.integrated).to be_truthy
end
end
end
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index b84e28314f2..eab7228307a 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -104,104 +104,116 @@ RSpec.describe Projects::ParticipantsService do
describe '#project_members' do
subject(:usernames) { service.project_members.map { |member| member[:username] } }
- context 'when there is a project in group namespace' do
- let_it_be(:public_group) { create(:group, :public) }
- let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
+ shared_examples 'return project members' do
+ context 'when there is a project in group namespace' do
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- let_it_be(:public_group_owner) { create(:user) }
+ let_it_be(:public_group_owner) { create(:user) }
- let(:service) { described_class.new(public_project, create(:user)) }
+ let(:service) { described_class.new(public_project, create(:user)) }
- before do
- public_group.add_owner(public_group_owner)
- end
+ before do
+ public_group.add_owner(public_group_owner)
+ end
- it 'returns members of a group' do
- expect(usernames).to include(public_group_owner.username)
+ it 'returns members of a group' do
+ expect(usernames).to include(public_group_owner.username)
+ end
end
- end
-
- context 'when there is a private group and a public project' do
- let_it_be(:public_group) { create(:group, :public) }
- let_it_be(:private_group) { create(:group, :private, :nested) }
- let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- let_it_be(:project_issue) { create(:issue, project: public_project)}
+ context 'when there is a private group and a public project' do
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:private_group) { create(:group, :private, :nested) }
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- let_it_be(:public_group_owner) { create(:user) }
- let_it_be(:private_group_member) { create(:user) }
- let_it_be(:public_project_maintainer) { create(:user) }
- let_it_be(:private_group_owner) { create(:user) }
+ let_it_be(:project_issue) { create(:issue, project: public_project)}
- let_it_be(:group_ancestor_owner) { create(:user) }
+ let_it_be(:public_group_owner) { create(:user) }
+ let_it_be(:private_group_member) { create(:user) }
+ let_it_be(:public_project_maintainer) { create(:user) }
+ let_it_be(:private_group_owner) { create(:user) }
- before_all do
- public_group.add_owner public_group_owner
- private_group.add_developer private_group_member
- public_project.add_maintainer public_project_maintainer
+ let_it_be(:group_ancestor_owner) { create(:user) }
- private_group.add_owner private_group_owner
- private_group.parent.add_owner group_ancestor_owner
- end
-
- context 'when the private group is invited to the public project' do
before_all do
- create(:project_group_link, group: private_group, project: public_project)
- end
+ public_group.add_owner public_group_owner
+ private_group.add_developer private_group_member
+ public_project.add_maintainer public_project_maintainer
- context 'when a user who is outside the public project and the private group is signed in' do
- let(:service) { described_class.new(public_project, create(:user)) }
+ private_group.add_owner private_group_owner
+ private_group.parent.add_owner group_ancestor_owner
+ end
- it 'does not return the private group' do
- expect(usernames).not_to include(private_group.name)
+ context 'when the private group is invited to the public project' do
+ before_all do
+ create(:project_group_link, group: private_group, project: public_project)
end
- it 'does not return private group members' do
- expect(usernames).not_to include(private_group_member.username)
- end
+ context 'when a user who is outside the public project and the private group is signed in' do
+ let(:service) { described_class.new(public_project, create(:user)) }
- it 'returns the project maintainer' do
- expect(usernames).to include(public_project_maintainer.username)
- end
+ it 'does not return the private group' do
+ expect(usernames).not_to include(private_group.name)
+ end
- it 'returns project members from an invited public group' do
- invited_public_group = create(:group, :public)
- invited_public_group.add_owner create(:user)
+ it 'does not return private group members' do
+ expect(usernames).not_to include(private_group_member.username)
+ end
- create(:project_group_link, group: invited_public_group, project: public_project)
+ it 'returns the project maintainer' do
+ expect(usernames).to include(public_project_maintainer.username)
+ end
- expect(usernames).to include(invited_public_group.users.first.username)
- end
+ it 'returns project members from an invited public group' do
+ invited_public_group = create(:group, :public)
+ invited_public_group.add_owner create(:user)
- it 'does not return ancestors of the private group' do
- expect(usernames).not_to include(group_ancestor_owner.username)
- end
- end
+ create(:project_group_link, group: invited_public_group, project: public_project)
- context 'when private group owner is signed in' do
- let(:service) { described_class.new(public_project, private_group_owner) }
+ expect(usernames).to include(invited_public_group.users.first.username)
+ end
- it 'returns private group members' do
- expect(usernames).to include(private_group_member.username)
+ it 'does not return ancestors of the private group' do
+ expect(usernames).not_to include(group_ancestor_owner.username)
+ end
end
- it 'returns ancestors of the the private group' do
- expect(usernames).to include(group_ancestor_owner.username)
- end
- end
+ context 'when private group owner is signed in' do
+ let(:service) { described_class.new(public_project, private_group_owner) }
- context 'when the namespace owner of the public project is signed in' do
- let(:service) { described_class.new(public_project, public_group_owner) }
+ it 'returns private group members' do
+ expect(usernames).to include(private_group_member.username)
+ end
- it 'returns private group members' do
- expect(usernames).to include(private_group_member.username)
+ it 'returns ancestors of the the private group' do
+ expect(usernames).to include(group_ancestor_owner.username)
+ end
end
- it 'does not return members of the ancestral groups of the private group' do
- expect(usernames).to include(group_ancestor_owner.username)
+ context 'when the namespace owner of the public project is signed in' do
+ let(:service) { described_class.new(public_project, public_group_owner) }
+
+ it 'returns private group members' do
+ expect(usernames).to include(private_group_member.username)
+ end
+
+ it 'does not return members of the ancestral groups of the private group' do
+ expect(usernames).to include(group_ancestor_owner.username)
+ end
end
end
end
end
+
+ it_behaves_like 'return project members'
+
+ context 'when feature flag :linear_participants_service_ancestor_scopes is disabled' do
+ before do
+ stub_feature_flags(linear_participants_service_ancestor_scopes: false)
+ end
+
+ it_behaves_like 'return project members'
+ end
end
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index d96573e26af..b539b01066e 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -64,6 +64,33 @@ RSpec.describe Projects::TransferService do
expect(transfer_result).to be_truthy
expect(project.namespace).to eq(group)
end
+
+ context 'when project has an associated project namespace' do
+ let!(:project_namespace) { create(:project_namespace, project: project) }
+
+ it 'keeps project namespace in sync with project' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to be_truthy
+
+ project_namespace_in_sync(group)
+ end
+
+ context 'when project is transferred to a deeper nested group' do
+ let(:parent_group) { create(:group) }
+ let(:sub_group) { create(:group, parent: parent_group) }
+ let(:sub_sub_group) { create(:group, parent: sub_group) }
+ let(:group) { sub_sub_group }
+
+ it 'keeps project namespace in sync with project' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to be_truthy
+
+ project_namespace_in_sync(sub_sub_group)
+ end
+ end
+ end
end
context 'when transfer succeeds' do
@@ -143,6 +170,28 @@ RSpec.describe Projects::TransferService do
end
end
end
+
+ context 'when project has pending builds' do
+ let!(:other_project) { create(:project) }
+ let!(:pending_build) { create(:ci_pending_build, project: project.reload) }
+ let!(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) }
+
+ before do
+ group.reload
+ end
+
+ it 'updates pending builds for the project', :aggregate_failures do
+ execute_transfer
+
+ pending_build.reload
+ unrelated_pending_build.reload
+
+ expect(pending_build.namespace_id).to eq(group.id)
+ expect(pending_build.namespace_traversal_ids).to eq(group.traversal_ids)
+ expect(unrelated_pending_build.namespace_id).to eq(other_project.namespace_id)
+ expect(unrelated_pending_build.namespace_traversal_ids).to eq(other_project.namespace.traversal_ids)
+ end
+ end
end
context 'when transfer fails' do
@@ -203,6 +252,34 @@ RSpec.describe Projects::TransferService do
shard_name: project.repository_storage
)
end
+
+ context 'when project has pending builds' do
+ let!(:other_project) { create(:project) }
+ let!(:pending_build) { create(:ci_pending_build, project: project.reload) }
+ let!(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) }
+
+ it 'does not update pending builds for the project', :aggregate_failures do
+ attempt_project_transfer
+
+ pending_build.reload
+ unrelated_pending_build.reload
+
+ expect(pending_build.namespace_id).to eq(project.namespace_id)
+ expect(pending_build.namespace_traversal_ids).to eq(project.namespace.traversal_ids)
+ expect(unrelated_pending_build.namespace_id).to eq(other_project.namespace_id)
+ expect(unrelated_pending_build.namespace_traversal_ids).to eq(other_project.namespace.traversal_ids)
+ end
+ end
+
+ context 'when project has an associated project namespace' do
+ let!(:project_namespace) { create(:project_namespace, project: project) }
+
+ it 'keeps project namespace in sync with project' do
+ attempt_project_transfer
+
+ project_namespace_in_sync(user.namespace)
+ end
+ end
end
context 'namespace -> no namespace' do
@@ -215,6 +292,18 @@ RSpec.describe Projects::TransferService do
expect(project.namespace).to eq(user.namespace)
expect(project.errors.messages[:new_namespace].first).to eq 'Please select a new namespace for your project.'
end
+
+ context 'when project has an associated project namespace' do
+ let!(:project_namespace) { create(:project_namespace, project: project) }
+
+ it 'keeps project namespace in sync with project' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to be false
+
+ project_namespace_in_sync(user.namespace)
+ end
+ end
end
context 'disallow transferring of project with tags' do
@@ -369,28 +458,23 @@ RSpec.describe Projects::TransferService do
using RSpec::Parameterized::TableSyntax
where(:project_shared_runners_enabled, :shared_runners_setting, :expected_shared_runners_enabled) do
- true | 'disabled_and_unoverridable' | false
- false | 'disabled_and_unoverridable' | false
- true | 'disabled_with_override' | true
- false | 'disabled_with_override' | false
- true | 'enabled' | true
- false | 'enabled' | false
+ true | :disabled_and_unoverridable | false
+ false | :disabled_and_unoverridable | false
+ true | :disabled_with_override | true
+ false | :disabled_with_override | false
+ true | :shared_runners_enabled | true
+ false | :shared_runners_enabled | false
end
with_them do
let(:project) { create(:project, :public, :repository, namespace: user.namespace, shared_runners_enabled: project_shared_runners_enabled) }
- let(:group) { create(:group) }
+ let(:group) { create(:group, shared_runners_setting) }
- before do
+ it 'updates shared runners based on the parent group' do
group.add_owner(user)
- expect_next_found_instance_of(Group) do |group|
- expect(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
- end
- execute_transfer
- end
+ expect(execute_transfer).to eq(true)
- it 'updates shared runners based on the parent group' do
expect(project.shared_runners_enabled).to eq(expected_shared_runners_enabled)
end
end
@@ -478,58 +562,30 @@ RSpec.describe Projects::TransferService do
group.add_owner(user)
end
- context 'when the feature flag `specialized_worker_for_project_transfer_auth_recalculation` is enabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_transfer_auth_recalculation: true)
- end
-
- it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
- expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
- .to receive(:perform_async).with(project.id)
-
- execute_transfer
- end
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ .to receive(:perform_async).with(project.id)
- it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
- user_ids = [user.id, member_of_old_group.id, member_of_new_group.id].map { |id| [id] }
-
- expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
- receive(:bulk_perform_in)
- .with(1.hour,
- user_ids,
- batch_delay: 30.seconds, batch_size: 100)
- )
-
- subject
- end
-
- it 'refreshes the permissions of the members of the old and new namespace', :sidekiq_inline do
- expect { execute_transfer }
- .to change { member_of_old_group.authorized_projects.include?(project) }.from(true).to(false)
- .and change { member_of_new_group.authorized_projects.include?(project) }.from(false).to(true)
- end
+ execute_transfer
end
- context 'when the feature flag `specialized_worker_for_project_transfer_auth_recalculation` is disabled' do
- before do
- stub_feature_flags(specialized_worker_for_project_transfer_auth_recalculation: false)
- end
-
- it 'calls UserProjectAccessChangedService to update project authorizations' do
- user_ids = [user.id, member_of_old_group.id, member_of_new_group.id]
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ user_ids = [user.id, member_of_old_group.id, member_of_new_group.id].map { |id| [id] }
- expect_next_instance_of(UserProjectAccessChangedService, user_ids) do |service|
- expect(service).to receive(:execute)
- end
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ user_ids,
+ batch_delay: 30.seconds, batch_size: 100)
+ )
- execute_transfer
- end
+ subject
+ end
- it 'refreshes the permissions of the members of the old and new namespace' do
- expect { execute_transfer }
- .to change { member_of_old_group.authorized_projects.include?(project) }.from(true).to(false)
- .and change { member_of_new_group.authorized_projects.include?(project) }.from(false).to(true)
- end
+ it 'refreshes the permissions of the members of the old and new namespace', :sidekiq_inline do
+ expect { execute_transfer }
+ .to change { member_of_old_group.authorized_projects.include?(project) }.from(true).to(false)
+ .and change { member_of_new_group.authorized_projects.include?(project) }.from(false).to(true)
end
end
@@ -643,4 +699,13 @@ RSpec.describe Projects::TransferService do
def rugged_config
rugged_repo(project.repository).config
end
+
+ def project_namespace_in_sync(group)
+ project.reload
+ expect(project.namespace).to eq(group)
+ expect(project.project_namespace.visibility_level).to eq(project.visibility_level)
+ expect(project.project_namespace.path).to eq(project.path)
+ expect(project.project_namespace.parent).to eq(project.namespace)
+ expect(project.project_namespace.traversal_ids).to eq([*project.namespace.traversal_ids, project.project_namespace.id])
+ end
end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 6d0b75e0c95..5810024a1ef 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -173,14 +173,6 @@ RSpec.describe Projects::UpdatePagesService do
include_examples 'successfully deploys'
- context 'when pages_smart_check_outdated_sha feature flag is disabled' do
- before do
- stub_feature_flags(pages_smart_check_outdated_sha: false)
- end
-
- include_examples 'fails with outdated reference message'
- end
-
context 'when old deployment present' do
before do
old_build = create(:ci_build, pipeline: old_pipeline, ref: 'HEAD')
@@ -189,14 +181,6 @@ RSpec.describe Projects::UpdatePagesService do
end
include_examples 'successfully deploys'
-
- context 'when pages_smart_check_outdated_sha feature flag is disabled' do
- before do
- stub_feature_flags(pages_smart_check_outdated_sha: false)
- end
-
- include_examples 'fails with outdated reference message'
- end
end
context 'when newer deployment present' do
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 115f3098185..4923ef169e8 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -374,7 +374,7 @@ RSpec.describe Projects::UpdateService do
expect(result).to eq({
status: :error,
- message: "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'."
+ message: "Name can contain only letters, digits, emojis, '_', '.', '+', dashes, or spaces. It must start with a letter, digit, emoji, or '_'."
})
end
end
@@ -441,26 +441,62 @@ RSpec.describe Projects::UpdateService do
end
end
- context 'when updating #shared_runners', :https_pages_enabled do
- let!(:pending_build) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
+ context 'when updating runners settings' do
+ let(:settings) do
+ { instance_runners_enabled: true, namespace_traversal_ids: [123] }
+ end
- subject(:call_service) do
- update_project(project, admin, shared_runners_enabled: shared_runners_enabled)
+ let!(:pending_build) do
+ create(:ci_pending_build, project: project, **settings)
+ end
+
+ context 'when project has shared runners enabled' do
+ let(:project) { create(:project, shared_runners_enabled: true) }
+
+ it 'updates builds queue when shared runners get disabled' do
+ expect { update_project(project, admin, shared_runners_enabled: false) }
+ .to change { pending_build.reload.instance_runners_enabled }.to(false)
+
+ expect(pending_build.reload.instance_runners_enabled).to be false
+ end
+ end
+
+ context 'when project has shared runners disabled' do
+ let(:project) { create(:project, shared_runners_enabled: false) }
+
+ it 'updates builds queue when shared runners get enabled' do
+ expect { update_project(project, admin, shared_runners_enabled: true) }
+ .to not_change { pending_build.reload.instance_runners_enabled }
+
+ expect(pending_build.reload.instance_runners_enabled).to be true
+ end
end
- context 'when shared runners is toggled' do
- let(:shared_runners_enabled) { false }
+ context 'when project has group runners enabled' do
+ let(:project) { create(:project, group_runners_enabled: true) }
+
+ before do
+ project.ci_cd_settings.update!(group_runners_enabled: true)
+ end
+
+ it 'updates builds queue when group runners get disabled' do
+ update_project(project, admin, group_runners_enabled: false)
- it 'updates ci pending builds' do
- expect { call_service }.to change { pending_build.reload.instance_runners_enabled }.to(false)
+ expect(pending_build.reload.namespace_traversal_ids).to be_empty
end
end
- context 'when shared runners is not toggled' do
- let(:shared_runners_enabled) { true }
+ context 'when project has group runners disabled' do
+ let(:project) { create(:project, :in_subgroup, group_runners_enabled: false) }
+
+ before do
+ project.reload.ci_cd_settings.update!(group_runners_enabled: false)
+ end
+
+ it 'updates builds queue when group runners get enabled' do
+ update_project(project, admin, group_runners_enabled: true)
- it 'updates ci pending builds' do
- expect { call_service }.to not_change { pending_build.reload.instance_runners_enabled }
+ expect(pending_build.reload.namespace_traversal_ids).to include(project.namespace.id)
end
end
end