Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/services/projects')
-rw-r--r--spec/services/projects/autocomplete_service_spec.rb34
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb394
-rw-r--r--spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb4
-rw-r--r--spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb370
-rw-r--r--spec/services/projects/destroy_service_spec.rb14
-rw-r--r--spec/services/projects/import_service_spec.rb9
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb5
-rw-r--r--spec/services/projects/update_service_spec.rb53
8 files changed, 537 insertions, 346 deletions
diff --git a/spec/services/projects/autocomplete_service_spec.rb b/spec/services/projects/autocomplete_service_spec.rb
index 54a21d2f22b..bc95a1f3c8b 100644
--- a/spec/services/projects/autocomplete_service_spec.rb
+++ b/spec/services/projects/autocomplete_service_spec.rb
@@ -154,23 +154,49 @@ RSpec.describe Projects::AutocompleteService do
let_it_be(:project) { create(:project, group: group) }
let_it_be(:contact_1) { create(:contact, group: group) }
let_it_be(:contact_2) { create(:contact, group: group) }
+ let_it_be(:contact_3) { create(:contact, :inactive, group: group) }
- subject { described_class.new(project, user).contacts.as_json }
+ let(:issue) { nil }
+
+ subject { described_class.new(project, user).contacts(issue).as_json }
before do
group.add_developer(user)
end
- it 'returns contact data correctly' do
+ it 'returns CRM contacts from group' do
expected_contacts = [
{ 'id' => contact_1.id, 'email' => contact_1.email,
- 'first_name' => contact_1.first_name, 'last_name' => contact_1.last_name },
+ 'first_name' => contact_1.first_name, 'last_name' => contact_1.last_name, 'state' => contact_1.state },
{ 'id' => contact_2.id, 'email' => contact_2.email,
- 'first_name' => contact_2.first_name, 'last_name' => contact_2.last_name }
+ 'first_name' => contact_2.first_name, 'last_name' => contact_2.last_name, 'state' => contact_2.state },
+ { 'id' => contact_3.id, 'email' => contact_3.email,
+ 'first_name' => contact_3.first_name, 'last_name' => contact_3.last_name, 'state' => contact_3.state }
]
expect(subject).to match_array(expected_contacts)
end
+
+ context 'some contacts are already assigned to the issue' do
+ let(:issue) { create(:issue, project: project) }
+
+ before do
+ issue.customer_relations_contacts << [contact_2, contact_3]
+ end
+
+ it 'marks already assigned contacts as set' do
+ expected_contacts = [
+ { 'id' => contact_1.id, 'email' => contact_1.email,
+ 'first_name' => contact_1.first_name, 'last_name' => contact_1.last_name, 'state' => contact_1.state, 'set' => false },
+ { 'id' => contact_2.id, 'email' => contact_2.email,
+ 'first_name' => contact_2.first_name, 'last_name' => contact_2.last_name, 'state' => contact_2.state, 'set' => true },
+ { 'id' => contact_3.id, 'email' => contact_3.email,
+ 'first_name' => contact_3.first_name, 'last_name' => contact_3.last_name, 'state' => contact_3.state, 'set' => true }
+ ]
+
+ expect(subject).to match_array(expected_contacts)
+ end
+ end
end
describe '#labels_as_hash' do
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index 2008de195ab..8311c4e4d9b 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -2,372 +2,134 @@
require 'spec_helper'
-RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_redis_cache do
- using RSpec::Parameterized::TableSyntax
+RSpec.describe Projects::ContainerRepository::CleanupTagsService do
+ let_it_be_with_reload(:container_repository) { create(:container_repository) }
+ let_it_be(:user) { container_repository.project.owner }
- include_context 'for a cleanup tags service'
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project, reload: true) { create(:project, :private) }
-
- let(:repository) { create(:container_repository, :root, project: project) }
- let(:service) { described_class.new(container_repository: repository, current_user: user, params: params) }
- let(:tags) { %w[latest A Ba Bb C D E] }
+ let(:params) { {} }
+ let(:extra_params) { {} }
+ let(:service) { described_class.new(container_repository: container_repository, current_user: user, params: params.merge(extra_params)) }
before do
- project.add_maintainer(user) if user
-
stub_container_registry_config(enabled: true)
-
- stub_container_registry_tags(
- repository: repository.path,
- tags: tags
- )
-
- stub_tag_digest('latest', 'sha256:configA')
- stub_tag_digest('A', 'sha256:configA')
- stub_tag_digest('Ba', 'sha256:configB')
- stub_tag_digest('Bb', 'sha256:configB')
- stub_tag_digest('C', 'sha256:configC')
- stub_tag_digest('D', 'sha256:configD')
- stub_tag_digest('E', nil)
-
- stub_digest_config('sha256:configA', 1.hour.ago)
- stub_digest_config('sha256:configB', 5.days.ago)
- stub_digest_config('sha256:configC', 1.month.ago)
- stub_digest_config('sha256:configD', nil)
end
describe '#execute' do
subject { service.execute }
- it_behaves_like 'handling invalid params',
- service_response_extra: {
- before_truncate_size: 0,
- after_truncate_size: 0,
- before_delete_size: 0,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when regex matching everything is specified',
- delete_expectations: [%w(A Ba Bb C D E)],
- service_response_extra: {
- before_truncate_size: 6,
- after_truncate_size: 6,
- before_delete_size: 6,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when delete regex matching specific tags is used',
- service_response_extra: {
- before_truncate_size: 2,
- after_truncate_size: 2,
- before_delete_size: 2,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex',
- service_response_extra: {
- before_truncate_size: 1,
- after_truncate_size: 1,
- before_delete_size: 1,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'with allow regex value',
- delete_expectations: [%w(A C D E)],
- service_response_extra: {
- before_truncate_size: 4,
- after_truncate_size: 4,
- before_delete_size: 4,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when keeping only N tags',
- delete_expectations: [%w(Bb Ba C)],
- service_response_extra: {
- before_truncate_size: 4,
- after_truncate_size: 4,
- before_delete_size: 3,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when not keeping N tags',
- delete_expectations: [%w(A Ba Bb C)],
- service_response_extra: {
- before_truncate_size: 4,
- after_truncate_size: 4,
- before_delete_size: 4,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when removing keeping only 3',
- delete_expectations: [%w(Bb Ba C)],
- service_response_extra: {
- before_truncate_size: 6,
- after_truncate_size: 6,
- before_delete_size: 3,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when removing older than 1 day',
- delete_expectations: [%w(Ba Bb C)],
- service_response_extra: {
- before_truncate_size: 6,
- after_truncate_size: 6,
- before_delete_size: 3,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when combining all parameters',
- delete_expectations: [%w(Bb Ba C)],
- service_response_extra: {
- before_truncate_size: 6,
- after_truncate_size: 6,
- before_delete_size: 3,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when running a container_expiration_policy',
- delete_expectations: [%w(Bb Ba C)],
- service_response_extra: {
- before_truncate_size: 6,
- after_truncate_size: 6,
- before_delete_size: 3,
- cached_tags_count: 0
- },
- supports_caching: true
-
- context 'when running a container_expiration_policy with caching' do
- let(:user) { nil }
- let(:params) do
- {
- 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day',
- 'container_expiration_policy' => true
- }
- end
-
- it 'expects caching to be used' do
- expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- expect_caching
-
- subject
- end
-
- context 'when setting set to false' do
- before do
- stub_application_setting(container_registry_expiration_policies_caching: false)
- end
-
- it 'does not use caching' do
- expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- expect_no_caching
+ shared_examples 'returning error message' do |message|
+ it "returns error #{message}" do
+ expect(::Projects::ContainerRepository::Gitlab::CleanupTagsService).not_to receive(:new)
+ expect(::Projects::ContainerRepository::ThirdParty::CleanupTagsService).not_to receive(:new)
+ expect(service).not_to receive(:log_info)
- subject
- end
+ expect(subject).to eq(status: :error, message: message)
end
end
- context 'truncating the tags list' do
- let(:params) do
- {
- 'name_regex_delete' => '.*',
- 'keep_n' => 1
- }
- end
-
- shared_examples 'returning the response' do |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
- it 'returns the response' do
- expect_no_caching
+ shared_examples 'handling invalid regular expressions' do
+ shared_examples 'handling invalid regex' do
+ it_behaves_like 'returning error message', 'invalid regex'
- result = subject
+ it 'calls error tracking service' do
+ expect(::Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
- service_response = expected_service_response(
- status: status,
- original_size: original_size,
- deleted: nil
- ).merge(
- before_truncate_size: before_truncate_size,
- after_truncate_size: after_truncate_size,
- before_delete_size: before_delete_size,
- cached_tags_count: 0
- )
-
- expect(result).to eq(service_response)
+ subject
end
end
- where(:max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
- 10 | :success | :success | false
- 10 | :error | :error | false
- 3 | :success | :error | true
- 3 | :error | :error | true
- 0 | :success | :success | false
- 0 | :error | :error | false
- end
+ context 'when name_regex_delete is invalid' do
+ let(:extra_params) { { 'name_regex_delete' => '*test*' } }
- with_them do
- before do
- stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
- allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
- expect(service).to receive(:execute).and_return(status: delete_tags_service_status)
- end
- end
-
- original_size = 7
- keep_n = 1
-
- it_behaves_like(
- 'returning the response',
- status: params[:expected_status],
- original_size: original_size,
- before_truncate_size: original_size - keep_n,
- after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
- before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1 # one tag is filtered out with older_than filter
- )
+ it_behaves_like 'handling invalid regex'
end
- end
- context 'caching', :freeze_time do
- let(:params) do
- {
- 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day',
- 'container_expiration_policy' => true
- }
- end
+ context 'when name_regex is invalid' do
+ let(:extra_params) { { 'name_regex' => '*test*' } }
- let(:tags_and_created_ats) do
- {
- 'A' => 1.hour.ago,
- 'Ba' => 5.days.ago,
- 'Bb' => 5.days.ago,
- 'C' => 1.month.ago,
- 'D' => nil,
- 'E' => nil
- }
+ it_behaves_like 'handling invalid regex'
end
- let(:cacheable_tags) { tags_and_created_ats.reject { |_, value| value.nil? } }
+ context 'when name_regex_keep is invalid' do
+ let(:extra_params) { { 'name_regex_keep' => '*test*' } }
- before do
- expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- # We froze time so we need to set the created_at stubs again
- stub_digest_config('sha256:configA', 1.hour.ago)
- stub_digest_config('sha256:configB', 5.days.ago)
- stub_digest_config('sha256:configC', 1.month.ago)
+ it_behaves_like 'handling invalid regex'
end
+ end
- it 'caches the created_at values' do
- expect_mget(tags_and_created_ats.keys)
- expect_set(cacheable_tags)
-
- expect(subject).to include(cached_tags_count: 0)
+ shared_examples 'handling all types of container repositories' do
+ shared_examples 'calling service' do |service_class, extra_log_data: {}|
+ let(:service_double) { instance_double(service_class.to_s) }
+
+ it "uses cleanup tags service #{service_class}" do
+ expect(service_class).to receive(:new).with(container_repository: container_repository, current_user: user, params: params).and_return(service_double)
+ expect(service_double).to receive(:execute).and_return('return value')
+ expect(service).to receive(:log_info)
+ .with(
+ {
+ container_repository_id: container_repository.id,
+ container_repository_path: container_repository.path,
+ project_id: container_repository.project.id
+ }.merge(extra_log_data))
+ expect(subject).to eq('return value')
+ end
end
- context 'with cached values' do
+ context 'with a migrated repository' do
before do
- ::Gitlab::Redis::Cache.with do |redis|
- redis.set(cache_key('C'), rfc3339(1.month.ago))
- end
+ container_repository.update_column(:migration_state, :import_done)
end
- it 'uses them' do
- expect_mget(tags_and_created_ats.keys)
-
- # because C is already in cache, it should not be cached again
- expect_set(cacheable_tags.except('C'))
-
- # We will ping the container registry for all tags *except* for C because it's cached
- expect(ContainerRegistry::Blob).to receive(:new).with(repository, { "digest" => "sha256:configA" }).and_call_original
- expect(ContainerRegistry::Blob).to receive(:new).with(repository, { "digest" => "sha256:configB" }).twice.and_call_original
- expect(ContainerRegistry::Blob).not_to receive(:new).with(repository, { "digest" => "sha256:configC" })
- expect(ContainerRegistry::Blob).to receive(:new).with(repository, { "digest" => "sha256:configD" }).and_call_original
-
- expect(subject).to include(cached_tags_count: 1)
- end
- end
+ context 'supporting the gitlab api' do
+ before do
+ allow(container_repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(true)
+ end
- def expect_mget(keys)
- Gitlab::Redis::Cache.with do |redis|
- expect(redis).to receive(:mget).with(keys.map(&method(:cache_key))).and_call_original
+ it_behaves_like 'calling service', ::Projects::ContainerRepository::Gitlab::CleanupTagsService, extra_log_data: { gitlab_cleanup_tags_service: true }
end
- end
-
- def expect_set(tags)
- selected_tags = tags.map do |tag_name, created_at|
- ex = 1.day.seconds - (Time.zone.now - created_at).seconds
- [tag_name, created_at, ex.to_i] if ex.positive?
- end.compact
-
- return if selected_tags.count.zero?
- Gitlab::Redis::Cache.with do |redis|
- expect(redis).to receive(:pipelined).and_call_original
-
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
- selected_tags.each do |tag_name, created_at, ex|
- expect(pipeline).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex)
- end
+ context 'not supporting the gitlab api' do
+ before do
+ allow(container_repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(false)
end
+
+ it_behaves_like 'calling service', ::Projects::ContainerRepository::ThirdParty::CleanupTagsService, extra_log_data: { third_party_cleanup_tags_service: true }
end
end
- def cache_key(tag_name)
- "container_repository:{#{repository.id}}:tag:#{tag_name}:created_at"
- end
+ context 'with a non migrated repository' do
+ before do
+ container_repository.update_column(:migration_state, :default)
+ container_repository.update!(created_at: ContainerRepository::MIGRATION_PHASE_1_ENDED_AT - 1.week)
+ end
- def rfc3339(date_time)
- # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
- # The caching will use DateTime rfc3339
- DateTime.rfc3339(date_time.rfc3339).rfc3339
+ it_behaves_like 'calling service', ::Projects::ContainerRepository::ThirdParty::CleanupTagsService, extra_log_data: { third_party_cleanup_tags_service: true }
end
end
- end
- private
+ context 'with valid user' do
+ it_behaves_like 'handling invalid regular expressions'
+ it_behaves_like 'handling all types of container repositories'
+ end
- def stub_tag_digest(tag, digest)
- allow_any_instance_of(ContainerRegistry::Client)
- .to receive(:repository_tag_digest)
- .with(repository.path, tag) { digest }
+ context 'for container expiration policy' do
+ let(:user) { nil }
+ let(:params) { { 'container_expiration_policy' => true } }
- allow_any_instance_of(ContainerRegistry::Client)
- .to receive(:repository_manifest)
- .with(repository.path, tag) do
- { 'config' => { 'digest' => digest } } if digest
+ it_behaves_like 'handling invalid regular expressions'
+ it_behaves_like 'handling all types of container repositories'
end
- end
- def stub_digest_config(digest, created_at)
- allow_any_instance_of(ContainerRegistry::Client)
- .to receive(:blob)
- .with(repository.path, digest, nil) do
- { 'created' => created_at.to_datetime.rfc3339 }.to_json if created_at
+ context 'with not allowed user' do
+ let_it_be(:user) { create(:user) }
+
+ it_behaves_like 'returning error message', 'access denied'
end
- end
- def expect_caching
- ::Gitlab::Redis::Cache.with do |redis|
- expect(redis).to receive(:mget).and_call_original
- expect(redis).to receive(:pipelined).and_call_original
+ context 'with no user' do
+ let(:user) { nil }
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
- expect(pipeline).to receive(:set).and_call_original
- end
+ it_behaves_like 'returning error message', 'access denied'
end
end
end
diff --git a/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb
index d2cdb667659..59827ea035e 100644
--- a/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb
@@ -46,8 +46,6 @@ RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService do
context 'with several tags pages' do
let(:tags_page_size) { 2 }
- it_behaves_like 'handling invalid params'
-
it_behaves_like 'when regex matching everything is specified',
delete_expectations: [%w[A], %w[Ba Bb], %w[C D], %w[E]]
@@ -105,8 +103,6 @@ RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService do
context 'with a single tags page' do
let(:tags_page_size) { 1000 }
- it_behaves_like 'handling invalid params'
-
it_behaves_like 'when regex matching everything is specified',
delete_expectations: [%w[A Ba Bb C D E]]
diff --git a/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
new file mode 100644
index 00000000000..2d034d577ac
--- /dev/null
+++ b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
@@ -0,0 +1,370 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ContainerRepository::ThirdParty::CleanupTagsService, :clean_gitlab_redis_cache do
+ using RSpec::Parameterized::TableSyntax
+
+ include_context 'for a cleanup tags service'
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :private) }
+
+ let(:repository) { create(:container_repository, :root, project: project) }
+ let(:service) { described_class.new(container_repository: repository, current_user: user, params: params) }
+ let(:tags) { %w[latest A Ba Bb C D E] }
+
+ before do
+ project.add_maintainer(user) if user
+
+ stub_container_registry_config(enabled: true)
+
+ stub_container_registry_tags(
+ repository: repository.path,
+ tags: tags
+ )
+
+ stub_tag_digest('latest', 'sha256:configA')
+ stub_tag_digest('A', 'sha256:configA')
+ stub_tag_digest('Ba', 'sha256:configB')
+ stub_tag_digest('Bb', 'sha256:configB')
+ stub_tag_digest('C', 'sha256:configC')
+ stub_tag_digest('D', 'sha256:configD')
+ stub_tag_digest('E', nil)
+
+ stub_digest_config('sha256:configA', 1.hour.ago)
+ stub_digest_config('sha256:configB', 5.days.ago)
+ stub_digest_config('sha256:configC', 1.month.ago)
+ stub_digest_config('sha256:configD', nil)
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ it_behaves_like 'when regex matching everything is specified',
+ delete_expectations: [%w[A Ba Bb C D E]],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 6,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when delete regex matching specific tags is used',
+ service_response_extra: {
+ before_truncate_size: 2,
+ after_truncate_size: 2,
+ before_delete_size: 2,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex',
+ service_response_extra: {
+ before_truncate_size: 1,
+ after_truncate_size: 1,
+ before_delete_size: 1,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'with allow regex value',
+ delete_expectations: [%w[A C D E]],
+ service_response_extra: {
+ before_truncate_size: 4,
+ after_truncate_size: 4,
+ before_delete_size: 4,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when keeping only N tags',
+ delete_expectations: [%w[Bb Ba C]],
+ service_response_extra: {
+ before_truncate_size: 4,
+ after_truncate_size: 4,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when not keeping N tags',
+ delete_expectations: [%w[A Ba Bb C]],
+ service_response_extra: {
+ before_truncate_size: 4,
+ after_truncate_size: 4,
+ before_delete_size: 4,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when removing keeping only 3',
+ delete_expectations: [%w[Bb Ba C]],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when removing older than 1 day',
+ delete_expectations: [%w[Ba Bb C]],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when combining all parameters',
+ delete_expectations: [%w[Bb Ba C]],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when running a container_expiration_policy',
+ delete_expectations: [%w[Bb Ba C]],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ context 'when running a container_expiration_policy with caching' do
+ let(:user) { nil }
+ let(:params) do
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true
+ }
+ end
+
+ it 'expects caching to be used' do
+ expect_delete(%w[Bb Ba C], container_expiration_policy: true)
+ expect_caching
+
+ subject
+ end
+
+ context 'when setting set to false' do
+ before do
+ stub_application_setting(container_registry_expiration_policies_caching: false)
+ end
+
+ it 'does not use caching' do
+ expect_delete(%w[Bb Ba C], container_expiration_policy: true)
+ expect_no_caching
+
+ subject
+ end
+ end
+ end
+
+ context 'when truncating the tags list' do
+ let(:params) do
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 1
+ }
+ end
+
+ shared_examples 'returning the response' do
+ |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
+ it 'returns the response' do
+ expect_no_caching
+
+ result = subject
+
+ service_response = expected_service_response(
+ status: status,
+ original_size: original_size,
+ deleted: nil
+ ).merge(
+ before_truncate_size: before_truncate_size,
+ after_truncate_size: after_truncate_size,
+ before_delete_size: before_delete_size,
+ cached_tags_count: 0
+ )
+
+ expect(result).to eq(service_response)
+ end
+ end
+
+ where(:max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
+ 10 | :success | :success | false
+ 10 | :error | :error | false
+ 3 | :success | :error | true
+ 3 | :error | :error | true
+ 0 | :success | :success | false
+ 0 | :error | :error | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
+ allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
+ allow(service).to receive(:execute).and_return(status: delete_tags_service_status)
+ end
+ end
+
+ original_size = 7
+ keep_n = 1
+
+ it_behaves_like(
+ 'returning the response',
+ status: params[:expected_status],
+ original_size: original_size,
+ before_truncate_size: original_size - keep_n,
+ after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
+ # one tag is filtered out with older_than filter
+ before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1
+ )
+ end
+ end
+
+ context 'with caching', :freeze_time do
+ let(:params) do
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true
+ }
+ end
+
+ let(:tags_and_created_ats) do
+ {
+ 'A' => 1.hour.ago,
+ 'Ba' => 5.days.ago,
+ 'Bb' => 5.days.ago,
+ 'C' => 1.month.ago,
+ 'D' => nil,
+ 'E' => nil
+ }
+ end
+
+ let(:cacheable_tags) { tags_and_created_ats.reject { |_, value| value.nil? } }
+
+ before do
+ expect_delete(%w[Bb Ba C], container_expiration_policy: true)
+ # We froze time so we need to set the created_at stubs again
+ stub_digest_config('sha256:configA', 1.hour.ago)
+ stub_digest_config('sha256:configB', 5.days.ago)
+ stub_digest_config('sha256:configC', 1.month.ago)
+ end
+
+ it 'caches the created_at values' do
+ expect_mget(tags_and_created_ats.keys)
+ expect_set(cacheable_tags)
+
+ expect(subject).to include(cached_tags_count: 0)
+ end
+
+ context 'with cached values' do
+ before do
+ ::Gitlab::Redis::Cache.with do |redis|
+ redis.set(cache_key('C'), rfc3339(1.month.ago))
+ end
+ end
+
+ it 'uses them' do
+ expect_mget(tags_and_created_ats.keys)
+
+ # because C is already in cache, it should not be cached again
+ expect_set(cacheable_tags.except('C'))
+
+ # We will ping the container registry for all tags *except* for C because it's cached
+ expect(ContainerRegistry::Blob)
+ .to receive(:new).with(repository, { "digest" => "sha256:configA" }).and_call_original
+ expect(ContainerRegistry::Blob)
+ .to receive(:new).with(repository, { "digest" => "sha256:configB" }).twice.and_call_original
+ expect(ContainerRegistry::Blob).not_to receive(:new).with(repository, { "digest" => "sha256:configC" })
+ expect(ContainerRegistry::Blob)
+ .to receive(:new).with(repository, { "digest" => "sha256:configD" }).and_call_original
+
+ expect(subject).to include(cached_tags_count: 1)
+ end
+ end
+
+ def expect_mget(keys)
+ Gitlab::Redis::Cache.with do |redis|
+ parameters = keys.map { |k| cache_key(k) }
+ expect(redis).to receive(:mget).with(parameters).and_call_original
+ end
+ end
+
+ def expect_set(tags)
+ selected_tags = tags.map do |tag_name, created_at|
+ ex = 1.day.seconds - (Time.zone.now - created_at).seconds
+ [tag_name, created_at, ex.to_i] if ex.positive?
+ end.compact
+
+ return if selected_tags.count.zero?
+
+ Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:pipelined).and_call_original
+
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ selected_tags.each do |tag_name, created_at, ex|
+ expect(pipeline).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex)
+ end
+ end
+ end
+ end
+
+ def cache_key(tag_name)
+ "container_repository:{#{repository.id}}:tag:#{tag_name}:created_at"
+ end
+
+ def rfc3339(date_time)
+ # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
+ # The caching will use DateTime rfc3339
+ DateTime.rfc3339(date_time.rfc3339).rfc3339
+ end
+ end
+ end
+
+ private
+
+ def stub_tag_digest(tag, digest)
+ allow(repository.client)
+ .to receive(:repository_tag_digest)
+ .with(repository.path, tag) { digest }
+
+ allow(repository.client)
+ .to receive(:repository_manifest)
+ .with(repository.path, tag) do
+ { 'config' => { 'digest' => digest } } if digest
+ end
+ end
+
+ def stub_digest_config(digest, created_at)
+ allow(repository.client)
+ .to receive(:blob)
+ .with(repository.path, digest, nil) do
+ { 'created' => created_at.to_datetime.rfc3339 }.to_json if created_at
+ end
+ end
+
+ def expect_caching
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:mget).and_call_original
+ expect(redis).to receive(:pipelined).and_call_original
+
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ expect(pipeline).to receive(:set).and_call_original
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 8269dbebccb..f7f02769f6a 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -146,20 +146,6 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
expect { destroy_project(project, user, {}) }.to change(MergeRequestDiff, :count).by(-1)
expect { another_project_mr.reload }.not_to raise_error
end
-
- context 'when extract_mr_diff_deletions feature flag is disabled' do
- before do
- stub_feature_flags(extract_mr_diff_deletions: false)
- end
-
- it 'also deletes merge request diffs' do
- merge_request_diffs = merge_request.merge_request_diffs
- expect(merge_request_diffs.size).to eq(1)
-
- expect { destroy_project(project, user, {}) }.to change(MergeRequestDiff, :count).by(-1)
- expect { another_project_mr.reload }.not_to raise_error
- end
- end
end
it_behaves_like 'deleting the project'
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index ab9f99f893d..6dc72948541 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -276,6 +276,15 @@ RSpec.describe Projects::ImportService do
expect(result[:status]).to eq :error
expect(result[:message]).to include('Only allowed ports are 80, 443')
end
+
+ it 'fails with file scheme' do
+ project.import_url = "file:///tmp/dir.git"
+
+ result = subject.execute
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to include('Only allowed schemes are http, https')
+ end
end
it_behaves_like 'measurable service' do
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index 17d01a57221..ee8f7fb2ef2 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -37,10 +37,6 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
context 'when the move succeeds' do
it 'moves the repository to the new storage and unmarks the repository as read-only' do
- old_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- project.repository.path_to_repo
- end
-
expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
expect(project_repository_double).to receive(:checksum)
@@ -53,7 +49,6 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
expect(result).to be_success
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('test_second_storage')
- expect(gitlab_shell.repository_exists?('default', old_path)).to be(false)
expect(project.project_repository.shard_name).to eq('test_second_storage')
end
end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 85d3e99109d..7d8951bf111 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -11,10 +11,27 @@ RSpec.describe Projects::UpdateService do
create(:project, creator: user, namespace: user.namespace)
end
+ shared_examples 'publishing Projects::ProjectAttributesChangedEvent' do |params:, attributes:|
+ it "publishes Projects::ProjectAttributesChangedEvent" do
+ expect { update_project(project, user, params) }
+ .to publish_event(Projects::ProjectAttributesChangedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id,
+ attributes: attributes
+ )
+ end
+ end
+
describe '#execute' do
let(:admin) { create(:admin) }
context 'when changing visibility level' do
+ it_behaves_like 'publishing Projects::ProjectAttributesChangedEvent',
+ params: { visibility_level: Gitlab::VisibilityLevel::INTERNAL },
+ attributes: %w[updated_at visibility_level]
+
context 'when visibility_level changes to INTERNAL' do
it 'updates the project to internal' do
expect(TodosDestroyer::ProjectPrivateWorker).not_to receive(:perform_in)
@@ -290,7 +307,7 @@ RSpec.describe Projects::UpdateService do
context 'when we update project but not enabling a wiki' do
it 'does not try to create an empty wiki' do
- TestEnv.rm_storage_dir(project.repository_storage, project.wiki.path)
+ project.wiki.repository.raw.remove
result = update_project(project, user, { name: 'test1' })
@@ -311,7 +328,7 @@ RSpec.describe Projects::UpdateService do
context 'when enabling a wiki' do
it 'creates a wiki' do
project.project_feature.update!(wiki_access_level: ProjectFeature::DISABLED)
- TestEnv.rm_storage_dir(project.repository_storage, project.wiki.path)
+ project.wiki.repository.raw.remove
result = update_project(project, user, project_feature_attributes: { wiki_access_level: ProjectFeature::ENABLED })
@@ -323,7 +340,7 @@ RSpec.describe Projects::UpdateService do
it 'logs an error and creates a metric when wiki can not be created' do
project.project_feature.update!(wiki_access_level: ProjectFeature::DISABLED)
- expect_any_instance_of(ProjectWiki).to receive(:wiki).and_raise(Wiki::CouldNotCreateWikiError)
+ expect_any_instance_of(ProjectWiki).to receive(:create_wiki_repository).and_raise(Wiki::CouldNotCreateWikiError)
expect_any_instance_of(described_class).to receive(:log_error).with("Could not create wiki for #{project.full_name}")
counter = double(:counter)
@@ -348,7 +365,37 @@ RSpec.describe Projects::UpdateService do
end
end
+ context 'when changes project features' do
+ # Using some sample features for testing.
+ # Not using all the features because some of them must be enabled/disabled together
+ %w[issues wiki forking].each do |feature_name|
+ let(:feature) { "#{feature_name}_access_level" }
+ let(:params) do
+ { project_feature_attributes: { feature => ProjectFeature::ENABLED } }
+ end
+
+ before do
+ project.project_feature.update!(feature => ProjectFeature::DISABLED)
+ end
+
+ it 'publishes Projects::ProjectFeaturesChangedEvent' do
+ expect { update_project(project, user, params) }
+ .to publish_event(Projects::ProjectFeaturesChangedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id,
+ features: ["updated_at", feature]
+ )
+ end
+ end
+ end
+
context 'when archiving a project' do
+ it_behaves_like 'publishing Projects::ProjectAttributesChangedEvent',
+ params: { archived: true },
+ attributes: %w[updated_at archived]
+
it 'publishes a ProjectTransferedEvent' do
expect { update_project(project, user, archived: true) }
.to publish_event(Projects::ProjectArchivedEvent)