Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-06-07 21:09:27 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-06-07 21:09:27 +0300
commit5cda8c8a420399ca9687c4a981fefd50ce5a1fdd (patch)
tree6050d7517a36798c9586e153df20a0696c5fcd4f /spec/services
parent7bbc731c75d0b8bf7c74ba77d521266d2ed0a1fc (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb10
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb112
-rw-r--r--spec/services/web_hook_service_spec.rb192
3 files changed, 262 insertions, 52 deletions
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index 1c6963e4a31..4f7663d7996 100644
--- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -99,6 +99,16 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
expect { subject }.not_to change { artifact.file.exists? }
end
end
+
+ context 'when the project in which the arfifact belongs to is undergoing stats refresh' do
+ before do
+ create(:project_build_artifacts_size_refresh, :pending, project: artifact.project)
+ end
+
+ it 'does not destroy job artifact' do
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ end
+ end
end
context 'when artifact is locked' do
diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
index 0bb062e6994..3a04a3af03e 100644
--- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -4,7 +4,14 @@ require 'spec_helper'
RSpec.describe Ci::JobArtifacts::DestroyBatchService do
let(:artifacts) { Ci::JobArtifact.where(id: [artifact_with_file.id, artifact_without_file.id, trace_artifact.id]) }
- let(:service) { described_class.new(artifacts, pick_up_at: Time.current) }
+ let(:skip_projects_on_refresh) { false }
+ let(:service) do
+ described_class.new(
+ artifacts,
+ pick_up_at: Time.current,
+ skip_projects_on_refresh: skip_projects_on_refresh
+ )
+ end
let_it_be(:artifact_with_file, refind: true) do
create(:ci_job_artifact, :zip)
@@ -76,18 +83,101 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
create(:project_build_artifacts_size_refresh, :running, project: artifact_under_refresh_2.project)
end
- it 'logs the artifacts undergoing refresh and continues with the delete', :aggregate_failures do
- expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with(
- method: 'Ci::JobArtifacts::DestroyBatchService#execute',
- project_id: artifact_under_refresh_1.project.id
- ).once
+ shared_examples 'avoiding N+1 queries' do
+ let!(:control_artifact_on_refresh) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:control_artifact_non_refresh) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:other_artifact_on_refresh) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:other_artifact_on_refresh_2) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:other_artifact_non_refresh) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:control_artifacts) do
+ Ci::JobArtifact.where(
+ id: [
+ control_artifact_on_refresh.id,
+ control_artifact_non_refresh.id
+ ]
+ )
+ end
+
+ let!(:artifacts) do
+ Ci::JobArtifact.where(
+ id: [
+ other_artifact_on_refresh.id,
+ other_artifact_on_refresh_2.id,
+ other_artifact_non_refresh.id
+ ]
+ )
+ end
+
+ let(:control_service) do
+ described_class.new(
+ control_artifacts,
+ pick_up_at: Time.current,
+ skip_projects_on_refresh: skip_projects_on_refresh
+ )
+ end
+
+ before do
+ create(:project_build_artifacts_size_refresh, :pending, project: control_artifact_on_refresh.project)
+ create(:project_build_artifacts_size_refresh, :pending, project: other_artifact_on_refresh.project)
+ create(:project_build_artifacts_size_refresh, :pending, project: other_artifact_on_refresh_2.project)
+ end
+
+ it 'does not make multiple queries when fetching multiple project refresh records' do
+ control = ActiveRecord::QueryRecorder.new { control_service.execute }
+
+ expect { subject }.not_to exceed_query_limit(control)
+ end
+ end
+
+ context 'and skip_projects_on_refresh is set to false (default)' do
+ it 'logs the projects undergoing refresh and continues with the delete', :aggregate_failures do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with(
+ method: 'Ci::JobArtifacts::DestroyBatchService#execute',
+ project_id: artifact_under_refresh_1.project.id
+ ).once
- expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with(
- method: 'Ci::JobArtifacts::DestroyBatchService#execute',
- project_id: artifact_under_refresh_2.project.id
- ).once
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with(
+ method: 'Ci::JobArtifacts::DestroyBatchService#execute',
+ project_id: artifact_under_refresh_2.project.id
+ ).once
+
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-4)
+ end
+
+ it_behaves_like 'avoiding N+1 queries'
+ end
+
+ context 'and skip_projects_on_refresh is set to true' do
+ let(:skip_projects_on_refresh) { true }
+
+ it 'logs the projects undergoing refresh and excludes the artifacts from deletion', :aggregate_failures do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_skipped_artifact_deletion_during_stats_refresh).with(
+ method: 'Ci::JobArtifacts::DestroyBatchService#execute',
+ project_ids: match_array([artifact_under_refresh_1.project.id, artifact_under_refresh_2.project.id])
+ )
+
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ expect(Ci::JobArtifact.where(id: artifact_under_refresh_1.id)).to exist
+ expect(Ci::JobArtifact.where(id: artifact_under_refresh_2.id)).to exist
+ expect(Ci::JobArtifact.where(id: artifact_under_refresh_3.id)).to exist
+ end
- expect { subject }.to change { Ci::JobArtifact.count }.by(-4)
+ it_behaves_like 'avoiding N+1 queries'
end
end
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index b99bc860523..9f3093d64f3 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state do
include StubRequests
+ let(:ellipsis) { '…' }
let_it_be(:project) { create(:project) }
let_it_be_with_reload(:project_hook) { create(:project_hook, project: project) }
@@ -268,6 +269,20 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
end
context 'execution logging' do
+ let(:default_log_data) do
+ {
+ trigger: 'push_hooks',
+ url: project_hook.url,
+ request_headers: headers,
+ request_data: data,
+ response_body: 'Success',
+ response_headers: {},
+ response_status: 200,
+ execution_duration: be > 0,
+ internal_error_message: nil
+ }
+ end
+
context 'with success' do
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success')
@@ -280,7 +295,7 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
expect(::WebHooks::LogExecutionWorker).not_to receive(:perform_async)
expect(::WebHooks::LogExecutionService)
.to receive(:new)
- .with(hook: project_hook, log_data: Hash, response_category: :ok)
+ .with(hook: project_hook, log_data: default_log_data, response_category: :ok)
.and_return(double(execute: nil))
service_instance.execute
@@ -291,17 +306,7 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
- hash_including(
- trigger: 'push_hooks',
- url: project_hook.url,
- request_headers: headers,
- request_data: data,
- response_body: 'Success',
- response_headers: {},
- response_status: 200,
- execution_duration: be > 0,
- internal_error_message: nil
- ),
+ hash_including(default_log_data),
:ok,
nil
)
@@ -328,15 +333,10 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
.with(
project_hook.id,
hash_including(
- trigger: 'push_hooks',
- url: project_hook.url,
- request_headers: headers,
- request_data: data,
- response_body: 'Bad request',
- response_headers: {},
- response_status: 400,
- execution_duration: be > 0,
- internal_error_message: nil
+ default_log_data.merge(
+ response_body: 'Bad request',
+ response_status: 400
+ )
),
:failed,
nil
@@ -356,15 +356,11 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
.with(
project_hook.id,
hash_including(
- trigger: 'push_hooks',
- url: project_hook.url,
- request_headers: headers,
- request_data: data,
- response_body: '',
- response_headers: {},
- response_status: 'internal error',
- execution_duration: be > 0,
- internal_error_message: 'Some HTTP Post error'
+ default_log_data.merge(
+ response_body: '',
+ response_status: 'internal error',
+ internal_error_message: 'Some HTTP Post error'
+ )
),
:error,
nil
@@ -383,17 +379,86 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
- hash_including(
- trigger: 'push_hooks',
- url: project_hook.url,
- request_headers: headers,
- request_data: data,
- response_body: '',
- response_headers: {},
- response_status: 200,
- execution_duration: be > 0,
- internal_error_message: nil
- ),
+ hash_including(default_log_data.merge(response_body: '')),
+ :ok,
+ nil
+ )
+
+ service_instance.execute
+ end
+ end
+
+ context 'with oversize response body' do
+ let(:oversize_body) { 'a' * (described_class::RESPONSE_BODY_SIZE_LIMIT + 1) }
+ let(:stripped_body) { 'a' * (described_class::RESPONSE_BODY_SIZE_LIMIT - ellipsis.bytesize) + ellipsis }
+
+ before do
+ stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: oversize_body)
+ end
+
+ it 'queues LogExecutionWorker with stripped response_body' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(default_log_data.merge(response_body: stripped_body)),
+ :ok,
+ nil
+ )
+
+ service_instance.execute
+ end
+ end
+
+ context 'with massive amount of headers' do
+ let(:response_headers) do
+ (1..described_class::RESPONSE_HEADERS_COUNT_LIMIT + 1).to_a.to_h do |num|
+ ["header-#{num}", SecureRandom.hex(num)]
+ end
+ end
+
+ let(:expected_response_headers) do
+ (1..described_class::RESPONSE_HEADERS_COUNT_LIMIT).to_a.to_h do |num|
+ # Capitalized
+ ["Header-#{num}", response_headers["header-#{num}"]]
+ end
+ end
+
+ before do
+ stub_full_request(project_hook.url, method: :post).to_return(
+ status: 200, body: 'Success', headers: response_headers
+ )
+ end
+
+ it 'queues LogExecutionWorker with limited amount of headers' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(default_log_data.merge(response_headers: expected_response_headers)),
+ :ok,
+ nil
+ )
+
+ service_instance.execute
+ end
+ end
+
+ context 'with oversize header' do
+ let(:oversize_header) { 'a' * (described_class::RESPONSE_HEADERS_SIZE_LIMIT + 1) }
+ let(:stripped_header) { 'a' * (described_class::RESPONSE_HEADERS_SIZE_LIMIT - ellipsis.bytesize) + ellipsis }
+ let(:response_headers) { { 'oversized-header' => oversize_header } }
+ let(:expected_response_headers) { { 'Oversized-Header' => stripped_header } }
+
+ before do
+ stub_full_request(project_hook.url, method: :post).to_return(
+ status: 200, body: 'Success', headers: response_headers
+ )
+ end
+
+ it 'queues LogExecutionWorker with stripped header value' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(default_log_data.merge(response_headers: expected_response_headers)),
:ok,
nil
)
@@ -401,6 +466,51 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
service_instance.execute
end
end
+
+ context 'with log data exceeding Sidekiq limit' do
+ before do
+ stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success')
+ end
+
+ it 'queues LogExecutionWorker with request_data overrided in the second attempt' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(default_log_data),
+ :ok,
+ nil
+ )
+ .and_raise(
+ Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError.new(WebHooks::LogExecutionWorker, 100, 50)
+ )
+ .ordered
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(default_log_data.merge(request_data: WebHookLog::OVERSIZE_REQUEST_DATA)),
+ :ok,
+ nil
+ )
+ .and_call_original
+ .ordered
+
+ service_instance.execute
+ end
+
+ context 'new log data still exceeds limit' do
+ before do
+ allow(WebHooks::LogExecutionWorker).to receive(:perform_async).and_raise(
+ Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError.new(WebHooks::LogExecutionWorker, 100, 50)
+ )
+ end
+
+ it 'raises an exception' do
+ expect do
+ service_instance.execute
+ end.to raise_error(Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError)
+ end
+ end
+ end
end
end