diff options
author | GitLab Bot <gitlab-bot@gitlab.com> | 2022-06-07 21:09:27 +0300 |
---|---|---|
committer | GitLab Bot <gitlab-bot@gitlab.com> | 2022-06-07 21:09:27 +0300 |
commit | 5cda8c8a420399ca9687c4a981fefd50ce5a1fdd (patch) | |
tree | 6050d7517a36798c9586e153df20a0696c5fcd4f /spec | |
parent | 7bbc731c75d0b8bf7c74ba77d521266d2ed0a1fc (diff) |
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
10 files changed, 394 insertions, 66 deletions
diff --git a/spec/features/admin/admin_hook_logs_spec.rb b/spec/features/admin/admin_hook_logs_spec.rb index fd51fd71fea..6caf2b24555 100644 --- a/spec/features/admin/admin_hook_logs_spec.rb +++ b/spec/features/admin/admin_hook_logs_spec.rb @@ -41,4 +41,18 @@ RSpec.describe 'Admin::HookLogs' do expect(page).to have_current_path(edit_admin_hook_path(system_hook), ignore_query: true) end + + context 'response data is too large' do + let(:hook_log) { create(:web_hook_log, web_hook: system_hook, request_data: WebHookLog::OVERSIZE_REQUEST_DATA) } + + it 'shows request data as too large and disables retry function' do + visit(admin_hook_hook_log_path(system_hook, hook_log)) + + expect(page).to have_content('Request data is too large') + expect(page).not_to have_button( + _('Resent request'), + disabled: true, class: 'has-tooltip', title: _("Request data is too large") + ) + end + end end diff --git a/spec/features/projects/hook_logs/user_reads_log_spec.rb b/spec/features/projects/hook_logs/user_reads_log_spec.rb index 8513a9374d1..9b7ec14c36f 100644 --- a/spec/features/projects/hook_logs/user_reads_log_spec.rb +++ b/spec/features/projects/hook_logs/user_reads_log_spec.rb @@ -3,21 +3,80 @@ require 'spec_helper' RSpec.describe 'Hook logs' do - let(:web_hook_log) { create(:web_hook_log, response_body: '<script>') } - let(:project) { web_hook_log.web_hook.project } + let(:project) { create(:project) } + let(:project_hook) { create(:project_hook, project: project) } + let(:web_hook_log) { create(:web_hook_log, web_hook: project_hook, response_body: 'Hello World') } let(:user) { create(:user) } before do + web_hook_log project.add_maintainer(user) sign_in(user) end - it 'user reads log without getting XSS' do - visit( - project_hook_hook_log_path( - project, web_hook_log.web_hook, web_hook_log)) + it 'shows list of hook logs' do + visit edit_project_hook_path(project, project_hook) - expect(page).to have_content('<script>') + expect(page).to have_content('Recent events') + expect(page).to have_link('View details', href: project_hook_hook_log_path(project, project_hook, web_hook_log)) + end + + it 'shows hook log details' do + visit edit_project_hook_path(project, project_hook) + click_link 'View details' + + expect(page).to have_content("POST #{web_hook_log.url}") + expect(page).to have_content(web_hook_log.response_body) + expect(page).to have_content('Resend Request') + end + + it 'retries hook log' do + WebMock.stub_request(:post, project_hook.url) + + visit edit_project_hook_path(project, project_hook) + click_link 'View details' + click_link 'Resend Request' + + expect(page).to have_current_path(edit_project_hook_path(project, project_hook), ignore_query: true) + end + + context 'request gets internal error' do + let(:web_hook_log) { create(:web_hook_log, web_hook: project_hook, internal_error_message: 'Some error') } + + it 'shows hook log details with internal error message' do + visit edit_project_hook_path(project, project_hook) + click_link 'View details' + + expect(page).to have_content("POST #{web_hook_log.url}") + expect(page).to have_content(web_hook_log.internal_error_message) + expect(page).to have_content('Resend Request') + end + end + + context 'response body contains XSS string' do + let(:web_hook_log) { create(:web_hook_log, web_hook: project_hook, response_body: '<script>') } + + it 'displays log without getting XSS' do + visit(project_hook_hook_log_path(project, project_hook, web_hook_log)) + + expect(page).to have_content('<script>') + end + end + + context 'response data is too large' do + let(:web_hook_log) do + create(:web_hook_log, web_hook: project_hook, request_data: WebHookLog::OVERSIZE_REQUEST_DATA) + end + + it 'shows request data as too large and disables retry function' do + visit(project_hook_hook_log_path(project, project_hook, web_hook_log)) + + expect(page).to have_content('Request data is too large') + expect(page).not_to have_button( + _('Resent request'), + disabled: true, class: 'has-tooltip', title: _("Request data is too large") + ) + end end end diff --git a/spec/frontend/access_tokens/components/access_token_table_app_spec.js b/spec/frontend/access_tokens/components/access_token_table_app_spec.js index 827bc1a6a4d..b45abe418e4 100644 --- a/spec/frontend/access_tokens/components/access_token_table_app_spec.js +++ b/spec/frontend/access_tokens/components/access_token_table_app_spec.js @@ -1,7 +1,8 @@ -import { GlTable } from '@gitlab/ui'; +import { GlPagination, GlTable } from '@gitlab/ui'; import { mount } from '@vue/test-utils'; import { nextTick } from 'vue'; import AccessTokenTableApp from '~/access_tokens/components/access_token_table_app.vue'; +import { EVENT_SUCCESS, PAGE_SIZE } from '~/access_tokens/components/constants'; import { __, s__, sprintf } from '~/locale'; import DomElementListener from '~/vue_shared/components/dom_element_listener.vue'; @@ -57,13 +58,14 @@ describe('~/access_tokens/components/access_token_table_app', () => { const triggerSuccess = async (activeAccessTokens = defaultActiveAccessTokens) => { wrapper .findComponent(DomElementListener) - .vm.$emit('ajax:success', { detail: [{ active_access_tokens: activeAccessTokens }] }); + .vm.$emit(EVENT_SUCCESS, { detail: [{ active_access_tokens: activeAccessTokens }] }); await nextTick(); }; const findTable = () => wrapper.findComponent(GlTable); const findHeaders = () => findTable().findAll('th > :first-child'); const findCells = () => findTable().findAll('td'); + const findPagination = () => wrapper.findComponent(GlPagination); afterEach(() => { wrapper?.destroy(); @@ -225,4 +227,15 @@ describe('~/access_tokens/components/access_token_table_app', () => { expect(cells.at(3).text()).not.toBe('Never'); expect(cells.at(10).text()).toBe('Never'); }); + + it('should show the pagination component when needed', async () => { + createComponent(); + expect(findPagination().exists()).toBe(false); + + await triggerSuccess(Array(PAGE_SIZE).fill(defaultActiveAccessTokens[0])); + expect(findPagination().exists()).toBe(false); + + await triggerSuccess(Array(PAGE_SIZE + 1).fill(defaultActiveAccessTokens[0])); + expect(findPagination().exists()).toBe(true); + }); }); diff --git a/spec/frontend/access_tokens/components/new_access_token_app_spec.js b/spec/frontend/access_tokens/components/new_access_token_app_spec.js index 25b3eba6587..0fdd77ef6f2 100644 --- a/spec/frontend/access_tokens/components/new_access_token_app_spec.js +++ b/spec/frontend/access_tokens/components/new_access_token_app_spec.js @@ -3,6 +3,7 @@ import { nextTick } from 'vue'; import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import { mountExtended } from 'helpers/vue_test_utils_helper'; import NewAccessTokenApp from '~/access_tokens/components/new_access_token_app.vue'; +import { EVENT_ERROR, EVENT_SUCCESS, FORM_SELECTOR } from '~/access_tokens/components/constants'; import { createAlert, VARIANT_INFO } from '~/flash'; import { __, sprintf } from '~/locale'; import DomElementListener from '~/vue_shared/components/dom_element_listener.vue'; @@ -22,20 +23,18 @@ describe('~/access_tokens/components/new_access_token_app', () => { }; const triggerSuccess = async (newToken = 'new token') => { - wrapper - .find(DomElementListener) - .vm.$emit('ajax:success', { detail: [{ new_token: newToken }] }); + wrapper.find(DomElementListener).vm.$emit(EVENT_SUCCESS, { detail: [{ new_token: newToken }] }); await nextTick(); }; const triggerError = async (errors = ['1', '2']) => { - wrapper.find(DomElementListener).vm.$emit('ajax:error', { detail: [{ errors }] }); + wrapper.find(DomElementListener).vm.$emit(EVENT_ERROR, { detail: [{ errors }] }); await nextTick(); }; beforeEach(() => { // NewAccessTokenApp observes a form element - setHTMLFixture('<form id="js-new-access-token-form"><input type="submit"/></form>'); + setHTMLFixture(`<form id="${FORM_SELECTOR}"><input type="submit"/></form>`); createComponent(); }); diff --git a/spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb b/spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb index 6dbfd5804d7..ce05d5b11c7 100644 --- a/spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb +++ b/spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb @@ -44,4 +44,24 @@ RSpec.describe Gitlab::ProjectStatsRefreshConflictsLogger do described_class.warn_request_rejected_during_stats_refresh(project_id) end end + + describe '.warn_skipped_artifact_deletion_during_stats_refresh' do + it 'logs a warning about artifacts being excluded from deletion while the project is undergoing stats refresh' do + project_ids = [12, 34] + method = 'Foo#action' + + expect(Gitlab::AppLogger).to receive(:warn).with( + hash_including( + message: 'Skipped deleting artifacts undergoing refresh', + method: method, + project_ids: match_array(project_ids), + 'correlation_id' => an_instance_of(String), + 'meta.feature_category' => 'test', + 'meta.caller_id' => 'caller' + ) + ) + + described_class.warn_skipped_artifact_deletion_during_stats_refresh(project_ids: project_ids, method: method) + end + end end diff --git a/spec/models/users/callout_spec.rb b/spec/models/users/callout_spec.rb index 293f0279e79..14f555863ec 100644 --- a/spec/models/users/callout_spec.rb +++ b/spec/models/users/callout_spec.rb @@ -11,4 +11,16 @@ RSpec.describe Users::Callout do it { is_expected.to validate_presence_of(:feature_name) } it { is_expected.to validate_uniqueness_of(:feature_name).scoped_to(:user_id).ignoring_case_sensitivity } end + + describe 'scopes' do + describe '.with_feature_name' do + let_it_be(:feature_name) { described_class.feature_names.keys.last } + let_it_be(:user_callouts_for_feature_name) { create_list(:callout, 2, feature_name: feature_name) } + let_it_be(:another_user_callout) { create(:callout, feature_name: described_class.feature_names.each_key.first) } + + it 'returns user callouts for the given feature name only' do + expect(described_class.with_feature_name(feature_name)).to eq(user_callouts_for_feature_name) + end + end + end end diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb index 1c6963e4a31..4f7663d7996 100644 --- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb +++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb @@ -99,6 +99,16 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s expect { subject }.not_to change { artifact.file.exists? } end end + + context 'when the project in which the arfifact belongs to is undergoing stats refresh' do + before do + create(:project_build_artifacts_size_refresh, :pending, project: artifact.project) + end + + it 'does not destroy job artifact' do + expect { subject }.not_to change { Ci::JobArtifact.count } + end + end end context 'when artifact is locked' do diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb index 0bb062e6994..3a04a3af03e 100644 --- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb +++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb @@ -4,7 +4,14 @@ require 'spec_helper' RSpec.describe Ci::JobArtifacts::DestroyBatchService do let(:artifacts) { Ci::JobArtifact.where(id: [artifact_with_file.id, artifact_without_file.id, trace_artifact.id]) } - let(:service) { described_class.new(artifacts, pick_up_at: Time.current) } + let(:skip_projects_on_refresh) { false } + let(:service) do + described_class.new( + artifacts, + pick_up_at: Time.current, + skip_projects_on_refresh: skip_projects_on_refresh + ) + end let_it_be(:artifact_with_file, refind: true) do create(:ci_job_artifact, :zip) @@ -76,18 +83,101 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do create(:project_build_artifacts_size_refresh, :running, project: artifact_under_refresh_2.project) end - it 'logs the artifacts undergoing refresh and continues with the delete', :aggregate_failures do - expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with( - method: 'Ci::JobArtifacts::DestroyBatchService#execute', - project_id: artifact_under_refresh_1.project.id - ).once + shared_examples 'avoiding N+1 queries' do + let!(:control_artifact_on_refresh) do + create(:ci_job_artifact, :zip) + end + + let!(:control_artifact_non_refresh) do + create(:ci_job_artifact, :zip) + end + + let!(:other_artifact_on_refresh) do + create(:ci_job_artifact, :zip) + end + + let!(:other_artifact_on_refresh_2) do + create(:ci_job_artifact, :zip) + end + + let!(:other_artifact_non_refresh) do + create(:ci_job_artifact, :zip) + end + + let!(:control_artifacts) do + Ci::JobArtifact.where( + id: [ + control_artifact_on_refresh.id, + control_artifact_non_refresh.id + ] + ) + end + + let!(:artifacts) do + Ci::JobArtifact.where( + id: [ + other_artifact_on_refresh.id, + other_artifact_on_refresh_2.id, + other_artifact_non_refresh.id + ] + ) + end + + let(:control_service) do + described_class.new( + control_artifacts, + pick_up_at: Time.current, + skip_projects_on_refresh: skip_projects_on_refresh + ) + end + + before do + create(:project_build_artifacts_size_refresh, :pending, project: control_artifact_on_refresh.project) + create(:project_build_artifacts_size_refresh, :pending, project: other_artifact_on_refresh.project) + create(:project_build_artifacts_size_refresh, :pending, project: other_artifact_on_refresh_2.project) + end + + it 'does not make multiple queries when fetching multiple project refresh records' do + control = ActiveRecord::QueryRecorder.new { control_service.execute } + + expect { subject }.not_to exceed_query_limit(control) + end + end + + context 'and skip_projects_on_refresh is set to false (default)' do + it 'logs the projects undergoing refresh and continues with the delete', :aggregate_failures do + expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with( + method: 'Ci::JobArtifacts::DestroyBatchService#execute', + project_id: artifact_under_refresh_1.project.id + ).once - expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with( - method: 'Ci::JobArtifacts::DestroyBatchService#execute', - project_id: artifact_under_refresh_2.project.id - ).once + expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with( + method: 'Ci::JobArtifacts::DestroyBatchService#execute', + project_id: artifact_under_refresh_2.project.id + ).once + + expect { subject }.to change { Ci::JobArtifact.count }.by(-4) + end + + it_behaves_like 'avoiding N+1 queries' + end + + context 'and skip_projects_on_refresh is set to true' do + let(:skip_projects_on_refresh) { true } + + it 'logs the projects undergoing refresh and excludes the artifacts from deletion', :aggregate_failures do + expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_skipped_artifact_deletion_during_stats_refresh).with( + method: 'Ci::JobArtifacts::DestroyBatchService#execute', + project_ids: match_array([artifact_under_refresh_1.project.id, artifact_under_refresh_2.project.id]) + ) + + expect { subject }.to change { Ci::JobArtifact.count }.by(-1) + expect(Ci::JobArtifact.where(id: artifact_under_refresh_1.id)).to exist + expect(Ci::JobArtifact.where(id: artifact_under_refresh_2.id)).to exist + expect(Ci::JobArtifact.where(id: artifact_under_refresh_3.id)).to exist + end - expect { subject }.to change { Ci::JobArtifact.count }.by(-4) + it_behaves_like 'avoiding N+1 queries' end end diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb index b99bc860523..9f3093d64f3 100644 --- a/spec/services/web_hook_service_spec.rb +++ b/spec/services/web_hook_service_spec.rb @@ -5,6 +5,7 @@ require 'spec_helper' RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state do include StubRequests + let(:ellipsis) { '…' } let_it_be(:project) { create(:project) } let_it_be_with_reload(:project_hook) { create(:project_hook, project: project) } @@ -268,6 +269,20 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state end context 'execution logging' do + let(:default_log_data) do + { + trigger: 'push_hooks', + url: project_hook.url, + request_headers: headers, + request_data: data, + response_body: 'Success', + response_headers: {}, + response_status: 200, + execution_duration: be > 0, + internal_error_message: nil + } + end + context 'with success' do before do stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success') @@ -280,7 +295,7 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state expect(::WebHooks::LogExecutionWorker).not_to receive(:perform_async) expect(::WebHooks::LogExecutionService) .to receive(:new) - .with(hook: project_hook, log_data: Hash, response_category: :ok) + .with(hook: project_hook, log_data: default_log_data, response_category: :ok) .and_return(double(execute: nil)) service_instance.execute @@ -291,17 +306,7 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state expect(WebHooks::LogExecutionWorker).to receive(:perform_async) .with( project_hook.id, - hash_including( - trigger: 'push_hooks', - url: project_hook.url, - request_headers: headers, - request_data: data, - response_body: 'Success', - response_headers: {}, - response_status: 200, - execution_duration: be > 0, - internal_error_message: nil - ), + hash_including(default_log_data), :ok, nil ) @@ -328,15 +333,10 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state .with( project_hook.id, hash_including( - trigger: 'push_hooks', - url: project_hook.url, - request_headers: headers, - request_data: data, - response_body: 'Bad request', - response_headers: {}, - response_status: 400, - execution_duration: be > 0, - internal_error_message: nil + default_log_data.merge( + response_body: 'Bad request', + response_status: 400 + ) ), :failed, nil @@ -356,15 +356,11 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state .with( project_hook.id, hash_including( - trigger: 'push_hooks', - url: project_hook.url, - request_headers: headers, - request_data: data, - response_body: '', - response_headers: {}, - response_status: 'internal error', - execution_duration: be > 0, - internal_error_message: 'Some HTTP Post error' + default_log_data.merge( + response_body: '', + response_status: 'internal error', + internal_error_message: 'Some HTTP Post error' + ) ), :error, nil @@ -383,17 +379,86 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state expect(WebHooks::LogExecutionWorker).to receive(:perform_async) .with( project_hook.id, - hash_including( - trigger: 'push_hooks', - url: project_hook.url, - request_headers: headers, - request_data: data, - response_body: '', - response_headers: {}, - response_status: 200, - execution_duration: be > 0, - internal_error_message: nil - ), + hash_including(default_log_data.merge(response_body: '')), + :ok, + nil + ) + + service_instance.execute + end + end + + context 'with oversize response body' do + let(:oversize_body) { 'a' * (described_class::RESPONSE_BODY_SIZE_LIMIT + 1) } + let(:stripped_body) { 'a' * (described_class::RESPONSE_BODY_SIZE_LIMIT - ellipsis.bytesize) + ellipsis } + + before do + stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: oversize_body) + end + + it 'queues LogExecutionWorker with stripped response_body' do + expect(WebHooks::LogExecutionWorker).to receive(:perform_async) + .with( + project_hook.id, + hash_including(default_log_data.merge(response_body: stripped_body)), + :ok, + nil + ) + + service_instance.execute + end + end + + context 'with massive amount of headers' do + let(:response_headers) do + (1..described_class::RESPONSE_HEADERS_COUNT_LIMIT + 1).to_a.to_h do |num| + ["header-#{num}", SecureRandom.hex(num)] + end + end + + let(:expected_response_headers) do + (1..described_class::RESPONSE_HEADERS_COUNT_LIMIT).to_a.to_h do |num| + # Capitalized + ["Header-#{num}", response_headers["header-#{num}"]] + end + end + + before do + stub_full_request(project_hook.url, method: :post).to_return( + status: 200, body: 'Success', headers: response_headers + ) + end + + it 'queues LogExecutionWorker with limited amount of headers' do + expect(WebHooks::LogExecutionWorker).to receive(:perform_async) + .with( + project_hook.id, + hash_including(default_log_data.merge(response_headers: expected_response_headers)), + :ok, + nil + ) + + service_instance.execute + end + end + + context 'with oversize header' do + let(:oversize_header) { 'a' * (described_class::RESPONSE_HEADERS_SIZE_LIMIT + 1) } + let(:stripped_header) { 'a' * (described_class::RESPONSE_HEADERS_SIZE_LIMIT - ellipsis.bytesize) + ellipsis } + let(:response_headers) { { 'oversized-header' => oversize_header } } + let(:expected_response_headers) { { 'Oversized-Header' => stripped_header } } + + before do + stub_full_request(project_hook.url, method: :post).to_return( + status: 200, body: 'Success', headers: response_headers + ) + end + + it 'queues LogExecutionWorker with stripped header value' do + expect(WebHooks::LogExecutionWorker).to receive(:perform_async) + .with( + project_hook.id, + hash_including(default_log_data.merge(response_headers: expected_response_headers)), :ok, nil ) @@ -401,6 +466,51 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state service_instance.execute end end + + context 'with log data exceeding Sidekiq limit' do + before do + stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success') + end + + it 'queues LogExecutionWorker with request_data overrided in the second attempt' do + expect(WebHooks::LogExecutionWorker).to receive(:perform_async) + .with( + project_hook.id, + hash_including(default_log_data), + :ok, + nil + ) + .and_raise( + Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError.new(WebHooks::LogExecutionWorker, 100, 50) + ) + .ordered + expect(WebHooks::LogExecutionWorker).to receive(:perform_async) + .with( + project_hook.id, + hash_including(default_log_data.merge(request_data: WebHookLog::OVERSIZE_REQUEST_DATA)), + :ok, + nil + ) + .and_call_original + .ordered + + service_instance.execute + end + + context 'new log data still exceeds limit' do + before do + allow(WebHooks::LogExecutionWorker).to receive(:perform_async).and_raise( + Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError.new(WebHooks::LogExecutionWorker, 100, 50) + ) + end + + it 'raises an exception' do + expect do + service_instance.execute + end.to raise_error(Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError) + end + end + end end end diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb index 330afa6fbd4..fb8ff23f8d8 100644 --- a/spec/workers/every_sidekiq_worker_spec.rb +++ b/spec/workers/every_sidekiq_worker_spec.rb @@ -319,6 +319,7 @@ RSpec.describe 'Every Sidekiq worker' do 'JiraConnect::SyncMergeRequestWorker' => 3, 'JiraConnect::SyncProjectWorker' => 3, 'LdapGroupSyncWorker' => 3, + 'Licenses::ResetSubmitLicenseUsageDataBannerWorker' => 13, 'MailScheduler::IssueDueWorker' => 3, 'MailScheduler::NotificationServiceWorker' => 3, 'MembersDestroyer::UnassignIssuablesWorker' => 3, |