Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-08-03 12:09:20 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-08-03 12:09:20 +0300
commit3d8459c18b7a20d9142359bb9334b467e774eb36 (patch)
tree109d881c19a27cdae131bf5e82f1216401d9df2b /spec
parente0a415ccb7a7e59c7a6c16841bdd1668d2ef0be5 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/frontend/projects/settings_service_desk/components/custom_email_spec.js94
-rw-r--r--spec/frontend/projects/settings_service_desk/components/mock_data.js9
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js47
-rw-r--r--spec/graphql/types/commit_type_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_without_pipeline_artifacts_size_job_spec.rb113
-rw-r--r--spec/migrations/20230719083202_backfill_project_statistics_storage_size_without_pipeline_artifacts_size_spec.rb44
-rw-r--r--spec/support/helpers/migrations_helpers/project_statistics_helper.rb37
-rw-r--r--spec/support/shared_contexts/lib/gitlab/background_migration/backfill_project_statistics.rb106
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb1
9 files changed, 451 insertions, 2 deletions
diff --git a/spec/frontend/projects/settings_service_desk/components/custom_email_spec.js b/spec/frontend/projects/settings_service_desk/components/custom_email_spec.js
new file mode 100644
index 00000000000..f167d2e9d6e
--- /dev/null
+++ b/spec/frontend/projects/settings_service_desk/components/custom_email_spec.js
@@ -0,0 +1,94 @@
+import { nextTick } from 'vue';
+import { GlLoadingIcon, GlAlert } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { HTTP_STATUS_OK, HTTP_STATUS_NOT_FOUND } from '~/lib/utils/http_status';
+import CustomEmail from '~/projects/settings_service_desk/components/custom_email.vue';
+import {
+ FEEDBACK_ISSUE_URL,
+ I18N_GENERIC_ERROR,
+} from '~/projects/settings_service_desk/custom_email_constants';
+import { MOCK_CUSTOM_EMAIL_EMPTY } from './mock_data';
+
+describe('CustomEmail', () => {
+ let axiosMock;
+ let wrapper;
+
+ const defaultProps = {
+ incomingEmail: 'incoming@example.com',
+ customEmailEndpoint: '/flightjs/Flight/-/service_desk/custom_email',
+ };
+
+ const createWrapper = (props = {}) => {
+ wrapper = extendedWrapper(
+ mount(CustomEmail, {
+ propsData: { ...defaultProps, ...props },
+ }),
+ );
+ };
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findFeedbackLink = () => wrapper.findByTestId('feedback-link');
+
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ });
+
+ it('displays link to feedback issue', () => {
+ createWrapper();
+
+ expect(findFeedbackLink().attributes('href')).toEqual(FEEDBACK_ISSUE_URL);
+ });
+
+ describe('when initial resource loading returns no configured custom email', () => {
+ beforeEach(() => {
+ axiosMock
+ .onGet(defaultProps.customEmailEndpoint)
+ .reply(HTTP_STATUS_OK, MOCK_CUSTOM_EMAIL_EMPTY);
+
+ createWrapper();
+ });
+
+ it('displays loading icon while fetching data', async () => {
+ // while loading
+ expect(findLoadingIcon().exists()).toBe(true);
+ await waitForPromises();
+ // loading completed
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('when initial resource loading returns 404', () => {
+ beforeEach(async () => {
+ axiosMock.onGet(defaultProps.customEmailEndpoint).reply(HTTP_STATUS_NOT_FOUND);
+
+ createWrapper();
+ await waitForPromises();
+ });
+
+ it('displays error alert with correct text', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(I18N_GENERIC_ERROR);
+ });
+
+ it('dismissing the alert removes it', async () => {
+ expect(findAlert().exists()).toBe(true);
+
+ findAlert().vm.$emit('dismiss');
+
+ await nextTick();
+
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/projects/settings_service_desk/components/mock_data.js b/spec/frontend/projects/settings_service_desk/components/mock_data.js
index 934778ff601..ea88a6cfccd 100644
--- a/spec/frontend/projects/settings_service_desk/components/mock_data.js
+++ b/spec/frontend/projects/settings_service_desk/components/mock_data.js
@@ -6,3 +6,12 @@ export const TEMPLATES = [
{ name: 'Security release', project_id: 1 },
],
];
+
+export const MOCK_CUSTOM_EMAIL_EMPTY = {
+ custom_email: null,
+ custom_email_enabled: false,
+ custom_email_verification_state: null,
+ custom_email_verification_error: null,
+ custom_email_smtp_address: null,
+ error_message: null,
+};
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
index b84d1c9c0aa..60f0efd9195 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
@@ -6,6 +6,7 @@ import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import ServiceDeskRoot from '~/projects/settings_service_desk/components/service_desk_root.vue';
import ServiceDeskSetting from '~/projects/settings_service_desk/components/service_desk_setting.vue';
+import CustomEmail from '~/projects/settings_service_desk/components/custom_email.vue';
describe('ServiceDeskRoot', () => {
let axiosMock;
@@ -25,6 +26,10 @@ describe('ServiceDeskRoot', () => {
selectedFileTemplateProjectId: 42,
templates: ['Bug', 'Documentation'],
publicProject: false,
+ customEmailEndpoint: '/gitlab-org/gitlab-test/-/service_desk/custom_email',
+ glFeatures: {
+ serviceDeskCustomEmail: true,
+ },
};
const getAlertText = () => wrapper.findComponent(GlAlert).text();
@@ -186,4 +191,46 @@ describe('ServiceDeskRoot', () => {
});
});
});
+
+ describe('CustomEmail component', () => {
+ it('is rendered', () => {
+ wrapper = createComponent();
+
+ expect(wrapper.findComponent(CustomEmail).exists()).toBe(true);
+ expect(wrapper.findComponent(CustomEmail).props()).toEqual({
+ incomingEmail: provideData.initialIncomingEmail,
+ customEmailEndpoint: provideData.customEmailEndpoint,
+ });
+ });
+
+ describe('when Service Desk is disabled', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ initialIsEnabled: false });
+ });
+
+ it('is not rendered', () => {
+ expect(wrapper.findComponent(CustomEmail).exists()).toBe(false);
+ });
+ });
+
+ describe('when issue tracker is disabled', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ isIssueTrackerEnabled: false });
+ });
+
+ it('is not rendered', () => {
+ expect(wrapper.findComponent(CustomEmail).exists()).toBe(false);
+ });
+ });
+
+ describe('when feature flag service_desk_custom_email is disabled', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ glFeatures: { serviceDeskCustomEmail: false } });
+ });
+
+ it('is not rendered', () => {
+ expect(wrapper.findComponent(CustomEmail).exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/graphql/types/commit_type_spec.rb b/spec/graphql/types/commit_type_spec.rb
index 3912b0905e3..6af5ea04dd2 100644
--- a/spec/graphql/types/commit_type_spec.rb
+++ b/spec/graphql/types/commit_type_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe GitlabSchema.types['Commit'] do
expect(described_class).to have_graphql_fields(
:id, :sha, :short_id, :title, :full_title, :full_title_html, :description, :description_html, :message, :title_html, :authored_date,
:author_name, :author_email, :author_gravatar, :author, :diffs, :web_url, :web_path,
- :pipelines, :signature_html, :signature
+ :pipelines, :signature_html, :signature, :committer_name, :committer_email, :committed_date
)
end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_without_pipeline_artifacts_size_job_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_without_pipeline_artifacts_size_job_spec.rb
new file mode 100644
index 00000000000..c85636f4998
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_without_pipeline_artifacts_size_job_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectStatisticsStorageSizeWithoutPipelineArtifactsSizeJob,
+ schema: 20230719083202,
+ feature_category: :consumables_cost_management do
+ include MigrationHelpers::ProjectStatisticsHelper
+
+ include_context 'when backfilling project statistics'
+
+ let(:default_pipeline_artifacts_size) { 5 }
+ let(:default_stats) do
+ {
+ repository_size: 1,
+ wiki_size: 1,
+ lfs_objects_size: 1,
+ build_artifacts_size: 1,
+ packages_size: 1,
+ snippets_size: 1,
+ uploads_size: 1,
+ pipeline_artifacts_size: default_pipeline_artifacts_size,
+ storage_size: default_storage_size
+ }
+ end
+
+ describe '#filter_batch' do
+ it 'filters out project_statistics with no artifacts size' do
+ project_statistics = generate_records(default_projects, project_statistics_table, default_stats)
+ project_statistics_table.create!(
+ project_id: proj5.id,
+ namespace_id: proj5.namespace_id,
+ repository_size: 1,
+ wiki_size: 1,
+ lfs_objects_size: 1,
+ build_artifacts_size: 1,
+ packages_size: 1,
+ snippets_size: 1,
+ pipeline_artifacts_size: 0,
+ uploads_size: 1,
+ storage_size: 7
+ )
+
+ expected = project_statistics.map(&:id)
+ actual = migration.filter_batch(project_statistics_table).pluck(:id)
+
+ expect(actual).to match_array(expected)
+ end
+ end
+
+ describe '#perform' do
+ subject(:perform_migration) { migration.perform }
+
+ context 'when project_statistics backfill runs' do
+ before do
+ generate_records(default_projects, project_statistics_table, default_stats)
+ end
+
+ context 'when storage_size includes pipeline_artifacts_size' do
+ it 'removes pipeline_artifacts_size from storage_size' do
+ allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ expect(project_statistics_table.pluck(:storage_size).uniq).to match_array([default_storage_size])
+
+ perform_migration
+
+ expect(project_statistics_table.pluck(:storage_size).uniq).to match_array(
+ [default_storage_size - default_pipeline_artifacts_size]
+ )
+ expect(::Namespaces::ScheduleAggregationWorker).to have_received(:perform_async).exactly(4).times
+ end
+ end
+
+ context 'when storage_size does not include default_pipeline_artifacts_size' do
+ it 'does not update the record' do
+ allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ proj_stat = project_statistics_table.last
+ expect(proj_stat.storage_size).to eq(default_storage_size)
+ proj_stat.storage_size = default_storage_size - default_pipeline_artifacts_size
+ proj_stat.save!
+
+ perform_migration
+
+ expect(project_statistics_table.pluck(:storage_size).uniq).to match_array(
+ [default_storage_size - default_pipeline_artifacts_size]
+ )
+ expect(::Namespaces::ScheduleAggregationWorker).to have_received(:perform_async).exactly(3).times
+ end
+ end
+ end
+
+ it 'coerces a null wiki_size to 0' do
+ project_statistics = create_project_stats(projects, namespaces, default_stats, { wiki_size: nil })
+ allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ migration = create_migration(end_id: project_statistics.project_id)
+
+ migration.perform
+
+ project_statistics.reload
+ expect(project_statistics.storage_size).to eq(6)
+ end
+
+ it 'coerces a null snippets_size to 0' do
+ project_statistics = create_project_stats(projects, namespaces, default_stats, { snippets_size: nil })
+ allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ migration = create_migration(end_id: project_statistics.project_id)
+
+ migration.perform
+
+ project_statistics.reload
+ expect(project_statistics.storage_size).to eq(6)
+ end
+ end
+end
diff --git a/spec/migrations/20230719083202_backfill_project_statistics_storage_size_without_pipeline_artifacts_size_spec.rb b/spec/migrations/20230719083202_backfill_project_statistics_storage_size_without_pipeline_artifacts_size_spec.rb
new file mode 100644
index 00000000000..c3183a5da1b
--- /dev/null
+++ b/spec/migrations/20230719083202_backfill_project_statistics_storage_size_without_pipeline_artifacts_size_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillProjectStatisticsStorageSizeWithoutPipelineArtifactsSize, feature_category: :consumables_cost_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'does not schedule background jobs when Gitlab.org_or_com? is false' do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+ allow(Gitlab).to receive(:org_or_com?).and_return(false)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+ end
+ end
+
+ it 'schedules a new batched migration' do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+ allow(Gitlab).to receive(:org_or_com?).and_return(true)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :project_statistics,
+ column_name: :project_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/support/helpers/migrations_helpers/project_statistics_helper.rb b/spec/support/helpers/migrations_helpers/project_statistics_helper.rb
new file mode 100644
index 00000000000..4e7d83a38ac
--- /dev/null
+++ b/spec/support/helpers/migrations_helpers/project_statistics_helper.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module MigrationHelpers
+ module ProjectStatisticsHelper
+ def generate_records(projects, table, values = {})
+ projects.map do |proj|
+ table.create!(
+ values.merge({
+ project_id: proj.id,
+ namespace_id: proj.namespace_id
+ })
+ )
+ end
+ end
+
+ def create_migration(end_id:)
+ described_class.new(start_id: 1, end_id: end_id,
+ batch_table: 'project_statistics', batch_column: 'project_id',
+ sub_batch_size: 1_000, pause_ms: 0,
+ connection: ApplicationRecord.connection)
+ end
+
+ def create_project_stats(project_table, namespace, default_stats, override_stats = {})
+ stats = default_stats.merge(override_stats)
+
+ group = namespace.create!(name: 'group_a', path: 'group-a', type: 'Group')
+ project_namespace = namespace.create!(name: 'project_a', path: 'project_a', type: 'Project', parent_id: group.id)
+ proj = project_table.create!(name: 'project_a', path: 'project-a', namespace_id: group.id,
+ project_namespace_id: project_namespace.id)
+ project_statistics_table.create!(
+ project_id: proj.id,
+ namespace_id: group.id,
+ **stats
+ )
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/lib/gitlab/background_migration/backfill_project_statistics.rb b/spec/support/shared_contexts/lib/gitlab/background_migration/backfill_project_statistics.rb
new file mode 100644
index 00000000000..1b835e1392d
--- /dev/null
+++ b/spec/support/shared_contexts/lib/gitlab/background_migration/backfill_project_statistics.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'when backfilling project statistics' do
+ let!(:namespaces) { table(:namespaces) }
+ let!(:project_statistics_table) { table(:project_statistics) }
+ let!(:projects) { table(:projects) }
+ let!(:count_of_columns) { ProjectStatistics::STORAGE_SIZE_COMPONENTS.count }
+ let(:default_storage_size) { 12 }
+
+ let!(:root_group) do
+ namespaces.create!(name: 'root-group', path: 'root-group', type: 'Group') do |new_group|
+ new_group.update!(traversal_ids: [new_group.id])
+ end
+ end
+
+ let!(:group) do
+ namespaces.create!(name: 'group', path: 'group', parent_id: root_group.id, type: 'Group') do |new_group|
+ new_group.update!(traversal_ids: [root_group.id, new_group.id])
+ end
+ end
+
+ let!(:sub_group) do
+ namespaces.create!(name: 'subgroup', path: 'subgroup', parent_id: group.id, type: 'Group') do |new_group|
+ new_group.update!(traversal_ids: [root_group.id, group.id, new_group.id])
+ end
+ end
+
+ let!(:namespace1) do
+ namespaces.create!(
+ name: 'namespace1', type: 'Group', path: 'space1'
+ )
+ end
+
+ let!(:proj_namespace1) do
+ namespaces.create!(
+ name: 'proj1', path: 'proj1', type: 'Project', parent_id: namespace1.id
+ )
+ end
+
+ let!(:proj_namespace2) do
+ namespaces.create!(
+ name: 'proj2', path: 'proj2', type: 'Project', parent_id: namespace1.id
+ )
+ end
+
+ let!(:proj_namespace3) do
+ namespaces.create!(
+ name: 'proj3', path: 'proj3', type: 'Project', parent_id: sub_group.id
+ )
+ end
+
+ let!(:proj_namespace4) do
+ namespaces.create!(
+ name: 'proj4', path: 'proj4', type: 'Project', parent_id: sub_group.id
+ )
+ end
+
+ let!(:proj_namespace5) do
+ namespaces.create!(
+ name: 'proj5', path: 'proj5', type: 'Project', parent_id: sub_group.id
+ )
+ end
+
+ let!(:proj1) do
+ projects.create!(
+ name: 'proj1', path: 'proj1', namespace_id: namespace1.id, project_namespace_id: proj_namespace1.id
+ )
+ end
+
+ let!(:proj2) do
+ projects.create!(
+ name: 'proj2', path: 'proj2', namespace_id: namespace1.id, project_namespace_id: proj_namespace2.id
+ )
+ end
+
+ let!(:proj3) do
+ projects.create!(
+ name: 'proj3', path: 'proj3', namespace_id: sub_group.id, project_namespace_id: proj_namespace3.id
+ )
+ end
+
+ let!(:proj4) do
+ projects.create!(
+ name: 'proj4', path: 'proj4', namespace_id: sub_group.id, project_namespace_id: proj_namespace4.id
+ )
+ end
+
+ let!(:proj5) do
+ projects.create!(
+ name: 'proj5', path: 'proj5', namespace_id: sub_group.id, project_namespace_id: proj_namespace5.id
+ )
+ end
+
+ let(:migration) do
+ described_class.new(start_id: 1, end_id: proj4.id,
+ batch_table: 'project_statistics', batch_column: 'project_id',
+ sub_batch_size: 1_000, pause_ms: 0,
+ connection: ApplicationRecord.connection)
+ end
+
+ let(:default_projects) do
+ [
+ proj1, proj2, proj3, proj4
+ ]
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 38959b6d764..3cd030e678d 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -398,7 +398,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'PipelineProcessWorker' => 3,
'PostReceive' => 3,
'ProcessCommitWorker' => 3,
- 'ProductAnalytics::InitializeAnalyticsWorker' => 3,
'ProductAnalytics::InitializeSnowplowProductAnalyticsWorker' => 1,
'ProjectCacheWorker' => 3,
'ProjectDestroyWorker' => 3,