Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/frontend/admin/users/components/user_actions_spec.js138
-rw-r--r--spec/frontend/admin/users/components/user_date_spec.js34
-rw-r--r--spec/frontend/admin/users/components/users_table_spec.js26
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js8
-rw-r--r--spec/lib/gitlab/import_export/design_repo_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/fork_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/importer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/repo_restorer_spec.rb4
-rw-r--r--spec/models/ci/pipeline_spec.rb14
-rw-r--r--spec/serializers/pipeline_entity_spec.rb207
-rw-r--r--spec/services/ci/pipeline_artifacts/create_quality_report_service_spec.rb62
-rw-r--r--spec/services/pages/migrate_from_legacy_storage_service_spec.rb92
-rw-r--r--spec/tasks/gitlab/pages_rake_spec.rb60
-rw-r--r--spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb40
14 files changed, 527 insertions, 166 deletions
diff --git a/spec/frontend/admin/users/components/user_actions_spec.js b/spec/frontend/admin/users/components/user_actions_spec.js
new file mode 100644
index 00000000000..78bc37233c2
--- /dev/null
+++ b/spec/frontend/admin/users/components/user_actions_spec.js
@@ -0,0 +1,138 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlDropdownDivider } from '@gitlab/ui';
+import AdminUserActions from '~/admin/users/components/user_actions.vue';
+import { generateUserPaths } from '~/admin/users/utils';
+
+import { users, paths } from '../mock_data';
+
+const BLOCK = 'block';
+const EDIT = 'edit';
+const LDAP = 'ldapBlocked';
+const DELETE = 'delete';
+const DELETE_WITH_CONTRIBUTIONS = 'deleteWithContributions';
+
+describe('AdminUserActions component', () => {
+ let wrapper;
+ const user = users[0];
+ const userPaths = generateUserPaths(paths, user.username);
+
+ const findEditButton = () => wrapper.find('[data-testid="edit"]');
+ const findActionsDropdown = () => wrapper.find('[data-testid="actions"');
+ const findDropdownDivider = () => wrapper.find(GlDropdownDivider);
+
+ const initComponent = ({ actions = [] } = {}) => {
+ wrapper = shallowMount(AdminUserActions, {
+ propsData: {
+ user: {
+ ...user,
+ actions,
+ },
+ paths,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('edit button', () => {
+ describe('when the user has an edit action attached', () => {
+ beforeEach(() => {
+ initComponent({ actions: [EDIT] });
+ });
+
+ it('renders the edit button linking to the user edit path', () => {
+ expect(findEditButton().exists()).toBe(true);
+ expect(findEditButton().attributes('href')).toBe(userPaths.edit);
+ });
+ });
+
+ describe('when there is no edit action attached to the user', () => {
+ beforeEach(() => {
+ initComponent({ actions: [] });
+ });
+
+ it('does not render the edit button linking to the user edit path', () => {
+ expect(findEditButton().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('actions dropdown', () => {
+ describe('when there are actions', () => {
+ const actions = [EDIT, BLOCK];
+
+ beforeEach(() => {
+ initComponent({ actions });
+ });
+
+ it('renders the actions dropdown', () => {
+ expect(findActionsDropdown().exists()).toBe(true);
+ });
+
+ it.each(actions)('renders a dropdown item for %s', (action) => {
+ const dropdownAction = wrapper.find(`[data-testid="${action}"]`);
+ expect(dropdownAction.exists()).toBe(true);
+ expect(dropdownAction.attributes('href')).toBe(userPaths[action]);
+ });
+
+ describe('when there is a LDAP action', () => {
+ beforeEach(() => {
+ initComponent({ actions: [LDAP] });
+ });
+
+ it('renders the LDAP dropdown item without a link', () => {
+ const dropdownAction = wrapper.find(`[data-testid="${LDAP}"]`);
+ expect(dropdownAction.exists()).toBe(true);
+ expect(dropdownAction.attributes('href')).toBe(undefined);
+ });
+ });
+
+ describe('when there is a delete action', () => {
+ const deleteActions = [DELETE, DELETE_WITH_CONTRIBUTIONS];
+
+ beforeEach(() => {
+ initComponent({ actions: [BLOCK, ...deleteActions] });
+ });
+
+ it('renders a dropdown divider', () => {
+ expect(findDropdownDivider().exists()).toBe(true);
+ });
+
+ it('only renders delete dropdown items for actions containing the word "delete"', () => {
+ const { length } = wrapper.findAll(`[data-testid*="delete-"]`);
+ expect(length).toBe(deleteActions.length);
+ });
+
+ it.each(deleteActions)('renders a delete dropdown item for %s', (action) => {
+ const deleteAction = wrapper.find(`[data-testid="delete-${action}"]`);
+ expect(deleteAction.exists()).toBe(true);
+ expect(deleteAction.attributes('href')).toBe(userPaths[action]);
+ });
+ });
+
+ describe('when there are no delete actions', () => {
+ it('does not render a dropdown divider', () => {
+ expect(findDropdownDivider().exists()).toBe(false);
+ });
+
+ it('does not render a delete dropdown item', () => {
+ const anyDeleteAction = wrapper.find(`[data-testid*="delete-"]`);
+ expect(anyDeleteAction.exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('when there are no actions', () => {
+ beforeEach(() => {
+ initComponent({ actions: [] });
+ });
+
+ it('does not render the actions dropdown', () => {
+ expect(findActionsDropdown().exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/users/components/user_date_spec.js b/spec/frontend/admin/users/components/user_date_spec.js
new file mode 100644
index 00000000000..6428b10059b
--- /dev/null
+++ b/spec/frontend/admin/users/components/user_date_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+
+import UserDate from '~/admin/users/components/user_date.vue';
+import { users } from '../mock_data';
+
+const mockDate = users[0].createdAt;
+
+describe('FormatDate component', () => {
+ let wrapper;
+
+ const initComponent = (props = {}) => {
+ wrapper = shallowMount(UserDate, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it.each`
+ date | output
+ ${mockDate} | ${'13 Nov, 2020'}
+ ${null} | ${'Never'}
+ ${undefined} | ${'Never'}
+ `('renders $date as $output', ({ date, output }) => {
+ initComponent({ date });
+
+ expect(wrapper.text()).toBe(output);
+ });
+});
diff --git a/spec/frontend/admin/users/components/users_table_spec.js b/spec/frontend/admin/users/components/users_table_spec.js
index b79d2d4d39d..ac48542cec5 100644
--- a/spec/frontend/admin/users/components/users_table_spec.js
+++ b/spec/frontend/admin/users/components/users_table_spec.js
@@ -3,6 +3,9 @@ import { mount } from '@vue/test-utils';
import AdminUsersTable from '~/admin/users/components/users_table.vue';
import AdminUserAvatar from '~/admin/users/components/user_avatar.vue';
+import AdminUserDate from '~/admin/users/components/user_date.vue';
+import AdminUserActions from '~/admin/users/components/user_actions.vue';
+
import { users, paths } from '../mock_data';
describe('AdminUsersTable component', () => {
@@ -39,18 +42,21 @@ describe('AdminUsersTable component', () => {
initComponent();
});
- it.each`
- key | label
- ${'name'} | ${'Name'}
- ${'projectsCount'} | ${'Projects'}
- ${'createdAt'} | ${'Created on'}
- ${'lastActivityOn'} | ${'Last activity'}
- `('renders users.$key in column $label', ({ key, label }) => {
- expect(getCellByLabel(0, label).text()).toContain(`${user[key]}`);
+ it('renders the projects count', () => {
+ expect(getCellByLabel(0, 'Projects').text()).toContain(`${user.projectsCount}`);
});
- it('renders an AdminUserAvatar component', () => {
- expect(getCellByLabel(0, 'Name').find(AdminUserAvatar).exists()).toBe(true);
+ it('renders the user actions', () => {
+ expect(wrapper.find(AdminUserActions).exists()).toBe(true);
+ });
+
+ it.each`
+ component | label
+ ${AdminUserAvatar} | ${'Name'}
+ ${AdminUserDate} | ${'Created on'}
+ ${AdminUserDate} | ${'Last activity'}
+ `('renders the component for column $label', ({ component, label }) => {
+ expect(getCellByLabel(0, label).find(component).exists()).toBe(true);
});
});
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
index 90222f0f718..18be88a0b8b 100644
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -1045,4 +1045,12 @@ describe('common_utils', () => {
expect(commonUtils.getDashPath('/some/url')).toEqual(null);
});
});
+
+ describe('convertArrayToCamelCase', () => {
+ it('returns a new array with snake_case string elements converted camelCase', () => {
+ const result = commonUtils.convertArrayToCamelCase(['hello', 'hello_world']);
+
+ expect(result).toEqual(['hello', 'helloWorld']);
+ });
+ });
});
diff --git a/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
index a7fd741834b..6680f4e7a03 100644
--- a/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::ImportExport::DesignRepoRestorer do
let(:restorer) do
described_class.new(path_to_bundle: bundle_path,
shared: shared,
- project: project)
+ importable: project)
end
before do
diff --git a/spec/lib/gitlab/import_export/fork_spec.rb b/spec/lib/gitlab/import_export/fork_spec.rb
index 62b3c0f95e3..65c28a8b8a2 100644
--- a/spec/lib/gitlab/import_export/fork_spec.rb
+++ b/spec/lib/gitlab/import_export/fork_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'forked project import' do
let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename) }
let(:repo_restorer) do
- Gitlab::ImportExport::RepoRestorer.new(path_to_bundle: bundle_path, shared: shared, project: project)
+ Gitlab::ImportExport::RepoRestorer.new(path_to_bundle: bundle_path, shared: shared, importable: project)
end
let!(:merge_request) do
diff --git a/spec/lib/gitlab/import_export/importer_spec.rb b/spec/lib/gitlab/import_export/importer_spec.rb
index 75db3167ebc..20f0f6af6f3 100644
--- a/spec/lib/gitlab/import_export/importer_spec.rb
+++ b/spec/lib/gitlab/import_export/importer_spec.rb
@@ -69,8 +69,8 @@ RSpec.describe Gitlab::ImportExport::Importer do
repo_path = File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename)
restorer = double(Gitlab::ImportExport::RepoRestorer)
- expect(Gitlab::ImportExport::RepoRestorer).to receive(:new).with(path_to_bundle: repo_path, shared: shared, project: project).and_return(restorer)
- expect(Gitlab::ImportExport::RepoRestorer).to receive(:new).with(path_to_bundle: wiki_repo_path, shared: shared, project: ProjectWiki.new(project)).and_return(restorer)
+ expect(Gitlab::ImportExport::RepoRestorer).to receive(:new).with(path_to_bundle: repo_path, shared: shared, importable: project).and_return(restorer)
+ expect(Gitlab::ImportExport::RepoRestorer).to receive(:new).with(path_to_bundle: wiki_repo_path, shared: shared, importable: ProjectWiki.new(project)).and_return(restorer)
expect(Gitlab::ImportExport::RepoRestorer).to receive(:new).and_call_original
expect(restorer).to receive(:restore).and_return(true).twice
diff --git a/spec/lib/gitlab/import_export/repo_restorer_spec.rb b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
index ceb34893069..fe43a23e242 100644
--- a/spec/lib/gitlab/import_export/repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Gitlab::ImportExport::RepoRestorer do
let(:bundler) { Gitlab::ImportExport::RepoSaver.new(exportable: project_with_repo, shared: shared) }
let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename) }
- subject { described_class.new(path_to_bundle: bundle_path, shared: shared, project: project) }
+ subject { described_class.new(path_to_bundle: bundle_path, shared: shared, importable: project) }
after do
Gitlab::Shell.new.remove_repository(project.repository_storage, project.disk_path)
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::ImportExport::RepoRestorer do
let(:bundler) { Gitlab::ImportExport::WikiRepoSaver.new(exportable: project_with_repo, shared: shared) }
let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.wiki_repo_bundle_filename) }
- subject { described_class.new(path_to_bundle: bundle_path, shared: shared, project: ProjectWiki.new(project)) }
+ subject { described_class.new(path_to_bundle: bundle_path, shared: shared, importable: ProjectWiki.new(project)) }
after do
Gitlab::Shell.new.remove_repository(project.wiki.repository_storage, project.wiki.disk_path)
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index af6d7ab4250..79eb016bc41 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -3522,7 +3522,19 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
context 'when pipeline status is success' do
let(:pipeline) { create(:ci_pipeline, :success, project: project) }
- it { expect(subject).to be_truthy }
+ it 'can generate a codequality report' do
+ expect(subject).to be_truthy
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(codequality_mr_diff: false)
+ end
+
+ it 'can not generate a codequality report' do
+ expect(subject).to be_falsey
+ end
+ end
end
end
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index d7cd13edec8..61dbcaae77d 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -7,18 +7,8 @@ RSpec.describe PipelineEntity do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
- let(:request) { double('request') }
-
- before do
- stub_not_protect_default_branch
-
- allow(request).to receive(:current_user).and_return(user)
- allow(request).to receive(:project).and_return(project)
- end
-
- let(:entity) do
- described_class.represent(pipeline, request: request)
- end
+ let(:request) { double('request', current_user: user) }
+ let(:entity) { described_class.represent(pipeline, request: request) }
describe '#as_json' do
subject { entity.as_json }
@@ -54,70 +44,72 @@ RSpec.describe PipelineEntity do
end
end
- context 'when pipeline is retryable' do
- let(:project) { create(:project) }
-
- let(:pipeline) do
- create(:ci_pipeline, status: :success, project: project)
- end
-
+ context 'when default branch not protected' do
before do
- create(:ci_build, :failed, pipeline: pipeline)
+ stub_not_protect_default_branch
end
- it 'does not serialize stage builds' do
- subject.with_indifferent_access.dig(:details, :stages, 0).tap do |stage|
- expect(stage).not_to include(:groups, :latest_statuses, :retries)
+ context 'when pipeline is retryable' do
+ let_it_be(:pipeline) do
+ create(:ci_pipeline, status: :success, project: project)
end
- end
- context 'user has ability to retry pipeline' do
before do
- project.add_developer(user)
- end
-
- it 'contains retry path' do
- expect(subject[:retry_path]).to be_present
+ create(:ci_build, :failed, pipeline: pipeline)
end
- end
- context 'user does not have ability to retry pipeline' do
- it 'does not contain retry path' do
- expect(subject).not_to have_key(:retry_path)
+ it 'does not serialize stage builds' do
+ subject.with_indifferent_access.dig(:details, :stages, 0).tap do |stage|
+ expect(stage).not_to include(:groups, :latest_statuses, :retries)
+ end
end
- end
- end
- context 'when pipeline is cancelable' do
- let(:project) { create(:project) }
+ context 'user has ability to retry pipeline' do
+ before do
+ project.add_developer(user)
+ end
- let(:pipeline) do
- create(:ci_pipeline, status: :running, project: project)
- end
+ it 'contains retry path' do
+ expect(subject[:retry_path]).to be_present
+ end
+ end
- before do
- create(:ci_build, :pending, pipeline: pipeline)
+ context 'user does not have ability to retry pipeline' do
+ it 'does not contain retry path' do
+ expect(subject).not_to have_key(:retry_path)
+ end
+ end
end
- it 'does not serialize stage builds' do
- subject.with_indifferent_access.dig(:details, :stages, 0).tap do |stage|
- expect(stage).not_to include(:groups, :latest_statuses, :retries)
+ context 'when pipeline is cancelable' do
+ let_it_be(:pipeline) do
+ create(:ci_pipeline, status: :running, project: project)
end
- end
- context 'user has ability to cancel pipeline' do
before do
- project.add_developer(user)
+ create(:ci_build, :pending, pipeline: pipeline)
end
- it 'contains cancel path' do
- expect(subject[:cancel_path]).to be_present
+ it 'does not serialize stage builds' do
+ subject.with_indifferent_access.dig(:details, :stages, 0).tap do |stage|
+ expect(stage).not_to include(:groups, :latest_statuses, :retries)
+ end
end
- end
- context 'user does not have ability to cancel pipeline' do
- it 'does not contain cancel path' do
- expect(subject).not_to have_key(:cancel_path)
+ context 'user has ability to cancel pipeline' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'contains cancel path' do
+ expect(subject[:cancel_path]).to be_present
+ end
+ end
+
+ context 'user does not have ability to cancel pipeline' do
+ it 'does not contain cancel path' do
+ expect(subject).not_to have_key(:cancel_path)
+ end
end
end
end
@@ -133,7 +125,6 @@ RSpec.describe PipelineEntity do
end
context 'user does not have ability to delete pipeline' do
- let(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: project) }
it 'does not contain delete path' do
@@ -167,79 +158,85 @@ RSpec.describe PipelineEntity do
end
end
- context 'when pipeline is detached merge request pipeline' do
- let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
- let(:project) { merge_request.target_project }
- let(:pipeline) { merge_request.pipelines_for_merge_request.first }
-
- it 'makes detached flag true' do
- expect(subject[:flags][:detached_merge_request_pipeline]).to be_truthy
+ context 'when request has a project' do
+ before do
+ allow(request).to receive(:project).and_return(project)
end
- it 'does not expose source sha and target sha' do
- expect(subject[:source_sha]).to be_nil
- expect(subject[:target_sha]).to be_nil
- end
+ context 'when pipeline is detached merge request pipeline' do
+ let_it_be(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
+ let(:project) { merge_request.target_project }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
- context 'when user is a developer' do
- before do
- project.add_developer(user)
+ it 'makes detached flag true' do
+ expect(subject[:flags][:detached_merge_request_pipeline]).to be_truthy
end
- it 'has merge request information' do
- expect(subject[:merge_request][:iid]).to eq(merge_request.iid)
+ it 'does not expose source sha and target sha' do
+ expect(subject[:source_sha]).to be_nil
+ expect(subject[:target_sha]).to be_nil
+ end
+
+ context 'when user is a developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'has merge request information' do
+ expect(subject[:merge_request][:iid]).to eq(merge_request.iid)
- expect(project_merge_request_path(project, merge_request))
- .to include(subject[:merge_request][:path])
+ expect(project_merge_request_path(project, merge_request))
+ .to include(subject[:merge_request][:path])
- expect(subject[:merge_request][:title]).to eq(merge_request.title)
+ expect(subject[:merge_request][:title]).to eq(merge_request.title)
- expect(subject[:merge_request][:source_branch])
- .to eq(merge_request.source_branch)
+ expect(subject[:merge_request][:source_branch])
+ .to eq(merge_request.source_branch)
- expect(project_commits_path(project, merge_request.source_branch))
- .to include(subject[:merge_request][:source_branch_path])
+ expect(project_commits_path(project, merge_request.source_branch))
+ .to include(subject[:merge_request][:source_branch_path])
- expect(subject[:merge_request][:target_branch])
- .to eq(merge_request.target_branch)
+ expect(subject[:merge_request][:target_branch])
+ .to eq(merge_request.target_branch)
- expect(project_commits_path(project, merge_request.target_branch))
- .to include(subject[:merge_request][:target_branch_path])
+ expect(project_commits_path(project, merge_request.target_branch))
+ .to include(subject[:merge_request][:target_branch_path])
+ end
end
- end
- context 'when user is an external user' do
- it 'has no merge request information' do
- expect(subject[:merge_request]).to be_nil
+ context 'when user is an external user' do
+ it 'has no merge request information' do
+ expect(subject[:merge_request]).to be_nil
+ end
end
end
- end
- context 'when pipeline is merge request pipeline' do
- let(:merge_request) { create(:merge_request, :with_merge_request_pipeline, merge_sha: 'abc') }
- let(:project) { merge_request.target_project }
- let(:pipeline) { merge_request.pipelines_for_merge_request.first }
+ context 'when pipeline is merge request pipeline' do
+ let_it_be(:merge_request) { create(:merge_request, :with_merge_request_pipeline, merge_sha: 'abc') }
+ let(:project) { merge_request.target_project }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
- it 'makes detached flag false' do
- expect(subject[:flags][:detached_merge_request_pipeline]).to be_falsy
- end
+ it 'makes detached flag false' do
+ expect(subject[:flags][:detached_merge_request_pipeline]).to be_falsy
+ end
- it 'makes atached flag true' do
- expect(subject[:flags][:merge_request_pipeline]).to be_truthy
- end
+ it 'makes atached flag true' do
+ expect(subject[:flags][:merge_request_pipeline]).to be_truthy
+ end
- it 'exposes source sha and target sha' do
- expect(subject[:source_sha]).to be_present
- expect(subject[:target_sha]).to be_present
- end
+ it 'exposes source sha and target sha' do
+ expect(subject[:source_sha]).to be_present
+ expect(subject[:target_sha]).to be_present
+ end
- it 'exposes merge request event type' do
- expect(subject[:merge_request_event_type]).to be_present
+ it 'exposes merge request event type' do
+ expect(subject[:merge_request_event_type]).to be_present
+ end
end
end
context 'when pipeline has failed builds' do
- let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let_it_be(:pipeline) { create(:ci_pipeline, user: user) }
let_it_be(:build) { create(:ci_build, :success, pipeline: pipeline) }
let_it_be(:failed_1) { create(:ci_build, :failed, pipeline: pipeline) }
let_it_be(:failed_2) { create(:ci_build, :failed, pipeline: pipeline) }
diff --git a/spec/services/ci/pipeline_artifacts/create_quality_report_service_spec.rb b/spec/services/ci/pipeline_artifacts/create_quality_report_service_spec.rb
new file mode 100644
index 00000000000..6cb88d1402e
--- /dev/null
+++ b/spec/services/ci/pipeline_artifacts/create_quality_report_service_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::PipelineArtifacts::CreateQualityReportService do
+ describe '#execute' do
+ subject(:pipeline_artifact) { described_class.new.execute(pipeline) }
+
+ context 'when pipeline has codequality reports' do
+ let(:project) { create(:project, :repository) }
+
+ describe 'pipeline completed status' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :result) do
+ :success | 1
+ :failed | 1
+ :canceled | 1
+ :skipped | 1
+ end
+
+ with_them do
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_reports, status: status, project: project) }
+
+ it 'creates a pipeline artifact' do
+ expect { pipeline_artifact }.to change(Ci::PipelineArtifact, :count).by(result)
+ end
+
+ it 'persists the default file name' do
+ expect(pipeline_artifact.file.filename).to eq('code_quality.json')
+ end
+
+ it 'sets expire_at to 1 week' do
+ freeze_time do
+ expect(pipeline_artifact.expire_at).to eq(1.week.from_now)
+ end
+ end
+ end
+ end
+
+ context 'when pipeline artifact has already been created' do
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_reports, project: project) }
+
+ it 'does not persist the same artifact twice' do
+ 2.times { described_class.new.execute(pipeline) }
+
+ expect(Ci::PipelineArtifact.count).to eq(1)
+ end
+ end
+ end
+
+ context 'when pipeline is not completed and codequality report does not exist' do
+ let(:pipeline) { create(:ci_pipeline, :running) }
+
+ it 'does not persist data' do
+ pipeline_artifact
+
+ expect(Ci::PipelineArtifact.count).to eq(0)
+ end
+ end
+ end
+end
diff --git a/spec/services/pages/migrate_from_legacy_storage_service_spec.rb b/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
new file mode 100644
index 00000000000..5d335143719
--- /dev/null
+++ b/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::MigrateFromLegacyStorageService do
+ let(:service) { described_class.new(Rails.logger, 3, 10) }
+
+ it 'does not try to migrate pages if pages are not deployed' do
+ expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+
+ expect(service.execute).to eq(migrated: 0, errored: 0)
+ end
+
+ it 'uses multiple threads' do
+ projects = create_list(:project, 20)
+ projects.each do |project|
+ project.mark_pages_as_deployed
+
+ FileUtils.mkdir_p File.join(project.pages_path, "public")
+ File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
+ f.write("Hello!")
+ end
+ end
+
+ service = described_class.new(Rails.logger, 3, 2)
+
+ threads = Concurrent::Set.new
+
+ expect(service).to receive(:migrate_project).exactly(20).times.and_wrap_original do |m, *args|
+ threads.add(Thread.current)
+
+ # sleep to be 100% certain that once thread can't consume all the queue
+ # it works without it, but I want to avoid making this test flaky
+ sleep(0.01)
+
+ m.call(*args)
+ end
+
+ expect(service.execute).to eq(migrated: 20, errored: 0)
+ expect(threads.length).to eq(3)
+ end
+
+ context 'when pages are marked as deployed' do
+ let(:project) { create(:project) }
+
+ before do
+ project.mark_pages_as_deployed
+ end
+
+ context 'when pages directory does not exist' do
+ it 'tries to migrate the project, but does not crash' do
+ expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
+ expect(service.execute).to eq(migrated: 0, errored: 1)
+ end
+ end
+
+ context 'when pages directory exists on disk' do
+ before do
+ FileUtils.mkdir_p File.join(project.pages_path, "public")
+ File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
+ f.write("Hello!")
+ end
+ end
+
+ it 'migrates pages projects without deployments' do
+ expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
+ expect do
+ expect(service.execute).to eq(migrated: 1, errored: 0)
+ end.to change { project.pages_metadatum.reload.pages_deployment }.from(nil)
+ end
+
+ context 'when deployed already exists for the project' do
+ before do
+ deployment = create(:pages_deployment, project: project)
+ project.set_first_pages_deployment!(deployment)
+ end
+
+ it 'does not try to migrate project' do
+ expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+
+ expect(service.execute).to eq(migrated: 0, errored: 0)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/pages_rake_spec.rb b/spec/tasks/gitlab/pages_rake_spec.rb
index 76808f52890..9c26d3d73c8 100644
--- a/spec/tasks/gitlab/pages_rake_spec.rb
+++ b/spec/tasks/gitlab/pages_rake_spec.rb
@@ -9,59 +9,31 @@ RSpec.describe 'gitlab:pages:migrate_legacy_storagerake task' do
subject { run_rake_task('gitlab:pages:migrate_legacy_storage') }
- let(:project) { create(:project) }
-
- it 'does not try to migrate pages if pages are not deployed' do
- expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+ it 'calls migration service' do
+ expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything, 3, 10) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
subject
end
- context 'when pages are marked as deployed' do
- before do
- project.mark_pages_as_deployed
- end
-
- context 'when pages directory does not exist' do
- it 'tries to migrate the project, but does not crash' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project) do |service|
- expect(service).to receive(:execute).and_call_original
- end
+ it 'uses PAGES_MIGRATION_THREADS environment variable' do
+ stub_env('PAGES_MIGRATION_THREADS', '5')
- subject
- end
+ expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything, 5, 10) do |service|
+ expect(service).to receive(:execute).and_call_original
end
- context 'when pages directory exists on disk' do
- before do
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
- end
- end
-
- it 'migrates pages projects without deployments' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project) do |service|
- expect(service).to receive(:execute).and_call_original
- end
-
- expect do
- subject
- end.to change { project.pages_metadatum.reload.pages_deployment }.from(nil)
- end
-
- context 'when deployed already exists for the project' do
- before do
- deployment = create(:pages_deployment, project: project)
- project.set_first_pages_deployment!(deployment)
- end
+ subject
+ end
- it 'does not try to migrate project' do
- expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+ it 'uses PAGES_MIGRATION_BATCH_SIZE environment variable' do
+ stub_env('PAGES_MIGRATION_BATCH_SIZE', '100')
- subject
- end
- end
+ expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything, 3, 100) do |service|
+ expect(service).to receive(:execute).and_call_original
end
+
+ subject
end
end
diff --git a/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb
new file mode 100644
index 00000000000..6755c93ae47
--- /dev/null
+++ b/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::PipelineArtifacts::CreateQualityReportWorker do
+ describe '#perform' do
+ subject { described_class.new.perform(pipeline_id) }
+
+ context 'when pipeline exists' do
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_reports) }
+ let(:pipeline_id) { pipeline.id }
+
+ it 'calls pipeline codequality report service' do
+ expect_next_instance_of(::Ci::PipelineArtifacts::CreateQualityReportService) do |quality_report_service|
+ expect(quality_report_service).to receive(:execute)
+ end
+
+ subject
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { pipeline_id }
+
+ it 'creates a pipeline artifact' do
+ expect { subject }.to change { pipeline.pipeline_artifacts.count }.by(1)
+ end
+ end
+ end
+
+ context 'when pipeline does not exist' do
+ let(:pipeline_id) { non_existing_record_id }
+
+ it 'does not call pipeline codequality report service' do
+ expect(Ci::PipelineArtifacts::CreateQualityReportService).not_to receive(:execute)
+
+ subject
+ end
+ end
+ end
+end