Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-12-03 15:10:23 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-12-03 15:10:23 +0300
commit5f0d27d131aced1a53e8cbc7db023d9f947f8a1a (patch)
tree7007c07fc37c95638f3e71c1902dcd055db1d8ca /spec
parentcc8ea69201e2e4d020018c43efeb993c44cd8a71 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/features/admin/admin_deploy_keys_spec.rb134
-rw-r--r--spec/finders/group_descendants_finder_spec.rb345
-rw-r--r--spec/frontend/crm/contacts_root_spec.js129
-rw-r--r--spec/frontend/crm/mock_data.js29
-rw-r--r--spec/frontend/crm/new_contact_form_spec.js108
-rw-r--r--spec/frontend/editor/helpers.js2
-rw-r--r--spec/frontend/editor/source_editor_instance_spec.js2
-rw-r--r--spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js4
-rw-r--r--spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb127
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb77
-rw-r--r--spec/lib/bulk_imports/groups/stage_spec.rb6
-rw-r--r--spec/lib/gitlab/etag_caching/store_spec.rb14
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb12
-rw-r--r--spec/models/bulk_imports/entity_spec.rb10
-rw-r--r--spec/models/ci/pipeline_spec.rb26
-rw-r--r--spec/models/commit_status_spec.rb35
-rw-r--r--spec/services/bulk_imports/uploads_export_service_spec.rb24
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb54
-rw-r--r--spec/services/concerns/audit_event_save_type_spec.rb28
21 files changed, 820 insertions, 390 deletions
diff --git a/spec/features/admin/admin_deploy_keys_spec.rb b/spec/features/admin/admin_deploy_keys_spec.rb
index 53caf0fac33..9b74aa2ac5a 100644
--- a/spec/features/admin/admin_deploy_keys_spec.rb
+++ b/spec/features/admin/admin_deploy_keys_spec.rb
@@ -3,101 +3,125 @@
require 'spec_helper'
RSpec.describe 'admin deploy keys' do
+ include Spec::Support::Helpers::ModalHelpers
+
let_it_be(:admin) { create(:admin) }
let!(:deploy_key) { create(:deploy_key, public: true) }
let!(:another_deploy_key) { create(:another_deploy_key, public: true) }
before do
- stub_feature_flags(admin_deploy_keys_vue: false)
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
end
- it 'show all public deploy keys' do
- visit admin_deploy_keys_path
+ shared_examples 'renders deploy keys correctly' do
+ it 'show all public deploy keys' do
+ visit admin_deploy_keys_path
- page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
- expect(page).to have_content(deploy_key.title)
- expect(page).to have_content(another_deploy_key.title)
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).to have_content(deploy_key.title)
+ expect(page).to have_content(another_deploy_key.title)
+ end
end
- end
- it 'shows all the projects the deploy key has write access' do
- write_key = create(:deploy_keys_project, :write_access, deploy_key: deploy_key)
+ it 'shows all the projects the deploy key has write access' do
+ write_key = create(:deploy_keys_project, :write_access, deploy_key: deploy_key)
- visit admin_deploy_keys_path
+ visit admin_deploy_keys_path
- page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
- expect(page).to have_content(write_key.project.full_name)
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).to have_content(write_key.project.full_name)
+ end
end
- end
- describe 'create a new deploy key' do
- let(:new_ssh_key) { attributes_for(:key)[:key] }
+ describe 'create a new deploy key' do
+ let(:new_ssh_key) { attributes_for(:key)[:key] }
- before do
- visit admin_deploy_keys_path
- click_link 'New deploy key'
- end
+ before do
+ visit admin_deploy_keys_path
+ click_link 'New deploy key'
+ end
- it 'creates a new deploy key' do
- fill_in 'deploy_key_title', with: 'laptop'
- fill_in 'deploy_key_key', with: new_ssh_key
- click_button 'Create'
+ it 'creates a new deploy key' do
+ fill_in 'deploy_key_title', with: 'laptop'
+ fill_in 'deploy_key_key', with: new_ssh_key
+ click_button 'Create'
- expect(current_path).to eq admin_deploy_keys_path
+ expect(current_path).to eq admin_deploy_keys_path
- page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
- expect(page).to have_content('laptop')
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).to have_content('laptop')
+ end
end
end
- end
- describe 'update an existing deploy key' do
- before do
- visit admin_deploy_keys_path
- find('tr', text: deploy_key.title).click_link('Edit')
- end
+ describe 'update an existing deploy key' do
+ before do
+ visit admin_deploy_keys_path
+ page.within('tr', text: deploy_key.title) do
+ click_link(_('Edit deploy key'))
+ end
+ end
- it 'updates an existing deploy key' do
- fill_in 'deploy_key_title', with: 'new-title'
- click_button 'Save changes'
+ it 'updates an existing deploy key' do
+ fill_in 'deploy_key_title', with: 'new-title'
+ click_button 'Save changes'
- expect(current_path).to eq admin_deploy_keys_path
+ expect(current_path).to eq admin_deploy_keys_path
- page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
- expect(page).to have_content('new-title')
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).to have_content('new-title')
+ end
end
end
end
- describe 'remove an existing deploy key' do
- before do
- visit admin_deploy_keys_path
- end
+ context 'when `admin_deploy_keys_vue` feature flag is enabled', :js do
+ it_behaves_like 'renders deploy keys correctly'
- it 'removes an existing deploy key' do
- find('tr', text: deploy_key.title).click_link('Remove')
+ describe 'remove an existing deploy key' do
+ before do
+ visit admin_deploy_keys_path
+ end
- expect(current_path).to eq admin_deploy_keys_path
- page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
- expect(page).not_to have_content(deploy_key.title)
+ it 'removes an existing deploy key' do
+ accept_gl_confirm('Are you sure you want to delete this deploy key?', button_text: 'Delete') do
+ page.within('tr', text: deploy_key.title) do
+ click_button _('Delete deploy key')
+ end
+ end
+
+ expect(current_path).to eq admin_deploy_keys_path
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).not_to have_content(deploy_key.title)
+ end
end
end
end
- context 'when `admin_deploy_keys_vue` feature flag is enabled', :js do
+ context 'when `admin_deploy_keys_vue` feature flag is disabled' do
before do
- stub_feature_flags(admin_deploy_keys_vue: true)
-
- visit admin_deploy_keys_path
+ stub_feature_flags(admin_deploy_keys_vue: false)
end
- it 'renders the Vue app', :aggregate_failures do
- expect(page).to have_content('Public deploy keys')
- expect(page).to have_selector('[data-testid="deploy-keys-list"]')
- expect(page).to have_link('New deploy key', href: new_admin_deploy_key_path)
+ it_behaves_like 'renders deploy keys correctly'
+
+ describe 'remove an existing deploy key' do
+ before do
+ visit admin_deploy_keys_path
+ end
+
+ it 'removes an existing deploy key' do
+ page.within('tr', text: deploy_key.title) do
+ click_link _('Remove deploy key')
+ end
+
+ expect(current_path).to eq admin_deploy_keys_path
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).not_to have_content(deploy_key.title)
+ end
+ end
end
end
end
diff --git a/spec/finders/group_descendants_finder_spec.rb b/spec/finders/group_descendants_finder_spec.rb
index 01c6eb05907..f6b87f7eeab 100644
--- a/spec/finders/group_descendants_finder_spec.rb
+++ b/spec/finders/group_descendants_finder_spec.rb
@@ -4,7 +4,12 @@ require 'spec_helper'
RSpec.describe GroupDescendantsFinder do
let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
+
+ let_it_be_with_reload(:group) do
+ create(:group).tap do |g|
+ g.add_owner(user)
+ end
+ end
let(:params) { {} }
@@ -12,254 +17,262 @@ RSpec.describe GroupDescendantsFinder do
described_class.new(current_user: user, parent_group: group, params: params)
end
- before do
- group.add_owner(user)
- end
-
- describe '#has_children?' do
- it 'is true when there are projects' do
- create(:project, namespace: group)
-
- expect(finder.has_children?).to be_truthy
- end
-
- context 'when there are subgroups' do
+ shared_examples 'group descentants finder examples' do
+ describe '#has_children?' do
it 'is true when there are projects' do
- create(:group, parent: group)
+ create(:project, namespace: group)
expect(finder.has_children?).to be_truthy
end
- end
- end
- describe '#execute' do
- it 'includes projects' do
- project = create(:project, namespace: group)
+ context 'when there are subgroups' do
+ it 'is true when there are projects' do
+ create(:group, parent: group)
- expect(finder.execute).to contain_exactly(project)
+ expect(finder.has_children?).to be_truthy
+ end
+ end
end
- context 'when archived is `true`' do
- let(:params) { { archived: 'true' } }
-
- it 'includes archived projects' do
- archived_project = create(:project, namespace: group, archived: true)
+ describe '#execute' do
+ it 'includes projects' do
project = create(:project, namespace: group)
- expect(finder.execute).to contain_exactly(archived_project, project)
+ expect(finder.execute).to contain_exactly(project)
end
- end
- context 'when archived is `only`' do
- let(:params) { { archived: 'only' } }
+ context 'when archived is `true`' do
+ let(:params) { { archived: 'true' } }
- it 'includes only archived projects' do
- archived_project = create(:project, namespace: group, archived: true)
- _project = create(:project, namespace: group)
+ it 'includes archived projects' do
+ archived_project = create(:project, namespace: group, archived: true)
+ project = create(:project, namespace: group)
- expect(finder.execute).to contain_exactly(archived_project)
+ expect(finder.execute).to contain_exactly(archived_project, project)
+ end
end
- end
- it 'does not include archived projects' do
- _archived_project = create(:project, :archived, namespace: group)
+ context 'when archived is `only`' do
+ let(:params) { { archived: 'only' } }
- expect(finder.execute).to be_empty
- end
+ it 'includes only archived projects' do
+ archived_project = create(:project, namespace: group, archived: true)
+ _project = create(:project, namespace: group)
- context 'with a filter' do
- let(:params) { { filter: 'test' } }
+ expect(finder.execute).to contain_exactly(archived_project)
+ end
+ end
- it 'includes only projects matching the filter' do
- _other_project = create(:project, namespace: group)
- matching_project = create(:project, namespace: group, name: 'testproject')
+ it 'does not include archived projects' do
+ _archived_project = create(:project, :archived, namespace: group)
- expect(finder.execute).to contain_exactly(matching_project)
+ expect(finder.execute).to be_empty
end
- end
- it 'sorts elements by name as default' do
- project1 = create(:project, namespace: group, name: 'z')
- project2 = create(:project, namespace: group, name: 'a')
+ context 'with a filter' do
+ let(:params) { { filter: 'test' } }
- expect(subject.execute).to eq([project2, project1])
- end
+ it 'includes only projects matching the filter' do
+ _other_project = create(:project, namespace: group)
+ matching_project = create(:project, namespace: group, name: 'testproject')
- context 'sorting by name' do
- let!(:project1) { create(:project, namespace: group, name: 'a', path: 'project-a') }
- let!(:project2) { create(:project, namespace: group, name: 'z', path: 'project-z') }
- let(:params) do
- {
- sort: 'name_asc'
- }
+ expect(finder.execute).to contain_exactly(matching_project)
+ end
end
- it 'sorts elements by name' do
- expect(subject.execute).to eq(
- [
- project1,
- project2
- ]
- )
+ it 'sorts elements by name as default' do
+ project1 = create(:project, namespace: group, name: 'z')
+ project2 = create(:project, namespace: group, name: 'a')
+
+ expect(subject.execute).to match_array([project2, project1])
end
- context 'with nested groups' do
- let!(:subgroup1) { create(:group, parent: group, name: 'a', path: 'sub-a') }
- let!(:subgroup2) { create(:group, parent: group, name: 'z', path: 'sub-z') }
+ context 'sorting by name' do
+ let!(:project1) { create(:project, namespace: group, name: 'a', path: 'project-a') }
+ let!(:project2) { create(:project, namespace: group, name: 'z', path: 'project-z') }
+ let(:params) do
+ {
+ sort: 'name_asc'
+ }
+ end
it 'sorts elements by name' do
expect(subject.execute).to eq(
[
- subgroup1,
- subgroup2,
project1,
project2
]
)
end
- end
- end
-
- it 'does not include projects shared with the group' do
- project = create(:project, namespace: group)
- other_project = create(:project)
- other_project.project_group_links.create!(group: group,
- group_access: Gitlab::Access::MAINTAINER)
- expect(finder.execute).to contain_exactly(project)
- end
- end
+ context 'with nested groups' do
+ let!(:subgroup1) { create(:group, parent: group, name: 'a', path: 'sub-a') }
+ let!(:subgroup2) { create(:group, parent: group, name: 'z', path: 'sub-z') }
+
+ it 'sorts elements by name' do
+ expect(subject.execute).to eq(
+ [
+ subgroup1,
+ subgroup2,
+ project1,
+ project2
+ ]
+ )
+ end
+ end
+ end
- context 'with shared groups' do
- let_it_be(:other_group) { create(:group) }
- let_it_be(:shared_group_link) do
- create(:group_group_link,
- shared_group: group,
- shared_with_group: other_group)
- end
+ it 'does not include projects shared with the group' do
+ project = create(:project, namespace: group)
+ other_project = create(:project)
+ other_project.project_group_links.create!(group: group,
+ group_access: Gitlab::Access::MAINTAINER)
- context 'without common ancestor' do
- it { expect(finder.execute).to be_empty }
+ expect(finder.execute).to contain_exactly(project)
+ end
end
- context 'with common ancestor' do
- let_it_be(:common_ancestor) { create(:group) }
- let_it_be(:other_group) { create(:group, parent: common_ancestor) }
- let_it_be(:group) { create(:group, parent: common_ancestor) }
+ context 'with shared groups' do
+ let_it_be(:other_group) { create(:group) }
+ let_it_be(:shared_group_link) do
+ create(:group_group_link,
+ shared_group: group,
+ shared_with_group: other_group)
+ end
- context 'querying under the common ancestor' do
+ context 'without common ancestor' do
it { expect(finder.execute).to be_empty }
end
- context 'querying the common ancestor' do
- subject(:finder) do
- described_class.new(current_user: user, parent_group: common_ancestor, params: params)
+ context 'with common ancestor' do
+ let_it_be(:common_ancestor) { create(:group) }
+ let_it_be(:other_group) { create(:group, parent: common_ancestor) }
+ let_it_be(:group) { create(:group, parent: common_ancestor) }
+
+ context 'querying under the common ancestor' do
+ it { expect(finder.execute).to be_empty }
end
- it 'contains shared subgroups' do
- expect(finder.execute).to contain_exactly(group, other_group)
+ context 'querying the common ancestor' do
+ subject(:finder) do
+ described_class.new(current_user: user, parent_group: common_ancestor, params: params)
+ end
+
+ it 'contains shared subgroups' do
+ expect(finder.execute).to contain_exactly(group, other_group)
+ end
end
end
end
- end
- context 'with nested groups' do
- let!(:project) { create(:project, namespace: group) }
- let!(:subgroup) { create(:group, :private, parent: group) }
+ context 'with nested groups' do
+ let!(:project) { create(:project, namespace: group) }
+ let!(:subgroup) { create(:group, :private, parent: group) }
- describe '#execute' do
- it 'contains projects and subgroups' do
- expect(finder.execute).to contain_exactly(subgroup, project)
- end
+ describe '#execute' do
+ it 'contains projects and subgroups' do
+ expect(finder.execute).to contain_exactly(subgroup, project)
+ end
- it 'does not include subgroups the user does not have access to' do
- subgroup.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ it 'does not include subgroups the user does not have access to' do
+ subgroup.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- public_subgroup = create(:group, :public, parent: group, path: 'public-group')
- other_subgroup = create(:group, :private, parent: group, path: 'visible-private-group')
- other_user = create(:user)
- other_subgroup.add_developer(other_user)
+ public_subgroup = create(:group, :public, parent: group, path: 'public-group')
+ other_subgroup = create(:group, :private, parent: group, path: 'visible-private-group')
+ other_user = create(:user)
+ other_subgroup.add_developer(other_user)
- finder = described_class.new(current_user: other_user, parent_group: group)
+ finder = described_class.new(current_user: other_user, parent_group: group)
- expect(finder.execute).to contain_exactly(public_subgroup, other_subgroup)
- end
+ expect(finder.execute).to contain_exactly(public_subgroup, other_subgroup)
+ end
- it 'only includes public groups when no user is given' do
- public_subgroup = create(:group, :public, parent: group)
- _private_subgroup = create(:group, :private, parent: group)
+ it 'only includes public groups when no user is given' do
+ public_subgroup = create(:group, :public, parent: group)
+ _private_subgroup = create(:group, :private, parent: group)
- finder = described_class.new(current_user: nil, parent_group: group)
+ finder = described_class.new(current_user: nil, parent_group: group)
- expect(finder.execute).to contain_exactly(public_subgroup)
- end
+ expect(finder.execute).to contain_exactly(public_subgroup)
+ end
- context 'when archived is `true`' do
- let(:params) { { archived: 'true' } }
+ context 'when archived is `true`' do
+ let(:params) { { archived: 'true' } }
- it 'includes archived projects in the count of subgroups' do
- create(:project, namespace: subgroup, archived: true)
+ it 'includes archived projects in the count of subgroups' do
+ create(:project, namespace: subgroup, archived: true)
- expect(finder.execute.first.preloaded_project_count).to eq(1)
+ expect(finder.execute.first.preloaded_project_count).to eq(1)
+ end
end
- end
- context 'with a filter' do
- let(:params) { { filter: 'test' } }
+ context 'with a filter' do
+ let(:params) { { filter: 'test' } }
- it 'contains only matching projects and subgroups' do
- matching_project = create(:project, namespace: group, name: 'Testproject')
- matching_subgroup = create(:group, name: 'testgroup', parent: group)
+ it 'contains only matching projects and subgroups' do
+ matching_project = create(:project, namespace: group, name: 'Testproject')
+ matching_subgroup = create(:group, name: 'testgroup', parent: group)
- expect(finder.execute).to contain_exactly(matching_subgroup, matching_project)
- end
+ expect(finder.execute).to contain_exactly(matching_subgroup, matching_project)
+ end
- it 'does not include subgroups the user does not have access to' do
- _invisible_subgroup = create(:group, :private, parent: group, name: 'test1')
- other_subgroup = create(:group, :private, parent: group, name: 'test2')
- public_subgroup = create(:group, :public, parent: group, name: 'test3')
- other_subsubgroup = create(:group, :private, parent: other_subgroup, name: 'test4')
- other_user = create(:user)
- other_subgroup.add_developer(other_user)
+ it 'does not include subgroups the user does not have access to' do
+ _invisible_subgroup = create(:group, :private, parent: group, name: 'test1')
+ other_subgroup = create(:group, :private, parent: group, name: 'test2')
+ public_subgroup = create(:group, :public, parent: group, name: 'test3')
+ other_subsubgroup = create(:group, :private, parent: other_subgroup, name: 'test4')
+ other_user = create(:user)
+ other_subgroup.add_developer(other_user)
- finder = described_class.new(current_user: other_user,
- parent_group: group,
- params: params)
+ finder = described_class.new(current_user: other_user,
+ parent_group: group,
+ params: params)
- expect(finder.execute).to contain_exactly(other_subgroup, public_subgroup, other_subsubgroup)
- end
+ expect(finder.execute).to contain_exactly(other_subgroup, public_subgroup, other_subsubgroup)
+ end
- context 'with matching children' do
- it 'includes a group that has a subgroup matching the query and its parent' do
- matching_subgroup = create(:group, :private, name: 'testgroup', parent: subgroup)
+ context 'with matching children' do
+ it 'includes a group that has a subgroup matching the query and its parent' do
+ matching_subgroup = create(:group, :private, name: 'testgroup', parent: subgroup)
- expect(finder.execute).to contain_exactly(subgroup, matching_subgroup)
- end
+ expect(finder.execute).to contain_exactly(subgroup, matching_subgroup)
+ end
- it 'includes the parent of a matching project' do
- matching_project = create(:project, namespace: subgroup, name: 'Testproject')
+ it 'includes the parent of a matching project' do
+ matching_project = create(:project, namespace: subgroup, name: 'Testproject')
- expect(finder.execute).to contain_exactly(subgroup, matching_project)
- end
+ expect(finder.execute).to contain_exactly(subgroup, matching_project)
+ end
- context 'with a small page size' do
- let(:params) { { filter: 'test', per_page: 1 } }
+ context 'with a small page size' do
+ let(:params) { { filter: 'test', per_page: 1 } }
- it 'contains all the ancestors of a matching subgroup regardless the page size' do
- subgroup = create(:group, :private, parent: group)
- matching = create(:group, :private, name: 'testgroup', parent: subgroup)
+ it 'contains all the ancestors of a matching subgroup regardless the page size' do
+ subgroup = create(:group, :private, parent: group)
+ matching = create(:group, :private, name: 'testgroup', parent: subgroup)
- expect(finder.execute).to contain_exactly(subgroup, matching)
+ expect(finder.execute).to contain_exactly(subgroup, matching)
+ end
end
- end
- it 'does not include the parent itself' do
- group.update!(name: 'test')
+ it 'does not include the parent itself' do
+ group.update!(name: 'test')
- expect(finder.execute).not_to include(group)
+ expect(finder.execute).not_to include(group)
+ end
end
end
end
end
end
+
+ it_behaves_like 'group descentants finder examples'
+
+ context 'when feature flag :linear_group_descendants_finder is disabled' do
+ before do
+ stub_feature_flags(linear_group_descendants_finder: false)
+ end
+
+ it_behaves_like 'group descentants finder examples'
+ end
end
diff --git a/spec/frontend/crm/contacts_root_spec.js b/spec/frontend/crm/contacts_root_spec.js
index fec1e924da3..c7410d13365 100644
--- a/spec/frontend/crm/contacts_root_spec.js
+++ b/spec/frontend/crm/contacts_root_spec.js
@@ -1,40 +1,62 @@
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import VueRouter from 'vue-router';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
import ContactsRoot from '~/crm/components/contacts_root.vue';
+import NewContactForm from '~/crm/components/new_contact_form.vue';
import getGroupContactsQuery from '~/crm/components/queries/get_group_contacts.query.graphql';
import { getGroupContactsQueryResponse } from './mock_data';
-jest.mock('~/flash');
-
describe('Customer relations contacts root app', () => {
Vue.use(VueApollo);
+ Vue.use(VueRouter);
let wrapper;
let fakeApollo;
+ let router;
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findRowByName = (rowName) => wrapper.findAllByRole('row', { name: rowName });
const findIssuesLinks = () => wrapper.findAllByTestId('issues-link');
+ const findNewContactButton = () => wrapper.findByTestId('new-contact-button');
+ const findNewContactForm = () => wrapper.findComponent(NewContactForm);
+ const findError = () => wrapper.findComponent(GlAlert);
const successQueryHandler = jest.fn().mockResolvedValue(getGroupContactsQueryResponse);
+ const basePath = '/groups/flightjs/-/crm/contacts';
+
const mountComponent = ({
queryHandler = successQueryHandler,
mountFunction = shallowMountExtended,
+ canAdminCrmContact = true,
} = {}) => {
fakeApollo = createMockApollo([[getGroupContactsQuery, queryHandler]]);
wrapper = mountFunction(ContactsRoot, {
- provide: { groupFullPath: 'flightjs', groupIssuesPath: '/issues' },
+ router,
+ provide: {
+ groupFullPath: 'flightjs',
+ groupIssuesPath: '/issues',
+ groupId: 26,
+ canAdminCrmContact,
+ },
apolloProvider: fakeApollo,
});
};
+ beforeEach(() => {
+ router = new VueRouter({
+ base: basePath,
+ mode: 'history',
+ routes: [],
+ });
+ });
+
afterEach(() => {
wrapper.destroy();
fakeApollo = null;
+ router = null;
});
it('should render loading spinner', () => {
@@ -43,23 +65,94 @@ describe('Customer relations contacts root app', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
- it('should render error message on reject', async () => {
- mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') });
- await waitForPromises();
+ describe('new contact button', () => {
+ it('should exist when user has permission', () => {
+ mountComponent();
+
+ expect(findNewContactButton().exists()).toBe(true);
+ });
+
+ it('should not exist when user has no permission', () => {
+ mountComponent({ canAdminCrmContact: false });
+
+ expect(findNewContactButton().exists()).toBe(false);
+ });
+ });
+
+ describe('new contact form', () => {
+ it('should not exist by default', async () => {
+ mountComponent();
+ await waitForPromises();
+
+ expect(findNewContactForm().exists()).toBe(false);
+ });
+
+ it('should exist when user clicks new contact button', async () => {
+ mountComponent();
+
+ findNewContactButton().vm.$emit('click');
+ await waitForPromises();
+
+ expect(findNewContactForm().exists()).toBe(true);
+ });
+
+ it('should exist when user navigates directly to /new', async () => {
+ router.replace({ path: '/new' });
+ mountComponent();
+ await waitForPromises();
+
+ expect(findNewContactForm().exists()).toBe(true);
+ });
+
+ it('should not exist when form emits close', async () => {
+ router.replace({ path: '/new' });
+ mountComponent();
+
+ findNewContactForm().vm.$emit('close');
+ await waitForPromises();
+
+ expect(findNewContactForm().exists()).toBe(false);
+ });
+ });
+
+ describe('error', () => {
+ it('should exist on reject', async () => {
+ mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') });
+ await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(findError().exists()).toBe(true);
+ });
+
+ it('should exist when new contact form emits error', async () => {
+ router.replace({ path: '/new' });
+ mountComponent();
+
+ findNewContactForm().vm.$emit('error');
+ await waitForPromises();
+
+ expect(findError().exists()).toBe(true);
+ });
});
- it('renders correct results', async () => {
- mountComponent({ mountFunction: mountExtended });
- await waitForPromises();
+ describe('on successful load', () => {
+ it('should not render error', async () => {
+ mountComponent();
+ await waitForPromises();
- expect(findRowByName(/Marty/i)).toHaveLength(1);
- expect(findRowByName(/George/i)).toHaveLength(1);
- expect(findRowByName(/jd@gitlab.com/i)).toHaveLength(1);
+ expect(findError().exists()).toBe(false);
+ });
+
+ it('renders correct results', async () => {
+ mountComponent({ mountFunction: mountExtended });
+ await waitForPromises();
- const issueLink = findIssuesLinks().at(0);
- expect(issueLink.exists()).toBe(true);
- expect(issueLink.attributes('href')).toBe('/issues?scope=all&state=opened&crm_contact_id=16');
+ expect(findRowByName(/Marty/i)).toHaveLength(1);
+ expect(findRowByName(/George/i)).toHaveLength(1);
+ expect(findRowByName(/jd@gitlab.com/i)).toHaveLength(1);
+
+ const issueLink = findIssuesLinks().at(0);
+ expect(issueLink.exists()).toBe(true);
+ expect(issueLink.attributes('href')).toBe('/issues?scope=all&state=opened&crm_contact_id=16');
+ });
});
});
diff --git a/spec/frontend/crm/mock_data.js b/spec/frontend/crm/mock_data.js
index 4197621aaa6..e784ac3764d 100644
--- a/spec/frontend/crm/mock_data.js
+++ b/spec/frontend/crm/mock_data.js
@@ -40,7 +40,6 @@ export const getGroupContactsQueryResponse = {
organization: null,
},
],
- __typename: 'CustomerRelationsContactConnection',
},
},
},
@@ -79,3 +78,31 @@ export const getGroupOrganizationsQueryResponse = {
},
},
};
+
+export const createContactMutationResponse = {
+ data: {
+ customerRelationsContactCreate: {
+ __typeName: 'CustomerRelationsContactCreatePayload',
+ contact: {
+ __typename: 'CustomerRelationsContact',
+ id: 'gid://gitlab/CustomerRelations::Contact/1',
+ firstName: 'A',
+ lastName: 'B',
+ email: 'C',
+ phone: null,
+ description: null,
+ organization: null,
+ },
+ errors: [],
+ },
+ },
+};
+
+export const createContactMutationErrorResponse = {
+ data: {
+ customerRelationsContactCreate: {
+ contact: null,
+ errors: ['Phone is invalid.'],
+ },
+ },
+};
diff --git a/spec/frontend/crm/new_contact_form_spec.js b/spec/frontend/crm/new_contact_form_spec.js
new file mode 100644
index 00000000000..681c0539536
--- /dev/null
+++ b/spec/frontend/crm/new_contact_form_spec.js
@@ -0,0 +1,108 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import NewContactForm from '~/crm/components/new_contact_form.vue';
+import createContactMutation from '~/crm/components/queries/create_contact.mutation.graphql';
+import getGroupContactsQuery from '~/crm/components/queries/get_group_contacts.query.graphql';
+import {
+ createContactMutationErrorResponse,
+ createContactMutationResponse,
+ getGroupContactsQueryResponse,
+} from './mock_data';
+
+describe('Customer relations contacts root app', () => {
+ Vue.use(VueApollo);
+ let wrapper;
+ let fakeApollo;
+ let queryHandler;
+
+ const findCreateNewContactButton = () => wrapper.findByTestId('create-new-contact-button');
+ const findCancelButton = () => wrapper.findByTestId('cancel-button');
+ const findForm = () => wrapper.find('form');
+
+ const mountComponent = ({ mountFunction = shallowMountExtended } = {}) => {
+ fakeApollo = createMockApollo([[createContactMutation, queryHandler]]);
+ fakeApollo.clients.defaultClient.cache.writeQuery({
+ query: getGroupContactsQuery,
+ variables: { groupFullPath: 'flightjs' },
+ data: getGroupContactsQueryResponse.data,
+ });
+ wrapper = mountFunction(NewContactForm, {
+ provide: { groupId: 26, groupFullPath: 'flightjs' },
+ apolloProvider: fakeApollo,
+ });
+ };
+
+ beforeEach(() => {
+ queryHandler = jest.fn().mockResolvedValue(createContactMutationResponse);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ describe('Create new contact button', () => {
+ it('should be disabled by default', () => {
+ mountComponent();
+
+ expect(findCreateNewContactButton().attributes('disabled')).toBeTruthy();
+ });
+
+ it('should not be disabled when first, last and email have values', async () => {
+ mountComponent();
+
+ wrapper.find('#contact-first-name').vm.$emit('input', 'A');
+ wrapper.find('#contact-last-name').vm.$emit('input', 'B');
+ wrapper.find('#contact-email').vm.$emit('input', 'C');
+ await waitForPromises();
+
+ expect(findCreateNewContactButton().attributes('disabled')).toBeFalsy();
+ });
+ });
+
+ it("should emit 'close' when cancel button is clicked", () => {
+ mountComponent();
+
+ findCancelButton().vm.$emit('click');
+
+ expect(wrapper.emitted().close).toBeTruthy();
+ });
+
+ describe('when query is successful', () => {
+ it("should emit 'close'", async () => {
+ mountComponent();
+
+ findForm().trigger('submit');
+ await waitForPromises();
+
+ expect(wrapper.emitted().close).toBeTruthy();
+ });
+ });
+
+ describe('when query fails', () => {
+ it('should emit error on reject', async () => {
+ queryHandler = jest.fn().mockRejectedValue('ERROR');
+ mountComponent();
+
+ findForm().trigger('submit');
+ await waitForPromises();
+
+ expect(wrapper.emitted().error).toBeTruthy();
+ });
+
+ it('should emit error on error response', async () => {
+ queryHandler = jest.fn().mockResolvedValue(createContactMutationErrorResponse);
+ mountComponent();
+
+ findForm().trigger('submit');
+ await waitForPromises();
+
+ expect(wrapper.emitted().error[0][0]).toEqual(
+ createContactMutationErrorResponse.data.customerRelationsContactCreate.errors,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/editor/helpers.js b/spec/frontend/editor/helpers.js
index c77be4f8c58..e4942c36f6c 100644
--- a/spec/frontend/editor/helpers.js
+++ b/spec/frontend/editor/helpers.js
@@ -31,7 +31,7 @@ export const SEConstExt = () => {
export function SEWithSetupExt() {
return {
- onSetup: (setupOptions = {}, instance) => {
+ onSetup: (instance, setupOptions = {}) => {
if (setupOptions && !Array.isArray(setupOptions)) {
Object.entries(setupOptions).forEach(([key, value]) => {
Object.assign(instance, {
diff --git a/spec/frontend/editor/source_editor_instance_spec.js b/spec/frontend/editor/source_editor_instance_spec.js
index a46eea4c4cd..38844b6cafe 100644
--- a/spec/frontend/editor/source_editor_instance_spec.js
+++ b/spec/frontend/editor/source_editor_instance_spec.js
@@ -424,7 +424,7 @@ describe('Source Editor Instance', () => {
definition: MyFullExtWithCallbacks,
setupOptions: defSetupOptions,
});
- expect(onSetup).toHaveBeenCalledWith(defSetupOptions, seInstance);
+ expect(onSetup).toHaveBeenCalledWith(seInstance, defSetupOptions);
expect(onUse).toHaveBeenCalledWith(seInstance);
});
diff --git a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
index 5fc0880b09e..ae19ed9ab02 100644
--- a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
+++ b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
@@ -1,4 +1,4 @@
-import { GlTable, GlLink } from '@gitlab/ui';
+import { GlTableLite, GlLink } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import { capitalizeFirstCharacter } from '~/lib/utils/text_utility';
import CiLintResults from '~/pipeline_editor/components/lint/ci_lint_results.vue';
@@ -24,7 +24,7 @@ describe('CI Lint Results', () => {
});
};
- const findTable = () => wrapper.find(GlTable);
+ const findTable = () => wrapper.find(GlTableLite);
const findByTestId = (selector) => () => wrapper.find(`[data-testid="ci-lint-${selector}"]`);
const findAllByTestId = (selector) => () =>
wrapper.findAll(`[data-testid="ci-lint-${selector}"]`);
diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
index a3cc866a406..0f6238e10dc 100644
--- a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
@@ -5,11 +5,12 @@ require 'spec_helper'
RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:project) { create(:project) }
- let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') }
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
- let_it_be(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
- let_it_be(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
+ let_it_be(:group) { create(:group) }
+
+ let(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
+ let(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject(:pipeline) { described_class.new(context) }
@@ -24,57 +25,101 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
- describe '#run' do
- it 'imports uploads into destination portable and removes tmpdir' do
- allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
- allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path]))
+ shared_examples 'uploads import' do
+ describe '#run' do
+ before do
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path]))
+ end
- pipeline.run
+ it 'imports uploads into destination portable and removes tmpdir' do
+ pipeline.run
- expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt')
+ expect(portable.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt')
- expect(Dir.exist?(tmpdir)).to eq(false)
- end
- end
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
- describe '#extract' do
- it 'downloads & extracts upload paths' do
- allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
- expect(pipeline).to receive(:untar_zxf)
- file_download_service = instance_double("BulkImports::FileDownloadService")
+ context 'when importing avatar' do
+ let(:uploads_dir_path) { File.join(tmpdir, 'avatar') }
- expect(BulkImports::FileDownloadService)
- .to receive(:new)
- .with(
- configuration: context.configuration,
- relative_url: "/projects/test/export_relations/download?relation=uploads",
- dir: tmpdir,
- filename: 'uploads.tar.gz')
- .and_return(file_download_service)
+ it 'imports avatar' do
+ FileUtils.touch(File.join(uploads_dir_path, 'avatar.png'))
- expect(file_download_service).to receive(:execute)
+ expect_next_instance_of(entity.update_service) do |service|
+ expect(service).to receive(:execute)
+ end
- extracted_data = pipeline.extract(context)
+ pipeline.run
+ end
- expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path)
- end
- end
+ context 'when something goes wrong' do
+ it 'raises exception' do
+ allow_next_instance_of(entity.update_service) do |service|
+ allow(service).to receive(:execute).and_return(nil)
+ end
+
+ pipeline.run
- describe '#load' do
- it 'creates a file upload' do
- expect { pipeline.load(context, upload_file_path) }.to change { project.uploads.count }.by(1)
+ expect(entity.failures.first.exception_class).to include('AvatarLoadingError')
+ end
+ end
+ end
end
- context 'when dynamic path is nil' do
- it 'returns' do
- expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { project.uploads.count }
+ describe '#extract' do
+ it 'downloads & extracts upload paths' do
+ allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
+ expect(pipeline).to receive(:untar_zxf)
+ file_download_service = instance_double("BulkImports::FileDownloadService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=uploads",
+ dir: tmpdir,
+ filename: 'uploads.tar.gz')
+ .and_return(file_download_service)
+
+ expect(file_download_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path)
end
end
- context 'when path is a directory' do
- it 'returns' do
- expect { pipeline.load(context, uploads_dir_path) }.not_to change { project.uploads.count }
+ describe '#load' do
+ it 'creates a file upload' do
+ expect { pipeline.load(context, upload_file_path) }.to change { portable.uploads.count }.by(1)
+ end
+
+ context 'when dynamic path is nil' do
+ it 'returns' do
+ expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { portable.uploads.count }
+ end
+ end
+
+ context 'when path is a directory' do
+ it 'returns' do
+ expect { pipeline.load(context, uploads_dir_path) }.not_to change { portable.uploads.count }
+ end
end
end
end
+
+ context 'when importing to group' do
+ let(:portable) { group }
+ let(:entity) { create(:bulk_import_entity, :group_entity, group: group, source_full_path: 'test') }
+
+ include_examples 'uploads import'
+ end
+
+ context 'when importing to project' do
+ let(:portable) { project }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') }
+
+ include_examples 'uploads import'
+ end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
deleted file mode 100644
index c68284aa580..00000000000
--- a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Pipelines::GroupAvatarPipeline do
- let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
- let_it_be(:bulk_import) { create(:bulk_import, user: user) }
-
- let_it_be(:entity) do
- create(
- :bulk_import_entity,
- group: group,
- bulk_import: bulk_import,
- source_full_path: 'source/full/path',
- destination_name: 'My Destination Group',
- destination_namespace: group.full_path
- )
- end
-
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
-
- subject { described_class.new(context) }
-
- describe '#run' do
- it 'updates the group avatar' do
- avatar_path = 'spec/fixtures/dk.png'
- stub_file_download(
- avatar_path,
- configuration: context.configuration,
- relative_url: "/groups/source%2Ffull%2Fpath/avatar",
- dir: an_instance_of(String),
- file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
- allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
- )
-
- expect { subject.run }.to change(context.group, :avatar)
-
- expect(context.group.avatar.filename).to eq(File.basename(avatar_path))
- end
-
- it 'raises an error when the avatar upload fails' do
- avatar_path = 'spec/fixtures/aosp_manifest.xml'
- stub_file_download(
- avatar_path,
- configuration: context.configuration,
- relative_url: "/groups/source%2Ffull%2Fpath/avatar",
- dir: an_instance_of(String),
- file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
- allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
- )
-
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:error)
- .with(
- bulk_import_id: context.bulk_import.id,
- bulk_import_entity_id: context.entity.id,
- bulk_import_entity_type: context.entity.source_type,
- context_extra: context.extra,
- exception_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline::GroupAvatarLoadingError",
- exception_message: "Avatar file format is not supported. Please try one of the following supported formats: image/png, image/jpeg, image/gif, image/bmp, image/tiff, image/vnd.microsoft.icon",
- pipeline_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline",
- pipeline_step: :loader
- )
- end
-
- expect { subject.run }.to change(BulkImports::Failure, :count)
- end
- end
-
- def stub_file_download(filepath = 'file/path.png', **params)
- expect_next_instance_of(BulkImports::FileDownloadService, params.presence) do |downloader|
- expect(downloader).to receive(:execute).and_return(filepath)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb
index a7acd661282..55a8e40f480 100644
--- a/spec/lib/bulk_imports/groups/stage_spec.rb
+++ b/spec/lib/bulk_imports/groups/stage_spec.rb
@@ -8,13 +8,13 @@ RSpec.describe BulkImports::Groups::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
- [1, BulkImports::Groups::Pipelines::GroupAvatarPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
[1, BulkImports::Groups::Pipelines::MembersPipeline],
[1, BulkImports::Common::Pipelines::LabelsPipeline],
[1, BulkImports::Common::Pipelines::MilestonesPipeline],
[1, BulkImports::Common::Pipelines::BadgesPipeline],
- [2, BulkImports::Common::Pipelines::BoardsPipeline]
+ [2, BulkImports::Common::Pipelines::BoardsPipeline],
+ [2, BulkImports::Common::Pipelines::UploadsPipeline]
]
end
@@ -24,7 +24,7 @@ RSpec.describe BulkImports::Groups::Stage do
describe '.pipelines' do
it 'list all the pipelines with their stage number, ordered by stage' do
- expect(described_class.new(bulk_import).pipelines & pipelines).to eq(pipelines)
+ expect(described_class.new(bulk_import).pipelines & pipelines).to contain_exactly(*pipelines)
expect(described_class.new(bulk_import).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
end
diff --git a/spec/lib/gitlab/etag_caching/store_spec.rb b/spec/lib/gitlab/etag_caching/store_spec.rb
index 46195e64715..6188a3fc8b3 100644
--- a/spec/lib/gitlab/etag_caching/store_spec.rb
+++ b/spec/lib/gitlab/etag_caching/store_spec.rb
@@ -80,5 +80,19 @@ RSpec.describe Gitlab::EtagCaching::Store, :clean_gitlab_redis_shared_state do
expect(store.get(key)).to eq(etag)
end
end
+
+ context 'with multiple keys' do
+ let(:keys) { ['/my-group/my-project/builds/234.json', '/api/graphql:pipelines/id/5'] }
+
+ it 'stores and returns multiple values' do
+ etags = store.touch(*keys)
+
+ expect(etags.size).to eq(keys.size)
+
+ keys.each_with_index do |key, i|
+ expect(store.get(key)).to eq(etags[i])
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb
new file mode 100644
index 00000000000..c9bc101374f
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::SnowplowConfiguredToGitlabCollectorMetric do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'for collector_hostname option' do
+ where(:collector_hostname, :expected_value) do
+ 'snowplow.trx.gitlab.net' | true
+ 'foo.bar.something.net' | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(snowplow_collector_hostname: collector_hostname)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb
new file mode 100644
index 00000000000..1e0cdd650fa
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::SnowplowEnabledMetric do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'for snowplow enabled option' do
+ where(:snowplow_enabled, :expected_value) do
+ true | true
+ false | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(snowplow_enabled: snowplow_enabled)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index f6d3047609e..c3c35279fe8 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -1045,6 +1045,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe ".system_usage_data_settings" do
let(:prometheus_client) { double(Gitlab::PrometheusClient) }
+ let(:snowplow_gitlab_host?) { Gitlab::CurrentSettings.snowplow_collector_hostname == 'snowplow.trx.gitlab.net' }
before do
allow(described_class).to receive(:operating_system).and_return('ubuntu-20.04')
@@ -1089,6 +1090,17 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'gathers user_cap_feature_enabled' do
expect(subject[:settings][:user_cap_feature_enabled]).to eq(Gitlab::CurrentSettings.new_user_signups_cap)
end
+
+ context 'snowplow stats' do
+ before do
+ stub_feature_flags(usage_data_instrumentation: false)
+ end
+
+ it 'gathers snowplow stats' do
+ expect(subject[:settings][:snowplow_enabled]).to eq(Gitlab::CurrentSettings.snowplow_enabled?)
+ expect(subject[:settings][:snowplow_configured_to_gitlab_collector]).to eq(snowplow_gitlab_host?)
+ end
+ end
end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index 90cb0185b9c..e5bbac62dcc 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -298,4 +298,14 @@ RSpec.describe BulkImports::Entity, type: :model do
expect(entity.wikis_url_path).to eq("/groups/#{entity.encoded_source_full_path}/wikis")
end
end
+
+ describe '#update_service' do
+ it 'returns correct update service class' do
+ group_entity = build(:bulk_import_entity)
+ project_entity = build(:bulk_import_entity, :project_entity)
+
+ expect(group_entity.update_service).to eq(::Groups::UpdateService)
+ expect(project_entity.update_service).to eq(::Projects::UpdateService)
+ end
+ end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 841608b96fe..3b6d756a738 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -1503,10 +1503,30 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe 'pipeline caching' do
- it 'performs ExpirePipelinesCacheWorker' do
- expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
+ context 'when expire_job_and_pipeline_cache_synchronously is enabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: true)
+ end
- pipeline.cancel
+ it 'executes Ci::ExpirePipelineCacheService' do
+ expect_next_instance_of(Ci::ExpirePipelineCacheService) do |service|
+ expect(service).to receive(:execute).with(pipeline)
+ end
+
+ pipeline.cancel
+ end
+ end
+
+ context 'when expire_job_and_pipeline_cache_synchronously is disabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: false)
+ end
+
+ it 'performs ExpirePipelinesCacheWorker' do
+ expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
+
+ pipeline.cancel
+ end
end
end
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index a4d4d0a58ff..d675b0e7221 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -46,10 +46,28 @@ RSpec.describe CommitStatus do
describe 'status state machine' do
let!(:commit_status) { create(:commit_status, :running, project: project) }
- it 'invalidates the cache after a transition' do
- expect(ExpireJobCacheWorker).to receive(:perform_async).with(commit_status.id)
+ context 'when expire_job_and_pipeline_cache_synchronously is enabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: true)
+ end
+
+ it 'invalidates the cache after a transition' do
+ expect(commit_status).to receive(:expire_etag_cache!)
+
+ commit_status.success!
+ end
+ end
+
+ context 'when expire_job_and_pipeline_cache_synchronously is disabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: false)
+ end
+
+ it 'invalidates the cache after a transition' do
+ expect(ExpireJobCacheWorker).to receive(:perform_async).with(commit_status.id)
- commit_status.success!
+ commit_status.success!
+ end
end
describe 'transitioning to running' do
@@ -949,4 +967,15 @@ RSpec.describe CommitStatus do
described_class.bulk_insert_tags!(statuses, tag_list_by_build)
end
end
+
+ describe '#expire_etag_cache!' do
+ it 'expires the etag cache' do
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |etag_store|
+ job_path = Gitlab::Routing.url_helpers.project_build_path(project, commit_status.id, format: :json)
+ expect(etag_store).to receive(:touch).with(job_path)
+ end
+
+ commit_status.expire_etag_cache!
+ end
+ end
end
diff --git a/spec/services/bulk_imports/uploads_export_service_spec.rb b/spec/services/bulk_imports/uploads_export_service_spec.rb
new file mode 100644
index 00000000000..39bcacfdc5e
--- /dev/null
+++ b/spec/services/bulk_imports/uploads_export_service_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::UploadsExportService do
+ let_it_be(:project) { create(:project, avatar: fixture_file_upload('spec/fixtures/rails_sample.png', 'image/png')) }
+ let_it_be(:upload) { create(:upload, :with_file, :issuable_upload, uploader: FileUploader, model: project) }
+ let_it_be(:export_path) { Dir.mktmpdir }
+
+ subject(:service) { described_class.new(project, export_path) }
+
+ after do
+ FileUtils.remove_entry(export_path) if Dir.exist?(export_path)
+ end
+
+ describe '#execute' do
+ it 'exports project uploads and avatar' do
+ subject.execute
+
+ expect(File.exist?(File.join(export_path, 'avatar', 'rails_sample.png'))).to eq(true)
+ expect(File.exist?(File.join(export_path, upload.secret, upload.retrieve_uploader.filename))).to eq(true)
+ end
+ end
+end
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index bc975938365..8cfe756faf3 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -18,14 +18,14 @@ RSpec.describe Ci::ExpirePipelineCacheService do
graphql_pipeline_sha_path = "/api/graphql:pipelines/sha/#{pipeline.sha}"
graphql_project_on_demand_scan_counts_path = "/api/graphql:on_demand_scan/counts/#{project.full_path}"
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
- expect(store).to receive(:touch).with(pipelines_path)
- expect(store).to receive(:touch).with(new_mr_pipelines_path)
- expect(store).to receive(:touch).with(pipeline_path)
- expect(store).to receive(:touch).with(graphql_pipeline_path)
- expect(store).to receive(:touch).with(graphql_pipeline_sha_path)
- expect(store).to receive(:touch).with(graphql_project_on_demand_scan_counts_path)
- end
+ expect_touched_etag_caching_paths(
+ pipelines_path,
+ new_mr_pipelines_path,
+ pipeline_path,
+ graphql_pipeline_path,
+ graphql_pipeline_sha_path,
+ graphql_project_on_demand_scan_counts_path
+ )
subject.execute(pipeline)
end
@@ -37,9 +37,10 @@ RSpec.describe Ci::ExpirePipelineCacheService do
merge_request_pipelines_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/pipelines.json"
merge_request_widget_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/cached_widget.json"
- allow_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch)
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_pipelines_path)
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_widget_path)
+ expect_touched_etag_caching_paths(
+ merge_request_pipelines_path,
+ merge_request_widget_path
+ )
subject.execute(merge_request.all_pipelines.last)
end
@@ -78,10 +79,7 @@ RSpec.describe Ci::ExpirePipelineCacheService do
it 'updates the cache of dependent pipeline' do
dependent_pipeline_path = "/#{source.source_project.full_path}/-/pipelines/#{source.source_pipeline.id}.json"
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
- allow(store).to receive(:touch)
- expect(store).to receive(:touch).with(dependent_pipeline_path)
- end
+ expect_touched_etag_caching_paths(dependent_pipeline_path)
subject.execute(pipeline)
end
@@ -94,13 +92,31 @@ RSpec.describe Ci::ExpirePipelineCacheService do
it 'updates the cache of dependent pipeline' do
dependent_pipeline_path = "/#{source.project.full_path}/-/pipelines/#{source.pipeline.id}.json"
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
- allow(store).to receive(:touch)
- expect(store).to receive(:touch).with(dependent_pipeline_path)
- end
+ expect_touched_etag_caching_paths(dependent_pipeline_path)
subject.execute(pipeline)
end
end
+
+ it 'does not do N+1 queries' do
+ subject.execute(pipeline)
+
+ control = ActiveRecord::QueryRecorder.new { subject.execute(pipeline) }
+
+ create(:ci_sources_pipeline, pipeline: pipeline)
+ create(:ci_sources_pipeline, source_job: create(:ci_build, pipeline: pipeline))
+
+ expect { subject.execute(pipeline) }.not_to exceed_query_limit(control.count)
+ end
+ end
+
+ def expect_touched_etag_caching_paths(*paths)
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
+ expect(store).to receive(:touch).and_wrap_original do |m, *args|
+ expect(args).to include(*paths)
+
+ m.call(*args)
+ end
+ end
end
end
diff --git a/spec/services/concerns/audit_event_save_type_spec.rb b/spec/services/concerns/audit_event_save_type_spec.rb
new file mode 100644
index 00000000000..fbaebd9f85c
--- /dev/null
+++ b/spec/services/concerns/audit_event_save_type_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuditEventSaveType do
+ subject(:target) { Object.new.extend(described_class) }
+
+ describe '#should_save_database? and #should_save_stream?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:query_method, :query_param, :result) do
+ :should_save_stream? | :stream | true
+ :should_save_stream? | :database_and_stream | true
+ :should_save_database? | :database | true
+ :should_save_database? | :database_and_stream | true
+ :should_save_stream? | :database | false
+ :should_save_stream? | nil | false
+ :should_save_database? | :stream | false
+ :should_save_database? | nil | false
+ end
+
+ with_them do
+ it 'returns corresponding results according to the query_method and query_param' do
+ expect(target.send(query_method, query_param)).to eq result
+ end
+ end
+ end
+end