Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-02-22 15:14:09 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-02-22 15:14:09 +0300
commit59f160b0cf3ca52fc25f827e57d0dc1273a50521 (patch)
tree6c3d25e025f1dc60bc56fe8f49c133fa119e078b /spec
parent932d504aaadc03b978eccad962a12be93f84be47 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/commands/sidekiq_cluster/cli_spec.rb213
-rw-r--r--spec/controllers/projects/forks_controller_spec.rb9
-rw-r--r--spec/features/projects/fork_spec.rb195
-rw-r--r--spec/frontend/pages/projects/forks/new/components/app_spec.js13
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_form_spec.js28
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js73
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js123
-rw-r--r--spec/lib/atlassian/jira_connect_spec.rb29
-rw-r--r--spec/lib/gitlab/process_supervisor_spec.rb127
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb3
-rw-r--r--spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb51
-rw-r--r--spec/models/merge_request_spec.rb23
-rw-r--r--spec/serializers/fork_namespace_entity_spec.rb22
-rw-r--r--spec/services/merge_requests/create_service_spec.rb23
-rw-r--r--spec/services/work_items/update_service_spec.rb4
15 files changed, 339 insertions, 597 deletions
diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb
index 15b738cacd1..6baaa98eff9 100644
--- a/spec/commands/sidekiq_cluster/cli_spec.rb
+++ b/spec/commands/sidekiq_cluster/cli_spec.rb
@@ -5,8 +5,11 @@ require 'rspec-parameterized'
require_relative '../../support/stub_settings_source'
require_relative '../../../sidekiq_cluster/cli'
+require_relative '../../support/helpers/next_instance_of'
RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubocop:disable RSpec/FilePath
+ include NextInstanceOf
+
let(:cli) { described_class.new('/dev/null') }
let(:timeout) { Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS }
let(:default_options) do
@@ -61,9 +64,8 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
context 'with arguments' do
before do
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
+ allow(Gitlab::ProcessManagement).to receive(:write_pid)
+ allow_next_instance_of(Gitlab::ProcessSupervisor) { |it| allow(it).to receive(:supervise) }
end
it 'starts the Sidekiq workers' do
@@ -81,7 +83,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
.to receive(:worker_queues).and_return(worker_queues)
expect(Gitlab::SidekiqCluster)
- .to receive(:start).with([worker_queues], default_options)
+ .to receive(:start).with([worker_queues], default_options).and_return([])
cli.run(%w(*))
end
@@ -135,6 +137,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
it 'when given', 'starts Sidekiq workers with given timeout' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options.merge(timeout: 10))
+ .and_return([])
cli.run(%w(foo --timeout 10))
end
@@ -142,6 +145,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
it 'when not given', 'starts Sidekiq workers with default timeout' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options.merge(timeout: Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS))
+ .and_return([])
cli.run(%w(foo))
end
@@ -257,7 +261,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
.to receive(:worker_queues).and_return(worker_queues)
expect(Gitlab::SidekiqCluster)
- .to receive(:start).with([worker_queues], default_options)
+ .to receive(:start).with([worker_queues], default_options).and_return([])
cli.run(%w(--queue-selector *))
end
@@ -292,16 +296,15 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
context 'starting the server' do
context 'without --dryrun' do
+ before do
+ allow(Gitlab::SidekiqCluster).to receive(:start).and_return([])
+ allow(Gitlab::ProcessManagement).to receive(:write_pid)
+ allow_next_instance_of(Gitlab::ProcessSupervisor) { |it| allow(it).to receive(:supervise) }
+ end
+
context 'when there are no sidekiq_health_checks settings set' do
let(:sidekiq_exporter_enabled) { true }
- before do
- allow(Gitlab::SidekiqCluster).to receive(:start)
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
- end
-
it 'does not start a sidekiq metrics server' do
expect(MetricsServer).not_to receive(:fork)
@@ -312,13 +315,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
context 'when the sidekiq_exporter.port setting is not set' do
let(:sidekiq_exporter_enabled) { true }
- before do
- allow(Gitlab::SidekiqCluster).to receive(:start)
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
- end
-
it 'does not start a sidekiq metrics server' do
expect(MetricsServer).not_to receive(:fork)
@@ -342,13 +338,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
}
end
- before do
- allow(Gitlab::SidekiqCluster).to receive(:start)
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
- end
-
it 'does not start a sidekiq metrics server' do
expect(MetricsServer).not_to receive(:fork)
@@ -368,13 +357,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
}
end
- before do
- allow(Gitlab::SidekiqCluster).to receive(:start)
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
- end
-
it 'does not start a sidekiq metrics server' do
expect(MetricsServer).not_to receive(:fork)
@@ -397,13 +379,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
end
with_them do
- before do
- allow(Gitlab::SidekiqCluster).to receive(:start)
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
- end
-
specify do
if start_metrics_server
expect(MetricsServer).to receive(:fork).with('sidekiq', metrics_dir: metrics_dir, wipe_metrics_dir: true, reset_signals: trapped_signals)
@@ -415,6 +390,23 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
end
end
end
+
+ context 'when a PID is specified' do
+ it 'writes the PID to a file' do
+ expect(Gitlab::ProcessManagement).to receive(:write_pid).with('/dev/null')
+
+ cli.option_parser.parse!(%w(-P /dev/null))
+ cli.run(%w(foo))
+ end
+ end
+
+ context 'when no PID is specified' do
+ it 'does not write a PID' do
+ expect(Gitlab::ProcessManagement).not_to receive(:write_pid)
+
+ cli.run(%w(foo))
+ end
+ end
end
context 'with --dryrun set' do
@@ -427,130 +419,55 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
end
end
end
-
- context 'supervising the server' do
- let(:sidekiq_exporter_enabled) { true }
- let(:sidekiq_health_checks_port) { '3907' }
-
- before do
- allow(cli).to receive(:sleep).with(a_kind_of(Numeric))
- allow(MetricsServer).to receive(:fork).and_return(99)
- cli.start_metrics_server
- end
-
- it 'stops the metrics server when one of the processes has been terminated' do
- allow(Gitlab::ProcessManagement).to receive(:process_died?).and_return(false)
- allow(Gitlab::ProcessManagement).to receive(:all_alive?).with(an_instance_of(Array)).and_return(false)
- allow(Gitlab::ProcessManagement).to receive(:signal_processes).with(an_instance_of(Array), :TERM)
-
- expect(Process).to receive(:kill).with(:TERM, 99)
-
- cli.start_loop
- end
-
- it 'starts the metrics server when it is down' do
- allow(Gitlab::ProcessManagement).to receive(:process_died?).and_return(true)
- allow(Gitlab::ProcessManagement).to receive(:all_alive?).with(an_instance_of(Array)).and_return(false)
- allow(cli).to receive(:stop_metrics_server)
-
- expect(MetricsServer).to receive(:fork).with(
- 'sidekiq', metrics_dir: metrics_dir, wipe_metrics_dir: false, reset_signals: trapped_signals
- )
-
- cli.start_loop
- end
- end
- end
- end
-
- describe '#write_pid' do
- context 'when a PID is specified' do
- it 'writes the PID to a file' do
- expect(Gitlab::ProcessManagement).to receive(:write_pid).with('/dev/null')
-
- cli.option_parser.parse!(%w(-P /dev/null))
- cli.write_pid
- end
end
- context 'when no PID is specified' do
- it 'does not write a PID' do
- expect(Gitlab::ProcessManagement).not_to receive(:write_pid)
-
- cli.write_pid
- end
- end
- end
+ context 'supervising the cluster' do
+ let(:sidekiq_exporter_enabled) { true }
+ let(:sidekiq_health_checks_port) { '3907' }
+ let(:metrics_server_pid) { 99 }
+ let(:sidekiq_worker_pids) { [2, 42] }
- describe '#wait_for_termination' do
- it 'waits for termination of all sub-processes and succeeds after 3 checks' do
- expect(Gitlab::ProcessManagement).to receive(:any_alive?)
- .with(an_instance_of(Array)).and_return(true, true, true, false)
-
- expect(Gitlab::ProcessManagement).to receive(:pids_alive)
- .with([]).and_return([])
-
- expect(Gitlab::ProcessManagement).to receive(:signal_processes)
- .with([], "-KILL")
-
- stub_const("Gitlab::SidekiqCluster::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1)
- allow(cli).to receive(:terminate_timeout_seconds) { 1 }
-
- cli.wait_for_termination
- end
-
- context 'with hanging workers' do
before do
- expect(cli).to receive(:write_pid)
- expect(cli).to receive(:trap_signals)
- expect(cli).to receive(:start_loop)
+ allow(Gitlab::SidekiqCluster).to receive(:start).and_return(sidekiq_worker_pids)
+ allow(Gitlab::ProcessManagement).to receive(:write_pid)
end
- it 'hard kills workers after timeout expires' do
- worker_pids = [101, 102, 103]
- expect(Gitlab::SidekiqCluster).to receive(:start)
- .with([['foo']], default_options)
- .and_return(worker_pids)
-
- expect(Gitlab::ProcessManagement).to receive(:any_alive?)
- .with(worker_pids).and_return(true).at_least(10).times
-
- expect(Gitlab::ProcessManagement).to receive(:pids_alive)
- .with(worker_pids).and_return([102])
+ it 'stops the entire process cluster if one of the workers has been terminated' do
+ allow_next_instance_of(Gitlab::ProcessSupervisor) do |it|
+ allow(it).to receive(:supervise).and_yield([2])
+ end
- expect(Gitlab::ProcessManagement).to receive(:signal_processes)
- .with([102], "-KILL")
+ expect(MetricsServer).to receive(:fork).once.and_return(metrics_server_pid)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).with([42, 99], :TERM)
cli.run(%w(foo))
-
- stub_const("Gitlab::SidekiqCluster::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1)
- allow(cli).to receive(:terminate_timeout_seconds) { 1 }
-
- cli.wait_for_termination
end
- end
- end
- describe '#trap_signals' do
- it 'traps termination and sidekiq specific signals' do
- expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i[INT TERM])
- expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i[TTIN USR1 USR2 HUP])
+ context 'when the supervisor is alive' do
+ it 'restarts the metrics server when it is down' do
+ allow_next_instance_of(Gitlab::ProcessSupervisor) do |it|
+ allow(it).to receive(:alive).and_return(true)
+ allow(it).to receive(:supervise).and_yield([metrics_server_pid])
+ end
- cli.trap_signals
- end
- end
+ expect(MetricsServer).to receive(:fork).twice.and_return(metrics_server_pid)
- describe '#start_loop' do
- it 'runs until one of the processes has been terminated' do
- allow(cli).to receive(:sleep).with(a_kind_of(Numeric))
+ cli.run(%w(foo))
+ end
+ end
- expect(Gitlab::ProcessManagement).to receive(:all_alive?)
- .with(an_instance_of(Array)).and_return(false)
+ context 'when the supervisor is shutting down' do
+ it 'does not restart the metrics server' do
+ allow_next_instance_of(Gitlab::ProcessSupervisor) do |it|
+ allow(it).to receive(:alive).and_return(false)
+ allow(it).to receive(:supervise).and_yield([metrics_server_pid])
+ end
- expect(Gitlab::ProcessManagement).to receive(:signal_processes)
- .with(an_instance_of(Array), :TERM)
+ expect(MetricsServer).to receive(:fork).once.and_return(metrics_server_pid)
- cli.start_loop
+ cli.run(%w(foo))
+ end
+ end
end
end
end
diff --git a/spec/controllers/projects/forks_controller_spec.rb b/spec/controllers/projects/forks_controller_spec.rb
index 0f8f3b49e02..962ef93dc72 100644
--- a/spec/controllers/projects/forks_controller_spec.rb
+++ b/spec/controllers/projects/forks_controller_spec.rb
@@ -199,15 +199,6 @@ RSpec.describe Projects::ForksController do
expect(json_response['namespaces'][1]['id']).to eq(group.id)
end
- it 'responds with group only when fork_project_form feature flag is disabled' do
- stub_feature_flags(fork_project_form: false)
- do_request
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['namespaces'].length).to eq(1)
- expect(json_response['namespaces'][0]['id']).to eq(group.id)
- end
-
context 'N+1 queries' do
before do
create(:fork_network, root_project: project)
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index f9a6b67e469..fb27f0961b6 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -164,199 +164,4 @@ RSpec.describe 'Project fork' do
end
end
end
-
- context 'with fork_project_form feature flag disabled' do
- before do
- stub_feature_flags(fork_project_form: false)
- sign_in(user)
- end
-
- it_behaves_like 'fork button on project page'
-
- context 'user has exceeded personal project limit' do
- before do
- user.update!(projects_limit: 0)
- end
-
- context 'with a group to fork to' do
- let!(:group) { create(:group).tap { |group| group.add_owner(user) } }
-
- it 'allows user to fork only to the group on fork page', :js do
- visit new_project_fork_path(project)
-
- to_personal_namespace = find('[data-qa-selector=fork_namespace_button].disabled') # rubocop:disable QA/SelectorUsage
- to_group = find(".fork-groups button[data-qa-name=#{group.name}]") # rubocop:disable QA/SelectorUsage
-
- expect(to_personal_namespace).not_to be_nil
- expect(to_group).not_to be_disabled
- end
- end
- end
-
- it_behaves_like 'create fork page', ' Select a namespace to fork the project '
-
- it 'forks the project', :sidekiq_might_not_need_inline do
- visit project_path(project)
-
- click_link 'Fork'
-
- page.within '.fork-thumbnail-container' do
- click_link 'Select'
- end
-
- expect(page).to have_content 'Forked from'
-
- visit project_path(project)
-
- expect(page).to have_content(/new merge request/i)
-
- page.within '.nav-sidebar' do
- first(:link, 'Merge requests').click
- end
-
- expect(page).to have_content(/new merge request/i)
-
- page.within '#content-body' do
- click_link('New merge request')
- end
-
- expect(current_path).to have_content(/#{user.namespace.path}/i)
- end
-
- it 'shows avatars when Gravatar is disabled' do
- stub_application_setting(gravatar_enabled: false)
-
- visit project_path(project)
-
- click_link 'Fork'
-
- page.within('.fork-thumbnail-container') do
- expect(page).to have_css('span.identicon')
- end
- end
-
- it 'shows the forked project on the list' do
- visit project_path(project)
-
- click_link 'Fork'
-
- page.within '.fork-thumbnail-container' do
- click_link 'Select'
- end
-
- visit project_forks_path(project)
-
- forked_project = user.fork_of(project.reload)
-
- page.within('.js-projects-list-holder') do
- expect(page).to have_content("#{forked_project.namespace.human_name} / #{forked_project.name}")
- end
-
- forked_project.update!(path: 'test-crappy-path')
-
- visit project_forks_path(project)
-
- page.within('.js-projects-list-holder') do
- expect(page).to have_content("#{forked_project.namespace.human_name} / #{forked_project.name}")
- end
- end
-
- context 'when the project is private' do
- let(:project) { create(:project, :repository) }
- let(:another_user) { create(:user, name: 'Mike') }
-
- before do
- project.add_reporter(user)
- project.add_reporter(another_user)
- end
-
- it 'renders private forks of the project' do
- visit project_path(project)
-
- another_project_fork = Projects::ForkService.new(project, another_user).execute
-
- click_link 'Fork'
-
- page.within '.fork-thumbnail-container' do
- click_link 'Select'
- end
-
- visit project_forks_path(project)
-
- page.within('.js-projects-list-holder') do
- user_project_fork = user.fork_of(project.reload)
- expect(page).to have_content("#{user_project_fork.namespace.human_name} / #{user_project_fork.name}")
- end
-
- expect(page).not_to have_content("#{another_project_fork.namespace.human_name} / #{another_project_fork.name}")
- end
- end
-
- context 'when the user already forked the project' do
- before do
- create(:project, :repository, name: project.name, namespace: user.namespace)
- end
-
- it 'renders error' do
- visit project_path(project)
-
- click_link 'Fork'
-
- page.within '.fork-thumbnail-container' do
- click_link 'Select'
- end
-
- expect(page).to have_content "Name has already been taken"
- end
- end
-
- context 'maintainer in group' do
- let(:group) { create(:group) }
-
- before do
- group.add_maintainer(user)
- end
-
- it 'allows user to fork project to group or to user namespace', :js do
- visit project_path(project)
- wait_for_requests
-
- expect(page).not_to have_css('a.disabled', text: 'Fork')
-
- click_link 'Fork'
-
- expect(page).to have_css('.fork-thumbnail')
- expect(page).to have_css('.group-row')
- expect(page).not_to have_css('.fork-thumbnail.disabled')
- end
-
- it 'allows user to fork project to group and not user when exceeded project limit', :js do
- user.projects_limit = 0
- user.save!
-
- visit project_path(project)
- wait_for_requests
-
- expect(page).not_to have_css('a.disabled', text: 'Fork')
-
- click_link 'Fork'
-
- expect(page).to have_css('.fork-thumbnail.disabled')
- expect(page).to have_css('.group-row')
- end
-
- it 'links to the fork if the project was already forked within that namespace', :sidekiq_might_not_need_inline, :js do
- forked_project = fork_project(project, user, namespace: group, repository: true)
-
- visit new_project_fork_path(project)
- wait_for_requests
-
- expect(page).to have_css('.group-row a.btn', text: 'Go to fork')
-
- click_link 'Go to fork'
-
- expect(current_path).to eq(project_path(forked_project))
- end
- end
- end
end
diff --git a/spec/frontend/pages/projects/forks/new/components/app_spec.js b/spec/frontend/pages/projects/forks/new/components/app_spec.js
index a7b4b9c42bd..0342b94a44d 100644
--- a/spec/frontend/pages/projects/forks/new/components/app_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/app_spec.js
@@ -1,19 +1,12 @@
import { shallowMount } from '@vue/test-utils';
import App from '~/pages/projects/forks/new/components/app.vue';
+import ForkForm from '~/pages/projects/forks/new/components/fork_form.vue';
describe('App component', () => {
let wrapper;
const DEFAULT_PROPS = {
forkIllustration: 'illustrations/project-create-new-sm.svg',
- endpoint: '/some/project-full-path/-/forks/new.json',
- projectFullPath: '/some/project-full-path',
- projectId: '10',
- projectName: 'Project Name',
- projectPath: 'project-name',
- projectDescription: 'some project description',
- projectVisibility: 'private',
- restrictedVisibilityLevels: [],
};
const createComponent = (props = {}) => {
@@ -37,7 +30,7 @@ describe('App component', () => {
expect(wrapper.find('img').attributes('src')).toBe('illustrations/project-create-new-sm.svg');
});
- it('renders ForkForm component with prop', () => {
- expect(wrapper.props()).toEqual(expect.objectContaining(DEFAULT_PROPS));
+ it('renders ForkForm component', () => {
+ expect(wrapper.findComponent(ForkForm).exists()).toBe(true);
});
});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
index dc5f1cb9e61..efbfd83a071 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
@@ -40,7 +40,9 @@ describe('ForkForm component', () => {
},
];
- const DEFAULT_PROPS = {
+ const DEFAULT_PROVIDE = {
+ newGroupPath: 'some/groups/path',
+ visibilityHelpPath: 'some/visibility/help/path',
endpoint: '/some/project-full-path/-/forks/new.json',
projectFullPath: '/some/project-full-path',
projectId: '10',
@@ -52,18 +54,14 @@ describe('ForkForm component', () => {
};
const mockGetRequest = (data = {}, statusCode = httpStatus.OK) => {
- axiosMock.onGet(DEFAULT_PROPS.endpoint).replyOnce(statusCode, data);
+ axiosMock.onGet(DEFAULT_PROVIDE.endpoint).replyOnce(statusCode, data);
};
- const createComponentFactory = (mountFn) => (props = {}, data = {}) => {
+ const createComponentFactory = (mountFn) => (provide = {}, data = {}) => {
wrapper = mountFn(ForkForm, {
provide: {
- newGroupPath: 'some/groups/path',
- visibilityHelpPath: 'some/visibility/help/path',
- },
- propsData: {
- ...DEFAULT_PROPS,
- ...props,
+ ...DEFAULT_PROVIDE,
+ ...provide,
},
data() {
return {
@@ -111,7 +109,7 @@ describe('ForkForm component', () => {
mockGetRequest();
createComponent();
- const { projectFullPath } = DEFAULT_PROPS;
+ const { projectFullPath } = DEFAULT_PROVIDE;
const cancelButton = wrapper.find('[data-testid="cancel-button"]');
expect(cancelButton.attributes('href')).toBe(projectFullPath);
@@ -130,10 +128,10 @@ describe('ForkForm component', () => {
mockGetRequest();
createComponent();
- expect(findForkNameInput().attributes('value')).toBe(DEFAULT_PROPS.projectName);
- expect(findForkSlugInput().attributes('value')).toBe(DEFAULT_PROPS.projectPath);
+ expect(findForkNameInput().attributes('value')).toBe(DEFAULT_PROVIDE.projectName);
+ expect(findForkSlugInput().attributes('value')).toBe(DEFAULT_PROVIDE.projectPath);
expect(findForkDescriptionTextarea().attributes('value')).toBe(
- DEFAULT_PROPS.projectDescription,
+ DEFAULT_PROVIDE.projectDescription,
);
});
@@ -164,7 +162,7 @@ describe('ForkForm component', () => {
it('make GET request from endpoint', async () => {
await axios.waitForAll();
- expect(axiosMock.history.get[0].url).toBe(DEFAULT_PROPS.endpoint);
+ expect(axiosMock.history.get[0].url).toBe(DEFAULT_PROVIDE.endpoint);
});
it('generate default option', async () => {
@@ -469,7 +467,7 @@ describe('ForkForm component', () => {
projectName,
projectPath,
projectVisibility,
- } = DEFAULT_PROPS;
+ } = DEFAULT_PROVIDE;
const url = `/api/${GON_API_VERSION}/projects/${projectId}/fork`;
const project = {
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js
deleted file mode 100644
index 490dafed4ae..00000000000
--- a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js
+++ /dev/null
@@ -1,73 +0,0 @@
-import { GlBadge, GlButton, GlLink } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import ForkGroupsListItem from '~/pages/projects/forks/new/components/fork_groups_list_item.vue';
-
-describe('Fork groups list item component', () => {
- let wrapper;
-
- const DEFAULT_GROUP_DATA = {
- id: 22,
- name: 'Gitlab Org',
- description: 'Ad et ipsam earum id aut nobis.',
- visibility: 'public',
- full_name: 'Gitlab Org',
- created_at: '2020-06-22T03:32:05.664Z',
- updated_at: '2020-06-22T03:32:05.664Z',
- avatar_url: null,
- fork_path: '/twitter/typeahead-js/-/forks?namespace_key=22',
- forked_project_path: null,
- permission: 'Owner',
- relative_path: '/gitlab-org',
- markdown_description:
- '<p data-sourcepos="1:1-1:31" dir="auto">Ad et ipsam earum id aut nobis.</p>',
- can_create_project: true,
- marked_for_deletion: false,
- };
-
- const DUMMY_PATH = '/dummy/path';
-
- const createWrapper = (propsData) => {
- wrapper = shallowMount(ForkGroupsListItem, {
- propsData: {
- ...propsData,
- },
- });
- };
-
- it('renders pending deletion badge if applicable', () => {
- createWrapper({ group: { ...DEFAULT_GROUP_DATA, marked_for_deletion: true } });
-
- expect(wrapper.find(GlBadge).text()).toBe('pending deletion');
- });
-
- it('renders go to fork button if has forked project', () => {
- createWrapper({ group: { ...DEFAULT_GROUP_DATA, forked_project_path: DUMMY_PATH } });
-
- expect(wrapper.find(GlButton).text()).toBe('Go to fork');
- expect(wrapper.find(GlButton).attributes().href).toBe(DUMMY_PATH);
- });
-
- it('renders select button if has no forked project', () => {
- createWrapper({
- group: { ...DEFAULT_GROUP_DATA, forked_project_path: null, fork_path: DUMMY_PATH },
- });
-
- expect(wrapper.find(GlButton).text()).toBe('Select');
- expect(wrapper.find('form').attributes().action).toBe(DUMMY_PATH);
- });
-
- it('renders link to current group', () => {
- const DUMMY_FULL_NAME = 'dummy';
- createWrapper({
- group: { ...DEFAULT_GROUP_DATA, relative_path: DUMMY_PATH, full_name: DUMMY_FULL_NAME },
- });
-
- expect(
- wrapper
- .findAll(GlLink)
- .filter((w) => w.text() === DUMMY_FULL_NAME)
- .at(0)
- .attributes().href,
- ).toBe(DUMMY_PATH);
- });
-});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js
deleted file mode 100644
index 9f8dbf3d542..00000000000
--- a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js
+++ /dev/null
@@ -1,123 +0,0 @@
-import { GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import AxiosMockAdapter from 'axios-mock-adapter';
-import { nextTick } from 'vue';
-import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
-import axios from '~/lib/utils/axios_utils';
-import ForkGroupsList from '~/pages/projects/forks/new/components/fork_groups_list.vue';
-import ForkGroupsListItem from '~/pages/projects/forks/new/components/fork_groups_list_item.vue';
-
-jest.mock('~/flash');
-
-describe('Fork groups list component', () => {
- let wrapper;
- let axiosMock;
-
- const DEFAULT_PROPS = {
- endpoint: '/dummy',
- };
-
- const replyWith = (...args) => axiosMock.onGet(DEFAULT_PROPS.endpoint).reply(...args);
-
- const createWrapper = (propsData) => {
- wrapper = shallowMount(ForkGroupsList, {
- propsData: {
- ...DEFAULT_PROPS,
- ...propsData,
- },
- stubs: {
- GlTabs: {
- template: '<div><slot></slot><slot name="tabs-end"></slot></div>',
- },
- },
- });
- };
-
- beforeEach(() => {
- axiosMock = new AxiosMockAdapter(axios);
- });
-
- afterEach(() => {
- axiosMock.reset();
-
- if (wrapper) {
- wrapper.destroy();
- wrapper = null;
- }
- });
-
- it('fires load groups request on mount', async () => {
- replyWith(200, { namespaces: [] });
- createWrapper();
-
- await waitForPromises();
-
- expect(axiosMock.history.get[0].url).toBe(DEFAULT_PROPS.endpoint);
- });
-
- it('displays flash if loading groups fails', async () => {
- replyWith(500);
- createWrapper();
-
- await waitForPromises();
-
- expect(createFlash).toHaveBeenCalled();
- });
-
- it('displays loading indicator while loading groups', () => {
- replyWith(() => new Promise(() => {}));
- createWrapper();
-
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- });
-
- it('displays empty text if no groups are available', async () => {
- const EMPTY_TEXT = 'No available groups to fork the project.';
- replyWith(200, { namespaces: [] });
- createWrapper();
-
- await waitForPromises();
-
- expect(wrapper.text()).toContain(EMPTY_TEXT);
- });
-
- it('displays filter field when groups are available', async () => {
- replyWith(200, { namespaces: [{ name: 'dummy1' }, { name: 'dummy2' }] });
- createWrapper();
-
- await waitForPromises();
-
- expect(wrapper.find(GlSearchBoxByType).exists()).toBe(true);
- });
-
- it('renders list items for each available group', async () => {
- const namespaces = [{ name: 'dummy1' }, { name: 'dummy2' }, { name: 'otherdummy' }];
-
- replyWith(200, { namespaces });
- createWrapper();
-
- await waitForPromises();
-
- expect(wrapper.findAll(ForkGroupsListItem)).toHaveLength(namespaces.length);
-
- namespaces.forEach((namespace, idx) => {
- expect(wrapper.findAll(ForkGroupsListItem).at(idx).props()).toStrictEqual({
- group: namespace,
- });
- });
- });
-
- it('filters repositories on the fly', async () => {
- replyWith(200, {
- namespaces: [{ name: 'dummy1' }, { name: 'dummy2' }, { name: 'otherdummy' }],
- });
- createWrapper();
- await waitForPromises();
- wrapper.find(GlSearchBoxByType).vm.$emit('input', 'other');
- await nextTick();
-
- expect(wrapper.findAll(ForkGroupsListItem)).toHaveLength(1);
- expect(wrapper.findAll(ForkGroupsListItem).at(0).props().group.name).toBe('otherdummy');
- });
-});
diff --git a/spec/lib/atlassian/jira_connect_spec.rb b/spec/lib/atlassian/jira_connect_spec.rb
new file mode 100644
index 00000000000..d9c34e938b4
--- /dev/null
+++ b/spec/lib/atlassian/jira_connect_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Atlassian::JiraConnect do
+ describe '.app_name' do
+ subject { described_class.app_name }
+
+ it { is_expected.to eq('GitLab for Jira (localhost)') }
+ end
+
+ describe '.app_key' do
+ subject(:app_key) { described_class.app_key }
+
+ it { is_expected.to eq('gitlab-jira-connect-localhost') }
+
+ context 'host name is too long' do
+ before do
+ hostname = 'x' * 100
+
+ stub_config(gitlab: { host: hostname })
+ end
+
+ it 'truncates the key to be no longer than 64 characters', :aggregate_failures do
+ expect(app_key).to eq('gitlab-jira-connect-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/process_supervisor_spec.rb b/spec/lib/gitlab/process_supervisor_spec.rb
new file mode 100644
index 00000000000..d264c77d5fb
--- /dev/null
+++ b/spec/lib/gitlab/process_supervisor_spec.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+require_relative '../../../lib/gitlab/process_supervisor'
+
+RSpec.describe Gitlab::ProcessSupervisor do
+ let(:health_check_interval_seconds) { 0.1 }
+ let(:check_terminate_interval_seconds) { 1 }
+ let(:forwarded_signals) { [] }
+ let(:process_id) do
+ Process.spawn('while true; do sleep 1; done').tap do |pid|
+ Process.detach(pid)
+ end
+ end
+
+ subject(:supervisor) do
+ described_class.new(
+ health_check_interval_seconds: health_check_interval_seconds,
+ check_terminate_interval_seconds: check_terminate_interval_seconds,
+ terminate_timeout_seconds: 1 + check_terminate_interval_seconds,
+ forwarded_signals: forwarded_signals
+ )
+ end
+
+ after do
+ if Gitlab::ProcessManagement.process_alive?(process_id)
+ Process.kill('KILL', process_id)
+ end
+ end
+
+ describe '#supervise' do
+ context 'while supervised process is alive' do
+ it 'does not invoke callback' do
+ expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(true)
+ pids_killed = []
+
+ thread = Thread.new do
+ supervisor.supervise(process_id) do |dead_pids|
+ pids_killed = dead_pids
+ []
+ end
+ end
+
+ # Wait several times the poll frequency of the supervisor.
+ sleep health_check_interval_seconds * 10
+ thread.terminate
+
+ expect(pids_killed).to be_empty
+ expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(true)
+ end
+ end
+
+ context 'when supervised process dies' do
+ it 'triggers callback with the dead PIDs' do
+ expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(true)
+ pids_killed = []
+
+ thread = Thread.new do
+ supervisor.supervise(process_id) do |dead_pids|
+ pids_killed = dead_pids
+ []
+ end
+ end
+
+ # Terminate the supervised process.
+ Process.kill('TERM', process_id)
+
+ await_condition(sleep_sec: health_check_interval_seconds) do
+ pids_killed == [process_id]
+ end
+ thread.terminate
+
+ expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(false)
+ end
+ end
+
+ context 'signal handling' do
+ before do
+ allow(supervisor).to receive(:sleep)
+ allow(Gitlab::ProcessManagement).to receive(:trap_signals)
+ allow(Gitlab::ProcessManagement).to receive(:all_alive?).and_return(false)
+ allow(Gitlab::ProcessManagement).to receive(:signal_processes).with([process_id], anything)
+ end
+
+ context 'termination signals' do
+ context 'when TERM results in timely shutdown of processes' do
+ it 'forwards them to observed processes without waiting for grace period to expire' do
+ allow(Gitlab::ProcessManagement).to receive(:any_alive?).and_return(false)
+
+ expect(Gitlab::ProcessManagement).to receive(:trap_signals).ordered.with(%i(INT TERM)).and_yield(:TERM)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], :TERM)
+ expect(supervisor).not_to receive(:sleep).with(check_terminate_interval_seconds)
+
+ supervisor.supervise(process_id) { [] }
+ end
+ end
+
+ context 'when TERM does not result in timely shutdown of processes' do
+ it 'issues a KILL signal after the grace period expires' do
+ expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i(INT TERM)).and_yield(:TERM)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], :TERM)
+ expect(supervisor).to receive(:sleep).ordered.with(check_terminate_interval_seconds).at_least(:once)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], '-KILL')
+
+ supervisor.supervise(process_id) { [] }
+ end
+ end
+ end
+
+ context 'forwarded signals' do
+ let(:forwarded_signals) { %i(USR1) }
+
+ it 'forwards given signals to the observed processes' do
+ expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i(USR1)).and_yield(:USR1)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], :USR1)
+
+ supervisor.supervise(process_id) { [] }
+ end
+ end
+ end
+ end
+
+ def await_condition(timeout_sec: 5, sleep_sec: 0.1)
+ Timeout.timeout(timeout_sec) do
+ sleep sleep_sec until yield
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 5e74ea3293c..f07a9a494c0 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -50,7 +50,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'importer',
'network_policies',
'geo',
- 'growth'
+ 'growth',
+ 'work_items'
)
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
new file mode 100644
index 00000000000..abd5d29d7e6
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter, :clean_gitlab_redis_shared_state do
+ let(:user) { build(:user, id: 1) }
+
+ shared_examples 'counter that does not track the event' do
+ it 'does not track the event' do
+ expect { 3.times { track_event } }.to not_change {
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: event_name,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ }
+ end
+ end
+
+ describe '.track_work_item_title_changed_action' do
+ subject(:track_event) { described_class.track_work_item_title_changed_action(author: user) }
+
+ let(:event_name) { described_class::WORK_ITEM_TITLE_CHANGED }
+
+ context 'when track_work_items_activity FF is enabled' do
+ it 'tracks a unique event only once' do
+ expect { 3.times { track_event } }.to change {
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: described_class::WORK_ITEM_TITLE_CHANGED,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ }.by(1)
+ end
+
+ context 'when author is nil' do
+ let(:user) { nil }
+
+ it_behaves_like 'counter that does not track the event'
+ end
+ end
+
+ context 'when track_work_items_activity FF is disabled' do
+ before do
+ stub_feature_flags(track_work_items_activity: false)
+ end
+
+ it_behaves_like 'counter that does not track the event'
+ end
+ end
+end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index f2f2023a992..d5cbe1b16e6 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -4249,6 +4249,29 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#eager_fetch_ref!' do
+ let(:project) { create(:project, :repository) }
+
+ # We use build instead of create to test that an IID is allocated
+ subject { build(:merge_request, source_project: project) }
+
+ it 'fetches the ref correctly' do
+ expect(subject.iid).to be_nil
+
+ expect { subject.eager_fetch_ref! }.to change { subject.iid.to_i }.by(1)
+
+ expect(subject.target_project.repository.ref_exists?(subject.ref_path)).to be_truthy
+ end
+
+ it 'only fetches the ref once after saved' do
+ expect(subject.target_project.repository).to receive(:fetch_source_branch!).once.and_call_original
+
+ subject.save!
+
+ expect(subject.target_project.repository.ref_exists?(subject.ref_path)).to be_truthy
+ end
+ end
+
describe 'removing a merge request' do
it 'refreshes the number of open merge requests of the target project' do
project = subject.target_project
diff --git a/spec/serializers/fork_namespace_entity_spec.rb b/spec/serializers/fork_namespace_entity_spec.rb
index 32223b0d41a..91c59c4bda8 100644
--- a/spec/serializers/fork_namespace_entity_spec.rb
+++ b/spec/serializers/fork_namespace_entity_spec.rb
@@ -59,26 +59,4 @@ RSpec.describe ForkNamespaceEntity do
it 'exposes human readable permission level' do
expect(json[:permission]).to eql 'Developer'
end
-
- it 'exposes can_create_project' do
- expect(json[:can_create_project]).to be true
- end
-
- context 'when fork_project_form feature flag is disabled' do
- before do
- stub_feature_flags(fork_project_form: false)
- end
-
- it 'sets can_create_project to true when user can create projects in namespace' do
- allow(user).to receive(:can?).with(:create_projects, namespace).and_return(true)
-
- expect(json[:can_create_project]).to be true
- end
-
- it 'sets can_create_project to false when user is not allowed create projects in namespace' do
- allow(user).to receive(:can?).with(:create_projects, namespace).and_return(false)
-
- expect(json[:can_create_project]).to be false
- end
- end
end
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index a196c944eda..ecdc92a1b6f 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -454,7 +454,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
end
- context 'when source and target projects are different' do
+ shared_examples 'when source and target projects are different' do |eager_fetch_ref_enabled|
let(:target_project) { fork_project(project, nil, repository: true) }
let(:opts) do
@@ -497,9 +497,18 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
it 'creates the merge request', :sidekiq_might_not_need_inline do
+ expect_next_instance_of(MergeRequest) do |instance|
+ if eager_fetch_ref_enabled
+ expect(instance).to receive(:eager_fetch_ref!).and_call_original
+ else
+ expect(instance).not_to receive(:eager_fetch_ref!)
+ end
+ end
+
merge_request = described_class.new(project: project, current_user: user, params: opts).execute
expect(merge_request).to be_persisted
+ expect(merge_request.iid).to be > 0
end
it 'does not create the merge request when the target project is archived' do
@@ -511,6 +520,18 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
end
+ context 'when merge_request_eager_fetch_ref is enabled' do
+ it_behaves_like 'when source and target projects are different', true
+ end
+
+ context 'when merge_request_eager_fetch_ref is disabled' do
+ before do
+ stub_feature_flags(merge_request_eager_fetch_ref: false)
+ end
+
+ it_behaves_like 'when source and target projects are different', false
+ end
+
context 'when user sets source project id' do
let(:another_project) { create(:project) }
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index f71f1060e40..b2d3f428899 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -23,6 +23,9 @@ RSpec.describe WorkItems::UpdateService do
it 'triggers issuable_title_updated graphql subscription' do
expect(GraphqlTriggers).to receive(:issuable_title_updated).with(work_item).and_call_original
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).to receive(:track_work_item_title_changed_action).with(author: current_user)
+ # During the work item transition we also want to track work items as issues
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_title_changed_action)
update_work_item
end
@@ -33,6 +36,7 @@ RSpec.describe WorkItems::UpdateService do
it 'does not trigger issuable_title_updated graphql subscription' do
expect(GraphqlTriggers).not_to receive(:issuable_title_updated)
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).not_to receive(:track_work_item_title_changed_action)
update_work_item
end