Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-02-02 21:09:42 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-02-02 21:09:42 +0300
commit2d66c59d593354aa98785899cc8d5e640f62a012 (patch)
treeb0f82a38ffba401a1ad448983785660038c4cf4b /spec
parentdab865db1e85e2fc3dd29dae8dc6b8e11b1ba3f7 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb44
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb10
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb6
-rw-r--r--spec/features/search/user_searches_for_issues_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_merge_requests_spec.rb34
-rw-r--r--spec/frontend/deploy_keys/components/key_spec.js2
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js363
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js2
-rw-r--r--spec/frontend/pipelines/pipelines_table_row_spec.js4
-rw-r--r--spec/frontend/search/mock_data.js16
-rw-r--r--spec/frontend/search/sort/components/app_spec.js168
-rw-r--r--spec/helpers/search_helper_spec.rb21
-rw-r--r--spec/helpers/sorting_helper_spec.rb18
-rw-r--r--spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb2
-rw-r--r--spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb9
-rw-r--r--spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb17
-rw-r--r--spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb7
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb11
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb7
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb8
-rw-r--r--spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb24
-rw-r--r--spec/lib/bulk_imports/importers/group_importer_spec.rb12
-rw-r--r--spec/lib/bulk_imports/pipeline/context_spec.rb38
-rw-r--r--spec/lib/bulk_imports/pipeline/runner_spec.rb46
-rw-r--r--spec/requests/api/api_spec.rb6
-rw-r--r--spec/requests/api/events_spec.rb6
-rw-r--r--spec/requests/api/version_spec.rb12
-rw-r--r--spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/requests/api/read_user_shared_examples.rb32
-rw-r--r--spec/workers/projects/git_garbage_collect_worker_spec.rb2
30 files changed, 639 insertions, 318 deletions
diff --git a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
index 04d8c52df61..be6205598d8 100644
--- a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
+++ b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe 'Merge request < User sees mini pipeline graph', :js do
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: 'master', status: 'running', sha: project.commit.id) }
let(:build) { create(:ci_build, pipeline: pipeline, stage: 'test') }
+ dropdown_toggle_selector = '[data-testid="mini-pipeline-graph-dropdown-toggle"]'
+
before do
build.run
build.trace.set('hello')
@@ -51,40 +53,25 @@ RSpec.describe 'Merge request < User sees mini pipeline graph', :js do
describe 'build list toggle' do
let(:toggle) do
- find('.mini-pipeline-graph-dropdown-toggle')
- first('.mini-pipeline-graph-dropdown-toggle')
+ find(dropdown_toggle_selector)
+ first(dropdown_toggle_selector)
end
# Status icon button styles should update as described in
# https://gitlab.com/gitlab-org/gitlab-foss/issues/42769
it 'has unique styles for default, :hover, :active, and :focus states' do
- find('.mini-pipeline-graph-dropdown-toggle')
- default_background_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('background-color');")
- default_foreground_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible svg').css('fill');")
- default_box_shadow = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('box-shadow');")
+ default_background_color, default_foreground_color, default_box_shadow = get_toggle_colors(dropdown_toggle_selector)
toggle.hover
-
- find('.mini-pipeline-graph-dropdown-toggle')
- hover_background_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('background-color');")
- hover_foreground_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible svg').css('fill');")
- hover_box_shadow = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('box-shadow');")
+ hover_background_color, hover_foreground_color, hover_box_shadow = get_toggle_colors(dropdown_toggle_selector)
page.driver.browser.action.click_and_hold(toggle.native).perform
-
- find('.mini-pipeline-graph-dropdown-toggle')
- active_background_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('background-color');")
- active_foreground_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible svg').css('fill');")
- active_box_shadow = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('box-shadow');")
+ active_background_color, active_foreground_color, active_box_shadow = get_toggle_colors(dropdown_toggle_selector)
page.driver.browser.action.release(toggle.native)
.move_by(100, 100)
.perform
-
- find('.mini-pipeline-graph-dropdown-toggle')
- focus_background_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('background-color');")
- focus_foreground_color = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible svg').css('fill');")
- focus_box_shadow = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').css('box-shadow');")
+ focus_background_color, focus_foreground_color, focus_box_shadow = get_toggle_colors(dropdown_toggle_selector)
expect(default_background_color).not_to eq(hover_background_color)
expect(hover_background_color).not_to eq(active_background_color)
@@ -112,8 +99,8 @@ RSpec.describe 'Merge request < User sees mini pipeline graph', :js do
describe 'builds list menu' do
let(:toggle) do
- find('.mini-pipeline-graph-dropdown-toggle')
- first('.mini-pipeline-graph-dropdown-toggle')
+ find(dropdown_toggle_selector)
+ first(dropdown_toggle_selector)
end
before do
@@ -157,4 +144,15 @@ RSpec.describe 'Merge request < User sees mini pipeline graph', :js do
end
end
end
+
+ private
+
+ def get_toggle_colors(selector)
+ find(selector)
+ [
+ evaluate_script("$('#{selector}:visible').css('background-color');"),
+ evaluate_script("$('#{selector}:visible svg').css('fill');"),
+ evaluate_script("$('#{selector}:visible').css('box-shadow');")
+ ]
+ end
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 5c4552510cb..5986a453dd5 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -525,24 +525,26 @@ RSpec.describe 'Pipelines', :js do
name: 'build')
end
+ dropdown_toggle_selector = '[data-testid="mini-pipeline-graph-dropdown-toggle"]'
+
before do
visit_project_pipelines
end
it 'renders a mini pipeline graph' do
expect(page).to have_selector('[data-testid="widget-mini-pipeline-graph"]')
- expect(page).to have_selector('.js-builds-dropdown-button')
+ expect(page).to have_selector(dropdown_toggle_selector)
end
context 'when clicking a stage badge' do
it 'opens a dropdown' do
- find('.js-builds-dropdown-button').click
+ find(dropdown_toggle_selector).click
expect(page).to have_link build.name
end
it 'is possible to cancel pending build' do
- find('.js-builds-dropdown-button').click
+ find(dropdown_toggle_selector).click
find('.js-ci-action').click
wait_for_requests
@@ -558,7 +560,7 @@ RSpec.describe 'Pipelines', :js do
end
it 'displays the failure reason' do
- find('.js-builds-dropdown-button').click
+ find(dropdown_toggle_selector).click
within('.js-builds-dropdown-list') do
build_element = page.find('.mini-pipeline-graph-dropdown-item')
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index 3e520142117..2f257d299d8 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe 'Projects > Settings > Repository settings' do
click_button 'Add key'
expect(page).to have_content('new_deploy_key')
- expect(page).to have_content('Write access allowed')
+ expect(page).to have_content('Grant write permissions to this key')
end
it 'edit an existing deploy key' do
@@ -77,7 +77,7 @@ RSpec.describe 'Projects > Settings > Repository settings' do
click_button 'Save changes'
expect(page).to have_content('updated_deploy_key')
- expect(page).to have_content('Write access allowed')
+ expect(page).to have_content('Grant write permissions to this key')
end
it 'edit an existing public deploy key to be writable' do
@@ -90,7 +90,7 @@ RSpec.describe 'Projects > Settings > Repository settings' do
click_button 'Save changes'
expect(page).to have_content('public_deploy_key')
- expect(page).to have_content('Write access allowed')
+ expect(page).to have_content('Grant write permissions to this key')
end
it 'edit a deploy key from projects user has access to' do
diff --git a/spec/features/search/user_searches_for_issues_spec.rb b/spec/features/search/user_searches_for_issues_spec.rb
index e253b9f2f7a..828e478d701 100644
--- a/spec/features/search/user_searches_for_issues_spec.rb
+++ b/spec/features/search/user_searches_for_issues_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe 'User searches for issues', :js do
expect(page.all('.search-result-row').last).to have_link(issue1.title)
end
- find('.reverse-sort-btn').click
+ find('[data-testid="sort-highest-icon"]').click
page.within('.results') do
expect(page.all('.search-result-row').first).to have_link(issue1.title)
diff --git a/spec/features/search/user_searches_for_merge_requests_spec.rb b/spec/features/search/user_searches_for_merge_requests_spec.rb
index 21e8075739f..7271716644b 100644
--- a/spec/features/search/user_searches_for_merge_requests_spec.rb
+++ b/spec/features/search/user_searches_for_merge_requests_spec.rb
@@ -5,8 +5,14 @@ require 'spec_helper'
RSpec.describe 'User searches for merge requests', :js do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
- let!(:merge_request1) { create(:merge_request, title: 'Foo', source_project: project, target_project: project) }
- let!(:merge_request2) { create(:merge_request, :simple, title: 'Bar', source_project: project, target_project: project) }
+ let!(:merge_request1) { create(:merge_request, title: 'Merge Request Foo', source_project: project, target_project: project, created_at: 1.hour.ago) }
+ let!(:merge_request2) { create(:merge_request, :simple, title: 'Merge Request Bar', source_project: project, target_project: project) }
+
+ def search_for_mr(search)
+ fill_in('dashboard_search', with: search)
+ find('.btn-search').click
+ select_search_scope('Merge requests')
+ end
before do
project.add_maintainer(user)
@@ -18,9 +24,7 @@ RSpec.describe 'User searches for merge requests', :js do
include_examples 'top right search form'
it 'finds a merge request' do
- fill_in('dashboard_search', with: merge_request1.title)
- find('.btn-search').click
- select_search_scope('Merge requests')
+ search_for_mr(merge_request1.title)
page.within('.results') do
expect(page).to have_link(merge_request1.title)
@@ -28,6 +32,22 @@ RSpec.describe 'User searches for merge requests', :js do
end
end
+ it 'sorts by created date' do
+ search_for_mr('Merge Request')
+
+ page.within('.results') do
+ expect(page.all('.search-result-row').first).to have_link(merge_request2.title)
+ expect(page.all('.search-result-row').last).to have_link(merge_request1.title)
+ end
+
+ find('[data-testid="sort-highest-icon"]').click
+
+ page.within('.results') do
+ expect(page.all('.search-result-row').first).to have_link(merge_request1.title)
+ expect(page.all('.search-result-row').last).to have_link(merge_request2.title)
+ end
+ end
+
context 'when on a project page' do
it 'finds a merge request' do
find('[data-testid="project-filter"]').click
@@ -38,9 +58,7 @@ RSpec.describe 'User searches for merge requests', :js do
click_on(project.full_name)
end
- fill_in('dashboard_search', with: merge_request1.title)
- find('.btn-search').click
- select_search_scope('Merge requests')
+ search_for_mr(merge_request1.title)
page.within('.results') do
expect(page).to have_link(merge_request1.title)
diff --git a/spec/frontend/deploy_keys/components/key_spec.js b/spec/frontend/deploy_keys/components/key_spec.js
index fcb4e31dec8..f783fdd6476 100644
--- a/spec/frontend/deploy_keys/components/key_spec.js
+++ b/spec/frontend/deploy_keys/components/key_spec.js
@@ -76,7 +76,7 @@ describe('Deploy keys key', () => {
createComponent({ deployKey: { ...deployKey, deploy_keys_projects: deployKeysProjects } });
expect(wrapper.find('.deploy-project-label').attributes('title')).toBe(
- 'Write access allowed',
+ 'Grant write permissions to this key',
);
});
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index e673d7dfd6b..f2f9362ae99 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -584,22 +584,6 @@ describe('secondsToMilliseconds', () => {
});
});
-describe('dayAfter', () => {
- const date = new Date('2019-07-16T00:00:00.000Z');
-
- it('returns the following date', () => {
- const nextDay = datetimeUtility.dayAfter(date);
- const expectedNextDate = new Date('2019-07-17T00:00:00.000Z');
-
- expect(nextDay).toStrictEqual(expectedNextDate);
- });
-
- it('does not modifiy the original date', () => {
- datetimeUtility.dayAfter(date);
- expect(date).toStrictEqual(new Date('2019-07-16T00:00:00.000Z'));
- });
-});
-
describe('secondsToDays', () => {
it('converts seconds to days correctly', () => {
expect(datetimeUtility.secondsToDays(0)).toBe(0);
@@ -608,132 +592,214 @@ describe('secondsToDays', () => {
});
});
-describe('nDaysAfter', () => {
- const date = new Date('2019-07-16T00:00:00.000Z');
-
- it.each`
- numberOfDays | expectedResult
- ${1} | ${new Date('2019-07-17T00:00:00.000Z').valueOf()}
- ${90} | ${new Date('2019-10-14T00:00:00.000Z').valueOf()}
- ${-1} | ${new Date('2019-07-15T00:00:00.000Z').valueOf()}
- ${0} | ${date.valueOf()}
- ${0.9} | ${date.valueOf()}
- `(
- 'returns the date $numberOfDays day(s) after the provided date',
- ({ numberOfDays, expectedResult }) => {
- expect(datetimeUtility.nDaysAfter(date, numberOfDays)).toBe(expectedResult);
- },
- );
-});
-
-describe('nDaysBefore', () => {
- const date = new Date('2019-07-16T00:00:00.000Z');
-
- it.each`
- numberOfDays | expectedResult
- ${1} | ${new Date('2019-07-15T00:00:00.000Z').valueOf()}
- ${90} | ${new Date('2019-04-17T00:00:00.000Z').valueOf()}
- ${-1} | ${new Date('2019-07-17T00:00:00.000Z').valueOf()}
- ${0} | ${date.valueOf()}
- ${0.9} | ${new Date('2019-07-15T00:00:00.000Z').valueOf()}
- `(
- 'returns the date $numberOfDays day(s) before the provided date',
- ({ numberOfDays, expectedResult }) => {
- expect(datetimeUtility.nDaysBefore(date, numberOfDays)).toBe(expectedResult);
- },
- );
-});
+describe('date addition/subtraction methods', () => {
+ beforeEach(() => {
+ timezoneMock.register('US/Eastern');
+ });
-describe('nWeeksAfter', () => {
- const date = new Date('2021-07-16T00:00:00.000Z');
+ afterEach(() => {
+ timezoneMock.unregister();
+ });
- it.each`
- numberOfWeeks | expectedResult
- ${1} | ${new Date('2021-07-23T00:00:00.000Z').valueOf()}
- ${3} | ${new Date('2021-08-06T00:00:00.000Z').valueOf()}
- ${-1} | ${new Date('2021-07-09T00:00:00.000Z').valueOf()}
- ${0} | ${date.valueOf()}
- ${0.6} | ${new Date('2021-07-20T00:00:00.000Z').valueOf()}
- `(
- 'returns the date $numberOfWeeks week(s) after the provided date',
- ({ numberOfWeeks, expectedResult }) => {
- expect(datetimeUtility.nWeeksAfter(date, numberOfWeeks)).toBe(expectedResult);
- },
- );
-});
+ describe('dayAfter', () => {
+ const input = '2019-03-10T00:00:00.000Z';
+ const expectedLocalResult = '2019-03-10T23:00:00.000Z';
+ const expectedUTCResult = '2019-03-11T00:00:00.000Z';
+
+ it.each`
+ inputAsString | options | expectedAsString
+ ${input} | ${undefined} | ${expectedLocalResult}
+ ${input} | ${{}} | ${expectedLocalResult}
+ ${input} | ${{ utc: false }} | ${expectedLocalResult}
+ ${input} | ${{ utc: true }} | ${expectedUTCResult}
+ `(
+ 'when the provided date is $inputAsString and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.dayAfter(inputDate, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+
+ it('does not modifiy the original date', () => {
+ const inputDate = new Date(input);
+ datetimeUtility.dayAfter(inputDate);
+ expect(inputDate.toISOString()).toBe(input);
+ });
+ });
-describe('nWeeksBefore', () => {
- const date = new Date('2021-07-16T00:00:00.000Z');
+ describe('nDaysAfter', () => {
+ const input = '2019-07-16T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfDays | options | expectedAsString
+ ${input} | ${1} | ${undefined} | ${'2019-07-17T00:00:00.000Z'}
+ ${input} | ${-1} | ${undefined} | ${'2019-07-15T00:00:00.000Z'}
+ ${input} | ${0} | ${undefined} | ${'2019-07-16T00:00:00.000Z'}
+ ${input} | ${0.9} | ${undefined} | ${'2019-07-16T00:00:00.000Z'}
+ ${input} | ${120} | ${undefined} | ${'2019-11-13T01:00:00.000Z'}
+ ${input} | ${120} | ${{}} | ${'2019-11-13T01:00:00.000Z'}
+ ${input} | ${120} | ${{ utc: false }} | ${'2019-11-13T01:00:00.000Z'}
+ ${input} | ${120} | ${{ utc: true }} | ${'2019-11-13T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfDays is $numberOfDays, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfDays, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nDaysAfter(inputDate, numberOfDays, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+ });
- it.each`
- numberOfWeeks | expectedResult
- ${1} | ${new Date('2021-07-09T00:00:00.000Z').valueOf()}
- ${3} | ${new Date('2021-06-25T00:00:00.000Z').valueOf()}
- ${-1} | ${new Date('2021-07-23T00:00:00.000Z').valueOf()}
- ${0} | ${date.valueOf()}
- ${0.6} | ${new Date('2021-07-11T00:00:00.000Z').valueOf()}
- `(
- 'returns the date $numberOfWeeks week(s) before the provided date',
- ({ numberOfWeeks, expectedResult }) => {
- expect(datetimeUtility.nWeeksBefore(date, numberOfWeeks)).toBe(expectedResult);
- },
- );
-});
+ describe('nDaysBefore', () => {
+ const input = '2019-07-16T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfDays | options | expectedAsString
+ ${input} | ${1} | ${undefined} | ${'2019-07-15T00:00:00.000Z'}
+ ${input} | ${-1} | ${undefined} | ${'2019-07-17T00:00:00.000Z'}
+ ${input} | ${0} | ${undefined} | ${'2019-07-16T00:00:00.000Z'}
+ ${input} | ${0.9} | ${undefined} | ${'2019-07-15T00:00:00.000Z'}
+ ${input} | ${180} | ${undefined} | ${'2019-01-17T01:00:00.000Z'}
+ ${input} | ${180} | ${{}} | ${'2019-01-17T01:00:00.000Z'}
+ ${input} | ${180} | ${{ utc: false }} | ${'2019-01-17T01:00:00.000Z'}
+ ${input} | ${180} | ${{ utc: true }} | ${'2019-01-17T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfDays is $numberOfDays, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfDays, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nDaysBefore(inputDate, numberOfDays, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+ });
-describe('nMonthsAfter', () => {
- // February has 28 days
- const feb2019 = new Date('2019-02-15T00:00:00.000Z');
- // Except in 2020, it had 29 days
- const feb2020 = new Date('2020-02-15T00:00:00.000Z');
- // April has 30 days
- const apr2020 = new Date('2020-04-15T00:00:00.000Z');
- // May has 31 days
- const may2020 = new Date('2020-05-15T00:00:00.000Z');
+ describe('nWeeksAfter', () => {
+ const input = '2021-07-16T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfWeeks | options | expectedAsString
+ ${input} | ${1} | ${undefined} | ${'2021-07-23T00:00:00.000Z'}
+ ${input} | ${3} | ${undefined} | ${'2021-08-06T00:00:00.000Z'}
+ ${input} | ${-1} | ${undefined} | ${'2021-07-09T00:00:00.000Z'}
+ ${input} | ${0} | ${undefined} | ${'2021-07-16T00:00:00.000Z'}
+ ${input} | ${0.6} | ${undefined} | ${'2021-07-20T00:00:00.000Z'}
+ ${input} | ${18} | ${undefined} | ${'2021-11-19T01:00:00.000Z'}
+ ${input} | ${18} | ${{}} | ${'2021-11-19T01:00:00.000Z'}
+ ${input} | ${18} | ${{ utc: false }} | ${'2021-11-19T01:00:00.000Z'}
+ ${input} | ${18} | ${{ utc: true }} | ${'2021-11-19T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfWeeks is $numberOfWeeks, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfWeeks, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nWeeksAfter(inputDate, numberOfWeeks, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+ });
- it.each`
- date | numberOfMonths | expectedResult
- ${feb2019} | ${1} | ${new Date('2019-03-15T00:00:00.000Z').valueOf()}
- ${feb2020} | ${1} | ${new Date('2020-03-15T00:00:00.000Z').valueOf()}
- ${apr2020} | ${1} | ${new Date('2020-05-15T00:00:00.000Z').valueOf()}
- ${may2020} | ${1} | ${new Date('2020-06-15T00:00:00.000Z').valueOf()}
- ${may2020} | ${12} | ${new Date('2021-05-15T00:00:00.000Z').valueOf()}
- ${may2020} | ${-1} | ${new Date('2020-04-15T00:00:00.000Z').valueOf()}
- ${may2020} | ${0} | ${may2020.valueOf()}
- ${may2020} | ${0.9} | ${may2020.valueOf()}
- `(
- 'returns the date $numberOfMonths month(s) after the provided date',
- ({ date, numberOfMonths, expectedResult }) => {
- expect(datetimeUtility.nMonthsAfter(date, numberOfMonths)).toBe(expectedResult);
- },
- );
-});
+ describe('nWeeksBefore', () => {
+ const input = '2021-07-16T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfWeeks | options | expectedAsString
+ ${input} | ${1} | ${undefined} | ${'2021-07-09T00:00:00.000Z'}
+ ${input} | ${3} | ${undefined} | ${'2021-06-25T00:00:00.000Z'}
+ ${input} | ${-1} | ${undefined} | ${'2021-07-23T00:00:00.000Z'}
+ ${input} | ${0} | ${undefined} | ${'2021-07-16T00:00:00.000Z'}
+ ${input} | ${0.6} | ${undefined} | ${'2021-07-11T00:00:00.000Z'}
+ ${input} | ${20} | ${undefined} | ${'2021-02-26T01:00:00.000Z'}
+ ${input} | ${20} | ${{}} | ${'2021-02-26T01:00:00.000Z'}
+ ${input} | ${20} | ${{ utc: false }} | ${'2021-02-26T01:00:00.000Z'}
+ ${input} | ${20} | ${{ utc: true }} | ${'2021-02-26T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfWeeks is $numberOfWeeks, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfWeeks, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nWeeksBefore(inputDate, numberOfWeeks, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+ });
-describe('nMonthsBefore', () => {
- // The previous month (February) has 28 days
- const march2019 = new Date('2019-03-15T00:00:00.000Z');
- // Except in 2020, it had 29 days
- const march2020 = new Date('2020-03-15T00:00:00.000Z');
- // The previous month (April) has 30 days
- const may2020 = new Date('2020-05-15T00:00:00.000Z');
- // The previous month (May) has 31 days
- const june2020 = new Date('2020-06-15T00:00:00.000Z');
+ describe('nMonthsAfter', () => {
+ // February has 28 days
+ const feb2019 = '2019-02-15T00:00:00.000Z';
+ // Except in 2020, it had 29 days
+ const feb2020 = '2020-02-15T00:00:00.000Z';
+ // April has 30 days
+ const apr2020 = '2020-04-15T00:00:00.000Z';
+ // May has 31 days
+ const may2020 = '2020-05-15T00:00:00.000Z';
+ // November 1, 2020 was the day Daylight Saving Time ended in 2020 (in the US)
+ const oct2020 = '2020-10-15T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfMonths | options | expectedAsString
+ ${feb2019} | ${1} | ${undefined} | ${'2019-03-14T23:00:00.000Z'}
+ ${feb2020} | ${1} | ${undefined} | ${'2020-03-14T23:00:00.000Z'}
+ ${apr2020} | ${1} | ${undefined} | ${'2020-05-15T00:00:00.000Z'}
+ ${may2020} | ${1} | ${undefined} | ${'2020-06-15T00:00:00.000Z'}
+ ${may2020} | ${12} | ${undefined} | ${'2021-05-15T00:00:00.000Z'}
+ ${may2020} | ${-1} | ${undefined} | ${'2020-04-15T00:00:00.000Z'}
+ ${may2020} | ${0} | ${undefined} | ${may2020}
+ ${may2020} | ${0.9} | ${undefined} | ${may2020}
+ ${oct2020} | ${1} | ${undefined} | ${'2020-11-15T01:00:00.000Z'}
+ ${oct2020} | ${1} | ${{}} | ${'2020-11-15T01:00:00.000Z'}
+ ${oct2020} | ${1} | ${{ utc: false }} | ${'2020-11-15T01:00:00.000Z'}
+ ${oct2020} | ${1} | ${{ utc: true }} | ${'2020-11-15T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfMonths is $numberOfMonths, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfMonths, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nMonthsAfter(inputDate, numberOfMonths, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+ });
- it.each`
- date | numberOfMonths | expectedResult
- ${march2019} | ${1} | ${new Date('2019-02-15T00:00:00.000Z').valueOf()}
- ${march2020} | ${1} | ${new Date('2020-02-15T00:00:00.000Z').valueOf()}
- ${may2020} | ${1} | ${new Date('2020-04-15T00:00:00.000Z').valueOf()}
- ${june2020} | ${1} | ${new Date('2020-05-15T00:00:00.000Z').valueOf()}
- ${june2020} | ${12} | ${new Date('2019-06-15T00:00:00.000Z').valueOf()}
- ${june2020} | ${-1} | ${new Date('2020-07-15T00:00:00.000Z').valueOf()}
- ${june2020} | ${0} | ${june2020.valueOf()}
- ${june2020} | ${0.9} | ${new Date('2020-05-15T00:00:00.000Z').valueOf()}
- `(
- 'returns the date $numberOfMonths month(s) before the provided date',
- ({ date, numberOfMonths, expectedResult }) => {
- expect(datetimeUtility.nMonthsBefore(date, numberOfMonths)).toBe(expectedResult);
- },
- );
+ describe('nMonthsBefore', () => {
+ // The previous month (February) has 28 days
+ const march2019 = '2019-03-15T00:00:00.000Z';
+ // Except in 2020, it had 29 days
+ const march2020 = '2020-03-15T00:00:00.000Z';
+ // The previous month (April) has 30 days
+ const may2020 = '2020-05-15T00:00:00.000Z';
+ // The previous month (May) has 31 days
+ const june2020 = '2020-06-15T00:00:00.000Z';
+ // November 1, 2020 was the day Daylight Saving Time ended in 2020 (in the US)
+ const nov2020 = '2020-11-15T00:00:00.000Z';
+
+ it.each`
+ inputAsString | numberOfMonths | options | expectedAsString
+ ${march2019} | ${1} | ${undefined} | ${'2019-02-15T01:00:00.000Z'}
+ ${march2020} | ${1} | ${undefined} | ${'2020-02-15T01:00:00.000Z'}
+ ${may2020} | ${1} | ${undefined} | ${'2020-04-15T00:00:00.000Z'}
+ ${june2020} | ${1} | ${undefined} | ${'2020-05-15T00:00:00.000Z'}
+ ${june2020} | ${12} | ${undefined} | ${'2019-06-15T00:00:00.000Z'}
+ ${june2020} | ${-1} | ${undefined} | ${'2020-07-15T00:00:00.000Z'}
+ ${june2020} | ${0} | ${undefined} | ${june2020}
+ ${june2020} | ${0.9} | ${undefined} | ${'2020-05-15T00:00:00.000Z'}
+ ${nov2020} | ${1} | ${undefined} | ${'2020-10-14T23:00:00.000Z'}
+ ${nov2020} | ${1} | ${{}} | ${'2020-10-14T23:00:00.000Z'}
+ ${nov2020} | ${1} | ${{ utc: false }} | ${'2020-10-14T23:00:00.000Z'}
+ ${nov2020} | ${1} | ${{ utc: true }} | ${'2020-10-15T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString, numberOfMonths is $numberOfMonths, and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, numberOfMonths, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.nMonthsBefore(inputDate, numberOfMonths, options);
+
+ expect(actual.toISOString()).toBe(expectedAsString);
+ },
+ );
+ });
});
describe('approximateDuration', () => {
@@ -951,3 +1017,32 @@ describe('isToday', () => {
expect(datetimeUtility.isToday(date)).toBe(expected);
});
});
+
+describe('getStartOfDay', () => {
+ beforeEach(() => {
+ timezoneMock.register('US/Eastern');
+ });
+
+ afterEach(() => {
+ timezoneMock.unregister();
+ });
+
+ it.each`
+ inputAsString | options | expectedAsString
+ ${'2021-01-29T18:08:23.014Z'} | ${undefined} | ${'2021-01-29T05:00:00.000Z'}
+ ${'2021-01-29T13:08:23.014-05:00'} | ${undefined} | ${'2021-01-29T05:00:00.000Z'}
+ ${'2021-01-30T03:08:23.014+09:00'} | ${undefined} | ${'2021-01-29T05:00:00.000Z'}
+ ${'2021-01-28T18:08:23.014-10:00'} | ${undefined} | ${'2021-01-28T05:00:00.000Z'}
+ ${'2021-01-28T18:08:23.014-10:00'} | ${{}} | ${'2021-01-28T05:00:00.000Z'}
+ ${'2021-01-28T18:08:23.014-10:00'} | ${{ utc: false }} | ${'2021-01-28T05:00:00.000Z'}
+ ${'2021-01-28T18:08:23.014-10:00'} | ${{ utc: true }} | ${'2021-01-29T00:00:00.000Z'}
+ `(
+ 'when the provided date is $inputAsString and the options parameter is $options, returns $expectedAsString',
+ ({ inputAsString, options, expectedAsString }) => {
+ const inputDate = new Date(inputAsString);
+ const actual = datetimeUtility.getStartOfDay(inputDate, options);
+
+ expect(actual.toISOString()).toEqual(expectedAsString);
+ },
+ );
+});
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index e6a8b9e37e4..cbad5b4c1e6 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -66,10 +66,10 @@ describe('Pipelines', () => {
const findRunPipelineButton = () => findByTestId('run-pipeline-button');
const findCiLintButton = () => findByTestId('ci-lint-button');
const findCleanCacheButton = () => findByTestId('clear-cache-button');
+ const findStagesDropdown = () => findByTestId('mini-pipeline-graph-dropdown-toggle');
const findEmptyState = () => wrapper.find(EmptyState);
const findBlankState = () => wrapper.find(BlankState);
- const findStagesDropdown = () => wrapper.find('.js-builds-dropdown-button');
const findTablePagination = () => wrapper.find(TablePagination);
diff --git a/spec/frontend/pipelines/pipelines_table_row_spec.js b/spec/frontend/pipelines/pipelines_table_row_spec.js
index 9cdd24b2ab5..660651547fc 100644
--- a/spec/frontend/pipelines/pipelines_table_row_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_row_spec.js
@@ -155,7 +155,9 @@ describe('Pipelines Table Row', () => {
it('should render an icon for each stage', () => {
expect(
- wrapper.findAll('.table-section:nth-child(4) .js-builds-dropdown-button').length,
+ wrapper.findAll(
+ '.table-section:nth-child(4) [data-testid="mini-pipeline-graph-dropdown-toggle"]',
+ ).length,
).toEqual(pipeline.details.stages.length);
});
});
diff --git a/spec/frontend/search/mock_data.js b/spec/frontend/search/mock_data.js
index 01d5a99c037..d076997b04a 100644
--- a/spec/frontend/search/mock_data.js
+++ b/spec/frontend/search/mock_data.js
@@ -45,3 +45,19 @@ export const MOCK_PROJECTS = [
id: 'test_2',
},
];
+
+export const MOCK_SORT_OPTIONS = [
+ {
+ title: 'Most relevant',
+ sortable: false,
+ sortParam: 'relevant',
+ },
+ {
+ title: 'Created date',
+ sortable: true,
+ sortParam: {
+ asc: 'created_asc',
+ desc: 'created_desc',
+ },
+ },
+];
diff --git a/spec/frontend/search/sort/components/app_spec.js b/spec/frontend/search/sort/components/app_spec.js
new file mode 100644
index 00000000000..8e3f08f7e28
--- /dev/null
+++ b/spec/frontend/search/sort/components/app_spec.js
@@ -0,0 +1,168 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlButtonGroup, GlButton, GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { MOCK_QUERY, MOCK_SORT_OPTIONS } from 'jest/search/mock_data';
+import GlobalSearchSort from '~/search/sort/components/app.vue';
+import { SORT_DIRECTION_UI } from '~/search/sort/constants';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('GlobalSearchSort', () => {
+ let wrapper;
+
+ const actionSpies = {
+ setQuery: jest.fn(),
+ applyQuery: jest.fn(),
+ };
+
+ const defaultProps = {
+ searchSortOptions: MOCK_SORT_OPTIONS,
+ };
+
+ const createComponent = (initialState, props) => {
+ const store = new Vuex.Store({
+ state: {
+ query: MOCK_QUERY,
+ ...initialState,
+ },
+ actions: actionSpies,
+ });
+
+ wrapper = shallowMount(GlobalSearchSort, {
+ localVue,
+ store,
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findSortButtonGroup = () => wrapper.find(GlButtonGroup);
+ const findSortDropdown = () => wrapper.find(GlDropdown);
+ const findSortDirectionButton = () => wrapper.find(GlButton);
+ const findDropdownItems = () => findSortDropdown().findAll(GlDropdownItem);
+ const findDropdownItemsText = () => findDropdownItems().wrappers.map((w) => w.text());
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders Sort Button Group', () => {
+ expect(findSortButtonGroup().exists()).toBe(true);
+ });
+
+ it('renders Sort Dropdown', () => {
+ expect(findSortDropdown().exists()).toBe(true);
+ });
+
+ it('renders Sort Direction Button', () => {
+ expect(findSortDirectionButton().exists()).toBe(true);
+ });
+ });
+
+ describe('Sort Dropdown Items', () => {
+ describe('renders', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('an instance for each namespace', () => {
+ expect(findDropdownItemsText()).toStrictEqual(
+ MOCK_SORT_OPTIONS.map((option) => option.title),
+ );
+ });
+ });
+
+ describe.each`
+ sortQuery | value
+ ${null} | ${MOCK_SORT_OPTIONS[0].title}
+ ${'asdf'} | ${MOCK_SORT_OPTIONS[0].title}
+ ${MOCK_SORT_OPTIONS[0].sortParam} | ${MOCK_SORT_OPTIONS[0].title}
+ ${MOCK_SORT_OPTIONS[1].sortParam.desc} | ${MOCK_SORT_OPTIONS[1].title}
+ ${MOCK_SORT_OPTIONS[1].sortParam.asc} | ${MOCK_SORT_OPTIONS[1].title}
+ `('selected', ({ sortQuery, value }) => {
+ describe(`when sort option is ${sortQuery}`, () => {
+ beforeEach(() => {
+ createComponent({ query: { sort: sortQuery } });
+ });
+
+ it('is set correctly', () => {
+ expect(findSortDropdown().attributes('text')).toBe(value);
+ });
+ });
+ });
+ });
+
+ describe.each`
+ description | sortQuery | sortUi | disabled
+ ${'non-sortable'} | ${MOCK_SORT_OPTIONS[0].sortParam} | ${SORT_DIRECTION_UI.disabled} | ${'true'}
+ ${'descending sortable'} | ${MOCK_SORT_OPTIONS[1].sortParam.desc} | ${SORT_DIRECTION_UI.desc} | ${undefined}
+ ${'ascending sortable'} | ${MOCK_SORT_OPTIONS[1].sortParam.asc} | ${SORT_DIRECTION_UI.asc} | ${undefined}
+ `('Sort Direction Button', ({ description, sortQuery, sortUi, disabled }) => {
+ describe(`when sort option is ${description}`, () => {
+ beforeEach(() => {
+ createComponent({ query: { sort: sortQuery } });
+ });
+
+ it('sets the UI correctly', () => {
+ expect(findSortDirectionButton().attributes('disabled')).toBe(disabled);
+ expect(findSortDirectionButton().attributes('title')).toBe(sortUi.tooltip);
+ expect(findSortDirectionButton().attributes('icon')).toBe(sortUi.icon);
+ });
+ });
+ });
+
+ describe('actions', () => {
+ describe.each`
+ description | index | value
+ ${'non-sortable'} | ${0} | ${MOCK_SORT_OPTIONS[0].sortParam}
+ ${'sortable'} | ${1} | ${MOCK_SORT_OPTIONS[1].sortParam.desc}
+ `('handleSortChange', ({ description, index, value }) => {
+ describe(`when clicking a ${description} option`, () => {
+ beforeEach(() => {
+ createComponent();
+ findDropdownItems().at(index).vm.$emit('click');
+ });
+
+ it('calls setQuery and applyQuery correctly', () => {
+ expect(actionSpies.setQuery).toHaveBeenCalledTimes(1);
+ expect(actionSpies.applyQuery).toHaveBeenCalledTimes(1);
+ expect(actionSpies.setQuery).toHaveBeenCalledWith(expect.any(Object), {
+ key: 'sort',
+ value,
+ });
+ });
+ });
+ });
+
+ describe.each`
+ description | sortQuery | value
+ ${'descending'} | ${MOCK_SORT_OPTIONS[1].sortParam.desc} | ${MOCK_SORT_OPTIONS[1].sortParam.asc}
+ ${'ascending'} | ${MOCK_SORT_OPTIONS[1].sortParam.asc} | ${MOCK_SORT_OPTIONS[1].sortParam.desc}
+ `('handleSortDirectionChange', ({ description, sortQuery, value }) => {
+ describe(`when toggling a ${description} option`, () => {
+ beforeEach(() => {
+ createComponent({ query: { sort: sortQuery } });
+ findSortDirectionButton().vm.$emit('click');
+ });
+
+ it('calls setQuery and applyQuery correctly', () => {
+ expect(actionSpies.setQuery).toHaveBeenCalledTimes(1);
+ expect(actionSpies.applyQuery).toHaveBeenCalledTimes(1);
+ expect(actionSpies.setQuery).toHaveBeenCalledWith(expect.any(Object), {
+ key: 'sort',
+ value,
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 2cb9d66ac63..80401635b95 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -610,4 +610,25 @@ RSpec.describe SearchHelper do
end
end
end
+
+ describe '#search_sort_options' do
+ let(:user) { create(:user) }
+
+ mock_created_sort = {
+ title: _('Created date'),
+ sortable: true,
+ sortParam: {
+ asc: 'created_asc',
+ desc: 'created_desc'
+ }
+ }
+
+ before do
+ allow(self).to receive(:current_user).and_return(user)
+ end
+
+ it 'returns the correct data' do
+ expect(search_sort_options).to eq([mock_created_sort])
+ end
+ end
end
diff --git a/spec/helpers/sorting_helper_spec.rb b/spec/helpers/sorting_helper_spec.rb
index 2d581dfba37..f976fb098a8 100644
--- a/spec/helpers/sorting_helper_spec.rb
+++ b/spec/helpers/sorting_helper_spec.rb
@@ -50,24 +50,6 @@ RSpec.describe SortingHelper do
end
end
- describe '#search_sort_direction_button' do
- before do
- set_sorting_url 'test_label'
- end
-
- it 'keeps label filter param' do
- expect(search_sort_direction_button('created_asc')).to include('label_name=test_label')
- end
-
- it 'returns icon with sort-lowest when sort is asc' do
- expect(search_sort_direction_button('created_asc')).to include('sort-lowest')
- end
-
- it 'returns icon with sort-highest when sort is desc' do
- expect(search_sort_direction_button('created_desc')).to include('sort-highest')
- end
- end
-
def stub_controller_path(value)
allow(helper.controller).to receive(:controller_path).and_return(value)
end
diff --git a/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb b/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb
index 4de7d95172f..57ffdfa9aee 100644
--- a/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb
+++ b/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe BulkImports::Common::Loaders::EntityLoader do
it "creates entities for the given data" do
group = create(:group, path: "imported-group")
parent_entity = create(:bulk_import_entity, group: group, bulk_import: create(:bulk_import))
- context = instance_double(BulkImports::Pipeline::Context, entity: parent_entity)
+ context = BulkImports::Pipeline::Context.new(parent_entity)
data = {
source_type: :group_entity,
diff --git a/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb
index 9dd844a0d65..627247c04ab 100644
--- a/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb
+++ b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb
@@ -5,16 +5,11 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Extractors::SubgroupsExtractor do
describe '#extract' do
it 'returns ExtractedData response' do
- user = create(:user)
bulk_import = create(:bulk_import)
+ create(:bulk_import_configuration, bulk_import: bulk_import)
entity = create(:bulk_import_entity, bulk_import: bulk_import)
- configuration = create(:bulk_import_configuration, bulk_import: bulk_import)
response = [{ 'test' => 'group' }]
- context = BulkImports::Pipeline::Context.new(
- current_user: user,
- entity: entity,
- configuration: configuration
- )
+ context = BulkImports::Pipeline::Context.new(entity)
allow_next_instance_of(BulkImports::Clients::Http) do |client|
allow(client).to receive(:each_page).and_return(response)
diff --git a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
index b14dfc615a9..183292722d2 100644
--- a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
+++ b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
@@ -7,21 +7,20 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do
let(:user) { create(:user) }
let(:data) { { foo: :bar } }
let(:service_double) { instance_double(::Groups::CreateService) }
- let(:entity) { create(:bulk_import_entity) }
- let(:context) do
- instance_double(
- BulkImports::Pipeline::Context,
- entity: entity,
- current_user: user
- )
- end
+ let(:bulk_import) { create(:bulk_import, user: user) }
+ let(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
subject { described_class.new }
context 'when user can create group' do
shared_examples 'calls Group Create Service to create a new group' do
it 'calls Group Create Service to create a new group' do
- expect(::Groups::CreateService).to receive(:new).with(context.current_user, data).and_return(service_double)
+ expect(::Groups::CreateService)
+ .to receive(:new)
+ .with(context.current_user, data)
+ .and_return(service_double)
+
expect(service_double).to receive(:execute)
expect(entity).to receive(:update!)
diff --git a/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb
index 2dc360fc6b9..ac2f9c8cb1d 100644
--- a/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb
+++ b/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb
@@ -7,12 +7,7 @@ RSpec.describe BulkImports::Groups::Loaders::LabelsLoader do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:entity) { create(:bulk_import_entity, group: group) }
- let(:context) do
- BulkImports::Pipeline::Context.new(
- entity: entity,
- current_user: user
- )
- end
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
let(:data) do
{
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
index 445d25abd99..1a6a955544a 100644
--- a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
@@ -6,21 +6,18 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
describe '#run' do
let(:user) { create(:user) }
let(:parent) { create(:group) }
+ let(:bulk_import) { create(:bulk_import, user: user) }
let(:entity) do
create(
:bulk_import_entity,
+ bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: parent.full_path
)
end
- let(:context) do
- BulkImports::Pipeline::Context.new(
- current_user: user,
- entity: entity
- )
- end
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
let(:group_data) do
{
@@ -36,8 +33,6 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
}
end
- subject { described_class.new }
-
before do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return([group_data])
diff --git a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
index 1cdeff168ae..7c96967971f 100644
--- a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
@@ -16,12 +16,7 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
)
end
- let(:context) do
- BulkImports::Pipeline::Context.new(
- current_user: user,
- entity: entity
- )
- end
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
def extractor_data(title:, has_next_page:, cursor: nil)
data = [
diff --git a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
index e5a8ed7f47d..71d1ffdeba3 100644
--- a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
@@ -14,13 +14,7 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
)
end
- let(:context) do
- instance_double(
- BulkImports::Pipeline::Context,
- current_user: user,
- entity: parent_entity
- )
- end
+ let(:context) { BulkImports::Pipeline::Context.new(parent_entity) }
let(:subgroup_data) do
[
diff --git a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
index 28a7859915d..5a7a51675d6 100644
--- a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
@@ -7,22 +7,18 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
let(:user) { create(:user) }
let(:parent) { create(:group) }
let(:group) { create(:group, name: 'My Source Group', parent: parent) }
+ let(:bulk_import) { create(:bulk_import, user: user) }
let(:entity) do
- instance_double(
- BulkImports::Entity,
+ create(
+ :bulk_import_entity,
+ bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: group.name,
destination_namespace: parent.full_path
)
end
- let(:context) do
- instance_double(
- BulkImports::Pipeline::Context,
- current_user: user,
- entity: entity
- )
- end
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
let(:data) do
{
@@ -85,16 +81,16 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
end
context 'when destination namespace is user namespace' do
- let(:entity) do
- instance_double(
- BulkImports::Entity,
+ it 'does not set parent id' do
+ entity = create(
+ :bulk_import_entity,
+ bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: group.name,
destination_namespace: user.namespace.full_path
)
- end
+ context = BulkImports::Pipeline::Context.new(entity)
- it 'does not set parent id' do
transformed_data = subject.transform(context, data)
expect(transformed_data).not_to have_key('parent_id')
diff --git a/spec/lib/bulk_imports/importers/group_importer_spec.rb b/spec/lib/bulk_imports/importers/group_importer_spec.rb
index 083b1507b01..0884a51ce7d 100644
--- a/spec/lib/bulk_imports/importers/group_importer_spec.rb
+++ b/spec/lib/bulk_imports/importers/group_importer_spec.rb
@@ -7,20 +7,14 @@ RSpec.describe BulkImports::Importers::GroupImporter do
let(:bulk_import) { create(:bulk_import) }
let(:bulk_import_entity) { create(:bulk_import_entity, :started, bulk_import: bulk_import) }
let(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
- let(:context) do
- BulkImports::Pipeline::Context.new(
- current_user: user,
- entity: bulk_import_entity,
- configuration: bulk_import_configuration
- )
- end
-
- subject { described_class.new(bulk_import_entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(bulk_import_entity) }
before do
allow(BulkImports::Pipeline::Context).to receive(:new).and_return(context)
end
+ subject { described_class.new(bulk_import_entity) }
+
describe '#execute' do
it 'starts the entity and run its pipelines' do
expect_to_run_pipeline BulkImports::Groups::Pipelines::GroupPipeline, context: context
diff --git a/spec/lib/bulk_imports/pipeline/context_spec.rb b/spec/lib/bulk_imports/pipeline/context_spec.rb
index e9af6313ca4..c8c3fe3a861 100644
--- a/spec/lib/bulk_imports/pipeline/context_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/context_spec.rb
@@ -3,25 +3,29 @@
require 'spec_helper'
RSpec.describe BulkImports::Pipeline::Context do
- describe '#initialize' do
- it 'initializes with permitted attributes' do
- args = {
- current_user: create(:user),
- entity: create(:bulk_import_entity),
- configuration: create(:bulk_import_configuration)
- }
+ let(:group) { instance_double(Group) }
+ let(:user) { instance_double(User) }
+ let(:bulk_import) { instance_double(BulkImport, user: user, configuration: :config) }
- context = described_class.new(args)
+ let(:entity) do
+ instance_double(
+ BulkImports::Entity,
+ bulk_import: bulk_import,
+ group: group
+ )
+ end
+
+ subject { described_class.new(entity) }
- args.each do |k, v|
- expect(context.public_send(k)).to eq(v)
- end
- end
+ describe '#group' do
+ it { expect(subject.group).to eq(group) }
+ end
+
+ describe '#current_user' do
+ it { expect(subject.current_user).to eq(user) }
+ end
- context 'when invalid argument is passed' do
- it 'raises NoMethodError' do
- expect { described_class.new(test: 'test').test }.to raise_exception(NoMethodError)
- end
- end
+ describe '#current_user' do
+ it { expect(subject.configuration).to eq(bulk_import.configuration) }
end
end
diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb
index de3489708d8..7373588db53 100644
--- a/spec/lib/bulk_imports/pipeline/runner_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb
@@ -45,12 +45,8 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
context 'when entity is not marked as failed' do
- let(:context) do
- instance_double(
- BulkImports::Pipeline::Context,
- entity: instance_double(BulkImports::Entity, id: 1, source_type: 'group', failed?: false)
- )
- end
+ let(:entity) { create(:bulk_import_entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
it 'runs pipeline extractor, transformer, loader' do
extracted_data = BulkImports::Pipeline::ExtractedData.new(data: { foo: :bar })
@@ -80,15 +76,27 @@ RSpec.describe BulkImports::Pipeline::Runner do
.with(
message: 'Pipeline started',
pipeline_class: 'BulkImports::MyPipeline',
- bulk_import_entity_id: 1,
- bulk_import_entity_type: 'group'
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: 'group_entity'
)
expect(logger).to receive(:info)
- .with(bulk_import_entity_id: 1, bulk_import_entity_type: 'group', extractor: 'BulkImports::Extractor')
+ .with(
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: 'group_entity',
+ extractor: 'BulkImports::Extractor'
+ )
expect(logger).to receive(:info)
- .with(bulk_import_entity_id: 1, bulk_import_entity_type: 'group', transformer: 'BulkImports::Transformer')
+ .with(
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: 'group_entity',
+ transformer: 'BulkImports::Transformer'
+ )
expect(logger).to receive(:info)
- .with(bulk_import_entity_id: 1, bulk_import_entity_type: 'group', loader: 'BulkImports::Loader')
+ .with(
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: 'group_entity',
+ loader: 'BulkImports::Loader'
+ )
end
BulkImports::MyPipeline.new.run(context)
@@ -96,7 +104,7 @@ RSpec.describe BulkImports::Pipeline::Runner do
context 'when exception is raised' do
let(:entity) { create(:bulk_import_entity, :created) }
- let(:context) { BulkImports::Pipeline::Context.new(entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
before do
allow_next_instance_of(BulkImports::Extractor) do |extractor|
@@ -153,21 +161,19 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
context 'when entity is marked as failed' do
- let(:context) do
- instance_double(
- BulkImports::Pipeline::Context,
- entity: instance_double(BulkImports::Entity, id: 1, source_type: 'group', failed?: true)
- )
- end
+ let(:entity) { create(:bulk_import_entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
it 'logs and returns without execution' do
+ allow(entity).to receive(:failed?).and_return(true)
+
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:info)
.with(
message: 'Skipping due to failed pipeline status',
pipeline_class: 'BulkImports::MyPipeline',
- bulk_import_entity_id: 1,
- bulk_import_entity_type: 'group'
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: 'group_entity'
)
end
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
index 209ac596ac7..8bd6049e6fa 100644
--- a/spec/requests/api/api_spec.rb
+++ b/spec/requests/api/api_spec.rb
@@ -36,6 +36,12 @@ RSpec.describe API::API do
expect(response).to have_gitlab_http_status(:ok)
end
+ it 'does authorize user for head request' do
+ head api('/groups', personal_access_token: token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
it 'does not authorize user for revoked token' do
revoked = create(:personal_access_token, :revoked, user: user, scopes: [:read_api])
diff --git a/spec/requests/api/events_spec.rb b/spec/requests/api/events_spec.rb
index 6a8d5f91abd..110d6e2f99e 100644
--- a/spec/requests/api/events_spec.rb
+++ b/spec/requests/api/events_spec.rb
@@ -55,6 +55,12 @@ RSpec.describe API::Events do
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
end
+
+ it 'returns "200" response on head request' do
+ head api('/events?action=closed&target_type=issue&after=2016-12-1&before=2016-12-31', personal_access_token: token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'when the requesting token does not have "read_user" or "api" scope' do
diff --git a/spec/requests/api/version_spec.rb b/spec/requests/api/version_spec.rb
index a0a0f66c8d1..7abbaf4f9ec 100644
--- a/spec/requests/api/version_spec.rb
+++ b/spec/requests/api/version_spec.rb
@@ -33,6 +33,12 @@ RSpec.describe API::Version do
expect_version
end
+
+ it 'returns "200" response on head requests' do
+ head api('/version', personal_access_token: personal_access_token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'with read_user scope' do
@@ -43,6 +49,12 @@ RSpec.describe API::Version do
expect_version
end
+
+ it 'returns "200" response on head requests' do
+ head api('/version', personal_access_token: personal_access_token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'with neither api nor read_user scope' do
diff --git a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
index f990c857439..7a09a437ab3 100644
--- a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'handles repository moves' do |check_worker: true|
+RSpec.shared_examples 'handles repository moves' do
describe 'associations' do
it { is_expected.to belong_to(:container) }
end
@@ -61,25 +61,23 @@ RSpec.shared_examples 'handles repository moves' do |check_worker: true|
context 'when in the default state' do
subject(:storage_move) { create(repository_storage_factory_key, container: container, destination_storage_name: 'test_second_storage') }
- if check_worker
- context 'and transits to scheduled' do
- it 'triggers the corresponding repository storage worker' do
- expect(repository_storage_worker).to receive(:perform_async).with(container.id, 'test_second_storage', storage_move.id)
+ context 'and transits to scheduled' do
+ it 'triggers the corresponding repository storage worker' do
+ expect(repository_storage_worker).to receive(:perform_async).with(container.id, 'test_second_storage', storage_move.id)
- storage_move.schedule!
+ storage_move.schedule!
- expect(container).to be_repository_read_only
- end
+ expect(container).to be_repository_read_only
+ end
- context 'when the transition fails' do
- it 'does not trigger the corresponding repository storage worker and adds an error' do
- allow(storage_move.container).to receive(:set_repository_read_only!).and_raise(StandardError, 'foobar')
- expect(repository_storage_worker).not_to receive(:perform_async)
+ context 'when the transition fails' do
+ it 'does not trigger the corresponding repository storage worker and adds an error' do
+ allow(storage_move.container).to receive(:set_repository_read_only!).and_raise(StandardError, 'foobar')
+ expect(repository_storage_worker).not_to receive(:perform_async)
- storage_move.schedule!
+ storage_move.schedule!
- expect(storage_move.errors[error_key]).to include('foobar')
- end
+ expect(storage_move.errors[error_key]).to include('foobar')
end
end
end
diff --git a/spec/support/shared_examples/requests/api/read_user_shared_examples.rb b/spec/support/shared_examples/requests/api/read_user_shared_examples.rb
index 59cd0ab67b4..b9fd997bd2c 100644
--- a/spec/support/shared_examples/requests/api/read_user_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/read_user_shared_examples.rb
@@ -7,21 +7,33 @@ RSpec.shared_examples 'allows the "read_user" scope' do |api_version|
context 'when the requesting token has the "api" scope' do
let(:token) { create(:personal_access_token, scopes: ['api'], user: user) }
- it 'returns a "200" response' do
+ it 'returns a "200" response on get request' do
get api_call.call(path, user, personal_access_token: token, version: version)
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it 'returns a "200" response on head request' do
+ head api_call.call(path, user, personal_access_token: token, version: version)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'when the requesting token has the "read_user" scope' do
let(:token) { create(:personal_access_token, scopes: ['read_user'], user: user) }
- it 'returns a "200" response' do
+ it 'returns a "200" response on get request' do
get api_call.call(path, user, personal_access_token: token, version: version)
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it 'returns a "200" response on head request' do
+ head api_call.call(path, user, personal_access_token: token, version: version)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'when the requesting token does not have any required scope' do
@@ -45,21 +57,33 @@ RSpec.shared_examples 'allows the "read_user" scope' do |api_version|
context 'when the requesting token has the "api" scope' do
let!(:token) { Doorkeeper::AccessToken.create! application_id: application.id, resource_owner_id: user.id, scopes: "api" }
- it 'returns a "200" response' do
+ it 'returns a "200" response on get request' do
get api_call.call(path, user, oauth_access_token: token)
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it 'returns a "200" response on head request' do
+ head api_call.call(path, user, oauth_access_token: token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'when the requesting token has the "read_user" scope' do
let!(:token) { Doorkeeper::AccessToken.create! application_id: application.id, resource_owner_id: user.id, scopes: "read_user" }
- it 'returns a "200" response' do
+ it 'returns a "200" response on get request' do
get api_call.call(path, user, oauth_access_token: token)
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it 'returns a "200" response on head request' do
+ head api_call.call(path, user, oauth_access_token: token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
context 'when the requesting token does not have any required scope' do
diff --git a/spec/workers/projects/git_garbage_collect_worker_spec.rb b/spec/workers/projects/git_garbage_collect_worker_spec.rb
index 73a70ba436b..8c44643ae51 100644
--- a/spec/workers/projects/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/projects/git_garbage_collect_worker_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Projects::GitGarbageCollectWorker do
let(:resource) { project }
let(:statistics_service_klass) { Projects::UpdateStatisticsService }
let(:statistics_keys) { [:repository_size, :lfs_objects_size] }
- let(:expected_default_lease) { "#{resource.id}" }
+ let(:expected_default_lease) { "projects:#{resource.id}" }
end
context 'when is able to get the lease' do