Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-02-07 15:09:13 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-02-07 15:09:13 +0300
commit211a8c3361ccf4eb92f36edbdcf15c98fcdcc8b7 (patch)
tree0ad37172721a39b0d57240bb1b4e70f200a0d93e /spec
parent456a7247f9e88fc2518b69a1a00e905c6db6d775 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/services_controller_spec.rb12
-rw-r--r--spec/controllers/concerns/lfs_request_spec.rb18
-rw-r--r--spec/controllers/dashboard/projects_controller_spec.rb33
-rw-r--r--spec/controllers/projects/services_controller_spec.rb6
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb6
-rw-r--r--spec/fixtures/trace/sample_trace2
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js59
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js40
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js59
-rw-r--r--spec/frontend/monitoring/embed/embed_spec.js21
-rw-r--r--spec/frontend/monitoring/init_utils.js1
-rw-r--r--spec/frontend/monitoring/panel_type_spec.js146
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js10
-rw-r--r--spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb75
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb44
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb39
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb14
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb30
-rw-r--r--spec/lib/gitlab/import_export/project_tree_restorer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb2
-rw-r--r--spec/migrations/fix_projects_without_project_feature_spec.rb42
-rw-r--r--spec/migrations/migrate_propagate_service_template_sidekiq_queue_spec.rb29
-rw-r--r--spec/models/concerns/sortable_spec.rb15
-rw-r--r--spec/models/project_spec.rb71
-rw-r--r--spec/models/service_spec.rb52
-rw-r--r--spec/presenters/project_presenter_spec.rb4
-rw-r--r--spec/requests/api/merge_requests_spec.rb2
-rw-r--r--spec/requests/lfs_http_spec.rb4
-rw-r--r--spec/services/merge_requests/create_service_spec.rb8
-rw-r--r--spec/services/merge_requests/link_lfs_objects_service_spec.rb103
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb8
-rw-r--r--spec/services/projects/create_service_spec.rb14
-rw-r--r--spec/services/projects/fork_service_spec.rb8
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_service_spec.rb3
-rw-r--r--spec/services/projects/propagate_instance_level_service_spec.rb (renamed from spec/services/projects/propagate_service_template_spec.rb)36
-rw-r--r--spec/workers/propagate_instance_level_service_worker_spec.rb31
-rw-r--r--spec/workers/propagate_service_template_worker_spec.rb31
-rw-r--r--spec/workers/repository_fork_worker_spec.rb24
39 files changed, 863 insertions, 245 deletions
diff --git a/spec/controllers/admin/services_controller_spec.rb b/spec/controllers/admin/services_controller_spec.rb
index 44233776865..6f59a5ac016 100644
--- a/spec/controllers/admin/services_controller_spec.rb
+++ b/spec/controllers/admin/services_controller_spec.rb
@@ -15,11 +15,11 @@ describe Admin::ServicesController do
Service.available_services_names.each do |service_name|
context "#{service_name}" do
let!(:service) do
- service_template = "#{service_name}_service".camelize.constantize
- service_template.where(template: true).first_or_create
+ service_instance = "#{service_name}_service".camelize.constantize
+ service_instance.where(instance: true).first_or_create
end
- it 'successfully displays the template' do
+ it 'successfully displays the service' do
get :edit, params: { id: service.id }
expect(response).to have_gitlab_http_status(:ok)
@@ -34,7 +34,7 @@ describe Admin::ServicesController do
RedmineService.create(
project: project,
active: false,
- template: true,
+ instance: true,
properties: {
project_url: 'http://abc',
issues_url: 'http://abc',
@@ -44,7 +44,7 @@ describe Admin::ServicesController do
end
it 'calls the propagation worker when service is active' do
- expect(PropagateServiceTemplateWorker).to receive(:perform_async).with(service.id)
+ expect(PropagateInstanceLevelServiceWorker).to receive(:perform_async).with(service.id)
put :update, params: { id: service.id, service: { active: true } }
@@ -52,7 +52,7 @@ describe Admin::ServicesController do
end
it 'does not call the propagation worker when service is not active' do
- expect(PropagateServiceTemplateWorker).not_to receive(:perform_async)
+ expect(PropagateInstanceLevelServiceWorker).not_to receive(:perform_async)
put :update, params: { id: service.id, service: { properties: {} } }
diff --git a/spec/controllers/concerns/lfs_request_spec.rb b/spec/controllers/concerns/lfs_request_spec.rb
index 79257e9a7f6..67c81156ca6 100644
--- a/spec/controllers/concerns/lfs_request_spec.rb
+++ b/spec/controllers/concerns/lfs_request_spec.rb
@@ -10,8 +10,6 @@ describe LfsRequest do
include LfsRequest
def show
- storage_project
-
head :ok
end
@@ -38,22 +36,6 @@ describe LfsRequest do
stub_lfs_setting(enabled: true)
end
- describe '#storage_project' do
- it 'assigns the project as storage project' do
- get :show, params: { id: project.id }
-
- expect(assigns(:storage_project)).to eq(project)
- end
-
- it 'assigns the source of a forked project' do
- forked_project = fork_project(project)
-
- get :show, params: { id: forked_project.id }
-
- expect(assigns(:storage_project)).to eq(project)
- end
- end
-
context 'user is authenticated without access to lfs' do
before do
allow(controller).to receive(:authenticate_user)
diff --git a/spec/controllers/dashboard/projects_controller_spec.rb b/spec/controllers/dashboard/projects_controller_spec.rb
index d013093c376..a13b56deb23 100644
--- a/spec/controllers/dashboard/projects_controller_spec.rb
+++ b/spec/controllers/dashboard/projects_controller_spec.rb
@@ -11,7 +11,14 @@ describe Dashboard::ProjectsController do
end
context 'user logged in' do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+
+ before_all do
+ project.add_developer(user)
+ project2.add_developer(user)
+ end
before do
sign_in(user)
@@ -28,12 +35,7 @@ describe Dashboard::ProjectsController do
end
it 'orders the projects by last activity by default' do
- project = create(:project)
- project.add_developer(user)
project.update!(last_repository_updated_at: 3.days.ago, last_activity_at: 3.days.ago)
-
- project2 = create(:project)
- project2.add_developer(user)
project2.update!(last_repository_updated_at: 10.days.ago, last_activity_at: 10.days.ago)
get :index
@@ -42,12 +44,27 @@ describe Dashboard::ProjectsController do
end
context 'project sorting' do
- let(:project) { create(:project) }
-
it_behaves_like 'set sort order from user preference' do
let(:sorting_param) { 'created_asc' }
end
end
+
+ context 'with search and sort parameters' do
+ render_views
+
+ shared_examples 'search and sort parameters' do |sort|
+ it 'returns a single project with no ambiguous column errors' do
+ get :index, params: { name: project2.name, sort: sort }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:projects)).to eq([project2])
+ end
+ end
+
+ %w[latest_activity_desc latest_activity_asc stars_desc stars_asc created_desc].each do |sort|
+ it_behaves_like 'search and sort parameters', sort
+ end
+ end
end
end
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index 0c074714bf3..b76d350ebbc 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -154,12 +154,12 @@ describe Projects::ServicesController do
end
end
- context 'when activating Jira service from a template' do
+ context 'when activating Jira service from instance level service' do
let(:service) do
- create(:jira_service, project: project, template: true)
+ create(:jira_service, project: project, instance: true)
end
- it 'activate Jira service from template' do
+ it 'activate Jira service from instance level service' do
expect(flash[:notice]).to eq 'Jira activated.'
end
end
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index 41c3c6b5770..8e20facda15 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -33,7 +33,7 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
expect(page).not_to have_link('Enable Auto DevOps')
expect(page).not_to have_link('Auto DevOps enabled')
expect(page).not_to have_link('Add Kubernetes cluster')
- expect(page).not_to have_link('Kubernetes configured')
+ expect(page).not_to have_link('Kubernetes')
end
end
end
@@ -100,7 +100,7 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
it 'no Kubernetes cluster button if can not manage clusters' do
page.within('.project-buttons') do
expect(page).not_to have_link('Add Kubernetes cluster')
- expect(page).not_to have_link('Kubernetes configured')
+ expect(page).not_to have_link('Kubernetes')
end
end
end
@@ -308,7 +308,7 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
visit project_path(project)
page.within('.project-buttons') do
- expect(page).to have_link('Kubernetes configured', href: project_cluster_path(project, cluster))
+ expect(page).to have_link('Kubernetes', href: project_cluster_path(project, cluster))
end
end
end
diff --git a/spec/fixtures/trace/sample_trace b/spec/fixtures/trace/sample_trace
index d774d154496..1aba1e76d0b 100644
--- a/spec/fixtures/trace/sample_trace
+++ b/spec/fixtures/trace/sample_trace
@@ -2736,7 +2736,7 @@ Service
when repository is empty
test runs execute
Template
- .build_from_template
+ .build_from_instance
when template is invalid
sets service template to inactive when template is invalid
for pushover service
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index d4f197a708f..0a7e3dca183 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -18,6 +18,15 @@ import * as iconUtils from '~/lib/utils/icon_utils';
const mockWidgets = 'mockWidgets';
const mockSvgPathContent = 'mockSvgPathContent';
+
+jest.mock('lodash/throttle', () =>
+ // this throttle mock executes immediately
+ jest.fn(func => {
+ // eslint-disable-next-line no-param-reassign
+ func.cancel = jest.fn();
+ return func;
+ }),
+);
jest.mock('~/lib/utils/icon_utils', () => ({
getSvgIconPathContent: jest.fn().mockImplementation(() => Promise.resolve(mockSvgPathContent)),
}));
@@ -94,6 +103,56 @@ describe('Time series component', () => {
});
});
+ describe('events', () => {
+ describe('datazoom', () => {
+ let eChartMock;
+ let startValue;
+ let endValue;
+
+ const findChart = () => timeSeriesChart.find({ ref: 'chart' });
+
+ beforeEach(done => {
+ eChartMock = {
+ handlers: {},
+ getOption: () => ({
+ dataZoom: [
+ {
+ startValue,
+ endValue,
+ },
+ ],
+ }),
+ off: jest.fn(eChartEvent => {
+ delete eChartMock.handlers[eChartEvent];
+ }),
+ on: jest.fn((eChartEvent, fn) => {
+ eChartMock.handlers[eChartEvent] = fn;
+ }),
+ };
+
+ timeSeriesChart = makeTimeSeriesChart(mockGraphData);
+ timeSeriesChart.vm.$nextTick(() => {
+ findChart().vm.$emit('created', eChartMock);
+ done();
+ });
+ });
+
+ it('handles datazoom event from chart', () => {
+ startValue = 1577836800000; // 2020-01-01T00:00:00.000Z
+ endValue = 1577840400000; // 2020-01-01T01:00:00.000Z
+ eChartMock.handlers.datazoom();
+
+ expect(timeSeriesChart.emitted('datazoom')).toHaveLength(1);
+ expect(timeSeriesChart.emitted('datazoom')[0]).toEqual([
+ {
+ start: new Date(startValue).toISOString(),
+ end: new Date(endValue).toISOString(),
+ },
+ ]);
+ });
+ });
+ });
+
describe('methods', () => {
describe('formatTooltipText', () => {
let mockDate;
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 31266b4f6d4..70b2c9cf527 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -73,12 +73,20 @@ describe('Dashboard', () => {
describe('no metrics are available yet', () => {
beforeEach(() => {
+ jest.spyOn(store, 'dispatch');
createShallowWrapper();
});
it('shows the environment selector', () => {
expect(findEnvironmentsDropdown().exists()).toBe(true);
});
+
+ it('sets endpoints: logs path', () => {
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setEndpoints',
+ expect.objectContaining({ logsPath: propsData.logsPath }),
+ );
+ });
});
describe('no data found', () => {
@@ -94,6 +102,21 @@ describe('Dashboard', () => {
});
describe('request information to the server', () => {
+ it('calls to set time range and fetch data', () => {
+ jest.spyOn(store, 'dispatch');
+
+ createShallowWrapper({ hasMetrics: true }, { methods: {} });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ expect.any(Object),
+ );
+
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
+ });
+ });
+
it('shows up a loading state', done => {
createShallowWrapper({ hasMetrics: true }, { methods: {} });
@@ -126,7 +149,7 @@ describe('Dashboard', () => {
.catch(done.fail);
});
- it('fetches the metrics data with proper time window', done => {
+ it('fetches the metrics data with proper time window', () => {
jest.spyOn(store, 'dispatch');
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
@@ -136,14 +159,9 @@ describe('Dashboard', () => {
environmentData,
);
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(store.dispatch).toHaveBeenCalled();
-
- done();
- })
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalled();
+ });
});
});
@@ -263,10 +281,6 @@ describe('Dashboard', () => {
return wrapper.vm.$nextTick();
});
- afterEach(() => {
- wrapper.destroy();
- });
-
it('renders a search input', () => {
expect(wrapper.find({ ref: 'monitorEnvironmentsDropdownSearch' }).exists()).toBe(true);
});
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
index 33fbfac486f..161c64dd74b 100644
--- a/spec/frontend/monitoring/components/dashboard_url_time_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -7,6 +7,7 @@ import { mockProjectDir } from '../mock_data';
import Dashboard from '~/monitoring/components/dashboard.vue';
import { createStore } from '~/monitoring/stores';
+import { defaultTimeRange } from '~/monitoring/constants';
import { propsData } from '../init_utils';
jest.mock('~/flash');
@@ -17,16 +18,11 @@ describe('dashboard invalid url parameters', () => {
let wrapper;
let mock;
- const fetchDataMock = jest.fn();
-
const createMountedWrapper = (props = { hasMetrics: true }, options = {}) => {
wrapper = mount(Dashboard, {
propsData: { ...propsData, ...props },
store,
stubs: ['graph-group', 'panel-type'],
- methods: {
- fetchData: fetchDataMock,
- },
...options,
});
};
@@ -35,6 +31,8 @@ describe('dashboard invalid url parameters', () => {
beforeEach(() => {
store = createStore();
+ jest.spyOn(store, 'dispatch');
+
mock = new MockAdapter(axios);
});
@@ -43,7 +41,6 @@ describe('dashboard invalid url parameters', () => {
wrapper.destroy();
}
mock.restore();
- fetchDataMock.mockReset();
queryToObject.mockReset();
});
@@ -53,15 +50,13 @@ describe('dashboard invalid url parameters', () => {
createMountedWrapper();
return wrapper.vm.$nextTick().then(() => {
- expect(findDateTimePicker().props('value')).toMatchObject({
- duration: { seconds: 28800 },
- });
+ expect(findDateTimePicker().props('value')).toEqual(defaultTimeRange);
- expect(fetchDataMock).toHaveBeenCalledTimes(1);
- expect(fetchDataMock).toHaveBeenCalledWith({
- start: expect.any(String),
- end: expect.any(String),
- });
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ expect.any(Object),
+ );
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
});
});
@@ -78,8 +73,8 @@ describe('dashboard invalid url parameters', () => {
return wrapper.vm.$nextTick().then(() => {
expect(findDateTimePicker().props('value')).toEqual(params);
- expect(fetchDataMock).toHaveBeenCalledTimes(1);
- expect(fetchDataMock).toHaveBeenCalledWith(params);
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/setTimeRange', params);
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
});
});
@@ -91,15 +86,17 @@ describe('dashboard invalid url parameters', () => {
createMountedWrapper();
return wrapper.vm.$nextTick().then(() => {
- expect(findDateTimePicker().props('value')).toMatchObject({
+ const expectedTimeRange = {
duration: { seconds: 60 * 2 },
- });
+ };
- expect(fetchDataMock).toHaveBeenCalledTimes(1);
- expect(fetchDataMock).toHaveBeenCalledWith({
- start: expect.any(String),
- end: expect.any(String),
- });
+ expect(findDateTimePicker().props('value')).toMatchObject(expectedTimeRange);
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ expectedTimeRange,
+ );
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
});
});
@@ -114,15 +111,13 @@ describe('dashboard invalid url parameters', () => {
return wrapper.vm.$nextTick().then(() => {
expect(createFlash).toHaveBeenCalled();
- expect(findDateTimePicker().props('value')).toMatchObject({
- duration: { seconds: 28800 },
- });
+ expect(findDateTimePicker().props('value')).toEqual(defaultTimeRange);
- expect(fetchDataMock).toHaveBeenCalledTimes(1);
- expect(fetchDataMock).toHaveBeenCalledWith({
- start: expect.any(String),
- end: expect.any(String),
- });
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ defaultTimeRange,
+ );
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
});
});
@@ -137,7 +132,7 @@ describe('dashboard invalid url parameters', () => {
duration: { seconds: 120 },
});
- // redirect to plus + new parameters
+ // redirect to with new parameters
expect(mergeUrlParams).toHaveBeenCalledWith({ duration_seconds: '120' }, toUrl);
expect(redirectTo).toHaveBeenCalledTimes(1);
});
diff --git a/spec/frontend/monitoring/embed/embed_spec.js b/spec/frontend/monitoring/embed/embed_spec.js
index 831ab1ed157..3bb70a02bd9 100644
--- a/spec/frontend/monitoring/embed/embed_spec.js
+++ b/spec/frontend/monitoring/embed/embed_spec.js
@@ -26,10 +26,11 @@ describe('Embed', () => {
beforeEach(() => {
actions = {
- setFeatureFlags: () => {},
- setShowErrorBanner: () => {},
- setEndpoints: () => {},
- fetchMetricsData: () => {},
+ setFeatureFlags: jest.fn(),
+ setShowErrorBanner: jest.fn(),
+ setEndpoints: jest.fn(),
+ setTimeRange: jest.fn(),
+ fetchDashboard: jest.fn(),
};
metricsWithDataGetter = jest.fn();
@@ -76,6 +77,18 @@ describe('Embed', () => {
mountComponent();
});
+ it('calls actions to fetch data', () => {
+ const expectedTimeRangePayload = expect.objectContaining({
+ start: expect.any(String),
+ end: expect.any(String),
+ });
+
+ expect(actions.setTimeRange).toHaveBeenCalledTimes(1);
+ expect(actions.setTimeRange.mock.calls[0][1]).toEqual(expectedTimeRangePayload);
+
+ expect(actions.fetchDashboard).toHaveBeenCalled();
+ });
+
it('shows a chart when metrics are present', () => {
expect(wrapper.find('.metrics-embed').exists()).toBe(true);
expect(wrapper.find(PanelType).exists()).toBe(true);
diff --git a/spec/frontend/monitoring/init_utils.js b/spec/frontend/monitoring/init_utils.js
index 30c64a8d885..36c654ba7b3 100644
--- a/spec/frontend/monitoring/init_utils.js
+++ b/spec/frontend/monitoring/init_utils.js
@@ -15,6 +15,7 @@ export const propsData = {
clustersPath: '/path/to/clusters',
tagsPath: '/path/to/tags',
projectPath: '/path/to/project',
+ logsPath: '/path/to/logs',
defaultBranch: 'master',
metricsEndpoint: mockApiEndpoint,
deploymentsEndpoint: null,
diff --git a/spec/frontend/monitoring/panel_type_spec.js b/spec/frontend/monitoring/panel_type_spec.js
index e51b69ef14d..730d67f79d8 100644
--- a/spec/frontend/monitoring/panel_type_spec.js
+++ b/spec/frontend/monitoring/panel_type_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import { setTestTimeout } from 'helpers/timeout';
+import invalidUrl from '~/lib/utils/invalid_url';
import axios from '~/lib/utils/axios_utils';
import PanelType from '~/monitoring/components/panel_type.vue';
import EmptyChart from '~/monitoring/components/charts/empty_chart.vue';
@@ -16,20 +17,25 @@ global.URL.createObjectURL = jest.fn();
describe('Panel Type component', () => {
let axiosMock;
let store;
- let panelType;
- const dashboardWidth = 100;
+ let state;
+ let wrapper;
const exampleText = 'example_text';
- const createWrapper = props =>
- shallowMount(PanelType, {
+ const createWrapper = props => {
+ wrapper = shallowMount(PanelType, {
propsData: {
...props,
},
store,
});
+ };
beforeEach(() => {
setTestTimeout(1000);
+
+ store = createStore();
+ state = store.state.monitoringDashboard;
+
axiosMock = new AxiosMockAdapter(axios);
});
@@ -44,19 +50,18 @@ describe('Panel Type component', () => {
graphDataNoResult.metrics[0].result = [];
beforeEach(() => {
- panelType = createWrapper({
- dashboardWidth,
+ createWrapper({
graphData: graphDataNoResult,
});
});
afterEach(() => {
- panelType.destroy();
+ wrapper.destroy();
});
describe('Empty Chart component', () => {
beforeEach(() => {
- glEmptyChart = panelType.find(EmptyChart);
+ glEmptyChart = wrapper.find(EmptyChart);
});
it('is a Vue instance', () => {
@@ -66,51 +71,126 @@ describe('Panel Type component', () => {
it('it receives a graph title', () => {
const props = glEmptyChart.props();
- expect(props.graphTitle).toBe(panelType.vm.graphData.title);
+ expect(props.graphTitle).toBe(wrapper.vm.graphData.title);
});
});
});
describe('when graph data is available', () => {
beforeEach(() => {
- store = createStore();
- panelType = createWrapper({
- dashboardWidth,
+ createWrapper({
graphData: graphDataPrometheusQueryRange,
});
});
afterEach(() => {
- panelType.destroy();
+ wrapper.destroy();
});
it('sets no clipboard copy link on dropdown by default', () => {
- const link = () => panelType.find('.js-chart-link');
+ const link = () => wrapper.find('.js-chart-link');
expect(link().exists()).toBe(false);
});
describe('Time Series Chart panel type', () => {
it('is rendered', () => {
- expect(panelType.find(TimeSeriesChart).isVueInstance()).toBe(true);
- expect(panelType.find(TimeSeriesChart).exists()).toBe(true);
+ expect(wrapper.find(TimeSeriesChart).isVueInstance()).toBe(true);
+ expect(wrapper.find(TimeSeriesChart).exists()).toBe(true);
});
it('includes a default group id', () => {
- expect(panelType.vm.groupId).toBe('panel-type-chart');
+ expect(wrapper.vm.groupId).toBe('panel-type-chart');
});
});
describe('Anomaly Chart panel type', () => {
- beforeEach(done => {
- panelType.setProps({
+ beforeEach(() => {
+ wrapper.setProps({
graphData: anomalyMockGraphData,
});
- panelType.vm.$nextTick(done);
+ return wrapper.vm.$nextTick();
});
it('is rendered with an anomaly chart', () => {
- expect(panelType.find(AnomalyChart).isVueInstance()).toBe(true);
- expect(panelType.find(AnomalyChart).exists()).toBe(true);
+ expect(wrapper.find(AnomalyChart).isVueInstance()).toBe(true);
+ expect(wrapper.find(AnomalyChart).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('View Logs dropdown item', () => {
+ const mockLogsPath = '/path/to/logs';
+ const mockTimeRange = { duration: { seconds: 120 } };
+
+ const findTimeChart = () => wrapper.find({ ref: 'timeChart' });
+ const findViewLogsLink = () => wrapper.find({ ref: 'viewLogsLink' });
+
+ beforeEach(() => {
+ createWrapper({
+ graphData: graphDataPrometheusQueryRange,
+ });
+ return wrapper.vm.$nextTick();
+ });
+
+ it('is not present by default', () =>
+ wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ }));
+
+ it('is not present if a time range is not set', () => {
+ state.logsPath = mockLogsPath;
+ state.timeRange = null;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ });
+ });
+
+ it('is not present if the logs path is default', () => {
+ state.logsPath = invalidUrl;
+ state.timeRange = mockTimeRange;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ });
+ });
+
+ it('is not present if the logs path is not set', () => {
+ state.logsPath = null;
+ state.timeRange = mockTimeRange;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ });
+ });
+
+ it('is present when logs path and time a range is present', () => {
+ state.logsPath = mockLogsPath;
+ state.timeRange = mockTimeRange;
+
+ return wrapper.vm.$nextTick(() => {
+ const href = `${mockLogsPath}?duration_seconds=${mockTimeRange.duration.seconds}`;
+ expect(findViewLogsLink().attributes('href')).toMatch(href);
+ });
+ });
+
+ it('it is overriden when a datazoom event is received', () => {
+ state.logsPath = mockLogsPath;
+ state.timeRange = mockTimeRange;
+
+ const zoomedTimeRange = {
+ start: '2020-01-01T00:00:00.000Z',
+ end: '2020-01-01T01:00:00.000Z',
+ };
+
+ findTimeChart().vm.$emit('datazoom', zoomedTimeRange);
+
+ return wrapper.vm.$nextTick(() => {
+ const start = encodeURIComponent(zoomedTimeRange.start);
+ const end = encodeURIComponent(zoomedTimeRange.end);
+ expect(findViewLogsLink().attributes('href')).toMatch(
+ `${mockLogsPath}?start=${start}&end=${end}`,
+ );
});
});
});
@@ -119,20 +199,18 @@ describe('Panel Type component', () => {
const clipboardText = 'A value to copy.';
beforeEach(() => {
- store = createStore();
- panelType = createWrapper({
+ createWrapper({
clipboardText,
- dashboardWidth,
graphData: graphDataPrometheusQueryRange,
});
});
afterEach(() => {
- panelType.destroy();
+ wrapper.destroy();
});
it('sets clipboard text on the dropdown', () => {
- const link = () => panelType.find('.js-chart-link');
+ const link = () => wrapper.find('.js-chart-link');
expect(link().exists()).toBe(true);
expect(link().element.dataset.clipboardText).toBe(clipboardText);
@@ -140,22 +218,20 @@ describe('Panel Type component', () => {
});
describe('when downloading metrics data as CSV', () => {
- beforeEach(done => {
+ beforeEach(() => {
graphDataPrometheusQueryRange.y_label = 'metric';
- store = createStore();
- panelType = shallowMount(PanelType, {
+ wrapper = shallowMount(PanelType, {
propsData: {
clipboardText: exampleText,
- dashboardWidth,
graphData: graphDataPrometheusQueryRange,
},
store,
});
- panelType.vm.$nextTick(done);
+ return wrapper.vm.$nextTick();
});
afterEach(() => {
- panelType.destroy();
+ wrapper.destroy();
});
describe('csvText', () => {
@@ -165,7 +241,7 @@ describe('Panel Type component', () => {
const firstRow = `${data[0][0]},${data[0][1]}`;
const secondRow = `${data[1][0]},${data[1][1]}`;
- expect(panelType.vm.csvText).toBe(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
+ expect(wrapper.vm.csvText).toBe(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
});
});
@@ -174,7 +250,7 @@ describe('Panel Type component', () => {
expect(global.URL.createObjectURL).toHaveBeenLastCalledWith(expect.any(Blob));
expect(global.URL.createObjectURL).toHaveBeenLastCalledWith(
expect.objectContaining({
- size: panelType.vm.csvText.length,
+ size: wrapper.vm.csvText.length,
type: 'text/plain',
}),
);
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 7f8ced8cf43..b0ac42e0e5f 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -90,6 +90,16 @@ describe('Monitoring mutations', () => {
expect(stateCopy.dashboardEndpoint).toEqual('dashboard.json');
expect(stateCopy.projectPath).toEqual('/gitlab-org/gitlab-foss');
});
+
+ it('should not remove default value of logsPath', () => {
+ const defaultLogsPath = stateCopy.logsPath;
+
+ mutations[types.SET_ENDPOINTS](stateCopy, {
+ dashboardEndpoint: 'dashboard.json',
+ });
+
+ expect(stateCopy.logsPath).toBe(defaultLogsPath);
+ });
});
describe('Individual panel/metric results', () => {
const metricId = '12_system_metrics_kubernetes_container_memory_total';
diff --git a/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb b/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb
new file mode 100644
index 00000000000..0dca542cb9f
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::FixProjectsWithoutProjectFeature, :migration, schema: 2020_01_27_111840 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:project_features) { table(:project_features) }
+
+ let(:namespace) { namespaces.create(name: 'foo', path: 'foo') }
+
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:private_project_without_feature) { projects.create!(namespace_id: namespace.id, visibility_level: 0) }
+ let(:public_project_without_feature) { projects.create!(namespace_id: namespace.id, visibility_level: 20) }
+ let!(:projects_without_feature) { [private_project_without_feature, public_project_without_feature] }
+
+ before do
+ project_features.create({ project_id: project.id, pages_access_level: 20 })
+ end
+
+ subject { described_class.new.perform(Project.minimum(:id), Project.maximum(:id)) }
+
+ def project_feature_records
+ project_features.order(:project_id).pluck(:project_id)
+ end
+
+ def features(project)
+ project_features.find_by(project_id: project.id)&.attributes
+ end
+
+ it 'creates a ProjectFeature for projects without it' do
+ expect { subject }.to change { project_feature_records }.from([project.id]).to([project.id, *projects_without_feature.map(&:id)])
+ end
+
+ it 'creates ProjectFeature records with default values for a public project' do
+ subject
+
+ expect(features(public_project_without_feature)).to include(
+ {
+ "merge_requests_access_level" => 20,
+ "issues_access_level" => 20,
+ "wiki_access_level" => 20,
+ "snippets_access_level" => 20,
+ "builds_access_level" => 20,
+ "repository_access_level" => 20,
+ "pages_access_level" => 20,
+ "forking_access_level" => 20
+ }
+ )
+ end
+
+ it 'creates ProjectFeature records with default values for a private project' do
+ subject
+
+ expect(features(private_project_without_feature)).to include("pages_access_level" => 10)
+ end
+
+ context 'when access control to pages is forced' do
+ before do
+ allow(::Gitlab::Pages).to receive(:access_control_is_forced?).and_return(true)
+ end
+
+ it 'creates ProjectFeature records with default values for a public project' do
+ subject
+
+ expect(features(public_project_without_feature)).to include("pages_access_level" => 10)
+ end
+ end
+
+ it 'sets created_at/updated_at timestamps' do
+ subject
+
+ expect(project_features.where('created_at IS NULL OR updated_at IS NULL')).to be_empty
+ end
+end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb
index 5e215be4dfb..26fc5344871 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb
@@ -10,7 +10,7 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do
context 'when there is only one ordering field' do
let(:arel_table) { Issue.arel_table }
- let(:order_list) { ['id'] }
+ let(:order_list) { [double(named_function: nil, attribute_name: 'id')] }
let(:values) { [500] }
let(:operators) { ['>'] }
@@ -25,7 +25,7 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do
context 'when ordering by a column attribute' do
let(:arel_table) { Issue.arel_table }
- let(:order_list) { %w(relative_position id) }
+ let(:order_list) { [double(named_function: nil, attribute_name: 'relative_position'), double(named_function: nil, attribute_name: 'id')] }
let(:values) { [1500, 500] }
shared_examples ':after condition' do
@@ -71,5 +71,45 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do
it_behaves_like ':after condition'
end
end
+
+ context 'when ordering by LOWER' do
+ let(:arel_table) { Project.arel_table }
+ let(:relation) { Project.order(arel_table['name'].lower.asc).order(:id) }
+ let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) }
+ let(:values) { ['Test', 500] }
+
+ context 'when :after' do
+ it 'generates :after sql' do
+ expected_sql = <<~SQL
+ (LOWER("projects"."name") > 'test')
+ OR (
+ LOWER("projects"."name") = 'test'
+ AND
+ "projects"."id" > 500
+ )
+ OR (LOWER("projects"."name") IS NULL)
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates :before sql' do
+ expected_sql = <<~SQL
+ (LOWER("projects"."name") > 'test')
+ OR (
+ LOWER("projects"."name") = 'test'
+ AND
+ "projects"."id" > 500
+ )
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb
index 1049890a079..be0a21b2438 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb
@@ -11,7 +11,7 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NullCondition do
context 'when ordering by a column attribute' do
let(:arel_table) { Issue.arel_table }
- let(:order_list) { %w(relative_position id) }
+ let(:order_list) { [double(named_function: nil, attribute_name: 'relative_position'), double(named_function: nil, attribute_name: 'id')] }
shared_examples ':after condition' do
it 'generates sql' do
@@ -54,5 +54,42 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NullCondition do
it_behaves_like ':after condition'
end
end
+
+ context 'when ordering by LOWER' do
+ let(:arel_table) { Project.arel_table }
+ let(:relation) { Project.order(arel_table['name'].lower.asc).order(:id) }
+ let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) }
+
+ context 'when :after' do
+ it 'generates sql' do
+ expected_sql = <<~SQL
+ (
+ LOWER("projects"."name") IS NULL
+ AND
+ "projects"."id" > 500
+ )
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates :before sql' do
+ expected_sql = <<~SQL
+ (
+ LOWER("projects"."name") IS NULL
+ AND
+ "projects"."id" > 500
+ )
+ OR (LOWER("projects"."name") IS NOT NULL)
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb
index 17ddcaefeeb..eb823fc0122 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb
@@ -37,6 +37,20 @@ describe Gitlab::Graphql::Connections::Keyset::OrderInfo do
expect(order_list.count).to eq 1
end
end
+
+ context 'when order contains LOWER' do
+ let(:relation) { Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(:id) }
+
+ it 'does not ignore the SQL order' do
+ expect(order_list.count).to eq 2
+ expect(order_list.first.attribute_name).to eq 'name'
+ expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::NamedFunction)
+ expect(order_list.first.named_function.to_sql).to eq 'LOWER("projects"."name")'
+ expect(order_list.first.operator_for(:after)).to eq '>'
+ expect(order_list.last.attribute_name).to eq 'id'
+ expect(order_list.last.operator_for(:after)).to eq '>'
+ end
+ end
end
describe '#validate_ordering' do
diff --git a/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
index 7ebf5da264d..b46ce4bf023 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
@@ -101,5 +101,35 @@ describe Gitlab::Graphql::Connections::Keyset::QueryBuilder do
end
end
end
+
+ context 'when sorting using LOWER' do
+ let(:relation) { Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(:id) }
+ let(:arel_table) { Project.arel_table }
+ let(:decoded_cursor) { { 'name' => 'Test', 'id' => 100 } }
+
+ context 'when no values are nil' do
+ context 'when :after' do
+ it 'generates the correct condition' do
+ conditions = builder.conditions
+
+ expect(conditions).to include '(LOWER("projects"."name") > \'test\')'
+ expect(conditions).to include '"projects"."id" > 100'
+ expect(conditions).to include 'OR (LOWER("projects"."name") IS NULL)'
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates the correct condition' do
+ conditions = builder.conditions
+
+ expect(conditions).to include '(LOWER("projects"."name") < \'test\')'
+ expect(conditions).to include '"projects"."id" < 100'
+ expect(conditions).to include 'LOWER("projects"."name") = \'test\''
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
index c899217d164..f4d3c9e613e 100644
--- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
@@ -652,10 +652,10 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
setup_import_export_config('light')
end
- it 'does not import any templated services' do
+ it 'does not import any instance-level services' do
expect(restored_project_json).to eq(true)
- expect(project.services.where(template: true).count).to eq(0)
+ expect(project.services.where(instance: true).count).to eq(0)
end
it 'imports labels' do
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index ceed750253d..86bf5710635 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -452,7 +452,7 @@ Service:
- updated_at
- active
- properties
-- template
+- instance
- push_events
- issues_events
- commit_events
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 9a49d334f52..8e9a816ba6a 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -18,7 +18,7 @@ describe Gitlab::UsageData do
create(:service, project: projects[1], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'MattermostService', active: false)
- create(:service, project: projects[2], type: 'MattermostService', active: true, template: true)
+ create(:service, project: projects[2], type: 'MattermostService', active: true, instance: true)
create(:service, project: projects[2], type: 'CustomIssueTrackerService', active: true)
create(:project_error_tracking_setting, project: projects[0])
create(:project_error_tracking_setting, project: projects[1], enabled: false)
diff --git a/spec/migrations/fix_projects_without_project_feature_spec.rb b/spec/migrations/fix_projects_without_project_feature_spec.rb
new file mode 100644
index 00000000000..6e0345da078
--- /dev/null
+++ b/spec/migrations/fix_projects_without_project_feature_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200127111840_fix_projects_without_project_feature.rb')
+
+describe FixProjectsWithoutProjectFeature, :migration do
+ let(:namespace) { table(:namespaces).create(name: 'gitlab', path: 'gitlab-org') }
+
+ let!(:projects) do
+ [
+ table(:projects).create(namespace_id: namespace.id, name: 'foo 1'),
+ table(:projects).create(namespace_id: namespace.id, name: 'foo 2'),
+ table(:projects).create(namespace_id: namespace.id, name: 'foo 3')
+ ]
+ end
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ end
+
+ around do |example|
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ example.call
+ end
+ end
+ end
+
+ it 'schedules jobs for ranges of projects' do
+ migrate!
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(2.minutes, projects[0].id, projects[1].id)
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(4.minutes, projects[2].id, projects[2].id)
+ end
+
+ it 'schedules jobs according to the configured batch size' do
+ expect { migrate! }.to change { BackgroundMigrationWorker.jobs.size }.by(2)
+ end
+end
diff --git a/spec/migrations/migrate_propagate_service_template_sidekiq_queue_spec.rb b/spec/migrations/migrate_propagate_service_template_sidekiq_queue_spec.rb
new file mode 100644
index 00000000000..2fffe638117
--- /dev/null
+++ b/spec/migrations/migrate_propagate_service_template_sidekiq_queue_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200206111847_migrate_propagate_service_template_sidekiq_queue.rb')
+
+describe MigratePropagateServiceTemplateSidekiqQueue, :sidekiq, :redis do
+ include Gitlab::Database::MigrationHelpers
+ include StubWorker
+
+ context 'when there are jobs in the queue' do
+ it 'correctly migrates queue when migrating up' do
+ Sidekiq::Testing.disable! do
+ stub_worker(queue: 'propagate_service_template').perform_async('Something', [1])
+ stub_worker(queue: 'propagate_instance_level_service').perform_async('Something', [1])
+
+ described_class.new.up
+
+ expect(sidekiq_queue_length('propagate_service_template')).to eq 0
+ expect(sidekiq_queue_length('propagate_instance_level_service')).to eq 2
+ end
+ end
+ end
+
+ context 'when there are no jobs in the queues' do
+ it 'does not raise error when migrating up' do
+ expect { described_class.new.up }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/models/concerns/sortable_spec.rb b/spec/models/concerns/sortable_spec.rb
index 184f7986a6f..18ac4d19938 100644
--- a/spec/models/concerns/sortable_spec.rb
+++ b/spec/models/concerns/sortable_spec.rb
@@ -4,17 +4,18 @@ require 'spec_helper'
describe Sortable do
describe '.order_by' do
+ let(:arel_table) { Group.arel_table }
let(:relation) { Group.all }
describe 'ordering by id' do
it 'ascending' do
- expect(relation).to receive(:reorder).with(id: :asc)
+ expect(relation).to receive(:reorder).with(arel_table['id'].asc)
relation.order_by('id_asc')
end
it 'descending' do
- expect(relation).to receive(:reorder).with(id: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['id'].desc)
relation.order_by('id_desc')
end
@@ -22,19 +23,19 @@ describe Sortable do
describe 'ordering by created day' do
it 'ascending' do
- expect(relation).to receive(:reorder).with(created_at: :asc)
+ expect(relation).to receive(:reorder).with(arel_table['created_at'].asc)
relation.order_by('created_asc')
end
it 'descending' do
- expect(relation).to receive(:reorder).with(created_at: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['created_at'].desc)
relation.order_by('created_desc')
end
it 'order by "date"' do
- expect(relation).to receive(:reorder).with(created_at: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['created_at'].desc)
relation.order_by('created_date')
end
@@ -66,13 +67,13 @@ describe Sortable do
describe 'ordering by Updated Time' do
it 'ascending' do
- expect(relation).to receive(:reorder).with(updated_at: :asc)
+ expect(relation).to receive(:reorder).with(arel_table['updated_at'].asc)
relation.order_by('updated_asc')
end
it 'descending' do
- expect(relation).to receive(:reorder).with(updated_at: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['updated_at'].desc)
relation.order_by('updated_desc')
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index ae4db1c2158..924cc7169ea 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -279,6 +279,12 @@ describe Project do
end
end
+ it 'validates presence of project_feature' do
+ project = build(:project, project_feature: nil)
+
+ expect(project).not_to be_valid
+ end
+
describe 'import_url' do
it 'does not allow an invalid URI as import_url' do
project = build(:project, import_url: 'invalid://')
@@ -2694,16 +2700,44 @@ describe Project do
describe '#all_lfs_objects' do
let(:lfs_object) { create(:lfs_object) }
- before do
- project.lfs_objects << lfs_object
+ context 'when LFS object is only associated to the source' do
+ before do
+ project.lfs_objects << lfs_object
+ end
+
+ it 'returns the lfs object for a project' do
+ expect(project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
+
+ it 'returns the lfs object for a fork' do
+ expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
end
- it 'returns the lfs object for a project' do
- expect(project.all_lfs_objects).to contain_exactly(lfs_object)
+ context 'when LFS object is only associated to the fork' do
+ before do
+ forked_project.lfs_objects << lfs_object
+ end
+
+ it 'returns nothing' do
+ expect(project.all_lfs_objects).to be_empty
+ end
+
+ it 'returns the lfs object for a fork' do
+ expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
end
- it 'returns the lfs object for a fork' do
- expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ context 'when LFS object is associated to both source and fork' do
+ before do
+ project.lfs_objects << lfs_object
+ forked_project.lfs_objects << lfs_object
+ end
+
+ it 'returns the lfs object for the source and fork' do
+ expect(project.all_lfs_objects).to contain_exactly(lfs_object)
+ expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
end
end
end
@@ -5519,6 +5553,31 @@ describe Project do
end
end
+ describe '#lfs_objects_oids' do
+ let(:project) { create(:project) }
+ let(:lfs_object) { create(:lfs_object) }
+ let(:another_lfs_object) { create(:lfs_object) }
+
+ subject { project.lfs_objects_oids }
+
+ context 'when project has associated LFS objects' do
+ before do
+ create(:lfs_objects_project, lfs_object: lfs_object, project: project)
+ create(:lfs_objects_project, lfs_object: another_lfs_object, project: project)
+ end
+
+ it 'returns OIDs of LFS objects' do
+ expect(subject).to match_array([lfs_object.oid, another_lfs_object.oid])
+ end
+ end
+
+ context 'when project has no associated LFS objects' do
+ it 'returns empty array' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
def rugged_config
rugged_repo(project.repository).config
end
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index f58bcbebd67..df2ed4911ec 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -97,23 +97,23 @@ describe Service do
end
end
- describe "Template" do
+ describe "Instance" do
let(:project) { create(:project) }
- describe '.build_from_template' do
- context 'when template is invalid' do
- it 'sets service template to inactive when template is invalid' do
- template = build(:prometheus_service, template: true, active: true, properties: {})
- template.save(validate: false)
+ describe '.build_from_instance' do
+ context 'when instance level integration is invalid' do
+ it 'sets instance level integration to inactive when instance is invalid' do
+ instance = build(:prometheus_service, instance: true, active: true, properties: {})
+ instance.save(validate: false)
- service = described_class.build_from_template(project.id, template)
+ service = described_class.build_from_instance(project.id, instance)
expect(service).to be_valid
expect(service.active).to be false
end
end
- describe 'build issue tracker from a template' do
+ describe 'build issue tracker from a instance level integration' do
let(:title) { 'custom title' }
let(:description) { 'custom description' }
let(:url) { 'http://jira.example.com' }
@@ -127,9 +127,9 @@ describe Service do
}
end
- shared_examples 'service creation from a template' do
+ shared_examples 'integration creation from instance level' do
it 'creates a correct service' do
- service = described_class.build_from_template(project.id, template)
+ service = described_class.build_from_instance(project.id, instance_level_integration)
expect(service).to be_active
expect(service.title).to eq(title)
@@ -144,38 +144,38 @@ describe Service do
# this will be removed as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
context 'when data are stored in properties' do
let(:properties) { data_params.merge(title: title, description: description) }
- let!(:template) do
- create(:jira_service, :without_properties_callback, template: true, properties: properties.merge(additional: 'something'))
+ let!(:instance_level_integration) do
+ create(:jira_service, :without_properties_callback, instance: true, properties: properties.merge(additional: 'something'))
end
- it_behaves_like 'service creation from a template'
+ it_behaves_like 'integration creation from instance level'
end
context 'when data are stored in separated fields' do
- let(:template) do
- create(:jira_service, data_params.merge(properties: {}, title: title, description: description, template: true))
+ let(:instance_level_integration) do
+ create(:jira_service, data_params.merge(properties: {}, title: title, description: description, instance: true))
end
- it_behaves_like 'service creation from a template'
+ it_behaves_like 'integration creation from instance level'
end
context 'when data are stored in both properties and separated fields' do
let(:properties) { data_params.merge(title: title, description: description) }
- let(:template) do
- create(:jira_service, :without_properties_callback, active: true, template: true, properties: properties).tap do |service|
+ let(:instance_level_integration) do
+ create(:jira_service, :without_properties_callback, active: true, instance: true, properties: properties).tap do |service|
create(:jira_tracker_data, data_params.merge(service: service))
end
end
- it_behaves_like 'service creation from a template'
+ it_behaves_like 'integration creation from instance level'
end
end
end
describe "for pushover service" do
- let!(:service_template) do
+ let!(:instance_level_integration) do
PushoverService.create(
- template: true,
+ instance: true,
properties: {
device: 'MyDevice',
sound: 'mic',
@@ -188,7 +188,7 @@ describe Service do
it "has all fields prefilled" do
service = project.find_or_initialize_service('pushover')
- expect(service.template).to eq(false)
+ expect(service.instance).to eq(false)
expect(service.device).to eq('MyDevice')
expect(service.sound).to eq('mic')
expect(service.priority).to eq(4)
@@ -391,14 +391,6 @@ describe Service do
end
end
- describe '.find_by_template' do
- let!(:service) { create(:service, template: true) }
-
- it 'returns service template' do
- expect(described_class.find_by_template).to eq(service)
- end
- end
-
describe '#api_field_names' do
let(:fake_service) do
Class.new(Service) do
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index 26fa3803651..af191172d33 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -467,7 +467,7 @@ describe ProjectPresenter do
expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(
is_link: false,
- label: a_string_including('Kubernetes configured'),
+ label: a_string_including('Kubernetes'),
link: presenter.project_cluster_path(project, cluster)
)
end
@@ -480,7 +480,7 @@ describe ProjectPresenter do
expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(
is_link: false,
- label: a_string_including('Kubernetes configured'),
+ label: a_string_including('Kubernetes'),
link: presenter.project_clusters_path(project)
)
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 245a8aa4905..427a361295c 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1492,7 +1492,7 @@ describe API::MergeRequests do
end
end
- context 'forked projects' do
+ context 'forked projects', :sidekiq_might_not_need_inline do
let!(:user2) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let!(:forked_project) { fork_project(project, user2, repository: true) }
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 0e02c0f001b..4e21c08ad5c 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -1193,8 +1193,8 @@ describe 'Git LFS API and storage' do
it_behaves_like 'LFS http 200 response'
- it 'LFS object is linked to the source project' do
- expect(lfs_object.projects.pluck(:id)).to include(upstream_project.id)
+ it 'LFS object is linked to the forked project' do
+ expect(lfs_object.projects.pluck(:id)).to include(project.id)
end
end
end
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index 3db1471bf3c..8490127058c 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -483,6 +483,14 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
expect(merge_request).to be_persisted
end
+ it 'calls MergeRequests::LinkLfsObjectsService#execute', :sidekiq_might_not_need_inline do
+ expect_next_instance_of(MergeRequests::LinkLfsObjectsService) do |service|
+ expect(service).to receive(:execute).with(instance_of(MergeRequest))
+ end
+
+ described_class.new(project, user, opts).execute
+ end
+
it 'does not create the merge request when the target project is archived' do
target_project.update!(archived: true)
diff --git a/spec/services/merge_requests/link_lfs_objects_service_spec.rb b/spec/services/merge_requests/link_lfs_objects_service_spec.rb
new file mode 100644
index 00000000000..f07cf13e4f2
--- /dev/null
+++ b/spec/services/merge_requests/link_lfs_objects_service_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequests::LinkLfsObjectsService, :sidekiq_inline do
+ include ProjectForksHelper
+ include RepoHelpers
+
+ let(:target_project) { create(:project, :public, :repository) }
+
+ let(:merge_request) do
+ create(
+ :merge_request,
+ target_project: target_project,
+ target_branch: 'lfs',
+ source_project: source_project,
+ source_branch: 'link-lfs-objects'
+ )
+ end
+
+ subject { described_class.new(target_project) }
+
+ shared_examples_for 'linking LFS objects' do
+ context 'when source project is the same as target project' do
+ let(:source_project) { target_project }
+
+ it 'does not call Projects::LfsPointers::LfsLinkService#execute' do
+ expect(Projects::LfsPointers::LfsLinkService).not_to receive(:new)
+
+ execute
+ end
+ end
+
+ context 'when source project is different from target project' do
+ let(:user) { create(:user) }
+ let(:source_project) { fork_project(target_project, user, namespace: user.namespace, repository: true) }
+
+ before do
+ create_branch(source_project, 'link-lfs-objects', 'lfs')
+ end
+
+ context 'and there are changes' do
+ before do
+ allow(source_project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ context 'and there are LFS objects added' do
+ before do
+ create_file_in_repo(source_project, 'link-lfs-objects', 'link-lfs-objects', 'one.lfs', 'One')
+ create_file_in_repo(source_project, 'link-lfs-objects', 'link-lfs-objects', 'two.lfs', 'Two')
+ end
+
+ it 'calls Projects::LfsPointers::LfsLinkService#execute with OIDs of LFS objects in merge request' do
+ expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
+ expect(service).to receive(:execute).with(%w[
+ 8b12507783d5becacbf2ebe5b01a60024d8728a8f86dcc818bce699e8b3320bc
+ 94a72c074cfe574742c9e99e863322f73feff82981d065ff65a0308f44f19f62
+ ])
+ end
+
+ execute
+ end
+ end
+
+ context 'but there are no LFS objects added' do
+ before do
+ create_file_in_repo(source_project, 'link-lfs-objects', 'link-lfs-objects', 'one.txt', 'One')
+ end
+
+ it 'does not call Projects::LfsPointers::LfsLinkService#execute' do
+ expect(Projects::LfsPointers::LfsLinkService).not_to receive(:new)
+
+ execute
+ end
+ end
+ end
+
+ context 'and there are no changes' do
+ it 'does not call Projects::LfsPointers::LfsLinkService#execute' do
+ expect(Projects::LfsPointers::LfsLinkService).not_to receive(:new)
+
+ execute
+ end
+ end
+ end
+ end
+
+ context 'when no oldrev and newrev passed' do
+ let(:execute) { subject.execute(merge_request) }
+
+ it_behaves_like 'linking LFS objects'
+ end
+
+ context 'when oldrev and newrev are passed' do
+ let(:execute) { subject.execute(merge_request, oldrev: merge_request.diff_base_sha, newrev: merge_request.diff_head_sha) }
+
+ it_behaves_like 'linking LFS objects'
+ end
+
+ def create_branch(project, new_name, branch_name)
+ ::Branches::CreateService.new(project, user).execute(new_name, branch_name)
+ end
+end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 1ba216e8ff1..b67779a912d 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -384,6 +384,14 @@ describe MergeRequests::RefreshService do
end
context 'open fork merge request' do
+ it 'calls MergeRequests::LinkLfsObjectsService#execute' do
+ expect_next_instance_of(MergeRequests::LinkLfsObjectsService) do |svc|
+ expect(svc).to receive(:execute).with(@fork_merge_request, oldrev: @oldrev, newrev: @newrev)
+ end
+
+ refresh
+ end
+
it 'executes hooks with update action' do
refresh
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index bce3f72a287..24781ac86be 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -15,7 +15,7 @@ describe Projects::CreateService, '#execute' do
}
end
- it 'creates labels on Project creation if there are templates' do
+ it 'creates labels on Project creation if there are instance level services' do
Label.create(title: "bug", template: true)
project = create_project(user, opts)
@@ -90,7 +90,7 @@ describe Projects::CreateService, '#execute' do
end
it 'sets invalid service as inactive' do
- create(:service, type: 'JiraService', project: nil, template: true, active: true)
+ create(:service, type: 'JiraService', project: nil, instance: true, active: true)
project = create_project(user, opts)
service = project.services.first
@@ -336,22 +336,22 @@ describe Projects::CreateService, '#execute' do
end
end
- context 'when there is an active service template' do
+ context 'when there is an active instance level service' do
before do
- create(:service, project: nil, template: true, active: true)
+ create(:service, project: nil, instance: true, active: true)
end
- it 'creates a service from this template' do
+ it 'creates a service from instance level service' do
project = create_project(user, opts)
expect(project.services.count).to eq 1
end
end
- context 'when a bad service template is created' do
+ context 'when a bad instance level service is created' do
it 'sets service to be inactive' do
opts[:import_url] = 'http://www.gitlab.com/gitlab-org/gitlab-foss'
- create(:service, type: 'DroneCiService', project: nil, template: true, active: true)
+ create(:service, type: 'DroneCiService', project: nil, instance: true, active: true)
project = create_project(user, opts)
service = project.services.first
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index e7b904fcd60..e14f1abf018 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -375,14 +375,6 @@ describe Projects::ForkService do
expect(fork_from_project.forks_count).to eq(1)
end
- it 'leaves no LFS objects dangling' do
- create(:lfs_objects_project, project: fork_to_project)
-
- expect { subject.execute(fork_to_project) }
- .to change { fork_to_project.lfs_objects_projects.count }
- .to(0)
- end
-
context 'if the fork is not allowed' do
let(:fork_from_project) { create(:project, :private) }
diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
index 970e82e7107..21a139cdf3c 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
@@ -48,10 +48,11 @@ describe Projects::LfsPointers::LfsDownloadService do
end
shared_examples 'lfs object is created' do
- it do
+ it 'creates and associate the LFS object to project' do
expect(subject).to receive(:download_and_save_file!).and_call_original
expect { subject.execute }.to change { LfsObject.count }.by(1)
+ expect(LfsObject.first.projects).to include(project)
end
it 'returns success result' do
diff --git a/spec/services/projects/propagate_service_template_spec.rb b/spec/services/projects/propagate_instance_level_service_spec.rb
index 2c3effec617..a842842a010 100644
--- a/spec/services/projects/propagate_service_template_spec.rb
+++ b/spec/services/projects/propagate_instance_level_service_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-describe Projects::PropagateServiceTemplate do
+describe Projects::PropagateInstanceLevelService do
describe '.propagate' do
- let!(:service_template) do
+ let!(:instance_level_integration) do
PushoverService.create(
- template: true,
+ instance: true,
active: true,
properties: {
device: 'MyDevice',
@@ -22,14 +22,14 @@ describe Projects::PropagateServiceTemplate do
it 'creates services for projects' do
expect(project.pushover_service).to be_nil
- described_class.propagate(service_template)
+ described_class.propagate(instance_level_integration)
expect(project.reload.pushover_service).to be_present
end
it 'creates services for a project that has another service' do
BambooService.create(
- template: true,
+ instance: true,
active: true,
project: project,
properties: {
@@ -42,14 +42,14 @@ describe Projects::PropagateServiceTemplate do
expect(project.pushover_service).to be_nil
- described_class.propagate(service_template)
+ described_class.propagate(instance_level_integration)
expect(project.reload.pushover_service).to be_present
end
it 'does not create the service if it exists already' do
other_service = BambooService.create(
- template: true,
+ instance: true,
active: true,
properties: {
bamboo_url: 'http://gitlab.com',
@@ -59,17 +59,17 @@ describe Projects::PropagateServiceTemplate do
}
)
- Service.build_from_template(project.id, service_template).save!
- Service.build_from_template(project.id, other_service).save!
+ Service.build_from_instance(project.id, instance_level_integration).save!
+ Service.build_from_instance(project.id, other_service).save!
- expect { described_class.propagate(service_template) }
+ expect { described_class.propagate(instance_level_integration) }
.not_to change { Service.count }
end
- it 'creates the service containing the template attributes' do
- described_class.propagate(service_template)
+ it 'creates the service containing the instance attributes' do
+ described_class.propagate(instance_level_integration)
- expect(project.pushover_service.properties).to eq(service_template.properties)
+ expect(project.pushover_service.properties).to eq(instance_level_integration.properties)
end
describe 'bulk update', :use_sql_query_cache do
@@ -80,7 +80,7 @@ describe Projects::PropagateServiceTemplate do
project_total.times { create(:project) }
- described_class.propagate(service_template)
+ described_class.propagate(instance_level_integration)
end
it 'creates services for all projects' do
@@ -90,18 +90,18 @@ describe Projects::PropagateServiceTemplate do
describe 'external tracker' do
it 'updates the project external tracker' do
- service_template.update!(category: 'issue_tracker', default: false)
+ instance_level_integration.update!(category: 'issue_tracker', default: false)
- expect { described_class.propagate(service_template) }
+ expect { described_class.propagate(instance_level_integration) }
.to change { project.reload.has_external_issue_tracker }.to(true)
end
end
describe 'external wiki' do
it 'updates the project external tracker' do
- service_template.update!(type: 'ExternalWikiService')
+ instance_level_integration.update!(type: 'ExternalWikiService')
- expect { described_class.propagate(service_template) }
+ expect { described_class.propagate(instance_level_integration) }
.to change { project.reload.has_external_wiki }.to(true)
end
end
diff --git a/spec/workers/propagate_instance_level_service_worker_spec.rb b/spec/workers/propagate_instance_level_service_worker_spec.rb
new file mode 100644
index 00000000000..6552b198181
--- /dev/null
+++ b/spec/workers/propagate_instance_level_service_worker_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe PropagateInstanceLevelServiceWorker do
+ include ExclusiveLeaseHelpers
+
+ describe '#perform' do
+ it 'calls the propagate service with the instance level service' do
+ instance_level_service = PushoverService.create(
+ instance: true,
+ active: true,
+ properties: {
+ device: 'MyDevice',
+ sound: 'mic',
+ priority: 4,
+ user_key: 'asdf',
+ api_key: '123456789'
+ })
+
+ stub_exclusive_lease("propagate_instance_level_service_worker:#{instance_level_service.id}",
+ timeout: PropagateInstanceLevelServiceWorker::LEASE_TIMEOUT)
+
+ expect(Projects::PropagateInstanceLevelService)
+ .to receive(:propagate)
+ .with(instance_level_service)
+
+ subject.perform(instance_level_service.id)
+ end
+ end
+end
diff --git a/spec/workers/propagate_service_template_worker_spec.rb b/spec/workers/propagate_service_template_worker_spec.rb
deleted file mode 100644
index fb4ced77832..00000000000
--- a/spec/workers/propagate_service_template_worker_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe PropagateServiceTemplateWorker do
- include ExclusiveLeaseHelpers
-
- describe '#perform' do
- it 'calls the propagate service with the template' do
- template = PushoverService.create(
- template: true,
- active: true,
- properties: {
- device: 'MyDevice',
- sound: 'mic',
- priority: 4,
- user_key: 'asdf',
- api_key: '123456789'
- })
-
- stub_exclusive_lease("propagate_service_template_worker:#{template.id}",
- timeout: PropagateServiceTemplateWorker::LEASE_TIMEOUT)
-
- expect(Projects::PropagateServiceTemplate)
- .to receive(:propagate)
- .with(template)
-
- subject.perform(template.id)
- end
- end
-end
diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb
index 26fd67adfaa..01104049404 100644
--- a/spec/workers/repository_fork_worker_spec.rb
+++ b/spec/workers/repository_fork_worker_spec.rb
@@ -72,13 +72,33 @@ describe RepositoryForkWorker do
perform!
end
- it "handles bad fork" do
- error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}"
+ it 'handles bad fork' do
+ error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Failed to create fork repository"
expect_fork_repository.and_return(false)
expect { perform! }.to raise_error(StandardError, error_message)
end
+
+ it 'calls Projects::LfsPointers::LfsLinkService#execute with OIDs of source project LFS objects' do
+ expect_fork_repository.and_return(true)
+ expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
+ expect(service).to receive(:execute).with(project.lfs_objects_oids)
+ end
+
+ perform!
+ end
+
+ it "handles LFS objects link failure" do
+ error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Source project has too many LFS objects"
+
+ expect_fork_repository.and_return(true)
+ expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
+ expect(service).to receive(:execute).and_raise(Projects::LfsPointers::LfsLinkService::TooManyOidsError)
+ end
+
+ expect { perform! }.to raise_error(StandardError, error_message)
+ end
end
context 'only project ID passed' do