Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-08-04 21:09:57 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-08-04 21:09:57 +0300
commitf5a72705e46f835812ffcc51658eecb08fbdf050 (patch)
tree9b322ce9c0454759d5b669be56e603a481791388
parent23c4d0c3e1ea30be08b597a961fc91773f60309f (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--app/assets/javascripts/api/analytics_api.js37
-rw-r--r--app/assets/javascripts/cycle_analytics/components/base.vue25
-rw-r--r--app/assets/javascripts/cycle_analytics/components/stage_table.vue4
-rw-r--r--app/assets/javascripts/cycle_analytics/index.js8
-rw-r--r--app/assets/javascripts/cycle_analytics/store/actions.js73
-rw-r--r--app/assets/javascripts/cycle_analytics/store/getters.js15
-rw-r--r--app/assets/javascripts/cycle_analytics/store/mutations.js29
-rw-r--r--app/assets/javascripts/cycle_analytics/store/state.js7
-rw-r--r--app/assets/javascripts/cycle_analytics/utils.js4
-rw-r--r--app/assets/javascripts/jira_connect/branches/components/new_branch_form.vue105
-rw-r--r--app/assets/javascripts/jira_connect/branches/constants.js15
-rw-r--r--app/assets/javascripts/jira_connect/branches/index.js14
-rw-r--r--app/assets/javascripts/jira_connect/branches/pages/index.vue60
-rw-r--r--app/assets/javascripts/repository/components/delete_blob_modal.vue65
-rw-r--r--app/controllers/jira_connect/branches_controller.rb21
-rw-r--r--app/models/ci/runner_namespace.rb1
-rw-r--r--app/models/ci/runner_project.rb1
-rw-r--r--app/services/import/github_service.rb2
-rw-r--r--app/views/jira_connect/branches/new.html.haml2
-rw-r--r--app/workers/concerns/gitlab/github_import/object_importer.rb11
-rw-r--r--app/workers/concerns/gitlab/github_import/queue.rb2
-rw-r--r--app/workers/concerns/gitlab/github_import/stage_methods.rb11
-rw-r--r--app/workers/merge_request_mergeability_check_worker.rb4
-rw-r--r--config/feature_flags/development/ci_runner_limits_override.yml8
-rw-r--r--db/migrate/20210726202748_add_vulnerability_severities_into_approval_project_rules.rb11
-rw-r--r--db/post_migrate/20210727113447_backfill_integrations_type_new.rb23
-rw-r--r--db/schema_migrations/202107262027481
-rw-r--r--db/schema_migrations/202107271134471
-rw-r--r--db/structure.sql3
-rw-r--r--doc/administration/packages/container_registry.md3
-rw-r--r--doc/api/container_registry.md49
-rw-r--r--doc/development/documentation/index.md2
-rw-r--r--doc/development/github_importer.md1
-rw-r--r--doc/subscriptions/self_managed/index.md11
-rw-r--r--doc/user/packages/container_registry/index.md26
-rw-r--r--doc/user/permissions.md6
-rw-r--r--lib/gitlab/auth/result.rb16
-rw-r--r--lib/gitlab/background_migration/backfill_integrations_type_new.rb62
-rw-r--r--lib/gitlab/github_import/logger.rb11
-rw-r--r--lib/gitlab/github_import/parallel_scheduling.rb11
-rw-r--r--lib/gitlab/import/logger.rb4
-rw-r--r--lib/gitlab/integrations/sti_type.rb2
-rw-r--r--lib/gitlab/json_logger.rb8
-rw-r--r--lib/gitlab/middleware/go.rb16
-rw-r--r--locale/gitlab.pot3
-rwxr-xr-xscripts/review_apps/review-apps.sh2
-rw-r--r--spec/controllers/jira_connect/branches_controller_spec.rb9
-rw-r--r--spec/features/cycle_analytics_spec.rb4
-rw-r--r--spec/frontend/cycle_analytics/__snapshots__/base_spec.js.snap3
-rw-r--r--spec/frontend/cycle_analytics/base_spec.js63
-rw-r--r--spec/frontend/cycle_analytics/store/actions_spec.js112
-rw-r--r--spec/frontend/cycle_analytics/store/mutations_spec.js92
-rw-r--r--spec/frontend/jira_connect/branches/components/new_branch_form_spec.js26
-rw-r--r--spec/frontend/jira_connect/branches/pages/index_spec.js65
-rw-r--r--spec/frontend/repository/components/delete_blob_modal_spec.js72
-rw-r--r--spec/lib/banzai/filter/references/project_reference_filter_spec.rb30
-rw-r--r--spec/lib/gitlab/auth/result_spec.rb52
-rw-r--r--spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb36
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb21
-rw-r--r--spec/lib/gitlab/github_import/logger_spec.rb41
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb78
-rw-r--r--spec/lib/gitlab/import/logger_spec.rb39
-rw-r--r--spec/migrations/backfill_integrations_type_new_spec.rb38
-rw-r--r--spec/models/ci/runner_namespace_spec.rb6
-rw-r--r--spec/models/ci/runner_project_spec.rb6
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb64
-rw-r--r--spec/requests/api/ci/runners_spec.rb28
-rw-r--r--spec/support/matchers/background_migrations_matchers.rb30
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb111
-rw-r--r--spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb68
-rw-r--r--spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb27
-rw-r--r--spec/workers/merge_request_mergeability_check_worker_spec.rb20
-rw-r--r--workhorse/internal/api/api.go8
-rw-r--r--workhorse/internal/api/api_test.go6
-rw-r--r--workhorse/internal/upstream/upstream.go73
-rw-r--r--workhorse/internal/upstream/upstream_test.go15
76 files changed, 1410 insertions, 629 deletions
diff --git a/app/assets/javascripts/api/analytics_api.js b/app/assets/javascripts/api/analytics_api.js
index fd9b0160b0d..11786f6c365 100644
--- a/app/assets/javascripts/api/analytics_api.js
+++ b/app/assets/javascripts/api/analytics_api.js
@@ -1,33 +1,32 @@
import axios from '~/lib/utils/axios_utils';
import { buildApiUrl } from './api_utils';
-const GROUP_VSA_PATH_BASE =
- '/groups/:id/-/analytics/value_stream_analytics/value_streams/:value_stream_id/stages/:stage_id';
-const PROJECT_VSA_PATH_BASE = '/:project_path/-/analytics/value_stream_analytics/value_streams';
+const PROJECT_VSA_PATH_BASE = '/:request_path/-/analytics/value_stream_analytics/value_streams';
const PROJECT_VSA_STAGES_PATH = `${PROJECT_VSA_PATH_BASE}/:value_stream_id/stages`;
+const PROJECT_VSA_STAGE_DATA_PATH = `${PROJECT_VSA_STAGES_PATH}/:stage_id`;
-const buildProjectValueStreamPath = (projectPath, valueStreamId = null) => {
+const buildProjectValueStreamPath = (requestPath, valueStreamId = null) => {
if (valueStreamId) {
return buildApiUrl(PROJECT_VSA_STAGES_PATH)
- .replace(':project_path', projectPath)
+ .replace(':request_path', requestPath)
.replace(':value_stream_id', valueStreamId);
}
- return buildApiUrl(PROJECT_VSA_PATH_BASE).replace(':project_path', projectPath);
+ return buildApiUrl(PROJECT_VSA_PATH_BASE).replace(':request_path', requestPath);
};
-const buildGroupValueStreamPath = ({ groupId, valueStreamId = null, stageId = null }) =>
- buildApiUrl(GROUP_VSA_PATH_BASE)
- .replace(':id', groupId)
+const buildValueStreamStageDataPath = ({ requestPath, valueStreamId = null, stageId = null }) =>
+ buildApiUrl(PROJECT_VSA_STAGE_DATA_PATH)
+ .replace(':request_path', requestPath)
.replace(':value_stream_id', valueStreamId)
.replace(':stage_id', stageId);
-export const getProjectValueStreams = (projectPath) => {
- const url = buildProjectValueStreamPath(projectPath);
+export const getProjectValueStreams = (requestPath) => {
+ const url = buildProjectValueStreamPath(requestPath);
return axios.get(url);
};
-export const getProjectValueStreamStages = (projectPath, valueStreamId) => {
- const url = buildProjectValueStreamPath(projectPath, valueStreamId);
+export const getProjectValueStreamStages = (requestPath, valueStreamId) => {
+ const url = buildProjectValueStreamPath(requestPath, valueStreamId);
return axios.get(url);
};
@@ -45,7 +44,15 @@ export const getProjectValueStreamMetrics = (requestPath, params) =>
* When used for project level VSA, requests should include the `project_id` in the params object
*/
-export const getValueStreamStageMedian = ({ groupId, valueStreamId, stageId }, params = {}) => {
- const stageBase = buildGroupValueStreamPath({ groupId, valueStreamId, stageId });
+export const getValueStreamStageMedian = ({ requestPath, valueStreamId, stageId }, params = {}) => {
+ const stageBase = buildValueStreamStageDataPath({ requestPath, valueStreamId, stageId });
return axios.get(`${stageBase}/median`, { params });
};
+
+export const getValueStreamStageRecords = (
+ { requestPath, valueStreamId, stageId },
+ params = {},
+) => {
+ const stageBase = buildValueStreamStageDataPath({ requestPath, valueStreamId, stageId });
+ return axios.get(`${stageBase}/records`, { params });
+};
diff --git a/app/assets/javascripts/cycle_analytics/components/base.vue b/app/assets/javascripts/cycle_analytics/components/base.vue
index e637bd0d819..0dc221abb61 100644
--- a/app/assets/javascripts/cycle_analytics/components/base.vue
+++ b/app/assets/javascripts/cycle_analytics/components/base.vue
@@ -42,7 +42,7 @@ export default {
'selectedStageError',
'stages',
'summary',
- 'startDate',
+ 'daysInPast',
'permissions',
]),
...mapGetters(['pathNavigationData']),
@@ -51,13 +51,15 @@ export default {
return selectedStageEvents.length && !isLoadingStage && !isEmptyStage;
},
displayNotEnoughData() {
- return this.selectedStageReady && this.isEmptyStage;
+ return !this.isLoadingStage && this.isEmptyStage;
},
displayNoAccess() {
- return this.selectedStageReady && !this.isUserAllowed(this.selectedStage.id);
+ return (
+ !this.isLoadingStage && this.selectedStage?.id && !this.isUserAllowed(this.selectedStage.id)
+ );
},
- selectedStageReady() {
- return !this.isLoadingStage && this.selectedStage;
+ displayPathNavigation() {
+ return this.isLoading || (this.selectedStage && this.pathNavigationData.length);
},
emptyStageTitle() {
if (this.displayNoAccess) {
@@ -83,8 +85,8 @@ export default {
'setSelectedStage',
'setDateRange',
]),
- handleDateSelect(startDate) {
- this.setDateRange({ startDate });
+ handleDateSelect(daysInPast) {
+ this.setDateRange(daysInPast);
},
onSelectStage(stage) {
this.setSelectedStage(stage);
@@ -101,15 +103,18 @@ export default {
dayRangeOptions: [7, 30, 90],
i18n: {
dropdownText: __('Last %{days} days'),
+ pageTitle: __('Value Stream Analytics'),
+ recentActivity: __('Recent Project Activity'),
},
};
</script>
<template>
<div class="cycle-analytics">
+ <h3>{{ $options.i18n.pageTitle }}</h3>
<path-navigation
- v-if="selectedStageReady"
+ v-if="displayPathNavigation"
class="js-path-navigation gl-w-full gl-pb-2"
- :loading="isLoading"
+ :loading="isLoading || isLoadingStage"
:stages="pathNavigationData"
:selected-stage="selectedStage"
:with-stage-counts="false"
@@ -135,7 +140,7 @@ export default {
<button class="dropdown-menu-toggle" data-toggle="dropdown" type="button">
<span class="dropdown-label">
<gl-sprintf :message="$options.i18n.dropdownText">
- <template #days>{{ startDate }}</template>
+ <template #days>{{ daysInPast }}</template>
</gl-sprintf>
<gl-icon name="chevron-down" class="dropdown-menu-toggle-icon gl-top-3" />
</span>
diff --git a/app/assets/javascripts/cycle_analytics/components/stage_table.vue b/app/assets/javascripts/cycle_analytics/components/stage_table.vue
index 2e225d90f9c..7b31e8d902d 100644
--- a/app/assets/javascripts/cycle_analytics/components/stage_table.vue
+++ b/app/assets/javascripts/cycle_analytics/components/stage_table.vue
@@ -52,7 +52,7 @@ export default {
selectedStage: {
type: Object,
required: false,
- default: () => ({ custom: false }),
+ default: () => ({}),
},
isLoading: {
type: Boolean,
@@ -102,7 +102,7 @@ export default {
},
computed: {
isEmptyStage() {
- return !this.stageEvents.length;
+ return !this.selectedStage || !this.stageEvents.length;
},
emptyStateTitleText() {
return this.emptyStateTitle || NOT_ENOUGH_DATA_ERROR;
diff --git a/app/assets/javascripts/cycle_analytics/index.js b/app/assets/javascripts/cycle_analytics/index.js
index 615f96c3860..cce2edb2447 100644
--- a/app/assets/javascripts/cycle_analytics/index.js
+++ b/app/assets/javascripts/cycle_analytics/index.js
@@ -20,11 +20,9 @@ export default () => {
store.dispatch('initializeVsa', {
projectId: parseInt(projectId, 10),
groupPath,
- requestPath,
- fullPath,
- features: {
- cycleAnalyticsForGroups:
- (groupPath && gon?.licensed_features?.cycleAnalyticsForGroups) || false,
+ endpoints: {
+ requestPath,
+ fullPath,
},
});
diff --git a/app/assets/javascripts/cycle_analytics/store/actions.js b/app/assets/javascripts/cycle_analytics/store/actions.js
index 5a7dbbd28bb..fd606109151 100644
--- a/app/assets/javascripts/cycle_analytics/store/actions.js
+++ b/app/assets/javascripts/cycle_analytics/store/actions.js
@@ -1,29 +1,28 @@
import {
getProjectValueStreamStages,
getProjectValueStreams,
- getProjectValueStreamStageData,
getProjectValueStreamMetrics,
getValueStreamStageMedian,
+ getValueStreamStageRecords,
} from '~/api/analytics_api';
import createFlash from '~/flash';
import { __ } from '~/locale';
-import {
- DEFAULT_DAYS_TO_DISPLAY,
- DEFAULT_VALUE_STREAM,
- I18N_VSA_ERROR_STAGE_MEDIAN,
-} from '../constants';
+import { DEFAULT_VALUE_STREAM, I18N_VSA_ERROR_STAGE_MEDIAN } from '../constants';
import * as types from './mutation_types';
export const setSelectedValueStream = ({ commit, dispatch }, valueStream) => {
commit(types.SET_SELECTED_VALUE_STREAM, valueStream);
- return dispatch('fetchValueStreamStages');
+ return Promise.all([dispatch('fetchValueStreamStages'), dispatch('fetchCycleAnalyticsData')]);
};
export const fetchValueStreamStages = ({ commit, state }) => {
- const { fullPath, selectedValueStream } = state;
+ const {
+ endpoints: { fullPath },
+ selectedValueStream: { id },
+ } = state;
commit(types.REQUEST_VALUE_STREAM_STAGES);
- return getProjectValueStreamStages(fullPath, selectedValueStream.id)
+ return getProjectValueStreamStages(fullPath, id)
.then(({ data }) => commit(types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS, data))
.catch(({ response: { status } }) => {
commit(types.RECEIVE_VALUE_STREAM_STAGES_ERROR, status);
@@ -41,16 +40,11 @@ export const receiveValueStreamsSuccess = ({ commit, dispatch }, data = []) => {
export const fetchValueStreams = ({ commit, dispatch, state }) => {
const {
- fullPath,
- features: { cycleAnalyticsForGroups },
+ endpoints: { fullPath },
} = state;
commit(types.REQUEST_VALUE_STREAMS);
- const stageRequests = ['setSelectedStage'];
- if (cycleAnalyticsForGroups) {
- stageRequests.push('fetchStageMedians');
- }
-
+ const stageRequests = ['setSelectedStage', 'fetchStageMedians'];
return getProjectValueStreams(fullPath)
.then(({ data }) => dispatch('receiveValueStreamsSuccess', data))
.then(() => Promise.all(stageRequests.map((r) => dispatch(r))))
@@ -58,9 +52,10 @@ export const fetchValueStreams = ({ commit, dispatch, state }) => {
commit(types.RECEIVE_VALUE_STREAMS_ERROR, status);
});
};
-
export const fetchCycleAnalyticsData = ({
- state: { requestPath },
+ state: {
+ endpoints: { requestPath },
+ },
getters: { legacyFilterParams },
commit,
}) => {
@@ -76,18 +71,10 @@ export const fetchCycleAnalyticsData = ({
});
};
-export const fetchStageData = ({
- state: { requestPath, selectedStage },
- getters: { legacyFilterParams },
- commit,
-}) => {
+export const fetchStageData = ({ getters: { requestParams, filterParams }, commit }) => {
commit(types.REQUEST_STAGE_DATA);
- return getProjectValueStreamStageData({
- requestPath,
- stageId: selectedStage.id,
- params: legacyFilterParams,
- })
+ return getValueStreamStageRecords(requestParams, filterParams)
.then(({ data }) => {
// when there's a query timeout, the request succeeds but the error is encoded in the response data
if (data?.error) {
@@ -134,22 +121,32 @@ export const setSelectedStage = ({ dispatch, commit, state: { stages } }, select
return dispatch('fetchStageData');
};
-const refetchData = (dispatch, commit) => {
- commit(types.SET_LOADING, true);
+export const setLoading = ({ commit }, value) => commit(types.SET_LOADING, value);
+
+const refetchStageData = (dispatch) => {
return Promise.resolve()
- .then(() => dispatch('fetchValueStreams'))
- .then(() => dispatch('fetchCycleAnalyticsData'))
- .finally(() => commit(types.SET_LOADING, false));
+ .then(() => dispatch('setLoading', true))
+ .then(() =>
+ Promise.all([
+ dispatch('fetchCycleAnalyticsData'),
+ dispatch('fetchStageData'),
+ dispatch('fetchStageMedians'),
+ ]),
+ )
+ .finally(() => dispatch('setLoading', false));
};
-export const setFilters = ({ dispatch, commit }) => refetchData(dispatch, commit);
+export const setFilters = ({ dispatch }) => refetchStageData(dispatch);
-export const setDateRange = ({ dispatch, commit }, { startDate = DEFAULT_DAYS_TO_DISPLAY }) => {
- commit(types.SET_DATE_RANGE, { startDate });
- return refetchData(dispatch, commit);
+export const setDateRange = ({ dispatch, commit }, daysInPast) => {
+ commit(types.SET_DATE_RANGE, daysInPast);
+ return refetchStageData(dispatch);
};
export const initializeVsa = ({ commit, dispatch }, initialData = {}) => {
commit(types.INITIALIZE_VSA, initialData);
- return refetchData(dispatch, commit);
+
+ return dispatch('setLoading', true)
+ .then(() => dispatch('fetchValueStreams'))
+ .finally(() => dispatch('setLoading', false));
};
diff --git a/app/assets/javascripts/cycle_analytics/store/getters.js b/app/assets/javascripts/cycle_analytics/store/getters.js
index 66971ea8a2e..9faccabcaad 100644
--- a/app/assets/javascripts/cycle_analytics/store/getters.js
+++ b/app/assets/javascripts/cycle_analytics/store/getters.js
@@ -13,11 +13,11 @@ export const pathNavigationData = ({ stages, medians, stageCounts, selectedStage
export const requestParams = (state) => {
const {
- selectedStage: { id: stageId = null },
- groupPath: groupId,
+ endpoints: { fullPath },
selectedValueStream: { id: valueStreamId },
+ selectedStage: { id: stageId = null },
} = state;
- return { valueStreamId, groupId, stageId };
+ return { requestPath: fullPath, valueStreamId, stageId };
};
const dateRangeParams = ({ createdAfter, createdBefore }) => ({
@@ -25,15 +25,14 @@ const dateRangeParams = ({ createdAfter, createdBefore }) => ({
created_before: createdBefore ? dateFormat(createdBefore, dateFormats.isoDate) : null,
});
-export const legacyFilterParams = ({ startDate }) => {
+export const legacyFilterParams = ({ daysInPast }) => {
return {
- 'cycle_analytics[start_date]': startDate,
+ 'cycle_analytics[start_date]': daysInPast,
};
};
-export const filterParams = ({ id, ...rest }) => {
+export const filterParams = (state) => {
return {
- project_ids: [id],
- ...dateRangeParams(rest),
+ ...dateRangeParams(state),
};
};
diff --git a/app/assets/javascripts/cycle_analytics/store/mutations.js b/app/assets/javascripts/cycle_analytics/store/mutations.js
index 50157cc3618..65035c0ebb8 100644
--- a/app/assets/javascripts/cycle_analytics/store/mutations.js
+++ b/app/assets/javascripts/cycle_analytics/store/mutations.js
@@ -4,15 +4,11 @@ import { decorateData, formatMedianValues, calculateFormattedDayInPast } from '.
import * as types from './mutation_types';
export default {
- [types.INITIALIZE_VSA](state, { requestPath, fullPath, groupPath, projectId, features }) {
- state.requestPath = requestPath;
- state.fullPath = fullPath;
- state.groupPath = groupPath;
- state.id = projectId;
+ [types.INITIALIZE_VSA](state, { endpoints }) {
+ state.endpoints = endpoints;
const { now, past } = calculateFormattedDayInPast(DEFAULT_DAYS_TO_DISPLAY);
state.createdBefore = now;
state.createdAfter = past;
- state.features = features;
},
[types.SET_LOADING](state, loadingState) {
state.isLoading = loadingState;
@@ -23,9 +19,9 @@ export default {
[types.SET_SELECTED_STAGE](state, stage) {
state.selectedStage = stage;
},
- [types.SET_DATE_RANGE](state, { startDate }) {
- state.startDate = startDate;
- const { now, past } = calculateFormattedDayInPast(startDate);
+ [types.SET_DATE_RANGE](state, daysInPast) {
+ state.daysInPast = daysInPast;
+ const { now, past } = calculateFormattedDayInPast(daysInPast);
state.createdBefore = now;
state.createdAfter = past;
},
@@ -50,25 +46,16 @@ export default {
[types.REQUEST_CYCLE_ANALYTICS_DATA](state) {
state.isLoading = true;
state.hasError = false;
- if (!state.features.cycleAnalyticsForGroups) {
- state.medians = {};
- }
},
[types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS](state, data) {
- const { summary, medians } = decorateData(data);
- if (!state.features.cycleAnalyticsForGroups) {
- state.medians = formatMedianValues(medians);
- }
- state.permissions = data.permissions;
+ const { summary } = decorateData(data);
+ state.permissions = data?.permissions || {};
state.summary = summary;
state.hasError = false;
},
[types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR](state) {
state.isLoading = false;
state.hasError = true;
- if (!state.features.cycleAnalyticsForGroups) {
- state.medians = {};
- }
},
[types.REQUEST_STAGE_DATA](state) {
state.isLoadingStage = true;
@@ -76,7 +63,7 @@ export default {
state.selectedStageEvents = [];
state.hasError = false;
},
- [types.RECEIVE_STAGE_DATA_SUCCESS](state, { events = [] }) {
+ [types.RECEIVE_STAGE_DATA_SUCCESS](state, events = []) {
state.isLoadingStage = false;
state.isEmptyStage = !events.length;
state.selectedStageEvents = events.map((ev) =>
diff --git a/app/assets/javascripts/cycle_analytics/store/state.js b/app/assets/javascripts/cycle_analytics/store/state.js
index 4d61077fb99..562b5d0a743 100644
--- a/app/assets/javascripts/cycle_analytics/store/state.js
+++ b/app/assets/javascripts/cycle_analytics/store/state.js
@@ -1,11 +1,9 @@
import { DEFAULT_DAYS_TO_DISPLAY } from '../constants';
export default () => ({
- features: {},
id: null,
- requestPath: '',
- fullPath: '',
- startDate: DEFAULT_DAYS_TO_DISPLAY,
+ endpoints: {},
+ daysInPast: DEFAULT_DAYS_TO_DISPLAY,
createdAfter: null,
createdBefore: null,
stages: [],
@@ -23,5 +21,4 @@ export default () => ({
isLoadingStage: false,
isEmptyStage: false,
permissions: {},
- parentPath: null,
});
diff --git a/app/assets/javascripts/cycle_analytics/utils.js b/app/assets/javascripts/cycle_analytics/utils.js
index 1f72291e97b..c941799a2ed 100644
--- a/app/assets/javascripts/cycle_analytics/utils.js
+++ b/app/assets/javascripts/cycle_analytics/utils.js
@@ -8,13 +8,11 @@ import { parseSeconds } from '~/lib/utils/datetime_utility';
import { s__, sprintf } from '../locale';
const mapToSummary = ({ value, ...rest }) => ({ ...rest, value: value || '-' });
-const mapToMedians = ({ name: id, value }) => ({ id, value });
export const decorateData = (data = {}) => {
- const { stats: stages, summary } = data;
+ const { summary } = data;
return {
summary: summary?.map((item) => mapToSummary(item)) || [],
- medians: stages?.map((item) => mapToMedians(item)) || [],
};
};
diff --git a/app/assets/javascripts/jira_connect/branches/components/new_branch_form.vue b/app/assets/javascripts/jira_connect/branches/components/new_branch_form.vue
index b2cc3a315cc..66fcb8e10eb 100644
--- a/app/assets/javascripts/jira_connect/branches/components/new_branch_form.vue
+++ b/app/assets/javascripts/jira_connect/branches/components/new_branch_form.vue
@@ -3,8 +3,6 @@ import { GlFormGroup, GlButton, GlFormInput, GlForm, GlAlert } from '@gitlab/ui'
import {
CREATE_BRANCH_ERROR_GENERIC,
CREATE_BRANCH_ERROR_WITH_CONTEXT,
- CREATE_BRANCH_SUCCESS_ALERT,
- I18N_NEW_BRANCH_PAGE_TITLE,
I18N_NEW_BRANCH_LABEL_DROPDOWN,
I18N_NEW_BRANCH_LABEL_BRANCH,
I18N_NEW_BRANCH_LABEL_SOURCE,
@@ -19,8 +17,6 @@ const DEFAULT_ALERT_PARAMS = {
title: '',
message: '',
variant: DEFAULT_ALERT_VARIANT,
- primaryButtonLink: '',
- primaryButtonText: '',
};
export default {
@@ -34,13 +30,7 @@ export default {
ProjectDropdown,
SourceBranchDropdown,
},
- props: {
- initialBranchName: {
- type: String,
- required: false,
- default: '',
- },
- },
+ inject: ['initialBranchName'],
data() {
return {
selectedProject: null,
@@ -111,10 +101,7 @@ export default {
message: errors[0],
});
} else {
- this.displayAlert({
- ...CREATE_BRANCH_SUCCESS_ALERT,
- variant: 'success',
- });
+ this.$emit('success');
}
} catch (e) {
this.onError({
@@ -126,7 +113,6 @@ export default {
},
},
i18n: {
- I18N_NEW_BRANCH_PAGE_TITLE,
I18N_NEW_BRANCH_LABEL_DROPDOWN,
I18N_NEW_BRANCH_LABEL_BRANCH,
I18N_NEW_BRANCH_LABEL_SOURCE,
@@ -134,15 +120,8 @@ export default {
},
};
</script>
-
<template>
- <div>
- <div class="gl-border-1 gl-border-b-solid gl-border-gray-100 gl-mb-5 gl-mt-7">
- <h1 class="page-title">
- {{ $options.i18n.I18N_NEW_BRANCH_PAGE_TITLE }}
- </h1>
- </div>
-
+ <gl-form @submit.prevent="onSubmit">
<gl-alert
v-if="showAlert"
class="gl-mb-5"
@@ -152,50 +131,44 @@ export default {
>
{{ alertParams.message }}
</gl-alert>
+ <gl-form-group :label="$options.i18n.I18N_NEW_BRANCH_LABEL_DROPDOWN" label-for="project-select">
+ <project-dropdown
+ id="project-select"
+ :selected-project="selectedProject"
+ @change="onProjectSelect"
+ @error="onError"
+ />
+ </gl-form-group>
- <gl-form @submit.prevent="onSubmit">
- <gl-form-group
- :label="$options.i18n.I18N_NEW_BRANCH_LABEL_DROPDOWN"
- label-for="project-select"
- >
- <project-dropdown
- id="project-select"
- :selected-project="selectedProject"
- @change="onProjectSelect"
- @error="onError"
- />
- </gl-form-group>
+ <gl-form-group
+ :label="$options.i18n.I18N_NEW_BRANCH_LABEL_BRANCH"
+ label-for="branch-name-input"
+ >
+ <gl-form-input id="branch-name-input" v-model="branchName" type="text" required />
+ </gl-form-group>
- <gl-form-group
- :label="$options.i18n.I18N_NEW_BRANCH_LABEL_BRANCH"
- label-for="branch-name-input"
- >
- <gl-form-input id="branch-name-input" v-model="branchName" type="text" required />
- </gl-form-group>
+ <gl-form-group
+ :label="$options.i18n.I18N_NEW_BRANCH_LABEL_SOURCE"
+ label-for="source-branch-select"
+ >
+ <source-branch-dropdown
+ id="source-branch-select"
+ :selected-project="selectedProject"
+ :selected-branch-name="selectedSourceBranchName"
+ @change="onSourceBranchSelect"
+ @error="onError"
+ />
+ </gl-form-group>
- <gl-form-group
- :label="$options.i18n.I18N_NEW_BRANCH_LABEL_SOURCE"
- label-for="source-branch-select"
+ <div class="form-actions">
+ <gl-button
+ :loading="createBranchLoading"
+ type="submit"
+ variant="confirm"
+ :disabled="disableSubmitButton"
>
- <source-branch-dropdown
- id="source-branch-select"
- :selected-project="selectedProject"
- :selected-branch-name="selectedSourceBranchName"
- @change="onSourceBranchSelect"
- @error="onError"
- />
- </gl-form-group>
-
- <div class="form-actions">
- <gl-button
- :loading="createBranchLoading"
- type="submit"
- variant="confirm"
- :disabled="disableSubmitButton"
- >
- {{ $options.i18n.I18N_NEW_BRANCH_SUBMIT_BUTTON_TEXT }}
- </gl-button>
- </div>
- </gl-form>
- </div>
+ {{ $options.i18n.I18N_NEW_BRANCH_SUBMIT_BUTTON_TEXT }}
+ </gl-button>
+ </div>
+ </gl-form>
</template>
diff --git a/app/assets/javascripts/jira_connect/branches/constants.js b/app/assets/javascripts/jira_connect/branches/constants.js
index 7095f123a9e..ab9d3b2c110 100644
--- a/app/assets/javascripts/jira_connect/branches/constants.js
+++ b/app/assets/javascripts/jira_connect/branches/constants.js
@@ -3,7 +3,6 @@ import { __, s__ } from '~/locale';
export const BRANCHES_PER_PAGE = 20;
export const PROJECTS_PER_PAGE = 20;
-export const I18N_NEW_BRANCH_PAGE_TITLE = __('New branch');
export const I18N_NEW_BRANCH_LABEL_DROPDOWN = __('Project');
export const I18N_NEW_BRANCH_LABEL_BRANCH = __('Branch name');
export const I18N_NEW_BRANCH_LABEL_SOURCE = __('Source branch');
@@ -14,7 +13,13 @@ export const CREATE_BRANCH_ERROR_GENERIC = s__(
);
export const CREATE_BRANCH_ERROR_WITH_CONTEXT = s__('JiraConnect|Failed to create branch.');
-export const CREATE_BRANCH_SUCCESS_ALERT = {
- title: s__('JiraConnect|New branch was successfully created.'),
- message: s__('JiraConnect|You can now close this window and return to Jira.'),
-};
+export const I18N_PAGE_TITLE_WITH_BRANCH_NAME = s__(
+ 'JiraConnect|Create branch for Jira issue %{jiraIssue}',
+);
+export const I18N_PAGE_TITLE_DEFAULT = __('New branch');
+export const I18N_NEW_BRANCH_SUCCESS_TITLE = s__(
+ 'JiraConnect|New branch was successfully created.',
+);
+export const I18N_NEW_BRANCH_SUCCESS_MESSAGE = s__(
+ 'JiraConnect|You can now close this window and return to Jira.',
+);
diff --git a/app/assets/javascripts/jira_connect/branches/index.js b/app/assets/javascripts/jira_connect/branches/index.js
index b8fe255e310..95bd4f5c675 100644
--- a/app/assets/javascripts/jira_connect/branches/index.js
+++ b/app/assets/javascripts/jira_connect/branches/index.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-import JiraConnectNewBranchForm from '~/jira_connect/branches/components/new_branch_form.vue';
+import JiraConnectNewBranchPage from '~/jira_connect/branches/pages/index.vue';
import createDefaultClient from '~/lib/graphql';
Vue.use(VueApollo);
@@ -11,7 +11,7 @@ export default async function initJiraConnectBranches() {
return null;
}
- const { initialBranchName } = el.dataset;
+ const { initialBranchName, successStateSvgPath } = el.dataset;
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(
@@ -25,12 +25,12 @@ export default async function initJiraConnectBranches() {
return new Vue({
el,
apolloProvider,
+ provide: {
+ initialBranchName,
+ successStateSvgPath,
+ },
render(createElement) {
- return createElement(JiraConnectNewBranchForm, {
- props: {
- initialBranchName,
- },
- });
+ return createElement(JiraConnectNewBranchPage);
},
});
}
diff --git a/app/assets/javascripts/jira_connect/branches/pages/index.vue b/app/assets/javascripts/jira_connect/branches/pages/index.vue
new file mode 100644
index 00000000000..d72dec6cdee
--- /dev/null
+++ b/app/assets/javascripts/jira_connect/branches/pages/index.vue
@@ -0,0 +1,60 @@
+<script>
+import { GlEmptyState } from '@gitlab/ui';
+import { sprintf } from '~/locale';
+import NewBranchForm from '../components/new_branch_form.vue';
+import {
+ I18N_PAGE_TITLE_WITH_BRANCH_NAME,
+ I18N_PAGE_TITLE_DEFAULT,
+ I18N_NEW_BRANCH_SUCCESS_TITLE,
+ I18N_NEW_BRANCH_SUCCESS_MESSAGE,
+} from '../constants';
+
+export default {
+ components: {
+ GlEmptyState,
+ NewBranchForm,
+ },
+ inject: ['initialBranchName', 'successStateSvgPath'],
+ data() {
+ return {
+ showForm: true,
+ };
+ },
+ computed: {
+ pageTitle() {
+ return this.initialBranchName
+ ? sprintf(this.$options.i18n.I18N_PAGE_TITLE_WITH_BRANCH_NAME, {
+ jiraIssue: this.initialBranchName,
+ })
+ : this.$options.i18n.I18N_PAGE_TITLE_DEFAULT;
+ },
+ },
+ methods: {
+ onNewBranchFormSuccess() {
+ // light-weight toggle to hide the form and show the success state
+ this.showForm = false;
+ },
+ },
+ i18n: {
+ I18N_PAGE_TITLE_WITH_BRANCH_NAME,
+ I18N_PAGE_TITLE_DEFAULT,
+ I18N_NEW_BRANCH_SUCCESS_TITLE,
+ I18N_NEW_BRANCH_SUCCESS_MESSAGE,
+ },
+};
+</script>
+<template>
+ <div>
+ <div class="gl-border-1 gl-border-b-solid gl-border-gray-100 gl-mb-5 gl-mt-7">
+ <h1 data-testid="page-title" class="page-title">{{ pageTitle }}</h1>
+ </div>
+
+ <new-branch-form v-if="showForm" @success="onNewBranchFormSuccess" />
+ <gl-empty-state
+ v-else
+ :title="$options.i18n.I18N_NEW_BRANCH_SUCCESS_TITLE"
+ :description="$options.i18n.I18N_NEW_BRANCH_SUCCESS_MESSAGE"
+ :svg-path="successStateSvgPath"
+ />
+ </div>
+</template>
diff --git a/app/assets/javascripts/repository/components/delete_blob_modal.vue b/app/assets/javascripts/repository/components/delete_blob_modal.vue
index c9b5a9ba4cb..394f1e7995a 100644
--- a/app/assets/javascripts/repository/components/delete_blob_modal.vue
+++ b/app/assets/javascripts/repository/components/delete_blob_modal.vue
@@ -2,6 +2,7 @@
import { GlModal, GlFormGroup, GlFormInput, GlFormTextarea, GlToggle, GlForm } from '@gitlab/ui';
import csrf from '~/lib/utils/csrf';
import { __ } from '~/locale';
+import validation from '~/vue_shared/directives/validation';
import {
SECONDARY_OPTIONS_TEXT,
COMMIT_LABEL,
@@ -9,6 +10,13 @@ import {
TOGGLE_CREATE_MR_LABEL,
} from '../constants';
+const initFormField = ({ value, required = true, skipValidation = false }) => ({
+ value,
+ required,
+ state: skipValidation ? true : null,
+ feedback: null,
+});
+
export default {
csrf,
components: {
@@ -26,6 +34,9 @@ export default {
TARGET_BRANCH_LABEL,
TOGGLE_CREATE_MR_LABEL,
},
+ directives: {
+ validation: validation(),
+ },
props: {
modalId: {
type: String,
@@ -61,12 +72,20 @@ export default {
},
},
data() {
+ const form = {
+ state: false,
+ showValidation: false,
+ fields: {
+ // fields key must match case of form name for validation directive to work
+ commit_message: initFormField({ value: this.commitMessage }),
+ branch_name: initFormField({ value: this.targetBranch }),
+ },
+ };
return {
loading: false,
- commit: this.commitMessage,
- target: this.targetBranch,
createNewMr: true,
error: '',
+ form,
};
},
computed: {
@@ -77,7 +96,7 @@ export default {
{
variant: 'danger',
loading: this.loading,
- disabled: !this.formCompleted || this.loading,
+ disabled: this.loading || !this.form.state,
},
],
};
@@ -92,17 +111,26 @@ export default {
],
};
},
+ /* eslint-disable dot-notation */
showCreateNewMrToggle() {
- return this.canPushCode && this.target !== this.originalBranch;
+ return this.canPushCode && this.form.fields['branch_name'].value !== this.originalBranch;
},
formCompleted() {
- return this.commit && this.target;
+ return this.form.fields['commit_message'].value && this.form.fields['branch_name'].value;
},
+ /* eslint-enable dot-notation */
},
methods: {
submitForm(e) {
e.preventDefault(); // Prevent modal from closing
+ this.form.showValidation = true;
+
+ if (!this.form.state) {
+ return;
+ }
+
this.loading = true;
+ this.form.showValidation = false;
this.$refs.form.$el.submit();
},
},
@@ -119,7 +147,7 @@ export default {
:action-cancel="cancelOptions"
@primary="submitForm"
>
- <gl-form ref="form" :action="deletePath" method="post">
+ <gl-form ref="form" novalidate :action="deletePath" method="post">
<input type="hidden" name="_method" value="delete" />
<input :value="$options.csrf.token" type="hidden" name="authenticity_token" />
<template v-if="emptyRepo">
@@ -132,15 +160,34 @@ export default {
<!-- Once "push to branch" permission is made available, will need to add to conditional
Follow-up issue: https://gitlab.com/gitlab-org/gitlab/-/issues/335462 -->
<input v-if="createNewMr" type="hidden" name="create_merge_request" value="1" />
- <gl-form-group :label="$options.i18n.COMMIT_LABEL" label-for="commit_message">
- <gl-form-textarea v-model="commit" name="commit_message" :disabled="loading" />
+ <gl-form-group
+ :label="$options.i18n.COMMIT_LABEL"
+ label-for="commit_message"
+ :invalid-feedback="form.fields['commit_message'].feedback"
+ >
+ <gl-form-textarea
+ v-model="form.fields['commit_message'].value"
+ v-validation:[form.showValidation]
+ name="commit_message"
+ :state="form.fields['commit_message'].state"
+ :disabled="loading"
+ required
+ />
</gl-form-group>
<gl-form-group
v-if="canPushCode"
:label="$options.i18n.TARGET_BRANCH_LABEL"
label-for="branch_name"
+ :invalid-feedback="form.fields['branch_name'].feedback"
>
- <gl-form-input v-model="target" :disabled="loading" name="branch_name" />
+ <gl-form-input
+ v-model="form.fields['branch_name'].value"
+ v-validation:[form.showValidation]
+ :state="form.fields['branch_name'].state"
+ :disabled="loading"
+ name="branch_name"
+ required
+ />
</gl-form-group>
<gl-toggle
v-if="showCreateNewMrToggle"
diff --git a/app/controllers/jira_connect/branches_controller.rb b/app/controllers/jira_connect/branches_controller.rb
index 7d7faae62a5..97d0b75639e 100644
--- a/app/controllers/jira_connect/branches_controller.rb
+++ b/app/controllers/jira_connect/branches_controller.rb
@@ -8,20 +8,31 @@ class JiraConnect::BranchesController < ApplicationController
feature_category :integrations
def new
+ @new_branch_data = new_branch_data
+ end
+
+ def self.feature_enabled?(user)
+ Feature.enabled?(:jira_connect_create_branch, user, default_enabled: :yaml)
+ end
+
+ private
+
+ def initial_branch_name
return unless params[:issue_key].present?
- @branch_name = Issue.to_branch_name(
+ Issue.to_branch_name(
params[:issue_key],
params[:issue_summary]
)
end
- def self.feature_enabled?(user)
- Feature.enabled?(:jira_connect_create_branch, user, default_enabled: :yaml)
+ def new_branch_data
+ {
+ initial_branch_name: initial_branch_name,
+ success_state_svg_path: ActionController::Base.helpers.image_path('illustrations/merge_requests.svg')
+ }
end
- private
-
def feature_enabled!
render_404 unless self.class.feature_enabled?(current_user)
end
diff --git a/app/models/ci/runner_namespace.rb b/app/models/ci/runner_namespace.rb
index fbb021b867b..d1353b97ed9 100644
--- a/app/models/ci/runner_namespace.rb
+++ b/app/models/ci/runner_namespace.rb
@@ -8,6 +8,7 @@ module Ci
self.limit_scope = :group
self.limit_relation = :recent_runners
self.limit_feature_flag = :ci_runner_limits
+ self.limit_feature_flag_for_override = :ci_runner_limits_override
belongs_to :runner, inverse_of: :runner_namespaces
belongs_to :namespace, inverse_of: :runner_namespaces, class_name: '::Namespace'
diff --git a/app/models/ci/runner_project.rb b/app/models/ci/runner_project.rb
index a72c124db87..e1c435e9b1f 100644
--- a/app/models/ci/runner_project.rb
+++ b/app/models/ci/runner_project.rb
@@ -8,6 +8,7 @@ module Ci
self.limit_scope = :project
self.limit_relation = :recent_runners
self.limit_feature_flag = :ci_runner_limits
+ self.limit_feature_flag_for_override = :ci_runner_limits_override
belongs_to :runner, inverse_of: :runner_projects
belongs_to :project, inverse_of: :runner_projects
diff --git a/app/services/import/github_service.rb b/app/services/import/github_service.rb
index 2f808d45ffd..2aaab88e778 100644
--- a/app/services/import/github_service.rb
+++ b/app/services/import/github_service.rb
@@ -111,7 +111,7 @@ module Import
private
def log_error(exception)
- Gitlab::Import::Logger.error(
+ Gitlab::GithubImport::Logger.error(
message: 'Import failed due to a GitHub error',
status: exception.response_status,
error: exception.response_body
diff --git a/app/views/jira_connect/branches/new.html.haml b/app/views/jira_connect/branches/new.html.haml
index ec2b7be47ca..f0e34c30018 100644
--- a/app/views/jira_connect/branches/new.html.haml
+++ b/app/views/jira_connect/branches/new.html.haml
@@ -2,4 +2,4 @@
- @hide_top_links = true
- page_title _('New branch')
-.js-jira-connect-create-branch{ data: { initial_branch_name: @branch_name } }
+.js-jira-connect-create-branch{ data: @new_branch_data }
diff --git a/app/workers/concerns/gitlab/github_import/object_importer.rb b/app/workers/concerns/gitlab/github_import/object_importer.rb
index 1eff53cea01..e5a224c1d6b 100644
--- a/app/workers/concerns/gitlab/github_import/object_importer.rb
+++ b/app/workers/concerns/gitlab/github_import/object_importer.rb
@@ -17,10 +17,6 @@ module Gitlab
feature_category :importers
worker_has_external_dependencies!
-
- def logger
- @logger ||= Gitlab::Import::Logger.build
- end
end
# project - An instance of `Project` to import the data into.
@@ -63,11 +59,11 @@ module Gitlab
attr_accessor :github_id
def info(project_id, extra = {})
- logger.info(log_attributes(project_id, extra))
+ Logger.info(log_attributes(project_id, extra))
end
def error(project_id, exception, data = {})
- logger.error(
+ Logger.error(
log_attributes(
project_id,
message: 'importer failed',
@@ -78,13 +74,12 @@ module Gitlab
Gitlab::ErrorTracking.track_and_raise_exception(
exception,
- log_attributes(project_id)
+ log_attributes(project_id, import_source: :github)
)
end
def log_attributes(project_id, extra = {})
extra.merge(
- import_source: :github,
project_id: project_id,
importer: importer_class.name,
github_id: github_id
diff --git a/app/workers/concerns/gitlab/github_import/queue.rb b/app/workers/concerns/gitlab/github_import/queue.rb
index 05eb7fbc2cb..1ec62509528 100644
--- a/app/workers/concerns/gitlab/github_import/queue.rb
+++ b/app/workers/concerns/gitlab/github_import/queue.rb
@@ -17,7 +17,7 @@ module Gitlab
sidekiq_options dead: false, retry: 5
sidekiq_retries_exhausted do |msg, e|
- Gitlab::Import::Logger.error(
+ Logger.error(
event: :github_importer_exhausted,
message: msg['error_message'],
class: msg['class'],
diff --git a/app/workers/concerns/gitlab/github_import/stage_methods.rb b/app/workers/concerns/gitlab/github_import/stage_methods.rb
index 916b273a28f..0671dcf4e72 100644
--- a/app/workers/concerns/gitlab/github_import/stage_methods.rb
+++ b/app/workers/concerns/gitlab/github_import/stage_methods.rb
@@ -37,11 +37,11 @@ module Gitlab
private
def info(project_id, extra = {})
- logger.info(log_attributes(project_id, extra))
+ Logger.info(log_attributes(project_id, extra))
end
def error(project_id, exception)
- logger.error(
+ Logger.error(
log_attributes(
project_id,
message: 'stage failed',
@@ -51,21 +51,16 @@ module Gitlab
Gitlab::ErrorTracking.track_and_raise_exception(
exception,
- log_attributes(project_id)
+ log_attributes(project_id, import_source: :github)
)
end
def log_attributes(project_id, extra = {})
extra.merge(
- import_source: :github,
project_id: project_id,
import_stage: self.class.name
)
end
-
- def logger
- @logger ||= Gitlab::Import::Logger.build
- end
end
end
end
diff --git a/app/workers/merge_request_mergeability_check_worker.rb b/app/workers/merge_request_mergeability_check_worker.rb
index db1bd0aba2a..d477101a768 100644
--- a/app/workers/merge_request_mergeability_check_worker.rb
+++ b/app/workers/merge_request_mergeability_check_worker.rb
@@ -14,7 +14,7 @@ class MergeRequestMergeabilityCheckWorker
merge_request = MergeRequest.find_by_id(merge_request_id)
unless merge_request
- logger.error("Failed to find merge request with ID: #{merge_request_id}")
+ Sidekiq.logger.error(worker: self.class.name, message: "Failed to find merge request", merge_request_id: merge_request_id)
return
end
@@ -23,6 +23,6 @@ class MergeRequestMergeabilityCheckWorker
.new(merge_request)
.execute(recheck: false, retry_lease: false)
- logger.error("Failed to check mergeability of merge request (#{merge_request_id}): #{result.message}") if result.error?
+ Sidekiq.logger.error(worker: self.class.name, message: "Failed to check mergeability of merge request: #{result.message}", merge_request_id: merge_request_id) if result.error?
end
end
diff --git a/config/feature_flags/development/ci_runner_limits_override.yml b/config/feature_flags/development/ci_runner_limits_override.yml
new file mode 100644
index 00000000000..a5dd74d35af
--- /dev/null
+++ b/config/feature_flags/development/ci_runner_limits_override.yml
@@ -0,0 +1,8 @@
+---
+name: ci_runner_limits_override
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67152
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/337224
+milestone: '14.2'
+type: development
+group: group::runner
+default_enabled: false
diff --git a/db/migrate/20210726202748_add_vulnerability_severities_into_approval_project_rules.rb b/db/migrate/20210726202748_add_vulnerability_severities_into_approval_project_rules.rb
new file mode 100644
index 00000000000..bd17a9df5df
--- /dev/null
+++ b/db/migrate/20210726202748_add_vulnerability_severities_into_approval_project_rules.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class AddVulnerabilitySeveritiesIntoApprovalProjectRules < ActiveRecord::Migration[6.1]
+ def up
+ add_column :approval_project_rules, :severity_levels, :text, array: true, null: false, default: []
+ end
+
+ def down
+ remove_column :approval_project_rules, :severity_levels
+ end
+end
diff --git a/db/post_migrate/20210727113447_backfill_integrations_type_new.rb b/db/post_migrate/20210727113447_backfill_integrations_type_new.rb
new file mode 100644
index 00000000000..8544c236fd7
--- /dev/null
+++ b/db/post_migrate/20210727113447_backfill_integrations_type_new.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+class BackfillIntegrationsTypeNew < ActiveRecord::Migration[6.1]
+ include Gitlab::Database::MigrationHelpers
+
+ MIGRATION = 'BackfillIntegrationsTypeNew'
+ INTERVAL = 2.minutes
+
+ def up
+ queue_batched_background_migration(
+ MIGRATION,
+ :integrations,
+ :id,
+ job_interval: INTERVAL
+ )
+ end
+
+ def down
+ Gitlab::Database::BackgroundMigration::BatchedMigration
+ .for_configuration(MIGRATION, :integrations, :id, [])
+ .delete_all
+ end
+end
diff --git a/db/schema_migrations/20210726202748 b/db/schema_migrations/20210726202748
new file mode 100644
index 00000000000..390847c8a07
--- /dev/null
+++ b/db/schema_migrations/20210726202748
@@ -0,0 +1 @@
+378e12c3c7c49e294ab4ab792151af8e3829cc6f38295d5faa0995ad16f3f934 \ No newline at end of file
diff --git a/db/schema_migrations/20210727113447 b/db/schema_migrations/20210727113447
new file mode 100644
index 00000000000..236022f5af3
--- /dev/null
+++ b/db/schema_migrations/20210727113447
@@ -0,0 +1 @@
+19e23131949e6056ea9837231fac6a2307fb52a8287eb34cc6e89eed11d52849 \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 7cff35b9b42..c8130d5488f 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -9729,7 +9729,8 @@ CREATE TABLE approval_project_rules (
name character varying NOT NULL,
rule_type smallint DEFAULT 0 NOT NULL,
scanners text[],
- vulnerabilities_allowed smallint
+ vulnerabilities_allowed smallint,
+ severity_levels text[] DEFAULT '{}'::text[] NOT NULL
);
CREATE TABLE approval_project_rules_groups (
diff --git a/doc/administration/packages/container_registry.md b/doc/administration/packages/container_registry.md
index 5e228b0493b..9c34d07ed07 100644
--- a/doc/administration/packages/container_registry.md
+++ b/doc/administration/packages/container_registry.md
@@ -617,8 +617,7 @@ In the examples below we set the Registry's port to `5001`.
## Disable Container Registry per project
If Registry is enabled in your GitLab instance, but you don't need it for your
-project, you can disable it from your project's settings. Read the user guide
-on how to achieve that.
+project, you can [disable it from your project's settings](../../user/project/settings/index.md#sharing-and-permissions).
## Use an external container registry with GitLab as an auth endpoint
diff --git a/doc/api/container_registry.md b/doc/api/container_registry.md
index cf5a7f89c8b..12bdeebca1d 100644
--- a/doc/api/container_registry.md
+++ b/doc/api/container_registry.md
@@ -30,6 +30,55 @@ To disable it:
Feature.disable(:ci_job_token_scope)
```
+## Change the visibility of the Container Registry
+
+This controls who can view the Container Registry.
+
+```plaintext
+PUT /projects/:id/
+```
+
+| Attribute | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding) accessible by the authenticated user. |
+| `container_registry_access_level` | string | no | The desired visibility of the Container Registry. One of `enabled` (default), `private`, or `disabled`. |
+
+Descriptions of the possible values for `container_registry_access_level`:
+
+- **enabled** (Default): The Container Registry is visible to everyone with access to the project.
+If the project is public, the Container Registry is also public. If the project is internal or
+private, the Container Registry is also internal or private.
+
+- **private**: The Container Registry is visible only to project members with Reporter role or
+higher. This is similar to the behavior of a private project with Container Registry visibility set
+to **enabled**.
+
+- **disabled**: The Container Registry is disabled.
+
+See the [Container Registry visibility permissions](../user/packages/container_registry/index.md#container-registry-visibility-permissions)
+for more details about the permissions that this setting grants to users.
+
+```shell
+curl --request PUT "https://gitlab.example.com/api/v4/projects/5/" \
+ --header 'PRIVATE-TOKEN: <your_access_token>' \
+ --header 'Accept: application/json' \
+ --header 'Content-Type: application/json' \
+ --data-raw '{
+ "container_registry_access_level": "private"
+ }'
+```
+
+Example response:
+
+```json
+{
+ "id": 5,
+ "name": "Project 5",
+ "container_registry_access_level": "private",
+ ...
+}
+```
+
## List registry repositories
### Within a project
diff --git a/doc/development/documentation/index.md b/doc/development/documentation/index.md
index c549024ab86..ebc07b72ec1 100644
--- a/doc/development/documentation/index.md
+++ b/doc/development/documentation/index.md
@@ -41,7 +41,7 @@ run only the jobs that match the type of contribution. If your contribution cont
**only** documentation changes, then only documentation-related jobs run, and
the pipeline completes much faster than a code contribution.
-If you are submitting documentation-only changes to Runner, Omnibus, or Charts,
+If you are submitting documentation-only changes to Omnibus or Charts,
the fast pipeline is not determined automatically. Instead, create branches for
docs-only merge requests using the following guide:
diff --git a/doc/development/github_importer.md b/doc/development/github_importer.md
index 14fd8f6bf72..c4f4c2d87d8 100644
--- a/doc/development/github_importer.md
+++ b/doc/development/github_importer.md
@@ -237,6 +237,7 @@ The code for this resides in:
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/48512/diffs) in GitLab 13.7.
> - Number of imported objects [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/64256) in GitLab 14.1.
+> - `Gitlab::GithubImport::Logger` [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65968) in GitLab 14.2.
The import progress can be checked in the `logs/importer.log` file. Each relevant import is logged
with `"import_source": "github"` and the `"project_id"`.
diff --git a/doc/subscriptions/self_managed/index.md b/doc/subscriptions/self_managed/index.md
index 681d5a85d22..027866ceb5e 100644
--- a/doc/subscriptions/self_managed/index.md
+++ b/doc/subscriptions/self_managed/index.md
@@ -54,12 +54,21 @@ billable user, with the following exceptions:
[blocked users](../../user/admin_area/moderate_users.md#block-a-user) don't count as billable users in the current subscription. When they are either deactivated or blocked they release a _billable user_ seat. However, they may
count toward overages in the subscribed seat count.
- Users who are [pending approval](../../user/admin_area/moderate_users.md#users-pending-approval).
-- Members with Guest permissions on an Ultimate subscription.
+- Members with the Guest role on an Ultimate subscription.
+- Users without project or group memberships on an Ultimate subscription.
- GitLab-created service accounts: `Ghost User` and bots
([`Support Bot`](../../user/project/service_desk.md#support-bot-user),
[`Project bot users`](../../user/project/settings/project_access_tokens.md#project-bot-users), and
so on.)
+**Billable users** as reported in the `/admin` section is updated once per day.
+
+### Maximum users
+
+GitLab shows the highest number of billable users for the current license period.
+
+To view this list, on the top bar, select **Menu >** **{admin}** **Admin**. On the left menu, select **Subscription**. In the lower left, the list of **Maximum users** is displayed.
+
### Tips for managing users and subscription seats
Managing the number of users against the number of subscription seats can be a challenge:
diff --git a/doc/user/packages/container_registry/index.md b/doc/user/packages/container_registry/index.md
index 4bf86e0a6d4..18b86c4a357 100644
--- a/doc/user/packages/container_registry/index.md
+++ b/doc/user/packages/container_registry/index.md
@@ -745,10 +745,13 @@ You can, however, remove the Container Registry for a project:
The **Packages & Registries > Container Registry** entry is removed from the project's sidebar.
-## Set visibility of the Container Registry
+## Change visibility of the Container Registry
By default, the Container Registry is visible to everyone with access to the project.
-You can, however, change the visibility of the Container Registry for a project:
+You can, however, change the visibility of the Container Registry for a project.
+
+See the [Container Registry visibility permissions](#container-registry-visibility-permissions)
+for more details about the permissions that this setting grants to users.
1. Go to your project's **Settings > General** page.
1. Expand the section **Visibility, project features, permissions**.
@@ -764,6 +767,25 @@ You can, however, change the visibility of the Container Registry for a project:
1. Select **Save changes**.
+## Container Registry visibility permissions
+
+The ability to view the Container Registry and pull images is controlled by the Container Registry's
+visibility permissions. You can change this through the [visibility setting on the UI](#change-visibility-of-the-container-registry)
+or the [API](../../../api/container_registry.md#change-the-visibility-of-the-container-registry).
+[Other permissions](../../permissions.md)
+such as updating the Container Registry, pushing or deleting images, and so on are not affected by
+this setting. However, disabling the Container Registry disables all Container Registry operations.
+
+| | | Anonymous<br/>(Everyone on internet) | Guest | Reporter, Developer, Maintainer, Owner |
+| -------------------- | --------------------- | --------- | ----- | ------------------------------------------ |
+| Public project with Container Registry visibility <br/> set to **Everyone With Access** (UI) or `enabled` (API) | View Container Registry <br/> and pull images | Yes | Yes | Yes |
+| Public project with Container Registry visibility <br/> set to **Only Project Members** (UI) or `private` (API) | View Container Registry <br/> and pull images | No | No | Yes |
+| Internal project with Container Registry visibility <br/> set to **Everyone With Access** (UI) or `enabled` (API) | View Container Registry <br/> and pull images | No | Yes | Yes |
+| Internal project with Container Registry visibility <br/> set to **Only Project Members** (UI) or `private` (API) | View Container Registry <br/> and pull images | No | No | Yes |
+| Private project with Container Registry visibility <br/> set to **Everyone With Access** (UI) or `enabled` (API) | View Container Registry <br/> and pull images | No | No | Yes |
+| Private project with Container Registry visibility <br/> set to **Only Project Members** (UI) or `private` (API) | View Container Registry <br/> and pull images | No | No | Yes |
+| Any project with Container Registry `disabled` | All operations on Container Registry | No | No | No |
+
## Manifest lists and garbage collection
Manifest lists are commonly used for creating multi-architecture images. If you rely on manifest
diff --git a/doc/user/permissions.md b/doc/user/permissions.md
index e73aa93d5ec..e47c70ba2f2 100644
--- a/doc/user/permissions.md
+++ b/doc/user/permissions.md
@@ -94,7 +94,6 @@ The following table lists project permissions available for each role:
| Pull [packages](packages/index.md) | ✓ (*1*) | ✓ | ✓ | ✓ | ✓ |
| Reopen [test case](../ci/test_cases/index.md) | | ✓ | ✓ | ✓ | ✓ |
| See a commit status | | ✓ | ✓ | ✓ | ✓ |
-| See a container registry | | ✓ | ✓ | ✓ | ✓ |
| See a list of merge requests | | ✓ | ✓ | ✓ | ✓ |
| See environments | | ✓ | ✓ | ✓ | ✓ |
| [Set issue estimate and record time spent](project/time_tracking.md) | | ✓ | ✓ | ✓ | ✓ |
@@ -260,6 +259,11 @@ Read through the documentation on [permissions for File Locking](project/file_lo
as well as by guest users that create a confidential issue. To learn more,
read through the documentation on [permissions and access to confidential issues](project/issues/confidential_issues.md#permissions-and-access-to-confidential-issues).
+### Container Registry visibility permissions
+
+Find the visibility permissions for the Container Registry, as described in the
+[related documentation](packages/container_registry/index.md#container-registry-visibility-permissions).
+
## Group members permissions
NOTE:
diff --git a/lib/gitlab/auth/result.rb b/lib/gitlab/auth/result.rb
index 443f7c08e18..1b1505c46bd 100644
--- a/lib/gitlab/auth/result.rb
+++ b/lib/gitlab/auth/result.rb
@@ -3,6 +3,8 @@
module Gitlab
module Auth
Result = Struct.new(:actor, :project, :type, :authentication_abilities) do
+ self::EMPTY = self.new(nil, nil, nil, nil).freeze
+
def ci?(for_project)
type == :ci &&
project &&
@@ -29,6 +31,20 @@ module Gitlab
def deploy_token
actor.is_a?(DeployToken) ? actor : nil
end
+
+ def can?(action)
+ actor&.can?(action)
+ end
+
+ def can_perform_action_on_project?(action, given_project)
+ Ability.allowed?(actor, action, given_project)
+ end
+
+ def authentication_abilities_include?(ability)
+ return false if authentication_abilities.blank?
+
+ authentication_abilities.include?(ability)
+ end
end
end
end
diff --git a/lib/gitlab/background_migration/backfill_integrations_type_new.rb b/lib/gitlab/background_migration/backfill_integrations_type_new.rb
new file mode 100644
index 00000000000..6a2d82aaeee
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_integrations_type_new.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Backfills the new `integrations.type_new` column, which contains
+ # the real class name, rather than the legacy class name in `type`
+ # which is mapped via `Gitlab::Integrations::StiType`.
+ class BackfillIntegrationsTypeNew
+ def perform(start_id, stop_id, *args)
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ WITH mapping(old_type, new_type) AS (VALUES
+ ('AsanaService', 'Integrations::Asana'),
+ ('AssemblaService', 'Integrations::Assembla'),
+ ('BambooService', 'Integrations::Bamboo'),
+ ('BugzillaService', 'Integrations::Bugzilla'),
+ ('BuildkiteService', 'Integrations::Buildkite'),
+ ('CampfireService', 'Integrations::Campfire'),
+ ('ConfluenceService', 'Integrations::Confluence'),
+ ('CustomIssueTrackerService', 'Integrations::CustomIssueTracker'),
+ ('DatadogService', 'Integrations::Datadog'),
+ ('DiscordService', 'Integrations::Discord'),
+ ('DroneCiService', 'Integrations::DroneCi'),
+ ('EmailsOnPushService', 'Integrations::EmailsOnPush'),
+ ('EwmService', 'Integrations::Ewm'),
+ ('ExternalWikiService', 'Integrations::ExternalWiki'),
+ ('FlowdockService', 'Integrations::Flowdock'),
+ ('HangoutsChatService', 'Integrations::HangoutsChat'),
+ ('IrkerService', 'Integrations::Irker'),
+ ('JenkinsService', 'Integrations::Jenkins'),
+ ('JiraService', 'Integrations::Jira'),
+ ('MattermostService', 'Integrations::Mattermost'),
+ ('MattermostSlashCommandsService', 'Integrations::MattermostSlashCommands'),
+ ('MicrosoftTeamsService', 'Integrations::MicrosoftTeams'),
+ ('MockCiService', 'Integrations::MockCi'),
+ ('MockMonitoringService', 'Integrations::MockMonitoring'),
+ ('PackagistService', 'Integrations::Packagist'),
+ ('PipelinesEmailService', 'Integrations::PipelinesEmail'),
+ ('PivotaltrackerService', 'Integrations::Pivotaltracker'),
+ ('PrometheusService', 'Integrations::Prometheus'),
+ ('PushoverService', 'Integrations::Pushover'),
+ ('RedmineService', 'Integrations::Redmine'),
+ ('SlackService', 'Integrations::Slack'),
+ ('SlackSlashCommandsService', 'Integrations::SlackSlashCommands'),
+ ('TeamcityService', 'Integrations::Teamcity'),
+ ('UnifyCircuitService', 'Integrations::UnifyCircuit'),
+ ('WebexTeamsService', 'Integrations::WebexTeams'),
+ ('YoutrackService', 'Integrations::Youtrack'),
+
+ -- EE-only integrations
+ ('GithubService', 'Integrations::Github'),
+ ('GitlabSlackApplicationService', 'Integrations::GitlabSlackApplication')
+ )
+
+ UPDATE integrations SET type_new = mapping.new_type
+ FROM mapping
+ WHERE integrations.id BETWEEN #{start_id} AND #{stop_id}
+ AND integrations.type = mapping.old_type
+ SQL
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/logger.rb b/lib/gitlab/github_import/logger.rb
new file mode 100644
index 00000000000..f234ab290c5
--- /dev/null
+++ b/lib/gitlab/github_import/logger.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ class Logger < ::Gitlab::Import::Logger
+ def default_attributes
+ super.merge(import_source: :github)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/parallel_scheduling.rb b/lib/gitlab/github_import/parallel_scheduling.rb
index 4598429d568..c002dc750e7 100644
--- a/lib/gitlab/github_import/parallel_scheduling.rb
+++ b/lib/gitlab/github_import/parallel_scheduling.rb
@@ -174,11 +174,11 @@ module Gitlab
private
def info(project_id, extra = {})
- logger.info(log_attributes(project_id, extra))
+ Logger.info(log_attributes(project_id, extra))
end
def error(project_id, exception)
- logger.error(
+ Logger.error(
log_attributes(
project_id,
message: 'importer failed',
@@ -188,22 +188,17 @@ module Gitlab
Gitlab::ErrorTracking.track_exception(
exception,
- log_attributes(project_id)
+ log_attributes(project_id, import_source: :github)
)
end
def log_attributes(project_id, extra = {})
extra.merge(
- import_source: :github,
project_id: project_id,
importer: importer_class.name,
parallel: parallel?
)
end
-
- def logger
- @logger ||= Gitlab::Import::Logger.build
- end
end
end
end
diff --git a/lib/gitlab/import/logger.rb b/lib/gitlab/import/logger.rb
index ab3e822a4e9..bd34aff734a 100644
--- a/lib/gitlab/import/logger.rb
+++ b/lib/gitlab/import/logger.rb
@@ -6,6 +6,10 @@ module Gitlab
def self.file_name_noext
'importer'
end
+
+ def default_attributes
+ super.merge(feature_category: :importers)
+ end
end
end
end
diff --git a/lib/gitlab/integrations/sti_type.rb b/lib/gitlab/integrations/sti_type.rb
index f7a0d115aee..0fa9f435b5c 100644
--- a/lib/gitlab/integrations/sti_type.rb
+++ b/lib/gitlab/integrations/sti_type.rb
@@ -7,7 +7,7 @@ module Gitlab
Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog
Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Irker Jenkins Jira Mattermost
MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker
- Prometheus Pushover Redmine Slack SlackSlashCommands Teamcity UnifyCircuit Youtrack WebexTeams
+ Prometheus Pushover Redmine Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack
)).freeze
def self.namespaced_integrations
diff --git a/lib/gitlab/json_logger.rb b/lib/gitlab/json_logger.rb
index 3a74df8dc8f..d0dcd232ecc 100644
--- a/lib/gitlab/json_logger.rb
+++ b/lib/gitlab/json_logger.rb
@@ -7,7 +7,7 @@ module Gitlab
end
def format_message(severity, timestamp, progname, message)
- data = {}
+ data = default_attributes
data[:severity] = severity
data[:time] = timestamp.utc.iso8601(3)
data[Labkit::Correlation::CorrelationId::LOG_KEY] = Labkit::Correlation::CorrelationId.current_id
@@ -21,5 +21,11 @@ module Gitlab
Gitlab::Json.dump(data) + "\n"
end
+
+ protected
+
+ def default_attributes
+ {}
+ end
end
end
diff --git a/lib/gitlab/middleware/go.rb b/lib/gitlab/middleware/go.rb
index 4b65bbcc791..a1a0356ff58 100644
--- a/lib/gitlab/middleware/go.rb
+++ b/lib/gitlab/middleware/go.rb
@@ -127,23 +127,25 @@ module Gitlab
def project_for_paths(paths, request)
project = Project.where_full_path_in(paths).first
- return unless Ability.allowed?(current_user(request, project), :read_project, project)
+
+ return unless authentication_result(request, project).can_perform_action_on_project?(:read_project, project)
project
end
- def current_user(request, project)
- return unless has_basic_credentials?(request)
+ def authentication_result(request, project)
+ empty_result = Gitlab::Auth::Result::EMPTY
+ return empty_result unless has_basic_credentials?(request)
login, password = user_name_and_password(request)
auth_result = Gitlab::Auth.find_for_git_client(login, password, project: project, ip: request.ip)
- return unless auth_result.success?
+ return empty_result unless auth_result.success?
- return unless auth_result.actor&.can?(:access_git)
+ return empty_result unless auth_result.can?(:access_git)
- return unless auth_result.authentication_abilities.include?(:read_project)
+ return empty_result unless auth_result.authentication_abilities_include?(:read_project)
- auth_result.actor
+ auth_result
end
end
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index ff192dd0a1d..17be61bd816 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -18710,6 +18710,9 @@ msgstr ""
msgid "Jira-GitLab user mapping template"
msgstr ""
+msgid "JiraConnect|Create branch for Jira issue %{jiraIssue}"
+msgstr ""
+
msgid "JiraConnect|Failed to create branch."
msgstr ""
diff --git a/scripts/review_apps/review-apps.sh b/scripts/review_apps/review-apps.sh
index a799f8cd925..641bf6a5d10 100755
--- a/scripts/review_apps/review-apps.sh
+++ b/scripts/review_apps/review-apps.sh
@@ -247,7 +247,7 @@ function deploy() {
gitlab_migrations_image_repository="${IMAGE_REPOSITORY}/gitlab-rails-ee"
gitlab_sidekiq_image_repository="${IMAGE_REPOSITORY}/gitlab-sidekiq-ee"
gitlab_webservice_image_repository="${IMAGE_REPOSITORY}/gitlab-webservice-ee"
- gitlab_task_runner_image_repository="${IMAGE_REPOSITORY}/gitlab-task-runner-ee"
+ gitlab_task_runner_image_repository="${IMAGE_REPOSITORY}/gitlab-toolbox-ee"
gitlab_gitaly_image_repository="${IMAGE_REPOSITORY}/gitaly"
gitaly_image_tag=$(parse_gitaly_image_tag)
gitlab_shell_image_repository="${IMAGE_REPOSITORY}/gitlab-shell"
diff --git a/spec/controllers/jira_connect/branches_controller_spec.rb b/spec/controllers/jira_connect/branches_controller_spec.rb
index 4b198eb8820..31f68608918 100644
--- a/spec/controllers/jira_connect/branches_controller_spec.rb
+++ b/spec/controllers/jira_connect/branches_controller_spec.rb
@@ -15,21 +15,24 @@ RSpec.describe JiraConnect::BranchesController do
get :new, params: { issue_key: 'ACME-123', issue_summary: 'My Issue !@#$%' }
expect(response).to be_successful
- expect(assigns(:branch_name)).to eq('ACME-123-my-issue')
+ expect(assigns(:new_branch_data)).to include(
+ initial_branch_name: 'ACME-123-my-issue',
+ success_state_svg_path: start_with('/assets/illustrations/merge_requests-')
+ )
end
it 'ignores missing summary' do
get :new, params: { issue_key: 'ACME-123' }
expect(response).to be_successful
- expect(assigns(:branch_name)).to eq('ACME-123')
+ expect(assigns(:new_branch_data)).to include(initial_branch_name: 'ACME-123')
end
it 'does not set a branch name if key is not passed' do
get :new, params: { issue_summary: 'My issue' }
expect(response).to be_successful
- expect(assigns(:branch_name)).to be_nil
+ expect(assigns(:new_branch_data)).to include('initial_branch_name': nil)
end
context 'when feature flag is disabled' do
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index 418247c88aa..704adfa568d 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -46,9 +46,9 @@ RSpec.describe 'Value Stream Analytics', :js do
@build = create_cycle(user, project, issue, mr, milestone, pipeline)
deploy_master(user, project)
- issue.metrics.update!(first_mentioned_in_commit_at: issue.metrics.first_associated_with_milestone_at + 1.day)
+ issue.metrics.update!(first_mentioned_in_commit_at: issue.metrics.first_associated_with_milestone_at + 1.hour)
merge_request = issue.merge_requests_closing_issues.first.merge_request
- merge_request.update!(created_at: issue.metrics.first_associated_with_milestone_at + 1.day)
+ merge_request.update!(created_at: issue.metrics.first_associated_with_milestone_at + 1.hour)
merge_request.metrics.update!(
latest_build_started_at: 4.hours.ago,
latest_build_finished_at: 3.hours.ago,
diff --git a/spec/frontend/cycle_analytics/__snapshots__/base_spec.js.snap b/spec/frontend/cycle_analytics/__snapshots__/base_spec.js.snap
deleted file mode 100644
index 771625a3e51..00000000000
--- a/spec/frontend/cycle_analytics/__snapshots__/base_spec.js.snap
+++ /dev/null
@@ -1,3 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Value stream analytics component isLoading = true renders the path navigation component with prop \`loading\` set to true 1`] = `"<path-navigation-stub loading=\\"true\\" stages=\\"\\" selectedstage=\\"[object Object]\\" class=\\"js-path-navigation gl-w-full gl-pb-2\\"></path-navigation-stub>"`;
diff --git a/spec/frontend/cycle_analytics/base_spec.js b/spec/frontend/cycle_analytics/base_spec.js
index 5b01741c9e4..c2c6b2a5d06 100644
--- a/spec/frontend/cycle_analytics/base_spec.js
+++ b/spec/frontend/cycle_analytics/base_spec.js
@@ -8,7 +8,15 @@ import PathNavigation from '~/cycle_analytics/components/path_navigation.vue';
import StageTable from '~/cycle_analytics/components/stage_table.vue';
import { NOT_ENOUGH_DATA_ERROR } from '~/cycle_analytics/constants';
import initState from '~/cycle_analytics/store/state';
-import { selectedStage, issueEvents } from './mock_data';
+import {
+ permissions,
+ transformedProjectStagePathData,
+ selectedStage,
+ issueEvents,
+ createdBefore,
+ createdAfter,
+ currentGroup,
+} from './mock_data';
const selectedStageEvents = issueEvents.events;
const noDataSvgPath = 'path/to/no/data';
@@ -18,25 +26,31 @@ Vue.use(Vuex);
let wrapper;
-function createStore({ initialState = {} }) {
+const defaultState = {
+ permissions,
+ currentGroup,
+ createdBefore,
+ createdAfter,
+};
+
+function createStore({ initialState = {}, initialGetters = {} }) {
return new Vuex.Store({
state: {
...initState(),
- permissions: {
- [selectedStage.id]: true,
- },
+ ...defaultState,
...initialState,
},
getters: {
- pathNavigationData: () => [],
+ pathNavigationData: () => transformedProjectStagePathData,
+ ...initialGetters,
},
});
}
-function createComponent({ initialState } = {}) {
+function createComponent({ initialState, initialGetters } = {}) {
return extendedWrapper(
shallowMount(BaseComponent, {
- store: createStore({ initialState }),
+ store: createStore({ initialState, initialGetters }),
propsData: {
noDataSvgPath,
noAccessSvgPath,
@@ -57,16 +71,7 @@ const findEmptyStageTitle = () => wrapper.findComponent(GlEmptyState).props('tit
describe('Value stream analytics component', () => {
beforeEach(() => {
- wrapper = createComponent({
- initialState: {
- isLoading: false,
- isLoadingStage: false,
- isEmptyStage: false,
- selectedStageEvents,
- selectedStage,
- selectedStageError: '',
- },
- });
+ wrapper = createComponent({ initialState: { selectedStage, selectedStageEvents } });
});
afterEach(() => {
@@ -102,7 +107,7 @@ describe('Value stream analytics component', () => {
});
it('renders the path navigation component with prop `loading` set to true', () => {
- expect(findPathNavigation().html()).toMatchSnapshot();
+ expect(findPathNavigation().props('loading')).toBe(true);
});
it('does not render the overview metrics', () => {
@@ -130,13 +135,19 @@ describe('Value stream analytics component', () => {
expect(tableWrapper.exists()).toBe(true);
expect(tableWrapper.find(GlLoadingIcon).exists()).toBe(true);
});
+
+ it('renders the path navigation loading state', () => {
+ expect(findPathNavigation().props('loading')).toBe(true);
+ });
});
describe('isEmptyStage = true', () => {
+ const emptyStageParams = {
+ isEmptyStage: true,
+ selectedStage: { ...selectedStage, emptyStageText: 'This stage is empty' },
+ };
beforeEach(() => {
- wrapper = createComponent({
- initialState: { selectedStage, isEmptyStage: true },
- });
+ wrapper = createComponent({ initialState: emptyStageParams });
});
it('renders the empty stage with `Not enough data` message', () => {
@@ -147,8 +158,7 @@ describe('Value stream analytics component', () => {
beforeEach(() => {
wrapper = createComponent({
initialState: {
- selectedStage,
- isEmptyStage: true,
+ ...emptyStageParams,
selectedStageError: 'There is too much data to calculate',
},
});
@@ -164,7 +174,9 @@ describe('Value stream analytics component', () => {
beforeEach(() => {
wrapper = createComponent({
initialState: {
+ selectedStage,
permissions: {
+ ...permissions,
[selectedStage.id]: false,
},
},
@@ -179,6 +191,7 @@ describe('Value stream analytics component', () => {
describe('without a selected stage', () => {
beforeEach(() => {
wrapper = createComponent({
+ initialGetters: { pathNavigationData: () => [] },
initialState: { selectedStage: null, isEmptyStage: true },
});
});
@@ -187,7 +200,7 @@ describe('Value stream analytics component', () => {
expect(findStageTable().exists()).toBe(true);
});
- it('does not render the path navigation component', () => {
+ it('does not render the path navigation', () => {
expect(findPathNavigation().exists()).toBe(false);
});
diff --git a/spec/frontend/cycle_analytics/store/actions_spec.js b/spec/frontend/cycle_analytics/store/actions_spec.js
index 8a8dd374f8e..28715aa87e8 100644
--- a/spec/frontend/cycle_analytics/store/actions_spec.js
+++ b/spec/frontend/cycle_analytics/store/actions_spec.js
@@ -2,39 +2,23 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/cycle_analytics/store/actions';
+import * as getters from '~/cycle_analytics/store/getters';
import httpStatusCodes from '~/lib/utils/http_status';
import { allowedStages, selectedStage, selectedValueStream } from '../mock_data';
const mockRequestPath = 'some/cool/path';
const mockFullPath = '/namespace/-/analytics/value_stream_analytics/value_streams';
const mockStartDate = 30;
-const mockRequestedDataActions = ['fetchValueStreams', 'fetchCycleAnalyticsData'];
-const mockInitializeActionCommit = {
- payload: { requestPath: mockRequestPath },
- type: 'INITIALIZE_VSA',
-};
+const mockEndpoints = { fullPath: mockFullPath, requestPath: mockRequestPath };
const mockSetDateActionCommit = { payload: { startDate: mockStartDate }, type: 'SET_DATE_RANGE' };
-const mockRequestedDataMutations = [
- {
- payload: true,
- type: 'SET_LOADING',
- },
- {
- payload: false,
- type: 'SET_LOADING',
- },
-];
-
-const features = {
- cycleAnalyticsForGroups: true,
-};
+
+const defaultState = { ...getters, selectedValueStream };
describe('Project Value Stream Analytics actions', () => {
let state;
let mock;
beforeEach(() => {
- state = {};
mock = new MockAdapter(axios);
});
@@ -45,28 +29,62 @@ describe('Project Value Stream Analytics actions', () => {
const mutationTypes = (arr) => arr.map(({ type }) => type);
+ const mockFetchStageDataActions = [
+ { type: 'setLoading', payload: true },
+ { type: 'fetchCycleAnalyticsData' },
+ { type: 'fetchStageData' },
+ { type: 'fetchStageMedians' },
+ { type: 'setLoading', payload: false },
+ ];
+
describe.each`
- action | payload | expectedActions | expectedMutations
- ${'initializeVsa'} | ${{ requestPath: mockRequestPath }} | ${mockRequestedDataActions} | ${[mockInitializeActionCommit, ...mockRequestedDataMutations]}
- ${'setDateRange'} | ${{ startDate: mockStartDate }} | ${mockRequestedDataActions} | ${[mockSetDateActionCommit, ...mockRequestedDataMutations]}
- ${'setSelectedStage'} | ${{ selectedStage }} | ${['fetchStageData']} | ${[{ type: 'SET_SELECTED_STAGE', payload: { selectedStage } }]}
- ${'setSelectedValueStream'} | ${{ selectedValueStream }} | ${['fetchValueStreamStages']} | ${[{ type: 'SET_SELECTED_VALUE_STREAM', payload: { selectedValueStream } }]}
+ action | payload | expectedActions | expectedMutations
+ ${'setLoading'} | ${true} | ${[]} | ${[{ type: 'SET_LOADING', payload: true }]}
+ ${'setDateRange'} | ${{ startDate: mockStartDate }} | ${mockFetchStageDataActions} | ${[mockSetDateActionCommit]}
+ ${'setFilters'} | ${[]} | ${mockFetchStageDataActions} | ${[]}
+ ${'setSelectedStage'} | ${{ selectedStage }} | ${[{ type: 'fetchStageData' }]} | ${[{ type: 'SET_SELECTED_STAGE', payload: { selectedStage } }]}
+ ${'setSelectedValueStream'} | ${{ selectedValueStream }} | ${[{ type: 'fetchValueStreamStages' }, { type: 'fetchCycleAnalyticsData' }]} | ${[{ type: 'SET_SELECTED_VALUE_STREAM', payload: { selectedValueStream } }]}
`('$action', ({ action, payload, expectedActions, expectedMutations }) => {
const types = mutationTypes(expectedMutations);
-
it(`will dispatch ${expectedActions} and commit ${types}`, () =>
testAction({
action: actions[action],
state,
payload,
expectedMutations,
- expectedActions: expectedActions.map((a) => ({ type: a })),
+ expectedActions,
}));
});
+ describe('initializeVsa', () => {
+ let mockDispatch;
+ let mockCommit;
+ const payload = { endpoints: mockEndpoints };
+
+ beforeEach(() => {
+ mockDispatch = jest.fn(() => Promise.resolve());
+ mockCommit = jest.fn();
+ });
+
+ it('will dispatch the setLoading and fetchValueStreams actions and commit INITIALIZE_VSA', async () => {
+ await actions.initializeVsa(
+ {
+ ...state,
+ dispatch: mockDispatch,
+ commit: mockCommit,
+ },
+ payload,
+ );
+ expect(mockCommit).toHaveBeenCalledWith('INITIALIZE_VSA', { endpoints: mockEndpoints });
+ expect(mockDispatch).toHaveBeenCalledWith('setLoading', true);
+ expect(mockDispatch).toHaveBeenCalledWith('fetchValueStreams');
+ expect(mockDispatch).toHaveBeenCalledWith('setLoading', false);
+ });
+ });
+
describe('fetchCycleAnalyticsData', () => {
beforeEach(() => {
- state = { requestPath: mockRequestPath };
+ state = { endpoints: mockEndpoints };
mock = new MockAdapter(axios);
mock.onGet(mockRequestPath).reply(httpStatusCodes.OK);
});
@@ -85,7 +103,7 @@ describe('Project Value Stream Analytics actions', () => {
describe('with a failing request', () => {
beforeEach(() => {
- state = { requestPath: mockRequestPath };
+ state = { endpoints: mockEndpoints };
mock = new MockAdapter(axios);
mock.onGet(mockRequestPath).reply(httpStatusCodes.BAD_REQUEST);
});
@@ -105,11 +123,12 @@ describe('Project Value Stream Analytics actions', () => {
});
describe('fetchStageData', () => {
- const mockStagePath = `${mockRequestPath}/events/${selectedStage.name}`;
+ const mockStagePath = /value_streams\/\w+\/stages\/\w+\/records/;
beforeEach(() => {
state = {
- requestPath: mockRequestPath,
+ ...defaultState,
+ endpoints: mockEndpoints,
startDate: mockStartDate,
selectedStage,
};
@@ -131,7 +150,8 @@ describe('Project Value Stream Analytics actions', () => {
beforeEach(() => {
state = {
- requestPath: mockRequestPath,
+ ...defaultState,
+ endpoints: mockEndpoints,
startDate: mockStartDate,
selectedStage,
};
@@ -155,7 +175,8 @@ describe('Project Value Stream Analytics actions', () => {
describe('with a failing request', () => {
beforeEach(() => {
state = {
- requestPath: mockRequestPath,
+ ...defaultState,
+ endpoints: mockEndpoints,
startDate: mockStartDate,
selectedStage,
};
@@ -179,8 +200,7 @@ describe('Project Value Stream Analytics actions', () => {
beforeEach(() => {
state = {
- features,
- fullPath: mockFullPath,
+ endpoints: mockEndpoints,
};
mock = new MockAdapter(axios);
mock.onGet(mockValueStreamPath).reply(httpStatusCodes.OK);
@@ -199,26 +219,6 @@ describe('Project Value Stream Analytics actions', () => {
],
}));
- describe('with cycleAnalyticsForGroups=false', () => {
- beforeEach(() => {
- state = {
- features: { cycleAnalyticsForGroups: false },
- fullPath: mockFullPath,
- };
- mock = new MockAdapter(axios);
- mock.onGet(mockValueStreamPath).reply(httpStatusCodes.OK);
- });
-
- it("does not dispatch the 'fetchStageMedians' request", () =>
- testAction({
- action: actions.fetchValueStreams,
- state,
- payload: {},
- expectedMutations: [{ type: 'REQUEST_VALUE_STREAMS' }],
- expectedActions: [{ type: 'receiveValueStreamsSuccess' }, { type: 'setSelectedStage' }],
- }));
- });
-
describe('with a failing request', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -271,7 +271,7 @@ describe('Project Value Stream Analytics actions', () => {
beforeEach(() => {
state = {
- fullPath: mockFullPath,
+ endpoints: mockEndpoints,
selectedValueStream,
};
mock = new MockAdapter(axios);
diff --git a/spec/frontend/cycle_analytics/store/mutations_spec.js b/spec/frontend/cycle_analytics/store/mutations_spec.js
index c2bc124d5ba..dcbc2369983 100644
--- a/spec/frontend/cycle_analytics/store/mutations_spec.js
+++ b/spec/frontend/cycle_analytics/store/mutations_spec.js
@@ -21,15 +21,12 @@ const convertedEvents = issueEvents.events;
const mockRequestPath = 'fake/request/path';
const mockCreatedAfter = '2020-06-18';
const mockCreatedBefore = '2020-07-18';
-const features = {
- cycleAnalyticsForGroups: true,
-};
describe('Project Value Stream Analytics mutations', () => {
useFakeDate(2020, 6, 18);
beforeEach(() => {
- state = { features };
+ state = {};
});
afterEach(() => {
@@ -61,25 +58,45 @@ describe('Project Value Stream Analytics mutations', () => {
${types.REQUEST_STAGE_MEDIANS} | ${'medians'} | ${{}}
${types.RECEIVE_STAGE_MEDIANS_ERROR} | ${'medians'} | ${{}}
`('$mutation will set $stateKey to $value', ({ mutation, stateKey, value }) => {
- mutations[mutation](state, {});
+ mutations[mutation](state);
expect(state).toMatchObject({ [stateKey]: value });
});
+ const mockInitialPayload = {
+ endpoints: { requestPath: mockRequestPath },
+ currentGroup: { title: 'cool-group' },
+ id: 1337,
+ };
+ const mockInitializedObj = {
+ endpoints: { requestPath: mockRequestPath },
+ createdAfter: mockCreatedAfter,
+ createdBefore: mockCreatedBefore,
+ };
+
it.each`
- mutation | payload | stateKey | value
- ${types.INITIALIZE_VSA} | ${{ requestPath: mockRequestPath }} | ${'requestPath'} | ${mockRequestPath}
- ${types.SET_DATE_RANGE} | ${{ startDate: DEFAULT_DAYS_TO_DISPLAY }} | ${'startDate'} | ${DEFAULT_DAYS_TO_DISPLAY}
- ${types.SET_DATE_RANGE} | ${{ startDate: DEFAULT_DAYS_TO_DISPLAY }} | ${'createdAfter'} | ${mockCreatedAfter}
- ${types.SET_DATE_RANGE} | ${{ startDate: DEFAULT_DAYS_TO_DISPLAY }} | ${'createdBefore'} | ${mockCreatedBefore}
- ${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true}
- ${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false}
- ${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream}
- ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'summary'} | ${convertedData.summary}
- ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
- ${types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS} | ${{ stages: rawValueStreamStages }} | ${'stages'} | ${valueStreamStages}
- ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
- ${types.RECEIVE_STAGE_MEDIANS_SUCCESS} | ${rawStageMedians} | ${'medians'} | ${formattedStageMedians}
+ mutation | stateKey | value
+ ${types.INITIALIZE_VSA} | ${'endpoints'} | ${{ requestPath: mockRequestPath }}
+ ${types.INITIALIZE_VSA} | ${'createdAfter'} | ${mockCreatedAfter}
+ ${types.INITIALIZE_VSA} | ${'createdBefore'} | ${mockCreatedBefore}
+ `('$mutation will set $stateKey', ({ mutation, stateKey, value }) => {
+ mutations[mutation](state, { ...mockInitialPayload });
+
+ expect(state).toMatchObject({ ...mockInitializedObj, [stateKey]: value });
+ });
+
+ it.each`
+ mutation | payload | stateKey | value
+ ${types.SET_DATE_RANGE} | ${DEFAULT_DAYS_TO_DISPLAY} | ${'daysInPast'} | ${DEFAULT_DAYS_TO_DISPLAY}
+ ${types.SET_DATE_RANGE} | ${DEFAULT_DAYS_TO_DISPLAY} | ${'createdAfter'} | ${mockCreatedAfter}
+ ${types.SET_DATE_RANGE} | ${DEFAULT_DAYS_TO_DISPLAY} | ${'createdBefore'} | ${mockCreatedBefore}
+ ${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true}
+ ${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false}
+ ${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream}
+ ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'summary'} | ${convertedData.summary}
+ ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
+ ${types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS} | ${{ stages: rawValueStreamStages }} | ${'stages'} | ${valueStreamStages}
+ ${types.RECEIVE_STAGE_MEDIANS_SUCCESS} | ${rawStageMedians} | ${'medians'} | ${formattedStageMedians}
`(
'$mutation with $payload will set $stateKey to $value',
({ mutation, payload, stateKey, value }) => {
@@ -97,41 +114,10 @@ describe('Project Value Stream Analytics mutations', () => {
});
it.each`
- mutation | payload | stateKey | value
- ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${{ events: [] }} | ${'isEmptyStage'} | ${true}
- ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${{ events: rawEvents }} | ${'selectedStageEvents'} | ${convertedEvents}
- ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${{ events: rawEvents }} | ${'isEmptyStage'} | ${false}
- `(
- '$mutation with $payload will set $stateKey to $value',
- ({ mutation, payload, stateKey, value }) => {
- mutations[mutation](state, payload);
-
- expect(state).toMatchObject({ [stateKey]: value });
- },
- );
- });
-
- describe('with cycleAnalyticsForGroups=false', () => {
- useFakeDate(2020, 6, 18);
-
- beforeEach(() => {
- state = { features: { cycleAnalyticsForGroups: false } };
- });
-
- const formattedMedians = {
- code: '2d',
- issue: '-',
- plan: '21h',
- review: '-',
- staging: '2d',
- test: '4h',
- };
-
- it.each`
- mutation | payload | stateKey | value
- ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'medians'} | ${formattedMedians}
- ${types.REQUEST_CYCLE_ANALYTICS_DATA} | ${{}} | ${'medians'} | ${{}}
- ${types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR} | ${{}} | ${'medians'} | ${{}}
+ mutation | payload | stateKey | value
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${[]} | ${'isEmptyStage'} | ${true}
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${rawEvents} | ${'selectedStageEvents'} | ${convertedEvents}
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${rawEvents} | ${'isEmptyStage'} | ${false}
`(
'$mutation with $payload will set $stateKey to $value',
({ mutation, payload, stateKey, value }) => {
diff --git a/spec/frontend/jira_connect/branches/components/new_branch_form_spec.js b/spec/frontend/jira_connect/branches/components/new_branch_form_spec.js
index 7ea47c2737a..7326b84ad54 100644
--- a/spec/frontend/jira_connect/branches/components/new_branch_form_spec.js
+++ b/spec/frontend/jira_connect/branches/components/new_branch_form_spec.js
@@ -9,7 +9,6 @@ import SourceBranchDropdown from '~/jira_connect/branches/components/source_bran
import {
CREATE_BRANCH_ERROR_GENERIC,
CREATE_BRANCH_ERROR_WITH_CONTEXT,
- CREATE_BRANCH_SUCCESS_ALERT,
} from '~/jira_connect/branches/constants';
import createBranchMutation from '~/jira_connect/branches/graphql/mutations/create_branch.mutation.graphql';
@@ -74,10 +73,14 @@ describe('NewBranchForm', () => {
return mockApollo;
}
- function createComponent({ mockApollo } = {}) {
+ function createComponent({ mockApollo, provide } = {}) {
wrapper = shallowMount(NewBranchForm, {
localVue,
apolloProvider: mockApollo || createMockApolloProvider(),
+ provide: {
+ initialBranchName: '',
+ ...provide,
+ },
});
}
@@ -139,14 +142,8 @@ describe('NewBranchForm', () => {
await waitForPromises();
});
- it('displays a success message', () => {
- const alert = findAlert();
- expect(alert.exists()).toBe(true);
- expect(alert.text()).toBe(CREATE_BRANCH_SUCCESS_ALERT.message);
- expect(alert.props()).toMatchObject({
- title: CREATE_BRANCH_SUCCESS_ALERT.title,
- variant: 'success',
- });
+ it('emits `success` event', () => {
+ expect(wrapper.emitted('success')).toBeTruthy();
});
it('called `createBranch` mutation correctly', () => {
@@ -195,6 +192,15 @@ describe('NewBranchForm', () => {
});
});
+ describe('when `initialBranchName` is specified', () => {
+ it('sets value of branch name input to `initialBranchName` by default', () => {
+ const mockInitialBranchName = 'ap1-test-branch-name';
+
+ createComponent({ provide: { initialBranchName: mockInitialBranchName } });
+ expect(findInput().attributes('value')).toBe(mockInitialBranchName);
+ });
+ });
+
describe('error handling', () => {
describe.each`
component | componentName
diff --git a/spec/frontend/jira_connect/branches/pages/index_spec.js b/spec/frontend/jira_connect/branches/pages/index_spec.js
new file mode 100644
index 00000000000..92976dd28da
--- /dev/null
+++ b/spec/frontend/jira_connect/branches/pages/index_spec.js
@@ -0,0 +1,65 @@
+import { GlEmptyState } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import NewBranchForm from '~/jira_connect/branches/components/new_branch_form.vue';
+import {
+ I18N_PAGE_TITLE_WITH_BRANCH_NAME,
+ I18N_PAGE_TITLE_DEFAULT,
+} from '~/jira_connect/branches/constants';
+import JiraConnectNewBranchPage from '~/jira_connect/branches/pages/index.vue';
+import { sprintf } from '~/locale';
+
+describe('NewBranchForm', () => {
+ let wrapper;
+
+ const findPageTitle = () => wrapper.find('h1');
+ const findNewBranchForm = () => wrapper.findComponent(NewBranchForm);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+
+ function createComponent({ provide } = {}) {
+ wrapper = shallowMount(JiraConnectNewBranchPage, {
+ provide: {
+ initialBranchName: '',
+ successStateSvgPath: '',
+ ...provide,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('page title', () => {
+ it.each`
+ initialBranchName | pageTitle
+ ${undefined} | ${I18N_PAGE_TITLE_DEFAULT}
+ ${'ap1-test-button'} | ${sprintf(I18N_PAGE_TITLE_WITH_BRANCH_NAME, { jiraIssue: 'ap1-test-button' })}
+ `(
+ 'sets page title to "$pageTitle" when initial branch name is "$initialBranchName"',
+ ({ initialBranchName, pageTitle }) => {
+ createComponent({ provide: { initialBranchName } });
+
+ expect(findPageTitle().text()).toBe(pageTitle);
+ },
+ );
+ });
+
+ it('renders NewBranchForm by default', () => {
+ createComponent();
+
+ expect(findNewBranchForm().exists()).toBe(true);
+ expect(findEmptyState().exists()).toBe(false);
+ });
+
+ describe('when `sucesss` event emitted from NewBranchForm', () => {
+ it('renders the success state', async () => {
+ createComponent();
+
+ const newBranchForm = findNewBranchForm();
+ await newBranchForm.vm.$emit('success');
+
+ expect(findNewBranchForm().exists()).toBe(false);
+ expect(findEmptyState().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/repository/components/delete_blob_modal_spec.js b/spec/frontend/repository/components/delete_blob_modal_spec.js
index 78e288a8ba4..8927eb8bd24 100644
--- a/spec/frontend/repository/components/delete_blob_modal_spec.js
+++ b/spec/frontend/repository/components/delete_blob_modal_spec.js
@@ -37,6 +37,8 @@ describe('DeleteBlobModal', () => {
const findModal = () => wrapper.findComponent(GlModal);
const findForm = () => findModal().findComponent(GlForm);
+ const findCommitTextarea = () => findForm().findComponent(GlFormTextarea);
+ const findTargetInput = () => findForm().findComponent(GlFormInput);
afterEach(() => {
wrapper.destroy();
@@ -65,18 +67,6 @@ describe('DeleteBlobModal', () => {
expect(findForm().attributes('action')).toBe(initialProps.deletePath);
});
- it('submits the form', async () => {
- createFullComponent();
- await nextTick();
-
- const submitSpy = jest.spyOn(findForm().element, 'submit');
- findModal().vm.$emit('primary', { preventDefault: () => {} });
- await nextTick();
-
- expect(submitSpy).toHaveBeenCalled();
- submitSpy.mockRestore();
- });
-
it.each`
component | defaultValue | canPushCode | targetBranch | originalBranch | exist
${GlFormTextarea} | ${initialProps.commitMessage} | ${true} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${true}
@@ -135,4 +125,62 @@ describe('DeleteBlobModal', () => {
},
);
});
+
+ describe('form submission', () => {
+ let submitSpy;
+
+ beforeEach(async () => {
+ createFullComponent();
+ await nextTick();
+ submitSpy = jest.spyOn(findForm().element, 'submit');
+ });
+
+ afterEach(() => {
+ submitSpy.mockRestore();
+ });
+
+ const fillForm = async (inputValue = {}) => {
+ const { targetText, commitText } = inputValue;
+
+ await findTargetInput().vm.$emit('input', targetText);
+ await findCommitTextarea().vm.$emit('input', commitText);
+ };
+
+ describe('invalid form', () => {
+ beforeEach(async () => {
+ await fillForm({ targetText: '', commitText: '' });
+ });
+
+ it('disables submit button', async () => {
+ expect(findModal().props('actionPrimary').attributes[0]).toEqual(
+ expect.objectContaining({ disabled: true }),
+ );
+ });
+
+ it('does not submit form', async () => {
+ findModal().vm.$emit('primary', { preventDefault: () => {} });
+ expect(submitSpy).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('valid form', () => {
+ beforeEach(async () => {
+ await fillForm({
+ targetText: 'some valid target branch',
+ commitText: 'some valid commit message',
+ });
+ });
+
+ it('enables submit button', async () => {
+ expect(findModal().props('actionPrimary').attributes[0]).toEqual(
+ expect.objectContaining({ disabled: false }),
+ );
+ });
+
+ it('submits form', async () => {
+ findModal().vm.$emit('primary', { preventDefault: () => {} });
+ expect(submitSpy).toHaveBeenCalled();
+ });
+ });
+ });
});
diff --git a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
index 63a5a9184c1..d88e262883f 100644
--- a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
@@ -97,4 +97,34 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter do
expect(filter.send(:projects)).to eq([project.full_path])
end
end
+
+ context 'checking N+1' do
+ let_it_be(:normal_project) { create(:project, :public) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_project) { create(:project, group: group) }
+ let_it_be(:nested_group) { create(:group, :nested) }
+ let_it_be(:nested_project) { create(:project, group: nested_group) }
+ let_it_be(:normal_project_reference) { get_reference(normal_project) }
+ let_it_be(:group_project_reference) { get_reference(group_project) }
+ let_it_be(:nested_project_reference) { get_reference(nested_project) }
+
+ it 'does not have N+1 per multiple project references', :use_sql_query_cache do
+ markdown = "#{normal_project_reference}"
+
+ # warm up first
+ reference_filter(markdown)
+
+ max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ reference_filter(markdown)
+ end.count
+
+ expect(max_count).to eq 1
+
+ markdown = "#{normal_project_reference} #{invalidate_reference(normal_project_reference)} #{group_project_reference} #{nested_project_reference}"
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(max_count)
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth/result_spec.rb b/spec/lib/gitlab/auth/result_spec.rb
index 2953538c15e..f8de4b80db2 100644
--- a/spec/lib/gitlab/auth/result_spec.rb
+++ b/spec/lib/gitlab/auth/result_spec.rb
@@ -3,10 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::Auth::Result do
+ let_it_be(:actor) { create(:user) }
+
subject { described_class.new(actor, nil, nil, []) }
context 'when actor is User' do
- let(:actor) { create(:user) }
+ let_it_be(:actor) { create(:user) }
it 'returns auth_user' do
expect(subject.auth_user).to eq(actor)
@@ -18,7 +20,7 @@ RSpec.describe Gitlab::Auth::Result do
end
context 'when actor is Deploy token' do
- let(:actor) { create(:deploy_token) }
+ let_it_be(:actor) { create(:deploy_token) }
it 'returns deploy token' do
expect(subject.deploy_token).to eq(actor)
@@ -28,4 +30,50 @@ RSpec.describe Gitlab::Auth::Result do
expect(subject.auth_user).to be_nil
end
end
+
+ describe '#authentication_abilities_include?' do
+ context 'when authentication abilities are empty' do
+ it 'returns false' do
+ expect(subject.authentication_abilities_include?(:read_code)).to be_falsey
+ end
+ end
+
+ context 'when authentication abilities are not empty' do
+ subject { described_class.new(actor, nil, nil, [:push_code]) }
+
+ it 'returns false when ability is not allowed' do
+ expect(subject.authentication_abilities_include?(:read_code)).to be_falsey
+ end
+
+ it 'returns true when ability is allowed' do
+ expect(subject.authentication_abilities_include?(:push_code)).to be_truthy
+ end
+ end
+ end
+
+ describe '#can_perform_action_on_project?' do
+ let(:project) { double }
+
+ it 'returns if actor can do perform given action on given project' do
+ expect(Ability).to receive(:allowed?).with(actor, :push_code, project).and_return(true)
+ expect(subject.can_perform_action_on_project?(:push_code, project)).to be_truthy
+ end
+
+ it 'returns if actor cannot do perform given action on given project' do
+ expect(Ability).to receive(:allowed?).with(actor, :push_code, project).and_return(false)
+ expect(subject.can_perform_action_on_project?(:push_code, project)).to be_falsey
+ end
+ end
+
+ describe '#can?' do
+ it 'returns if actor can do perform given action on given project' do
+ expect(actor).to receive(:can?).with(:push_code).and_return(true)
+ expect(subject.can?(:push_code)).to be_truthy
+ end
+
+ it 'returns if actor cannot do perform given action on given project' do
+ expect(actor).to receive(:can?).with(:push_code).and_return(false)
+ expect(subject.can?(:push_code)).to be_falsey
+ end
+ end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb b/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb
new file mode 100644
index 00000000000..eaad5f8158b
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillIntegrationsTypeNew do
+ let(:integrations) { table(:integrations) }
+ let(:namespaced_integrations) { Gitlab::Integrations::StiType.namespaced_integrations }
+
+ before do
+ integrations.connection.execute 'ALTER TABLE integrations DISABLE TRIGGER "trigger_type_new_on_insert"'
+
+ namespaced_integrations.each_with_index do |type, i|
+ integrations.create!(id: i + 1, type: "#{type}Service")
+ end
+ ensure
+ integrations.connection.execute 'ALTER TABLE integrations ENABLE TRIGGER "trigger_type_new_on_insert"'
+ end
+
+ it 'backfills `type_new` for the selected records' do
+ described_class.new.perform(2, 10)
+
+ expect(integrations.where(id: 2..10).pluck(:type, :type_new)).to contain_exactly(
+ ['AssemblaService', 'Integrations::Assembla'],
+ ['BambooService', 'Integrations::Bamboo'],
+ ['BugzillaService', 'Integrations::Bugzilla'],
+ ['BuildkiteService', 'Integrations::Buildkite'],
+ ['CampfireService', 'Integrations::Campfire'],
+ ['ConfluenceService', 'Integrations::Confluence'],
+ ['CustomIssueTrackerService', 'Integrations::CustomIssueTracker'],
+ ['DatadogService', 'Integrations::Datadog'],
+ ['DiscordService', 'Integrations::Discord']
+ )
+
+ expect(integrations.where.not(id: 2..10)).to all(have_attributes(type_new: nil))
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
index 8ee534734f0..eb2ee0d8779 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
@@ -61,18 +61,15 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter do
.and_raise(exception)
end
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:error)
- .with(
- message: 'importer failed',
- import_source: :github,
- project_id: project.id,
- parallel: false,
- importer: 'Gitlab::GithubImport::Importer::LfsObjectImporter',
- 'error.message': 'Invalid Project URL'
- )
- end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:error)
+ .with(
+ message: 'importer failed',
+ project_id: project.id,
+ parallel: false,
+ importer: 'Gitlab::GithubImport::Importer::LfsObjectImporter',
+ 'error.message': 'Invalid Project URL'
+ )
expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
diff --git a/spec/lib/gitlab/github_import/logger_spec.rb b/spec/lib/gitlab/github_import/logger_spec.rb
new file mode 100644
index 00000000000..e7e09505747
--- /dev/null
+++ b/spec/lib/gitlab/github_import/logger_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Logger do
+ subject(:logger) { described_class.new('/dev/null') }
+
+ let(:now) { Time.zone.now }
+
+ describe '#format_message' do
+ before do
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('new-correlation-id')
+ end
+
+ it 'formats strings' do
+ output = subject.format_message('INFO', now, 'test', 'Hello world')
+
+ expect(Gitlab::Json.parse(output)).to eq({
+ 'severity' => 'INFO',
+ 'time' => now.utc.iso8601(3),
+ 'message' => 'Hello world',
+ 'correlation_id' => 'new-correlation-id',
+ 'feature_category' => 'importers',
+ 'import_source' => 'github'
+ })
+ end
+
+ it 'formats hashes' do
+ output = subject.format_message('INFO', now, 'test', { hello: 1 })
+
+ expect(Gitlab::Json.parse(output)).to eq({
+ 'severity' => 'INFO',
+ 'time' => now.utc.iso8601(3),
+ 'hello' => 1,
+ 'correlation_id' => 'new-correlation-id',
+ 'feature_category' => 'importers',
+ 'import_source' => 'github'
+ })
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index d56d4708385..0ea8f8697d9 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -79,26 +79,23 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
.to receive(:sequential_import)
.and_return([])
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- message: 'starting importer',
- import_source: :github,
- parallel: false,
- project_id: project.id,
- importer: 'Class'
- )
- expect(logger)
- .to receive(:info)
- .with(
- message: 'importer finished',
- import_source: :github,
- parallel: false,
- project_id: project.id,
- importer: 'Class'
- )
- end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'starting importer',
+ parallel: false,
+ project_id: project.id,
+ importer: 'Class'
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'importer finished',
+ parallel: false,
+ project_id: project.id,
+ importer: 'Class'
+ )
importer.execute
end
@@ -112,35 +109,32 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
.to receive(:sequential_import)
.and_raise(exception)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- message: 'starting importer',
- import_source: :github,
- parallel: false,
- project_id: project.id,
- importer: 'Class'
- )
- expect(logger)
- .to receive(:error)
- .with(
- message: 'importer failed',
- import_source: :github,
- project_id: project.id,
- parallel: false,
- importer: 'Class',
- 'error.message': 'some error'
- )
- end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'starting importer',
+ parallel: false,
+ project_id: project.id,
+ importer: 'Class'
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:error)
+ .with(
+ message: 'importer failed',
+ project_id: project.id,
+ parallel: false,
+ importer: 'Class',
+ 'error.message': 'some error'
+ )
expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
.with(
exception,
- import_source: :github,
parallel: false,
project_id: project.id,
+ import_source: :github,
importer: 'Class'
)
.and_call_original
diff --git a/spec/lib/gitlab/import/logger_spec.rb b/spec/lib/gitlab/import/logger_spec.rb
new file mode 100644
index 00000000000..60978aaa25c
--- /dev/null
+++ b/spec/lib/gitlab/import/logger_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Import::Logger do
+ subject { described_class.new('/dev/null') }
+
+ let(:now) { Time.zone.now }
+
+ describe '#format_message' do
+ before do
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('new-correlation-id')
+ end
+
+ it 'formats strings' do
+ output = subject.format_message('INFO', now, 'test', 'Hello world')
+
+ expect(Gitlab::Json.parse(output)).to eq({
+ 'severity' => 'INFO',
+ 'time' => now.utc.iso8601(3),
+ 'message' => 'Hello world',
+ 'correlation_id' => 'new-correlation-id',
+ 'feature_category' => 'importers'
+ })
+ end
+
+ it 'formats hashes' do
+ output = subject.format_message('INFO', now, 'test', { hello: 1 })
+
+ expect(Gitlab::Json.parse(output)).to eq({
+ 'severity' => 'INFO',
+ 'time' => now.utc.iso8601(3),
+ 'hello' => 1,
+ 'correlation_id' => 'new-correlation-id',
+ 'feature_category' => 'importers'
+ })
+ end
+ end
+end
diff --git a/spec/migrations/backfill_integrations_type_new_spec.rb b/spec/migrations/backfill_integrations_type_new_spec.rb
new file mode 100644
index 00000000000..5b8fbf6f555
--- /dev/null
+++ b/spec/migrations/backfill_integrations_type_new_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillIntegrationsTypeNew do
+ let_it_be(:migration) { described_class::MIGRATION }
+ let_it_be(:integrations) { table(:integrations) }
+
+ before do
+ integrations.create!(id: 1)
+ integrations.create!(id: 2)
+ integrations.create!(id: 3)
+ integrations.create!(id: 4)
+ integrations.create!(id: 5)
+ end
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of integrations' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :integrations,
+ column_name: :id,
+ interval: described_class::INTERVAL
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/models/ci/runner_namespace_spec.rb b/spec/models/ci/runner_namespace_spec.rb
index 41d805adb9f..4e7cf7a3cb3 100644
--- a/spec/models/ci/runner_namespace_spec.rb
+++ b/spec/models/ci/runner_namespace_spec.rb
@@ -4,6 +4,12 @@ require 'spec_helper'
RSpec.describe Ci::RunnerNamespace do
it_behaves_like 'includes Limitable concern' do
+ before do
+ skip_default_enabled_yaml_check
+
+ stub_feature_flags(ci_runner_limits_override: false)
+ end
+
subject { build(:ci_runner_namespace, group: create(:group, :nested), runner: create(:ci_runner, :group)) }
end
end
diff --git a/spec/models/ci/runner_project_spec.rb b/spec/models/ci/runner_project_spec.rb
index 13369dba2cf..fef1416a84a 100644
--- a/spec/models/ci/runner_project_spec.rb
+++ b/spec/models/ci/runner_project_spec.rb
@@ -4,6 +4,12 @@ require 'spec_helper'
RSpec.describe Ci::RunnerProject do
it_behaves_like 'includes Limitable concern' do
+ before do
+ skip_default_enabled_yaml_check
+
+ stub_feature_flags(ci_runner_limits_override: false)
+ end
+
subject { build(:ci_runner_project, project: create(:project), runner: create(:ci_runner, :project)) }
end
end
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
index 6d222046998..17b988a60c5 100644
--- a/spec/requests/api/ci/runner/runners_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -98,14 +98,33 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
before do
create(:ci_runner, runner_type: :project_type, projects: [project], contacted_at: 1.second.ago)
create(:plan_limits, :default_plan, ci_registered_project_runners: 1)
+
+ skip_default_enabled_yaml_check
+ stub_feature_flags(ci_runner_limits_override: ci_runner_limits_override)
end
- it 'does not create runner' do
- request
+ context 'with ci_runner_limits_override FF disabled' do
+ let(:ci_runner_limits_override) { false }
+
+ it 'does not create runner' do
+ request
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to include('runner_projects.base' => ['Maximum number of ci registered project runners (1) exceeded'])
- expect(project.runners.reload.size).to eq(1)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include('runner_projects.base' => ['Maximum number of ci registered project runners (1) exceeded'])
+ expect(project.runners.reload.size).to eq(1)
+ end
+ end
+
+ context 'with ci_runner_limits_override FF enabled' do
+ let(:ci_runner_limits_override) { true }
+
+ it 'creates runner' do
+ request
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['message']).to be_nil
+ expect(project.runners.reload.size).to eq(2)
+ end
end
end
@@ -113,6 +132,9 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
before do
create(:ci_runner, runner_type: :project_type, projects: [project], created_at: 14.months.ago, contacted_at: 13.months.ago)
create(:plan_limits, :default_plan, ci_registered_project_runners: 1)
+
+ skip_default_enabled_yaml_check
+ stub_feature_flags(ci_runner_limits_override: false)
end
it 'creates runner' do
@@ -182,14 +204,33 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
before do
create(:ci_runner, runner_type: :group_type, groups: [group], contacted_at: nil, created_at: 1.month.ago)
create(:plan_limits, :default_plan, ci_registered_group_runners: 1)
+
+ skip_default_enabled_yaml_check
+ stub_feature_flags(ci_runner_limits_override: ci_runner_limits_override)
end
- it 'does not create runner' do
- request
+ context 'with ci_runner_limits_override FF disabled' do
+ let(:ci_runner_limits_override) { false }
+
+ it 'does not create runner' do
+ request
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to include('runner_namespaces.base' => ['Maximum number of ci registered group runners (1) exceeded'])
- expect(group.runners.reload.size).to eq(1)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include('runner_namespaces.base' => ['Maximum number of ci registered group runners (1) exceeded'])
+ expect(group.runners.reload.size).to eq(1)
+ end
+ end
+
+ context 'with ci_runner_limits_override FF enabled' do
+ let(:ci_runner_limits_override) { true }
+
+ it 'creates runner' do
+ request
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['message']).to be_nil
+ expect(group.runners.reload.size).to eq(2)
+ end
end
end
@@ -198,6 +239,9 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
create(:ci_runner, runner_type: :group_type, groups: [group], created_at: 4.months.ago, contacted_at: 3.months.ago)
create(:ci_runner, runner_type: :group_type, groups: [group], contacted_at: nil, created_at: 4.months.ago)
create(:plan_limits, :default_plan, ci_registered_group_runners: 1)
+
+ skip_default_enabled_yaml_check
+ stub_feature_flags(ci_runner_limits_override: false)
end
it 'creates runner' do
diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb
index 82fb4440429..902938d7d02 100644
--- a/spec/requests/api/ci/runners_spec.rb
+++ b/spec/requests/api/ci/runners_spec.rb
@@ -1003,13 +1003,31 @@ RSpec.describe API::Ci::Runners do
context 'when it exceeds the application limits' do
before do
create(:plan_limits, :default_plan, ci_registered_project_runners: 1)
+
+ skip_default_enabled_yaml_check
+ stub_feature_flags(ci_runner_limits_override: ci_runner_limits_override)
end
- it 'does not enable specific runner' do
- expect do
- post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id }
- end.not_to change { project.runners.count }
- expect(response).to have_gitlab_http_status(:bad_request)
+ context 'with ci_runner_limits_override FF disabled' do
+ let(:ci_runner_limits_override) { false }
+
+ it 'does not enable specific runner' do
+ expect do
+ post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id }
+ end.not_to change { project.runners.count }
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'with ci_runner_limits_override FF enabled' do
+ let(:ci_runner_limits_override) { true }
+
+ it 'enables specific runner' do
+ expect do
+ post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id }
+ end.to change { project.runners.count }
+ expect(response).to have_gitlab_http_status(:created)
+ end
end
end
end
diff --git a/spec/support/matchers/background_migrations_matchers.rb b/spec/support/matchers/background_migrations_matchers.rb
index 08bbbcc7438..d3833a1e8e8 100644
--- a/spec/support/matchers/background_migrations_matchers.rb
+++ b/spec/support/matchers/background_migrations_matchers.rb
@@ -64,3 +64,33 @@ RSpec::Matchers.define :be_scheduled_migration_with_multiple_args do |*expected|
arg.sort == expected.sort
end
end
+
+RSpec::Matchers.define :have_scheduled_batched_migration do |table_name: nil, column_name: nil, job_arguments: [], **attributes|
+ define_method :matches? do |migration|
+ # Default arguments passed by BatchedMigrationWrapper (values don't matter here)
+ expect(migration).to be_background_migration_with_arguments([
+ _start_id = 1,
+ _stop_id = 2,
+ table_name,
+ column_name,
+ _sub_batch_size = 10,
+ _pause_ms = 100,
+ *job_arguments
+ ])
+
+ batched_migrations =
+ Gitlab::Database::BackgroundMigration::BatchedMigration
+ .for_configuration(migration, table_name, column_name, job_arguments)
+
+ expect(batched_migrations.count).to be(1)
+ expect(batched_migrations).to all(have_attributes(attributes)) if attributes.present?
+ end
+
+ define_method :does_not_match? do |migration|
+ batched_migrations =
+ Gitlab::Database::BackgroundMigration::BatchedMigration
+ .where(job_class_name: migration)
+
+ expect(batched_migrations.count).to be(0)
+ end
+end
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index ce02100ebfc..ddcf922ad68 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -60,26 +60,23 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(importer_instance)
.to receive(:execute)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- github_id: 1,
- message: 'starting importer',
- import_source: :github,
- project_id: 1,
- importer: 'klass_name'
- )
- expect(logger)
- .to receive(:info)
- .with(
- github_id: 1,
- message: 'importer finished',
- import_source: :github,
- project_id: 1,
- importer: 'klass_name'
- )
- end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ github_id: 1,
+ message: 'starting importer',
+ project_id: 1,
+ importer: 'klass_name'
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ github_id: 1,
+ message: 'importer finished',
+ project_id: 1,
+ importer: 'klass_name'
+ )
worker.import(project, client, { 'number' => 10, 'github_id' => 1 })
@@ -100,31 +97,28 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
.to receive(:execute)
.and_raise(exception)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- github_id: 1,
- message: 'starting importer',
- import_source: :github,
- project_id: project.id,
- importer: 'klass_name'
- )
- expect(logger)
- .to receive(:error)
- .with(
- github_id: 1,
- message: 'importer failed',
- import_source: :github,
- project_id: project.id,
- importer: 'klass_name',
- 'error.message': 'some error',
- 'github.data': {
- 'github_id' => 1,
- 'number' => 10
- }
- )
- end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ github_id: 1,
+ message: 'starting importer',
+ project_id: project.id,
+ importer: 'klass_name'
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:error)
+ .with(
+ github_id: 1,
+ message: 'importer failed',
+ project_id: project.id,
+ importer: 'klass_name',
+ 'error.message': 'some error',
+ 'github.data': {
+ 'github_id' => 1,
+ 'number' => 10
+ }
+ )
expect(Gitlab::ErrorTracking)
.to receive(:track_and_raise_exception)
@@ -143,21 +137,18 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
it 'logs error when representation does not have a github_id' do
expect(importer_class).not_to receive(:new)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:error)
- .with(
- github_id: nil,
- message: 'importer failed',
- import_source: :github,
- project_id: project.id,
- importer: 'klass_name',
- 'error.message': 'key not found: :github_id',
- 'github.data': {
- 'number' => 10
- }
- )
- end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:error)
+ .with(
+ github_id: nil,
+ message: 'importer failed',
+ project_id: project.id,
+ importer: 'klass_name',
+ 'error.message': 'key not found: :github_id',
+ 'github.data': {
+ 'number' => 10
+ }
+ )
expect(Gitlab::ErrorTracking)
.to receive(:track_and_raise_exception)
diff --git a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
index 651ea77a71c..c0e2df6f985 100644
--- a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
@@ -36,24 +36,21 @@ RSpec.describe Gitlab::GithubImport::StageMethods do
an_instance_of(Project)
)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- message: 'starting stage',
- import_source: :github,
- project_id: project.id,
- import_stage: 'DummyStage'
- )
- expect(logger)
- .to receive(:info)
- .with(
- message: 'stage finished',
- import_source: :github,
- project_id: project.id,
- import_stage: 'DummyStage'
- )
- end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'starting stage',
+ project_id: project.id,
+ import_stage: 'DummyStage'
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'stage finished',
+ project_id: project.id,
+ import_stage: 'DummyStage'
+ )
worker.perform(project.id)
end
@@ -70,25 +67,22 @@ RSpec.describe Gitlab::GithubImport::StageMethods do
.to receive(:try_import)
.and_raise(exception)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- message: 'starting stage',
- import_source: :github,
- project_id: project.id,
- import_stage: 'DummyStage'
- )
- expect(logger)
- .to receive(:error)
- .with(
- message: 'stage failed',
- import_source: :github,
- project_id: project.id,
- import_stage: 'DummyStage',
- 'error.message': 'some error'
- )
- end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'starting stage',
+ project_id: project.id,
+ import_stage: 'DummyStage'
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:error)
+ .with(
+ message: 'stage failed',
+ project_id: project.id,
+ import_stage: 'DummyStage',
+ 'error.message': 'some error'
+ )
expect(Gitlab::ErrorTracking)
.to receive(:track_and_raise_exception)
diff --git a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
index 8dea24dc74f..132fe1dc618 100644
--- a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
@@ -26,21 +26,18 @@ RSpec.describe Gitlab::GithubImport::Stage::FinishImportWorker do
.to receive(:increment)
.and_call_original
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- message: 'GitHub project import finished',
- import_stage: 'Gitlab::GithubImport::Stage::FinishImportWorker',
- import_source: :github,
- object_counts: {
- 'fetched' => {},
- 'imported' => {}
- },
- project_id: project.id,
- duration_s: a_kind_of(Numeric)
- )
- end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ message: 'GitHub project import finished',
+ import_stage: 'Gitlab::GithubImport::Stage::FinishImportWorker',
+ object_counts: {
+ 'fetched' => {},
+ 'imported' => {}
+ },
+ project_id: project.id,
+ duration_s: a_kind_of(Numeric)
+ )
worker.report_import_time(project)
end
diff --git a/spec/workers/merge_request_mergeability_check_worker_spec.rb b/spec/workers/merge_request_mergeability_check_worker_spec.rb
index 0349de5cbb3..32debcf9651 100644
--- a/spec/workers/merge_request_mergeability_check_worker_spec.rb
+++ b/spec/workers/merge_request_mergeability_check_worker_spec.rb
@@ -10,6 +10,12 @@ RSpec.describe MergeRequestMergeabilityCheckWorker do
it 'does not execute MergeabilityCheckService' do
expect(MergeRequests::MergeabilityCheckService).not_to receive(:new)
+ expect(Sidekiq.logger).to receive(:error).once
+ .with(
+ merge_request_id: 1,
+ worker: "MergeRequestMergeabilityCheckWorker",
+ message: 'Failed to find merge request')
+
subject.perform(1)
end
end
@@ -24,6 +30,20 @@ RSpec.describe MergeRequestMergeabilityCheckWorker do
subject.perform(merge_request.id)
end
+
+ it 'structurally logs a failed mergeability check' do
+ expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request) do |service|
+ expect(service).to receive(:execute).and_return(double(error?: true, message: "solar flares"))
+ end
+
+ expect(Sidekiq.logger).to receive(:error).once
+ .with(
+ merge_request_id: merge_request.id,
+ worker: "MergeRequestMergeabilityCheckWorker",
+ message: 'Failed to check mergeability of merge request: solar flares')
+
+ subject.perform(merge_request.id)
+ end
end
it_behaves_like 'an idempotent worker' do
diff --git a/workhorse/internal/api/api.go b/workhorse/internal/api/api.go
index d3c19af030d..417ee71dbdc 100644
--- a/workhorse/internal/api/api.go
+++ b/workhorse/internal/api/api.go
@@ -3,7 +3,6 @@ package api
import (
"bytes"
"encoding/json"
- "errors"
"fmt"
"io"
"net/http"
@@ -40,8 +39,6 @@ type API struct {
Version string
}
-var ErrNotGeoSecondary = errors.New("this is not a Geo secondary site")
-
var (
requestsCounter = promauto.NewCounterVec(
prometheus.CounterOpts{
@@ -399,7 +396,6 @@ func validResponseContentType(resp *http.Response) bool {
return helper.IsContentType(ResponseContentType, resp.Header.Get("Content-Type"))
}
-// TODO: Cache the result of the API requests https://gitlab.com/gitlab-org/gitlab/-/issues/329671
func (api *API) GetGeoProxyURL() (*url.URL, error) {
geoProxyApiUrl := *api.URL
geoProxyApiUrl.Path, geoProxyApiUrl.RawPath = joinURLPath(api.URL, geoProxyEndpointPath)
@@ -424,10 +420,6 @@ func (api *API) GetGeoProxyURL() (*url.URL, error) {
return nil, fmt.Errorf("GetGeoProxyURL: decode response: %v", err)
}
- if response.GeoProxyURL == "" {
- return nil, ErrNotGeoSecondary
- }
-
geoProxyURL, err := url.Parse(response.GeoProxyURL)
if err != nil {
return nil, fmt.Errorf("GetGeoProxyURL: Could not parse Geo proxy URL: %v, err: %v", response.GeoProxyURL, err)
diff --git a/workhorse/internal/api/api_test.go b/workhorse/internal/api/api_test.go
index 4267a184a19..43e3604cc9c 100644
--- a/workhorse/internal/api/api_test.go
+++ b/workhorse/internal/api/api_test.go
@@ -22,16 +22,14 @@ func TestGetGeoProxyURLWhenGeoSecondary(t *testing.T) {
geoProxyURL, err := getGeoProxyURLGivenResponse(t, `{"geo_proxy_url":"http://primary"}`)
require.NoError(t, err)
- require.NotNil(t, geoProxyURL)
require.Equal(t, "http://primary", geoProxyURL.String())
}
func TestGetGeoProxyURLWhenGeoPrimaryOrNonGeo(t *testing.T) {
geoProxyURL, err := getGeoProxyURLGivenResponse(t, "{}")
- require.Error(t, err)
- require.Equal(t, ErrNotGeoSecondary, err)
- require.Nil(t, geoProxyURL)
+ require.NoError(t, err)
+ require.Equal(t, "", geoProxyURL.String())
}
func getGeoProxyURLGivenResponse(t *testing.T, givenInternalApiResponse string) (*url.URL, error) {
diff --git a/workhorse/internal/upstream/upstream.go b/workhorse/internal/upstream/upstream.go
index 983b0516ee9..d6e5e7766b5 100644
--- a/workhorse/internal/upstream/upstream.go
+++ b/workhorse/internal/upstream/upstream.go
@@ -10,6 +10,7 @@ import (
"fmt"
"os"
"sync"
+ "time"
"net/http"
"net/url"
@@ -35,6 +36,7 @@ var (
requestHeaderBlacklist = []string{
upload.RewrittenFieldsHeader,
}
+ geoProxyApiPollingInterval = 10 * time.Second
)
type upstream struct {
@@ -48,6 +50,7 @@ type upstream struct {
geoLocalRoutes []routeEntry
geoProxyCableRoute routeEntry
geoProxyRoute routeEntry
+ geoProxyTestChannel chan struct{}
accessLogger *logrus.Logger
enableGeoProxyFeature bool
mu sync.RWMutex
@@ -61,6 +64,9 @@ func newUpstream(cfg config.Config, accessLogger *logrus.Logger, routesCallback
up := upstream{
Config: cfg,
accessLogger: accessLogger,
+ // Kind of a feature flag. See https://gitlab.com/groups/gitlab-org/-/epics/5914#note_564974130
+ enableGeoProxyFeature: os.Getenv("GEO_SECONDARY_PROXY") == "1",
+ geoProxyBackend: &url.URL{},
}
if up.Backend == nil {
up.Backend = DefaultBackend
@@ -79,10 +85,13 @@ func newUpstream(cfg config.Config, accessLogger *logrus.Logger, routesCallback
up.Version,
up.RoundTripper,
)
- // Kind of a feature flag. See https://gitlab.com/groups/gitlab-org/-/epics/5914#note_564974130
- up.enableGeoProxyFeature = os.Getenv("GEO_SECONDARY_PROXY") == "1"
+
routesCallback(&up)
+ if up.enableGeoProxyFeature {
+ go up.pollGeoProxyAPI()
+ }
+
var correlationOpts []correlation.InboundHandlerOption
if cfg.PropagateCorrelationID {
correlationOpts = append(correlationOpts, correlation.WithPropagation())
@@ -168,19 +177,14 @@ func (u *upstream) findRoute(cleanedPath string, r *http.Request) *routeEntry {
}
func (u *upstream) findGeoProxyRoute(cleanedPath string, r *http.Request) *routeEntry {
- geoProxyURL, err := u.APIClient.GetGeoProxyURL()
+ u.mu.RLock()
+ defer u.mu.RUnlock()
- if err == nil {
- u.setGeoProxyRoutes(geoProxyURL)
- return u.matchGeoProxyRoute(cleanedPath, r)
- } else if err != apipkg.ErrNotGeoSecondary {
- log.WithRequest(r).WithError(err).Error("Geo Proxy: Unable to determine Geo Proxy URL. Falling back to normal routing")
+ if u.geoProxyBackend.String() == "" {
+ log.WithRequest(r).Debug("Geo Proxy: Not a Geo proxy")
+ return nil
}
- return nil
-}
-
-func (u *upstream) matchGeoProxyRoute(cleanedPath string, r *http.Request) *routeEntry {
// Some routes are safe to serve from this GitLab instance
for _, ro := range u.geoLocalRoutes {
if ro.isMatch(cleanedPath, r) {
@@ -191,8 +195,6 @@ func (u *upstream) matchGeoProxyRoute(cleanedPath string, r *http.Request) *rout
log.WithRequest(r).WithFields(log.Fields{"geoProxyBackend": u.geoProxyBackend}).Debug("Geo Proxy: Forward this request")
- u.mu.RLock()
- defer u.mu.RUnlock()
if cleanedPath == "/-/cable" {
return &u.geoProxyCableRoute
}
@@ -200,15 +202,40 @@ func (u *upstream) matchGeoProxyRoute(cleanedPath string, r *http.Request) *rout
return &u.geoProxyRoute
}
-func (u *upstream) setGeoProxyRoutes(geoProxyURL *url.URL) {
+func (u *upstream) pollGeoProxyAPI() {
+ for {
+ u.callGeoProxyAPI()
+
+ // Notify tests when callGeoProxyAPI() finishes
+ if u.geoProxyTestChannel != nil {
+ u.geoProxyTestChannel <- struct{}{}
+ }
+
+ time.Sleep(geoProxyApiPollingInterval)
+ }
+}
+
+// Calls /api/v4/geo/proxy and sets up routes
+func (u *upstream) callGeoProxyAPI() {
+ geoProxyURL, err := u.APIClient.GetGeoProxyURL()
+ if err != nil {
+ log.WithError(err).WithFields(log.Fields{"geoProxyBackend": u.geoProxyBackend}).Error("Geo Proxy: Unable to determine Geo Proxy URL. Fallback on cached value.")
+ return
+ }
+
+ if u.geoProxyBackend.String() != geoProxyURL.String() {
+ log.WithFields(log.Fields{"oldGeoProxyURL": u.geoProxyBackend, "newGeoProxyURL": geoProxyURL}).Info("Geo Proxy: URL changed")
+ u.updateGeoProxyFields(geoProxyURL)
+ }
+}
+
+func (u *upstream) updateGeoProxyFields(geoProxyURL *url.URL) {
u.mu.Lock()
defer u.mu.Unlock()
- if u.geoProxyBackend == nil || u.geoProxyBackend.String() != geoProxyURL.String() {
- log.WithFields(log.Fields{"geoProxyURL": geoProxyURL}).Debug("Geo Proxy: Update GeoProxyRoute")
- u.geoProxyBackend = geoProxyURL
- geoProxyRoundTripper := roundtripper.NewBackendRoundTripper(u.geoProxyBackend, "", u.ProxyHeadersTimeout, u.DevelopmentMode)
- geoProxyUpstream := proxypkg.NewProxy(u.geoProxyBackend, u.Version, geoProxyRoundTripper)
- u.geoProxyCableRoute = u.wsRoute(`^/-/cable\z`, geoProxyUpstream)
- u.geoProxyRoute = u.route("", "", geoProxyUpstream)
- }
+
+ u.geoProxyBackend = geoProxyURL
+ geoProxyRoundTripper := roundtripper.NewBackendRoundTripper(u.geoProxyBackend, "", u.ProxyHeadersTimeout, u.DevelopmentMode)
+ geoProxyUpstream := proxypkg.NewProxy(u.geoProxyBackend, u.Version, geoProxyRoundTripper)
+ u.geoProxyCableRoute = u.wsRoute(`^/-/cable\z`, geoProxyUpstream)
+ u.geoProxyRoute = u.route("", "", geoProxyUpstream)
}
diff --git a/workhorse/internal/upstream/upstream_test.go b/workhorse/internal/upstream/upstream_test.go
index a3dcc380d64..c86c03920f0 100644
--- a/workhorse/internal/upstream/upstream_test.go
+++ b/workhorse/internal/upstream/upstream_test.go
@@ -141,7 +141,7 @@ func TestGeoProxyFeatureEnabledOnNonGeoSecondarySite(t *testing.T) {
runTestCases(t, ws, testCases)
}
-func TestGeoProxyWithAPIError(t *testing.T) {
+func TestGeoProxyFeatureEnabledButWithAPIError(t *testing.T) {
geoProxyEndpointResponseBody := "Invalid response"
railsServer, deferredClose := startRailsServer("Local Rails server", geoProxyEndpointResponseBody)
defer deferredClose()
@@ -214,10 +214,15 @@ func startRailsServer(railsServerName string, geoProxyEndpointResponseBody strin
}
func startWorkhorseServer(railsServerURL string, enableGeoProxyFeature bool) (*httptest.Server, func()) {
+ geoProxyTestChannel := make(chan struct{})
+
myConfigureRoutes := func(u *upstream) {
// Enable environment variable "feature flag"
u.enableGeoProxyFeature = enableGeoProxyFeature
+ // An empty message will be sent to this channel after every callGeoProxyAPI()
+ u.geoProxyTestChannel = geoProxyTestChannel
+
// call original
configureRoutes(u)
}
@@ -226,5 +231,13 @@ func startWorkhorseServer(railsServerURL string, enableGeoProxyFeature bool) (*h
ws := httptest.NewServer(upstreamHandler)
testhelper.ConfigureSecret()
+ if enableGeoProxyFeature {
+ // Wait for an empty message from callGeoProxyAPI(). This should be done on
+ // all tests where enableGeoProxyFeature is true, including the ones where
+ // we expect geoProxyURL to be nil or error, to ensure the tests do not pass
+ // by coincidence.
+ <-geoProxyTestChannel
+ }
+
return ws, ws.Close
}