Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-04-28 21:26:46 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-04-28 21:26:46 +0300
commit5509e479900ee537980a126287c20327c41a61d6 (patch)
tree8272f06bd58b1518eca38975f95656ffc5497bd2
parente0529f76a36026dc4bd51fbec1e5c52e7f3866e1 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--app/assets/javascripts/content_editor/components/bubble_menus/media_bubble_menu.vue2
-rw-r--r--app/assets/javascripts/content_editor/services/upload_helpers.js12
-rw-r--r--app/assets/javascripts/issues/dashboard/components/issues_dashboard_app.vue2
-rw-r--r--app/assets/javascripts/issues/list/components/issues_list_app.vue3
-rw-r--r--app/assets/javascripts/issues/list/constants.js25
-rw-r--r--app/assets/javascripts/issues/list/utils.js81
-rw-r--r--app/assets/javascripts/merge_request_tabs.js20
-rw-r--r--app/assets/javascripts/pipelines/components/jobs/failed_jobs_app.vue30
-rw-r--r--app/assets/javascripts/pipelines/components/jobs/failed_jobs_table.vue9
-rw-r--r--app/assets/javascripts/pipelines/components/jobs/utils.js33
-rw-r--r--app/assets/javascripts/pipelines/components/pipeline_tabs.vue10
-rw-r--r--app/assets/javascripts/pipelines/constants.js2
-rw-r--r--app/assets/javascripts/pipelines/graphql/queries/get_failed_jobs.query.graphql4
-rw-r--r--app/assets/javascripts/pipelines/pipeline_tabs.js2
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue7
-rw-r--r--app/assets/javascripts/vue_shared/issuable/list/components/issuable_list_root.vue1
-rw-r--r--app/assets/javascripts/work_items/components/work_item_description.vue16
-rw-r--r--app/assets/javascripts/work_items/components/work_item_detail.vue4
-rw-r--r--app/controllers/registrations_controller.rb1
-rw-r--r--app/helpers/ci/builds_helper.rb22
-rw-r--r--app/helpers/projects/pipeline_helper.rb1
-rw-r--r--app/models/project_setting.rb21
-rw-r--r--app/models/protected_branch/push_access_level.rb6
-rw-r--r--app/models/protected_tag/create_access_level.rb6
-rw-r--r--app/services/ci/runners/unregister_runner_manager_service.rb33
-rw-r--r--app/services/ci/runners/unregister_runner_service.rb3
-rw-r--r--app/views/projects/edit.html.haml2
-rw-r--r--app/views/users/show.html.haml9
-rw-r--r--config/settings.rb5
-rw-r--r--db/migrate/20230414190012_add_product_analytics_to_project_settings.rb45
-rw-r--r--db/schema_migrations/202304141900121
-rw-r--r--db/structure.sql18
-rw-r--r--doc/.vale/gitlab/spelling-exceptions.txt2
-rw-r--r--doc/administration/reference_architectures/index.md1
-rw-r--r--doc/ci/quick_start/index.md3
-rw-r--r--doc/ci/quick_start/tutorial.md504
-rw-r--r--doc/development/database/batched_background_migrations.md43
-rw-r--r--doc/tutorials/build_application.md1
-rw-r--r--doc/user/project/repository/code_suggestions.md54
-rw-r--r--lib/api/ci/runner.rb19
-rw-r--r--lib/api/entities/protected_ref_access.rb2
-rw-r--r--lib/gitlab/database/partitioning/convert_table_to_first_list_partition.rb316
-rw-r--r--lib/gitlab/database/partitioning/list/convert_table.rb313
-rw-r--r--lib/gitlab/database/partitioning/list/locking_configuration.rb65
-rw-r--r--lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb8
-rw-r--r--lib/gitlab/repository_size_error_message.rb34
-rw-r--r--lib/gitlab/sidekiq_config/worker_router.rb5
-rw-r--r--lib/product_analytics/settings.rb33
-rw-r--r--locale/gitlab.pot83
-rw-r--r--sidekiq_cluster/cli.rb25
-rw-r--r--spec/bin/sidekiq_cluster_spec.rb7
-rw-r--r--spec/commands/sidekiq_cluster/cli_spec.rb70
-rw-r--r--spec/config/settings_spec.rb4
-rw-r--r--spec/features/issues/filtered_search/dropdown_hint_spec.rb4
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb12
-rw-r--r--spec/features/issues/filtered_search/recent_searches_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/search_bar_spec.rb3
-rw-r--r--spec/features/issues/filtered_search/visual_tokens_spec.rb42
-rw-r--r--spec/features/users/signup_spec.rb1
-rw-r--r--spec/frontend/__helpers__/init_vue_mr_page_helper.js10
-rw-r--r--spec/frontend/content_editor/extensions/attachment_spec.js5
-rw-r--r--spec/frontend/content_editor/test_constants.js6
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js22
-rw-r--r--spec/frontend/issues/list/mock_data.js5
-rw-r--r--spec/frontend/issues/list/utils_spec.js15
-rw-r--r--spec/frontend/merge_request_tabs_spec.js36
-rw-r--r--spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js7
-rw-r--r--spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js34
-rw-r--r--spec/frontend/pipelines/components/jobs/utils_spec.js14
-rw-r--r--spec/frontend/pipelines/components/pipeline_tabs_spec.js1
-rw-r--r--spec/frontend/pipelines/mock_data.js87
-rw-r--r--spec/frontend/pipelines/pipeline_tabs_spec.js2
-rw-r--r--spec/frontend/read_more_spec.js7
-rw-r--r--spec/frontend/work_items/components/work_item_description_spec.js42
-rw-r--r--spec/helpers/ci/builds_helper_spec.rb59
-rw-r--r--spec/helpers/projects/pipeline_helper_spec.rb1
-rw-r--r--spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb (renamed from spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb)128
-rw-r--r--spec/lib/gitlab/database/partitioning/list/locking_configuration_spec.rb46
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb14
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_router_spec.rb29
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb8
-rw-r--r--spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb8
-rw-r--r--spec/lib/product_analytics/settings_spec.rb34
-rw-r--r--spec/models/project_setting_spec.rb2
-rw-r--r--spec/requests/api/ci/runner/runners_delete_spec.rb82
-rw-r--r--spec/requests/api/project_attributes.yml14
-rw-r--r--spec/requests/api/protected_branches_spec.rb15
-rw-r--r--spec/requests/api/protected_tags_spec.rb15
-rw-r--r--spec/services/ci/runners/unregister_runner_manager_service_spec.rb50
-rw-r--r--spec/support/helpers/database/inject_failure_helpers.rb41
-rw-r--r--spec/support/helpers/filtered_search_helpers.rb2
-rw-r--r--spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb92
-rw-r--r--spec/support/shared_examples/requests/api/status_shared_examples.rb4
93 files changed, 2022 insertions, 1007 deletions
diff --git a/app/assets/javascripts/content_editor/components/bubble_menus/media_bubble_menu.vue b/app/assets/javascripts/content_editor/components/bubble_menus/media_bubble_menu.vue
index a14d49922fb..1bfa635c03b 100644
--- a/app/assets/javascripts/content_editor/components/bubble_menus/media_bubble_menu.vue
+++ b/app/assets/javascripts/content_editor/components/bubble_menus/media_bubble_menu.vue
@@ -212,7 +212,7 @@ export default {
@show="updateMediaInfoToState"
@hidden="resetMediaInfo"
>
- <editor-state-observer @transaction="updateMediaInfoToState">
+ <editor-state-observer :debounce="0" @transaction="updateMediaInfoToState">
<gl-button-group v-if="!isEditing" class="gl-display-flex gl-align-items-center">
<gl-loading-icon v-if="showProgressIndicator" class="gl-pl-4 gl-pr-3" />
<input
diff --git a/app/assets/javascripts/content_editor/services/upload_helpers.js b/app/assets/javascripts/content_editor/services/upload_helpers.js
index de1a187b246..548f5cdf19c 100644
--- a/app/assets/javascripts/content_editor/services/upload_helpers.js
+++ b/app/assets/javascripts/content_editor/services/upload_helpers.js
@@ -9,7 +9,17 @@ export const acceptedMimes = {
ext: 'drawio.svg',
},
image: {
- mimes: ['image/jpeg', 'image/png', 'image/gif', 'image/jpg'],
+ mimes: [
+ 'image/jpeg',
+ 'image/png',
+ 'image/gif',
+ 'image/svg+xml',
+ 'image/webp',
+ 'image/tiff',
+ 'image/bmp',
+ 'image/vnd.microsoft.icon',
+ 'image/x-icon',
+ ],
},
audio: {
mimes: [
diff --git a/app/assets/javascripts/issues/dashboard/components/issues_dashboard_app.vue b/app/assets/javascripts/issues/dashboard/components/issues_dashboard_app.vue
index b4a9b37d487..b9e4d0df3f2 100644
--- a/app/assets/javascripts/issues/dashboard/components/issues_dashboard_app.vue
+++ b/app/assets/javascripts/issues/dashboard/components/issues_dashboard_app.vue
@@ -187,7 +187,6 @@ export default {
return {
hideUsers: this.isPublicVisibilityRestricted && !this.isSignedIn,
isSignedIn: this.isSignedIn,
- search: this.searchQuery,
sort: this.sortKey,
state: this.state,
...this.pageParams,
@@ -332,7 +331,6 @@ export default {
},
urlParams() {
return {
- search: this.searchQuery,
sort: urlSortParams[this.sortKey],
state: this.state,
...this.urlFilterParams,
diff --git a/app/assets/javascripts/issues/list/components/issues_list_app.vue b/app/assets/javascripts/issues/list/components/issues_list_app.vue
index 7d077603530..5fb83dfd1ab 100644
--- a/app/assets/javascripts/issues/list/components/issues_list_app.vue
+++ b/app/assets/javascripts/issues/list/components/issues_list_app.vue
@@ -253,11 +253,11 @@ export default {
iid: isIidSearch ? this.searchQuery.slice(1) : undefined,
isProject: this.isProject,
isSignedIn: this.isSignedIn,
- search: isIidSearch ? undefined : this.searchQuery,
sort: this.sortKey,
state: this.state,
...this.pageParams,
...this.apiFilterParams,
+ search: isIidSearch ? undefined : this.searchQuery,
types: this.apiFilterParams.types || this.defaultWorkItemTypes,
};
},
@@ -484,7 +484,6 @@ export default {
},
urlParams() {
return {
- search: this.searchQuery,
sort: urlSortParams[this.sortKey],
state: this.state,
...this.urlFilterParams,
diff --git a/app/assets/javascripts/issues/list/constants.js b/app/assets/javascripts/issues/list/constants.js
index 990ba1c0621..cd0679e00bf 100644
--- a/app/assets/javascripts/issues/list/constants.js
+++ b/app/assets/javascripts/issues/list/constants.js
@@ -5,6 +5,7 @@ import {
FILTER_NONE,
FILTER_STARTED,
FILTER_UPCOMING,
+ FILTERED_SEARCH_TERM,
OPERATOR_IS,
OPERATOR_NOT,
OPERATOR_OR,
@@ -155,13 +156,13 @@ export const specialFilterValues = [
export const TYPE_TOKEN_OBJECTIVE_OPTION = {
icon: 'issue-type-objective',
- title: 'objective',
+ title: s__('WorkItem|Objective'),
value: 'objective',
};
export const TYPE_TOKEN_KEY_RESULT_OPTION = {
icon: 'issue-type-keyresult',
- title: 'key_result',
+ title: s__('WorkItem|Key Result'),
value: 'key_result',
};
@@ -175,13 +176,23 @@ export const defaultWorkItemTypes = [
];
export const defaultTypeTokenOptions = [
- { icon: 'issue-type-issue', title: 'issue', value: 'issue' },
- { icon: 'issue-type-incident', title: 'incident', value: 'incident' },
- { icon: 'issue-type-test-case', title: 'test_case', value: 'test_case' },
- { icon: 'issue-type-task', title: 'task', value: 'task' },
+ { icon: 'issue-type-issue', title: s__('WorkItem|Issue'), value: 'issue' },
+ { icon: 'issue-type-incident', title: s__('WorkItem|Incident'), value: 'incident' },
+ { icon: 'issue-type-test-case', title: s__('WorkItem|Test case'), value: 'test_case' },
+ { icon: 'issue-type-task', title: s__('WorkItem|Task'), value: 'task' },
];
-export const filters = {
+export const filtersMap = {
+ [FILTERED_SEARCH_TERM]: {
+ [API_PARAM]: {
+ [NORMAL_FILTER]: 'search',
+ },
+ [URL_PARAM]: {
+ [undefined]: {
+ [NORMAL_FILTER]: 'search',
+ },
+ },
+ },
[TOKEN_TYPE_AUTHOR]: {
[API_PARAM]: {
[NORMAL_FILTER]: 'authorUsername',
diff --git a/app/assets/javascripts/issues/list/utils.js b/app/assets/javascripts/issues/list/utils.js
index b086640cd12..d053400dd03 100644
--- a/app/assets/javascripts/issues/list/utils.js
+++ b/app/assets/javascripts/issues/list/utils.js
@@ -1,4 +1,3 @@
-import { createTerm } from '@gitlab/ui/src/components/base/filtered_search/filtered_search_utils';
import { isPositiveInteger } from '~/lib/utils/number_utils';
import { getParameterByName } from '~/lib/utils/url_utility';
import { __ } from '~/locale';
@@ -28,7 +27,7 @@ import {
CREATED_DESC,
DUE_DATE_ASC,
DUE_DATE_DESC,
- filters,
+ filtersMap,
HEALTH_STATUS_ASC,
HEALTH_STATUS_DESC,
LABEL_PRIORITY_ASC,
@@ -196,10 +195,10 @@ export const getSortOptions = ({
return sortOptions;
};
-const tokenTypes = Object.keys(filters);
+const tokenTypes = Object.keys(filtersMap);
const getUrlParams = (tokenType) =>
- Object.values(filters[tokenType][URL_PARAM]).flatMap((filterObj) => Object.values(filterObj));
+ Object.values(filtersMap[tokenType][URL_PARAM]).flatMap((filterObj) => Object.values(filterObj));
const urlParamKeys = tokenTypes.flatMap(getUrlParams);
@@ -207,11 +206,11 @@ const getTokenTypeFromUrlParamKey = (urlParamKey) =>
tokenTypes.find((tokenType) => getUrlParams(tokenType).includes(urlParamKey));
const getOperatorFromUrlParamKey = (tokenType, urlParamKey) =>
- Object.entries(filters[tokenType][URL_PARAM]).find(([, filterObj]) =>
+ Object.entries(filtersMap[tokenType][URL_PARAM]).find(([, filterObj]) =>
Object.values(filterObj).includes(urlParamKey),
)[0];
-const convertToFilteredTokens = (locationSearch) =>
+export const getFilterTokens = (locationSearch) =>
Array.from(new URLSearchParams(locationSearch).entries())
.filter(([key]) => urlParamKeys.includes(key))
.map(([key, data]) => {
@@ -223,26 +222,8 @@ const convertToFilteredTokens = (locationSearch) =>
};
});
-const convertToFilteredSearchTerms = (locationSearch) =>
- new URLSearchParams(locationSearch)
- .get('search')
- ?.split(' ')
- .map((word) => ({
- type: FILTERED_SEARCH_TERM,
- value: {
- data: word,
- },
- })) || [];
-
-export const getFilterTokens = (locationSearch) => {
- if (!locationSearch) {
- return [createTerm()];
- }
- const filterTokens = convertToFilteredTokens(locationSearch);
- const searchTokens = convertToFilteredSearchTerms(locationSearch);
- const tokens = filterTokens.concat(searchTokens);
- return tokens.length ? tokens : [createTerm()];
-};
+const isNotEmptySearchToken = (token) =>
+ !(token.type === FILTERED_SEARCH_TERM && !token.value.data);
const isSpecialFilter = (type, data) => {
const isAssigneeIdParam =
@@ -293,22 +274,20 @@ export const convertToApiParams = (filterTokens) => {
const not = new Map();
const or = new Map();
- filterTokens
- .filter((token) => token.type !== FILTERED_SEARCH_TERM)
- .forEach((token) => {
- const filterType = getFilterType(token);
- const apiField = filters[token.type][API_PARAM][filterType];
- let obj;
- if (token.value.operator === OPERATOR_NOT) {
- obj = not;
- } else if (token.value.operator === OPERATOR_OR) {
- obj = or;
- } else {
- obj = params;
- }
- const data = formatData(token);
- obj.set(apiField, obj.has(apiField) ? [obj.get(apiField), data].flat() : data);
- });
+ filterTokens.filter(isNotEmptySearchToken).forEach((token) => {
+ const filterType = getFilterType(token);
+ const apiField = filtersMap[token.type][API_PARAM][filterType];
+ let obj;
+ if (token.value.operator === OPERATOR_NOT) {
+ obj = not;
+ } else if (token.value.operator === OPERATOR_OR) {
+ obj = or;
+ } else {
+ obj = params;
+ }
+ const data = formatData(token);
+ obj.set(apiField, obj.has(apiField) ? [obj.get(apiField), data].flat() : data);
+ });
if (not.size) {
params.set('not', Object.fromEntries(not));
@@ -322,16 +301,14 @@ export const convertToApiParams = (filterTokens) => {
};
export const convertToUrlParams = (filterTokens) => {
- const urlParamsMap = filterTokens
- .filter((token) => token.type !== FILTERED_SEARCH_TERM)
- .reduce((acc, token) => {
- const filterType = getFilterType(token);
- const urlParam = filters[token.type][URL_PARAM][token.value.operator]?.[filterType];
- return acc.set(
- urlParam,
- acc.has(urlParam) ? [acc.get(urlParam), token.value.data].flat() : token.value.data,
- );
- }, new Map());
+ const urlParamsMap = filterTokens.filter(isNotEmptySearchToken).reduce((acc, token) => {
+ const filterType = getFilterType(token);
+ const urlParam = filtersMap[token.type][URL_PARAM][token.value.operator]?.[filterType];
+ return acc.set(
+ urlParam,
+ acc.has(urlParam) ? [acc.get(urlParam), token.value.data].flat() : token.value.data,
+ );
+ }, new Map());
return Object.fromEntries(urlParamsMap);
};
diff --git a/app/assets/javascripts/merge_request_tabs.js b/app/assets/javascripts/merge_request_tabs.js
index 124b14a9845..201499f8509 100644
--- a/app/assets/javascripts/merge_request_tabs.js
+++ b/app/assets/javascripts/merge_request_tabs.js
@@ -183,6 +183,8 @@ const pageBundles = {
export default class MergeRequestTabs {
constructor({ action, setUrl, stubLocation } = {}) {
+ const containers = document.querySelectorAll('.content-wrapper .container-fluid');
+ this.contentWrapper = containers[containers.length - 1];
this.mergeRequestTabs = document.querySelector('.merge-request-tabs-container');
this.mergeRequestTabsAll =
this.mergeRequestTabs && this.mergeRequestTabs.querySelectorAll
@@ -208,7 +210,7 @@ export default class MergeRequestTabs {
this.diffsLoaded = false;
this.diffsClass = null;
this.commitsLoaded = false;
- this.fixedLayoutPref = null;
+ this.isFixedLayoutPreferred = this.contentWrapper.classList.contains('container-limited');
this.eventHub = createEventHub();
this.loadedPages = { [action]: true };
@@ -561,22 +563,12 @@ export default class MergeRequestTabs {
return action === 'diffs' || action === 'new/diffs';
}
- expandViewContainer(removeLimited = true) {
- const $wrapper = $('.content-wrapper .container-fluid').not('.breadcrumbs');
- if (this.fixedLayoutPref === null) {
- this.fixedLayoutPref = $wrapper.hasClass('container-limited');
- }
- if (this.diffViewType() === 'parallel' || removeLimited) {
- $wrapper.removeClass('container-limited');
- } else {
- $wrapper.toggleClass('container-limited', this.fixedLayoutPref);
- }
+ expandViewContainer() {
+ this.contentWrapper.classList.remove('container-limited');
}
resetViewContainer() {
- if (this.fixedLayoutPref !== null) {
- $('.content-wrapper .container-fluid').toggleClass('container-limited', this.fixedLayoutPref);
- }
+ this.contentWrapper.classList.toggle('container-limited', this.isFixedLayoutPreferred);
}
// Expand the issuable sidebar unless the user explicitly collapsed it
diff --git a/app/assets/javascripts/pipelines/components/jobs/failed_jobs_app.vue b/app/assets/javascripts/pipelines/components/jobs/failed_jobs_app.vue
index 21b585933b8..c24862f828b 100644
--- a/app/assets/javascripts/pipelines/components/jobs/failed_jobs_app.vue
+++ b/app/assets/javascripts/pipelines/components/jobs/failed_jobs_app.vue
@@ -2,9 +2,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { s__ } from '~/locale';
import { createAlert } from '~/alert';
-import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import GetFailedJobsQuery from '../../graphql/queries/get_failed_jobs.query.graphql';
-import { prepareFailedJobs } from './utils';
import FailedJobsTable from './failed_jobs_table.vue';
export default {
@@ -20,12 +18,6 @@ export default {
default: '',
},
},
- props: {
- failedJobsSummary: {
- type: Array,
- required: true,
- },
- },
apollo: {
failedJobs: {
query: GetFailedJobsQuery,
@@ -36,15 +28,16 @@ export default {
};
},
update({ project }) {
- if (project?.pipeline?.jobs?.nodes) {
- return project.pipeline.jobs.nodes.map((job) => {
- return { normalizedId: getIdFromGraphQLId(job.id), ...job };
- });
- }
- return [];
- },
- result() {
- this.preparedFailedJobs = prepareFailedJobs(this.failedJobs, this.failedJobsSummary);
+ const jobNodes = project?.pipeline?.jobs?.nodes || [];
+
+ return jobNodes.map((job) => {
+ return {
+ ...job,
+ // this field is needed for the slot row-details
+ // on the failed_jobs_table.vue component
+ _showDetails: true,
+ };
+ });
},
error() {
createAlert({ message: s__('Jobs|There was a problem fetching the failed jobs.') });
@@ -54,7 +47,6 @@ export default {
data() {
return {
failedJobs: [],
- preparedFailedJobs: [],
};
},
computed: {
@@ -68,6 +60,6 @@ export default {
<template>
<div>
<gl-loading-icon v-if="loading" size="lg" class="gl-mt-4" />
- <failed-jobs-table v-else :failed-jobs="preparedFailedJobs" />
+ <failed-jobs-table v-else :failed-jobs="failedJobs" />
</div>
</template>
diff --git a/app/assets/javascripts/pipelines/components/jobs/failed_jobs_table.vue b/app/assets/javascripts/pipelines/components/jobs/failed_jobs_table.vue
index 778f014bcd3..80c08d7c613 100644
--- a/app/assets/javascripts/pipelines/components/jobs/failed_jobs_table.vue
+++ b/app/assets/javascripts/pipelines/components/jobs/failed_jobs_table.vue
@@ -52,6 +52,9 @@ export default {
showErrorMessage() {
createAlert({ message: s__('Job|There was a problem retrying the failed job.') });
},
+ failureSummary(trace) {
+ return trace ? trace.htmlSummary : s__('Job|No job log');
+ },
},
};
</script>
@@ -90,8 +93,8 @@ export default {
</div>
</template>
- <template #cell(failure)="{ item }">
- <span>{{ item.failure }}</span>
+ <template #cell(failureMessage)="{ item }">
+ <span data-testid="job-failure-message">{{ item.failureMessage }}</span>
</template>
<template #cell(actions)="{ item }">
@@ -110,7 +113,7 @@ export default {
class="gl-w-full gl-text-left gl-border-none"
data-testid="job-log"
>
- <code v-safe-html="item.failureSummary" class="gl-reset-bg gl-p-0" >
+ <code v-safe-html="failureSummary(item.trace)" class="gl-reset-bg gl-p-0" data-testid="job-trace-summary">
</code>
</pre>
</template>
diff --git a/app/assets/javascripts/pipelines/components/jobs/utils.js b/app/assets/javascripts/pipelines/components/jobs/utils.js
deleted file mode 100644
index c8414d44d14..00000000000
--- a/app/assets/javascripts/pipelines/components/jobs/utils.js
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- We get the failure and failure summary from Rails which has
- a summary failure log. Here we combine that data with the data
- from GraphQL to display the log.
-
- failedJobs is from GraphQL
- failedJobsSummary is from Rails
- */
-
-export const prepareFailedJobs = (failedJobs = [], failedJobsSummary = []) => {
- const combinedJobs = [];
-
- if (failedJobs.length > 0 && failedJobsSummary.length > 0) {
- failedJobs.forEach((failedJob) => {
- const foundJob = failedJobsSummary.find(
- (failedJobSummary) => failedJob.normalizedId === failedJobSummary.id,
- );
-
- if (foundJob) {
- combinedJobs.push({
- ...failedJob,
- failure: foundJob?.failure,
- failureSummary: foundJob?.failure_summary,
- // this field is needed for the slot row-details
- // on the failed_jobs_table.vue component
- _showDetails: true,
- });
- }
- });
- }
-
- return combinedJobs;
-};
diff --git a/app/assets/javascripts/pipelines/components/pipeline_tabs.vue b/app/assets/javascripts/pipelines/components/pipeline_tabs.vue
index 3798863ae60..d2ec3c352fe 100644
--- a/app/assets/javascripts/pipelines/components/pipeline_tabs.vue
+++ b/app/assets/javascripts/pipelines/components/pipeline_tabs.vue
@@ -31,13 +31,7 @@ export default {
GlTab,
GlTabs,
},
- inject: [
- 'defaultTabValue',
- 'failedJobsCount',
- 'failedJobsSummary',
- 'totalJobCount',
- 'testsCount',
- ],
+ inject: ['defaultTabValue', 'failedJobsCount', 'totalJobCount', 'testsCount'],
data() {
return {
activeTab: this.defaultTabValue,
@@ -110,7 +104,7 @@ export default {
<span class="gl-mr-2">{{ $options.i18n.tabs.failedJobsTitle }}</span>
<gl-badge size="sm" data-testid="failed-builds-counter">{{ failedJobsCount }}</gl-badge>
</template>
- <router-view :failed-jobs-summary="failedJobsSummary" />
+ <router-view />
</gl-tab>
<gl-tab
:active="isActive($options.tabNames.tests)"
diff --git a/app/assets/javascripts/pipelines/constants.js b/app/assets/javascripts/pipelines/constants.js
index ca146ac1e87..abeeea1f888 100644
--- a/app/assets/javascripts/pipelines/constants.js
+++ b/app/assets/javascripts/pipelines/constants.js
@@ -93,7 +93,7 @@ export const DEFAULT_FIELDS = [
columnClass: 'gl-w-20p',
},
{
- key: 'failure',
+ key: 'failureMessage',
label: __('Failure'),
columnClass: 'gl-w-40p',
},
diff --git a/app/assets/javascripts/pipelines/graphql/queries/get_failed_jobs.query.graphql b/app/assets/javascripts/pipelines/graphql/queries/get_failed_jobs.query.graphql
index 14e9a838f4b..13c9f0ff8ee 100644
--- a/app/assets/javascripts/pipelines/graphql/queries/get_failed_jobs.query.graphql
+++ b/app/assets/javascripts/pipelines/graphql/queries/get_failed_jobs.query.graphql
@@ -34,6 +34,10 @@ query getFailedJobs($fullPath: ID!, $pipelineIid: ID!) {
readBuild
updateBuild
}
+ trace {
+ htmlSummary
+ }
+ failureMessage
}
}
}
diff --git a/app/assets/javascripts/pipelines/pipeline_tabs.js b/app/assets/javascripts/pipelines/pipeline_tabs.js
index f9b0c43303d..33bdedee764 100644
--- a/app/assets/javascripts/pipelines/pipeline_tabs.js
+++ b/app/assets/javascripts/pipelines/pipeline_tabs.js
@@ -28,7 +28,6 @@ export const createAppOptions = (selector, apolloProvider, router) => {
exposeSecurityDashboard,
exposeLicenseScanningData,
failedJobsCount,
- failedJobsSummary,
projectPath,
graphqlResourceEtag,
pipelineIid,
@@ -80,7 +79,6 @@ export const createAppOptions = (selector, apolloProvider, router) => {
exposeSecurityDashboard: parseBoolean(exposeSecurityDashboard),
exposeLicenseScanningData: parseBoolean(exposeLicenseScanningData),
failedJobsCount,
- failedJobsSummary: JSON.parse(failedJobsSummary),
graphqlResourceEtag,
pipelineIid,
pipelineProjectPath,
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue b/app/assets/javascripts/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue
index fe4f2d407f7..88062bf245f 100644
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue
+++ b/app/assets/javascripts/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue
@@ -99,6 +99,11 @@ export default {
required: false,
default: false,
},
+ termsAsTokens: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
},
data() {
return {
@@ -356,7 +361,9 @@ export default {
:close-button-title="__('Close')"
:clear-recent-searches-text="__('Clear recent searches')"
:no-recent-searches-text="__(`You don't have any recent searches`)"
+ :search-text-option-label="__('Search for this text')"
:show-friendly-text="showFriendlyText"
+ :terms-as-tokens="termsAsTokens"
class="flex-grow-1"
@history-item-selected="handleHistoryItemSelected"
@clear="onClear"
diff --git a/app/assets/javascripts/vue_shared/issuable/list/components/issuable_list_root.vue b/app/assets/javascripts/vue_shared/issuable/list/components/issuable_list_root.vue
index 3ac6aaf8b86..95108933a0b 100644
--- a/app/assets/javascripts/vue_shared/issuable/list/components/issuable_list_root.vue
+++ b/app/assets/javascripts/vue_shared/issuable/list/components/issuable_list_root.vue
@@ -317,6 +317,7 @@ export default {
:show-checkbox="showBulkEditSidebar"
:checkbox-checked="allIssuablesChecked"
:show-friendly-text="showFilteredSearchFriendlyText"
+ terms-as-tokens
class="gl-flex-grow-1 gl-border-t-none row-content-block"
data-qa-selector="issuable_search_container"
@checked-input="handleAllIssuablesCheckedInput"
diff --git a/app/assets/javascripts/work_items/components/work_item_description.vue b/app/assets/javascripts/work_items/components/work_item_description.vue
index 141dac9573c..942f5d4a9f0 100644
--- a/app/assets/javascripts/work_items/components/work_item_description.vue
+++ b/app/assets/javascripts/work_items/components/work_item_description.vue
@@ -10,9 +10,10 @@ import Tracking from '~/tracking';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
-import { getWorkItemQuery, autocompleteDataSources, markdownPreviewPath } from '../utils';
+import { autocompleteDataSources, markdownPreviewPath } from '../utils';
import workItemDescriptionSubscription from '../graphql/work_item_description.subscription.graphql';
import updateWorkItemMutation from '../graphql/update_work_item.mutation.graphql';
+import workItemByIidQuery from '../graphql/work_item_by_iid.query.graphql';
import { i18n, TRACKING_CATEGORY_SHOW, WIDGET_TYPE_DESCRIPTION } from '../constants';
import WorkItemDescriptionRendered from './work_item_description_rendered.vue';
@@ -36,11 +37,6 @@ export default {
type: String,
required: true,
},
- fetchByIid: {
- type: Boolean,
- required: false,
- default: false,
- },
queryVariables: {
type: Object,
required: true,
@@ -66,17 +62,15 @@ export default {
},
apollo: {
workItem: {
- query() {
- return getWorkItemQuery(this.fetchByIid);
- },
+ query: workItemByIidQuery,
variables() {
return this.queryVariables;
},
update(data) {
- return this.fetchByIid ? data.workspace.workItems.nodes[0] : data.workItem;
+ return data.workspace.workItems.nodes[0];
},
skip() {
- return !this.queryVariables.id && !this.queryVariables.iid;
+ return !this.queryVariables.iid;
},
result() {
if (this.isEditing) {
diff --git a/app/assets/javascripts/work_items/components/work_item_detail.vue b/app/assets/javascripts/work_items/components/work_item_detail.vue
index 766dabecb45..270730dba03 100644
--- a/app/assets/javascripts/work_items/components/work_item_detail.vue
+++ b/app/assets/javascripts/work_items/components/work_item_detail.vue
@@ -679,7 +679,6 @@ export default {
:progress="workItemProgress.progress"
:work-item-id="workItem.id"
:work-item-type="workItemType"
- :fetch-by-iid="fetchByIid"
:query-variables="queryVariables"
@error="updateError = $event"
/>
@@ -690,7 +689,6 @@ export default {
:can-update="canUpdate"
:work-item-id="workItem.id"
:work-item-type="workItemType"
- :fetch-by-iid="fetchByIid"
:query-variables="queryVariables"
:full-path="fullPath"
@error="updateError = $event"
@@ -702,7 +700,6 @@ export default {
:can-update="canUpdate"
:work-item-id="workItem.id"
:work-item-type="workItemType"
- :fetch-by-iid="fetchByIid"
:query-variables="queryVariables"
:full-path="fullPath"
@error="updateError = $event"
@@ -711,7 +708,6 @@ export default {
v-if="hasDescriptionWidget"
:work-item-id="workItem.id"
:full-path="fullPath"
- :fetch-by-iid="fetchByIid"
:query-variables="queryVariables"
class="gl-pt-5"
@error="updateError = $event"
diff --git a/app/controllers/registrations_controller.rb b/app/controllers/registrations_controller.rb
index 70698c0dcb2..5c67e056d66 100644
--- a/app/controllers/registrations_controller.rb
+++ b/app/controllers/registrations_controller.rb
@@ -190,6 +190,7 @@ class RegistrationsController < Devise::RegistrationsController
flash[:alert] = _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
flash.delete :recaptcha_error
add_gon_variables
+ set_minimum_password_length
render action: 'new'
end
diff --git a/app/helpers/ci/builds_helper.rb b/app/helpers/ci/builds_helper.rb
index afd0af18ba7..8a00c0f3eb0 100644
--- a/app/helpers/ci/builds_helper.rb
+++ b/app/helpers/ci/builds_helper.rb
@@ -2,18 +2,6 @@
module Ci
module BuildsHelper
- def build_summary(build, skip: false)
- if build.has_trace?
- if skip
- link_to _('View job log'), pipeline_job_url(build.pipeline, build)
- else
- build.trace.html(last_lines: 10).html_safe
- end
- else
- _('No job log')
- end
- end
-
def sidebar_build_class(build, current_build)
build_class = []
build_class << 'active' if build.id === current_build.id
@@ -36,15 +24,5 @@ module Ci
description: project_job_url(@project, @build)
}
end
-
- def prepare_failed_jobs_summary_data(failed_builds)
- failed_builds.map do |build|
- {
- id: build.id,
- failure: build.present.callout_failure_message,
- failure_summary: build_summary(build)
- }
- end.to_json
- end
end
end
diff --git a/app/helpers/projects/pipeline_helper.rb b/app/helpers/projects/pipeline_helper.rb
index c5cbe79caf7..0239253d8f0 100644
--- a/app/helpers/projects/pipeline_helper.rb
+++ b/app/helpers/projects/pipeline_helper.rb
@@ -7,7 +7,6 @@ module Projects
def js_pipeline_tabs_data(project, pipeline, _user)
{
failed_jobs_count: pipeline.failed_builds.count,
- failed_jobs_summary: prepare_failed_jobs_summary_data(pipeline.failed_builds),
project_path: project.full_path,
graphql_resource_etag: graphql_etag_pipeline_path(pipeline),
metrics_path: namespace_project_ci_prometheus_metrics_histograms_path(namespace_id: project.namespace, project_id: project, format: :json),
diff --git a/app/models/project_setting.rb b/app/models/project_setting.rb
index 6a60015cc26..1256ef0f2fc 100644
--- a/app/models/project_setting.rb
+++ b/app/models/project_setting.rb
@@ -10,6 +10,27 @@ class ProjectSetting < ApplicationRecord
scope :for_projects, ->(projects) { where(project_id: projects) }
+ attr_encrypted :cube_api_key,
+ mode: :per_attribute_iv,
+ key: Settings.attr_encrypted_db_key_base_32,
+ algorithm: 'aes-256-gcm',
+ encode: false,
+ encode_iv: false
+
+ attr_encrypted :jitsu_administrator_password,
+ mode: :per_attribute_iv,
+ key: Settings.attr_encrypted_db_key_base_32,
+ algorithm: 'aes-256-gcm',
+ encode: false,
+ encode_iv: false
+
+ attr_encrypted :product_analytics_clickhouse_connection_string,
+ mode: :per_attribute_iv,
+ key: Settings.attr_encrypted_db_key_base_32,
+ algorithm: 'aes-256-gcm',
+ encode: false,
+ encode_iv: false
+
enum squash_option: {
never: 0,
always: 1,
diff --git a/app/models/protected_branch/push_access_level.rb b/app/models/protected_branch/push_access_level.rb
index 66fe57be25f..c86ca5723fa 100644
--- a/app/models/protected_branch/push_access_level.rb
+++ b/app/models/protected_branch/push_access_level.rb
@@ -21,6 +21,12 @@ class ProtectedBranch::PushAccessLevel < ApplicationRecord
end
end
+ def humanize
+ return "Deploy key" if deploy_key.present?
+
+ super
+ end
+
def check_access(user)
if user && deploy_key.present?
return user.can?(:read_project, project) && enabled_deploy_key_for_user?(deploy_key, user)
diff --git a/app/models/protected_tag/create_access_level.rb b/app/models/protected_tag/create_access_level.rb
index abb233d3800..785e7559212 100644
--- a/app/models/protected_tag/create_access_level.rb
+++ b/app/models/protected_tag/create_access_level.rb
@@ -19,6 +19,12 @@ class ProtectedTag::CreateAccessLevel < ApplicationRecord
end
end
+ def humanize
+ return "Deploy key" if deploy_key.present?
+
+ super
+ end
+
def check_access(user)
return false if access_level == Gitlab::Access::NO_ACCESS
diff --git a/app/services/ci/runners/unregister_runner_manager_service.rb b/app/services/ci/runners/unregister_runner_manager_service.rb
new file mode 100644
index 00000000000..ecf6aba09c7
--- /dev/null
+++ b/app/services/ci/runners/unregister_runner_manager_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Ci
+ module Runners
+ class UnregisterRunnerManagerService
+ attr_reader :runner, :author, :system_id
+
+ # @param [Ci::Runner] runner the runner to unregister/destroy
+ # @param [User, authentication token String] author the user or the authentication token authorizing the removal
+ # @param [String] system_id ID of the system being unregistered
+ def initialize(runner, author, system_id:)
+ @runner = runner
+ @author = author
+ @system_id = system_id
+ end
+
+ def execute
+ return system_id_missing_error if system_id.blank?
+
+ runner_manager = runner.runner_managers.find_by_system_xid!(system_id)
+ runner_manager.destroy!
+
+ ServiceResponse.success
+ end
+
+ private
+
+ def system_id_missing_error
+ ServiceResponse.error(message: '`system_id` needs to be specified for runners created in the UI.')
+ end
+ end
+ end
+end
diff --git a/app/services/ci/runners/unregister_runner_service.rb b/app/services/ci/runners/unregister_runner_service.rb
index 742b21f77df..d186bd421d5 100644
--- a/app/services/ci/runners/unregister_runner_service.rb
+++ b/app/services/ci/runners/unregister_runner_service.rb
@@ -13,7 +13,8 @@ module Ci
end
def execute
- @runner&.destroy
+ runner.destroy!
+
ServiceResponse.success
end
end
diff --git a/app/views/projects/edit.html.haml b/app/views/projects/edit.html.haml
index b0eef923411..a81afa5f450 100644
--- a/app/views/projects/edit.html.haml
+++ b/app/views/projects/edit.html.haml
@@ -61,6 +61,8 @@
= render 'projects/service_desk_settings'
+= render_if_exists 'product_analytics/project_settings', expanded: expanded
+
%section.settings.advanced-settings.no-animate#js-project-advanced-settings{ class: ('expanded' if expanded), data: { qa_selector: 'advanced_settings_content' } }
.settings-header
%h4.settings-title.js-settings-toggle.js-settings-toggle-trigger-only= _('Advanced')
diff --git a/app/views/users/show.html.haml b/app/views/users/show.html.haml
index f49bb525776..1ebf02ffd39 100644
--- a/app/views/users/show.html.haml
+++ b/app/views/users/show.html.haml
@@ -122,9 +122,12 @@
- if display_public_email?(@user)
= render 'middle_dot_divider', stacking: true do
= link_to @user.public_email, "mailto:#{@user.public_email}", itemprop: 'email'
- - if @user.bio.present? && @user.confirmed? && !@user.blocked?
- %p.profile-user-bio.gl-mb-3
- = @user.bio
+
+ -# Ensure this stays indented one level less than the social links
+ -# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/118314
+ - if @user.bio.present? && @user.confirmed? && !@user.blocked?
+ %p.profile-user-bio.gl-mb-3
+ = @user.bio
- if !profile_tabs.empty? && !Feature.enabled?(:profile_tabs_vue, current_user)
.scrolling-tabs-container{ class: [('gl-display-none' if show_super_sidebar?)] }
diff --git a/config/settings.rb b/config/settings.rb
index e03d9877e1c..c25531a3311 100644
--- a/config/settings.rb
+++ b/config/settings.rb
@@ -171,11 +171,12 @@ Settings = GitlabSettings.load(file, section) do
cron_jobs['gitlab_service_ping_worker']['cron'] ||= cron_for_service_ping
end
- # Route jobs to queue based on worker name.
+ # Route all jobs to 'default' queue. This setting is meant for self-managed instances use to keep things simple.
+ # See https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1491
def build_sidekiq_routing_rules(rules)
return rules unless rules.nil? || rules&.empty?
- [[Gitlab::SidekiqConfig::WorkerMatcher::WILDCARD_MATCH, nil]]
+ [[Gitlab::SidekiqConfig::WorkerMatcher::WILDCARD_MATCH, 'default']]
end
private
diff --git a/db/migrate/20230414190012_add_product_analytics_to_project_settings.rb b/db/migrate/20230414190012_add_product_analytics_to_project_settings.rb
new file mode 100644
index 00000000000..c77168f05be
--- /dev/null
+++ b/db/migrate/20230414190012_add_product_analytics_to_project_settings.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+class AddProductAnalyticsToProjectSettings < Gitlab::Database::Migration[2.1]
+ disable_ddl_transaction!
+
+ def up
+ with_lock_retries do
+ add_column :project_settings, :jitsu_host, :text, if_not_exists: true
+ add_column :project_settings, :jitsu_project_xid, :text, if_not_exists: true
+ add_column :project_settings, :jitsu_administrator_email, :text, if_not_exists: true
+ add_column :project_settings, :encrypted_jitsu_administrator_password, :binary, if_not_exists: true
+ add_column :project_settings, :encrypted_jitsu_administrator_password_iv, :binary, if_not_exists: true
+ add_column :project_settings, :product_analytics_data_collector_host, :text, if_not_exists: true
+ add_column :project_settings, :encrypted_product_analytics_clickhouse_connection_string,
+ :binary, if_not_exists: true
+ add_column :project_settings, :encrypted_product_analytics_clickhouse_connection_string_iv,
+ :binary, if_not_exists: true
+ add_column :project_settings, :cube_api_base_url, :text, if_not_exists: true
+ add_column :project_settings, :encrypted_cube_api_key, :binary, if_not_exists: true
+ add_column :project_settings, :encrypted_cube_api_key_iv, :binary, if_not_exists: true
+ end
+
+ add_text_limit :project_settings, :jitsu_host, 255
+ add_text_limit :project_settings, :jitsu_project_xid, 255
+ add_text_limit :project_settings, :jitsu_administrator_email, 255
+ add_text_limit :project_settings, :product_analytics_data_collector_host, 255
+ add_text_limit :project_settings, :cube_api_base_url, 512
+ end
+
+ def down
+ with_lock_retries do
+ remove_column :project_settings, :jitsu_host, if_exists: true
+ remove_column :project_settings, :jitsu_project_xid, if_exists: true
+ remove_column :project_settings, :jitsu_administrator_email, if_exists: true
+ remove_column :project_settings, :encrypted_jitsu_administrator_password, if_exists: true
+ remove_column :project_settings, :encrypted_jitsu_administrator_password_iv, if_exists: true
+ remove_column :project_settings, :product_analytics_data_collector_host, if_exists: true
+ remove_column :project_settings, :encrypted_product_analytics_clickhouse_connection_string, if_exists: true
+ remove_column :project_settings, :encrypted_product_analytics_clickhouse_connection_string_iv, if_exists: true
+ remove_column :project_settings, :cube_api_base_url, if_exists: true
+ remove_column :project_settings, :encrypted_cube_api_key, if_exists: true
+ remove_column :project_settings, :encrypted_cube_api_key_iv, if_exists: true
+ end
+ end
+end
diff --git a/db/schema_migrations/20230414190012 b/db/schema_migrations/20230414190012
new file mode 100644
index 00000000000..5973075344e
--- /dev/null
+++ b/db/schema_migrations/20230414190012
@@ -0,0 +1 @@
+0202a3aa13d7d9c47fdd33f9029900ed5a81b37efccceee532565d2b31499e61 \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 7b98c9ceebc..557ab9e7eab 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -21153,6 +21153,17 @@ CREATE TABLE project_settings (
pages_unique_domain text,
runner_registration_enabled boolean DEFAULT true,
product_analytics_instrumentation_key text,
+ jitsu_host text,
+ jitsu_project_xid text,
+ jitsu_administrator_email text,
+ encrypted_jitsu_administrator_password bytea,
+ encrypted_jitsu_administrator_password_iv bytea,
+ product_analytics_data_collector_host text,
+ encrypted_product_analytics_clickhouse_connection_string bytea,
+ encrypted_product_analytics_clickhouse_connection_string_iv bytea,
+ cube_api_base_url text,
+ encrypted_cube_api_key bytea,
+ encrypted_cube_api_key_iv bytea,
CONSTRAINT check_1a30456322 CHECK ((char_length(pages_unique_domain) <= 63)),
CONSTRAINT check_2981f15877 CHECK ((char_length(jitsu_key) <= 100)),
CONSTRAINT check_3a03e7557a CHECK ((char_length(previous_default_branch) <= 4096)),
@@ -21161,7 +21172,12 @@ CREATE TABLE project_settings (
CONSTRAINT check_acb7fad2f9 CHECK ((char_length(product_analytics_instrumentation_key) <= 255)),
CONSTRAINT check_b09644994b CHECK ((char_length(squash_commit_template) <= 500)),
CONSTRAINT check_bde223416c CHECK ((show_default_award_emojis IS NOT NULL)),
- CONSTRAINT check_eaf7cfb6a7 CHECK ((char_length(merge_commit_template) <= 500))
+ CONSTRAINT check_eaf7cfb6a7 CHECK ((char_length(merge_commit_template) <= 500)),
+ CONSTRAINT check_4b142e71f3 CHECK ((char_length(product_analytics_data_collector_host) <= 255)),
+ CONSTRAINT check_ea15225016 CHECK ((char_length(jitsu_project_xid) <= 255)),
+ CONSTRAINT check_f4499c0fa4 CHECK ((char_length(jitsu_host) <= 255)),
+ CONSTRAINT check_f5495015f5 CHECK ((char_length(jitsu_administrator_email) <= 255)),
+ CONSTRAINT check_f9df7bcee2 CHECK ((char_length(cube_api_base_url) <= 512))
);
CREATE TABLE project_states (
diff --git a/doc/.vale/gitlab/spelling-exceptions.txt b/doc/.vale/gitlab/spelling-exceptions.txt
index b24608f1a1c..9345b4a7d79 100644
--- a/doc/.vale/gitlab/spelling-exceptions.txt
+++ b/doc/.vale/gitlab/spelling-exceptions.txt
@@ -292,6 +292,7 @@ Dockerfiles
Dockerize
Dockerized
Dockerizing
+Docusaurus
dogfood
dogfooding
dogfoods
@@ -436,6 +437,7 @@ hotfixed
hotfixes
hotfixing
hotspots
+HTMLHint
http
https
hyperparameter
diff --git a/doc/administration/reference_architectures/index.md b/doc/administration/reference_architectures/index.md
index f40c8fc3c67..a2b348fb642 100644
--- a/doc/administration/reference_architectures/index.md
+++ b/doc/administration/reference_architectures/index.md
@@ -332,6 +332,7 @@ Several database cloud provider services are known not to support the above or h
- [Amazon Aurora](https://aws.amazon.com/rds/aurora/) is incompatible and not supported. See [14.4.0](../../update/index.md#1440) for more details.
- [Azure Database for PostgreSQL Single Server](https://azure.microsoft.com/en-gb/products/postgresql/#overview) (Single / Flexible) is not supported for use due to notable performance / stability issues or missing functionality. See [Recommendation Notes for Azure](#recommendation-notes-for-azure) for more details.
+- Azure Database for PostgreSQL Flexible Server uses Microsoft Azure Active Directory (Azure AD) as authentication mechanism, which is incompatible with GitLab database integration.
- [Google AlloyDB](https://cloud.google.com/alloydb) and [Amazon RDS Multi-AZ DB cluster](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/multi-az-db-clusters-concepts.html) have not been tested and are not recommended. Both solutions are specifically not expected to work with GitLab Geo.
- [Amazon RDS Multi-AZ DB instance](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZSingleStandby.html) is a separate product and is supported.
diff --git a/doc/ci/quick_start/index.md b/doc/ci/quick_start/index.md
index 3e1cbca8bfa..ec58491e604 100644
--- a/doc/ci/quick_start/index.md
+++ b/doc/ci/quick_start/index.md
@@ -9,6 +9,9 @@ type: reference
This tutorial shows you how to configure and run your first CI/CD pipeline in GitLab.
+If you are already familiar with basic CI/CD concepts, you can learn about
+common keywords in [Tutorial: Create a complex pipeline](tutorial.md).
+
## Prerequisites
Before you start, make sure you have:
diff --git a/doc/ci/quick_start/tutorial.md b/doc/ci/quick_start/tutorial.md
new file mode 100644
index 00000000000..3e12c319069
--- /dev/null
+++ b/doc/ci/quick_start/tutorial.md
@@ -0,0 +1,504 @@
+---
+stage: Verify
+group: Pipeline Authoring
+info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
+---
+
+# Tutorial: Create a complex pipeline
+
+This tutorial walks you through configuring a progressively more complex CI/CD pipeline
+through small, iterative steps. The pipeline is always fully functional,
+but it gains more functionality with each step.
+
+When you finish this tutorial, you will have a new project on GitLab.com and a working documentation site on
+[Docusaurus](https://docusaurus.io/).
+
+To complete this tutorial, you will:
+
+1. Create a project to hold the Docusaurus files
+1. Create the initial pipeline configuration file
+1. Add a job to build the site
+1. Add a job to deploy the site
+1. Add test jobs
+1. Start using merge request pipelines
+1. Reduce duplicated configuration
+
+## Prerequisites
+
+- You need an account on GitLab.com.
+- You should be familiar with Git.
+- Node.js must be installed on your local machine. For example, on macOS you can
+ [install node](https://formulae.brew.sh/formula/node) with `brew install node`.
+
+## Create a project to hold the Docusaurus files
+
+Before adding the pipeline configuration, you must first set up a Docusaurus project
+on GitLab.com:
+
+1. Create a new project under your username (not a group):
+ 1. On the top bar, select **Main menu > Projects > View all projects**.
+ 1. On the right of the page, select **New project**.
+ 1. Select **Create blank project**.
+ 1. Enter the project details:
+ - In the **Project name** field, enter the name of your project, for example `My Pipeline Tutorial Project`.
+ - Select **Initialize repository with a README**.
+ 1. Select **Create project**.
+1. On the right of the **Project Overview** page for your project, select **Clone**
+ to find the clone paths for your project. Copy the SSH or HTTP path and use the path
+ to clone the project locally.
+
+ For example, to clone with SSH into a `pipeline-tutorial` directory on your computer:
+
+ ```shell
+ git clone git@gitlab.com:my-username/my-pipeline-tutorial-project.git pipeline-tutorial
+ ```
+
+1. Change to the project's directory, then generate a new Docusaurus site:
+
+ ```shell
+ cd pipeline-tutorial
+ npm init docusaurus
+ ```
+
+ The Docusaurus initialization wizard prompts you with questions about the site.
+ Use all the default options.
+
+1. The initialization wizard sets up the site in `website/`, but the site should be in
+ the root of the project. Move the files up to the root and delete the old directory:
+
+ ```shell
+ mv website/* .
+ rm -r website
+ ```
+
+1. Update the Docusaurus configuration file with the details of your GitLab project.
+ In `docusaurus.config.js`:
+
+ - Set `url:` to a path with this format: `https://<my-username>.gitlab.io/`.
+ - Set `baseUrl:` to your project name, like `/my-pipeline-tutorial-project/`.
+
+1. Commit the changes, and push them to GitLab:
+
+ ```shell
+ git add .
+ git commit -m "Add simple generated Docusaurus site"
+ git push origin
+ ```
+
+## Create the initial CI/CD configuration file
+
+Start with the simplest possible pipeline configuration file to ensure CI/CD is enabled
+in the project and runners are available to run jobs.
+
+This step introduces:
+
+- [Jobs](../jobs/index.md): These are self-contained parts of a pipeline that run your commands.
+ Jobs run on [runners](../runners/index.md), separate from the GitLab instance.
+- [`script`](../yaml/index.md#script): This section of a job's configuration is
+ where you define the commands for jobs. If there are multiple commands (in an array),
+ they run in order. Each command executes as if it was run as a CLI command.
+ By default, if a command fails or returns an error, the job is flagged as failed
+ and no more commands run.
+
+In this step, create a `.gitlab-ci.yml` file in the root of the project with this configuration:
+
+```yaml
+test-job:
+ script:
+ - echo "This is my first job!"
+ - date
+```
+
+Commit and push this change to GitLab, then:
+
+1. Go to **Build > Pipelines** and make sure a pipeline runs in GitLab with this single job.
+1. Select the pipeline, then select the job to view the job's log and see the `This is my first job!` message
+ followed by the date.
+
+Now that you have a `.gitlab-ci.yml` file in your project, you can make all future changes
+to pipeline configuration with the [pipeline editor](../pipeline_editor/index.md).
+
+## Add a job to build the site
+
+A common task for a CI/CD pipeline is to build the code in the project then deploy it.
+Start by adding a job that builds the site.
+
+This step introduces:
+
+- [`image`](../yaml/index.md#image): Tell the runner which Docker
+ container to use to run the job in. The runner:
+ 1. Downloads the container image and starts it.
+ 1. Clones your GitLab project into the running container.
+ 1. Runs the `script` commands, one at a time.
+- [`artifacts`](../yaml/index.md#artifacts): Jobs are self-contained and do not share
+ resources with each other. If you want files generated in one job to be used in
+ another job, you must save them as artifacts first. Then later jobs can retrieve the
+ artifacts and use the generated files.
+
+In this step, replace `test-job` with `build-job`:
+
+- Use `image` to configure the job to run with the latest `node` image. Docusaurus
+ is a Node.js project and the `node` image has the needed `npm` commands built in.
+- Run `npm install` to install Docusaurus into the running `node` container, then run
+ `npm run build` to build the site.
+- Docusaurus saves the built site in `build/`, so save these files with `artifacts`.
+
+```yaml
+build-job:
+ image: node
+ script:
+ - npm install
+ - npm run build
+ artifacts:
+ paths:
+ - "build/"
+```
+
+Use the pipeline editor to commit this pipeline configuration to the default branch,
+and check the job log. You can:
+
+- See the `npm` commands run and build the site.
+- Verify that the artifacts are saved at the end.
+- Browse the contents of the artifacts file by selecting **Browse** to the right of the job log
+ after the job completes.
+
+## Add a job to deploy the site
+
+After verifying the Docusaurus site builds in `build-job`, you can add a job that deploys it.
+
+This step introduces:
+
+- [`stage`](../yaml/index.md#stage) and [`stages](../yaml/index.md#stage): The most common
+ pipeline configurations group jobs into stages. Jobs in the same stage can run in parallel,
+ while jobs in later stages wait for jobs in earlier stages to complete. If a job fails,
+ the whole stage is considered failed and jobs in later stages do not start running.
+- [GitLab Pages](../../user/project/pages/index.md): To host your static site, you
+ will use GitLab Pages.
+
+In this step:
+
+- Add a job that fetches the built site and deploys it. When using GitLab Pages,
+ the job is always named `pages`. The artifacts from the `build-job` are fetched automatically
+ and extracted into the job. Pages looks for the site in the `public/` directory though,
+ so add a `script` command to move the site to that directory.
+- Add a `stages` section, and define the stages for each job. `build-job` runs first
+ in the `build` stage, and `pages` runs after in the `deploy` stage.
+
+```yaml
+stages: # List of stages for jobs and their order of execution
+ - build
+ - deploy
+
+build-job:
+ stage: build # Set this job to run in the `build` stage
+ image: node
+ script:
+ - npm install
+ - npm run build
+ artifacts:
+ paths:
+ - "build/"
+
+pages:
+ stage: deploy # Set this new job to run in the `deploy` stage
+ script:
+ - mv build/ public/
+ artifacts:
+ paths:
+ - "public/"
+```
+
+Use the pipeline editor to commit this pipeline configuration to the default branch,
+and view the pipeline details from the **Pipelines** list. Verify that:
+
+- The two jobs run in different stages, `build` and `deploy`.
+- After the `pages` job completes a `pages-deploy` job appears, which is the GitLab process
+ that deploys the Pages site. When that job completes, you can visit your new Docusaurus
+ site. The Pages documentation explains [the URL formatting](../../user/project/pages/getting_started_part_one.md#gitlab-pages-default-domain-names),
+ which should be similar to `https://<my-username>.gitlab.io/<my-pipeline-tutorial-project>/`.
+
+## Add test jobs
+
+Now that the site builds and deploys as expected, you can add tests and linting.
+For example, a Ruby project might run RSpec test jobs. Docusaurus is a static site
+that uses Markdown and generated HTML, so this tutorial adds jobs to test the Markdown and HTML.
+
+This step introduces:
+
+- [`allow_failure`](../yaml/index.md#allow_failure): Jobs that fail intermittently,
+ or are expected to fail, can slow down productivity or be difficult to troubleshoot.
+ Use `allow_failure` to let jobs fail without halting pipeline execution.
+- [`dependencies`](../yaml/index.md#dependencies): Use `dependencies` to control
+ artifact downloads in individual jobs by listing which jobs to fetch artifacts from.
+
+In this step:
+
+- Add a new `test` stage that runs between `build` and `deploy`. These three stages
+ are the default stages when `stages` is undefined in the configuration.
+- Add a `lint-markdown` job to run [markdownlint](https://github.com/DavidAnson/markdownlint)
+ and check the Markdown in your project. markdownlint is a static analysis tool that
+ checks that your Markdown files follow formatting standards.
+ - The sample Markdown files Docusaurus generates are in `blog/` and `docs/`.
+ - This tool scans the original Markdown files only, and does not need the generated HTML
+ saved in the `build-job` artifacts. Speed up the job with `dependencies: []`
+ so that it fetches no artifacts.
+ - A few of the sample Markdown files violate default markdownlint rules, so add
+ `allow_failure: true` to let the pipeline continue despite the rule violations.
+- Add a `test-html` job to run [HTMLHint](https://htmlhint.com/) and check the generated HTML.
+ HTMLHint is a static analysis tool that scans generated HTML for known issues.
+- Both `test-html` and `pages` need the generated HTML found in the `build-job` artifacts.
+ Jobs fetch artifacts from all jobs in earlier stages by default, but add `dependencies:`
+ to make sure the jobs don't accidentally download other artifacts after future pipeline changes.
+
+```yaml
+stages:
+ - build
+ - test # Add a `test` stage for the test jobs
+ - deploy
+
+build-job:
+ stage: build
+ image: node
+ script:
+ - npm install
+ - npm run build
+ artifacts:
+ paths:
+ - "build/"
+
+lint-markdown:
+ stage: test
+ image: node
+ dependencies: [] # Don't fetch any artifacts
+ script:
+ - npm install markdownlint-cli2 --global # Install markdownlint into the container
+ - markdownlint-cli2 -v # Verify the version, useful for troubleshooting
+ - markdownlint-cli2 "blog/**/*.md" "docs/**/*.md" # Lint all markdown files in blog/ and docs/
+ allow_failure: true # This job fails right now, but don't let it stop the pipeline.
+
+test-html:
+ stage: test
+ image: node
+ dependencies:
+ - build-job # Only fetch artifacts from `build-job`
+ script:
+ - npm install --save-dev htmlhint # Install HTMLHint into the container
+ - npx htmlhint --version # Verify the version, useful for troubleshooting
+ - npx htmlhint build/ # Lint all markdown files in blog/ and docs/
+
+pages:
+ stage: deploy
+ dependencies:
+ - build-job # Only fetch artifacts from `build-job`
+ script:
+ - mv build/ public/
+ artifacts:
+ paths:
+ - "public/"
+```
+
+Commit this pipeline configuration to the default branch, and view the pipeline details.
+
+- The `test-markdown` job fails because the sample Markdown violates the default
+ markdownlint rules, but is allowed to fail. You can:
+ - Ignore the violations for now. They do not need to be fixed as part of the tutorial.
+ - Fix the Markdown file violations. Then you can change `allow_failure` to `false`,
+ or remove `allow_failure` completely because `allow_failure: false` is the default behavior
+ when not defined.
+ - Add a markdownlint configuration file to limit which rule violations to alert on.
+- You can also make changes to the Markdown file content and see the changes on the site
+ after the next deployment.
+
+## Start using merge request pipelines
+
+With the pipeline configurations above, the site deploys every time a pipeline completes
+successfully, but this is not an ideal development workflow. It's better to work from
+feature branches and merge requests, and only deploy the site when changes merge
+to the default branch.
+
+This step introduces:
+
+- [`rules`](../yaml/index.md#rules): Add rules to each job to configure in which
+ pipelines they run. You can configure jobs to run in [merge request pipelines](../pipelines/merge_request_pipelines.md),
+ [scheduled pipelines](../pipelines/schedules.md), or other specific situations.
+ Rules are evaluated from top to bottom, and if a rule matches, the job is
+ added to the pipeline.
+- [CI/CD variables](../variables/index.md): use these environment variables
+ to configure job behavior in the configuration file and in script commands.
+ [Predefined CI/CD variables](../variables/predefined_variables.md) are variables
+ that you do not need to manually define. They are automatically injected into pipelines
+ so you can use them to configure your pipeline. Variables are usually formatted as `$VARIABLE_NAME`.
+ and predefined variables are usually prefixed with `$CI_`.
+
+In this step:
+
+- Create a new feature branch and make the changes in the branch instead of the default branch.
+- Add `rules` to each job:
+ - The site should only deploy for changes to the default branch.
+ - The other jobs should run for all changes in merge requests or the default branch.
+- With this pipeline configuration, you can work from a feature branch without running any jobs,
+ which saves resources. When you are ready to validate your changes, create a merge request
+ and a pipeline runs with the jobs configured to run in merge requests.
+- When your merge request is accepted and the changes merge to the default branch,
+ a new pipeline runs which also contains the `pages` deployment job. The site deploys
+ if no jobs fail.
+
+```yaml
+stages:
+ - build
+ - test
+ - deploy
+
+build-job:
+ stage: build
+ image: node
+ script:
+ - npm install
+ - npm run build
+ artifacts:
+ paths:
+ - "build/"
+ rules:
+ - if: $CI_PIPELINE_SOURCE == 'merge_request_event' # Run for all changes to a merge request's source branch
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH # Run for all changes to the default branch
+
+lint-markdown:
+ stage: test
+ image: node
+ dependencies: []
+ script:
+ - npm install markdownlint-cli2 --global
+ - markdownlint-cli2 -v
+ - markdownlint-cli2 "blog/**/*.md" "docs/**/*.md"
+ allow_failure: true
+ rules:
+ - if: $CI_PIPELINE_SOURCE == 'merge_request_event' # Run for all changes to a merge request's source branch
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH # Run for all changes to the default branch
+
+test-html:
+ stage: test
+ image: node
+ dependencies:
+ - build-job
+ script:
+ - npm install --save-dev htmlhint
+ - npx htmlhint --version
+ - npx htmlhint build/
+ rules:
+ - if: $CI_PIPELINE_SOURCE == 'merge_request_event' # Run for all changes to a merge request's source branch
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH # Run for all changes to the default branch
+
+pages:
+ stage: deploy
+ dependencies:
+ - build-job
+ script:
+ - mv build/ public/
+ artifacts:
+ paths:
+ - "public/"
+ rules:
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH # Run for all changes to the default branch only
+```
+
+Merge the changes in your merge request. This action updates the default branch. Verify that
+the new pipeline contains the `pages` job that deploys the site.
+
+Be sure to use feature branches and merge requests for all future changes to pipeline configuration.
+Other project changes, like creating a Git tag or adding a pipeline schedule, do not
+trigger pipelines unless you add rules for those cases too.
+
+## Reduce duplicated configuration
+
+The pipeline now contains three jobs that all have identical `rules` and `image`
+configuration. Instead of repeating these rules, use `extends` and `default` to create
+single sources of truth.
+
+This step introduces:
+
+- [Hidden jobs](../jobs/index.md#hide-jobs): Jobs that start with `.` are never
+ added to a pipeline. Use them to hold configuration you want to reuse.
+- [`extends`](../yaml/index.md#extends): Use extends to repeat configuration in
+ multiple places, often from hidden jobs. If you update the hidden job's configuration,
+ all jobs extending the hidden job use the updated configuration.
+- [`default`](../yaml/index.md#default): Set keyword defaults that apply to all jobs
+ when not defined.
+- YAML overriding: When reusing configuration with `extends` or `default`, you can explicitly
+ define a keyword in the job to override the `extends` or `default` configuration.
+
+In this step:
+
+- Add a `.standard-rules` hidden job to hold the rules that are repeated in `build-job`,
+ `lint-markdown`, and `test-html`.
+- Use `extends` to reuse the `.standard-rules` configuration in the three jobs.
+- Add a `default` section to define the `image` default as `node`.
+- The `pages` deployment job does not need the default `node` image, so explicitly use
+ [`busybox`](https://hub.docker.com/_/busybox), an extremely tiny and fast image.
+
+```yaml
+stages:
+ - build
+ - test
+ - deploy
+
+default: # Add a default section to define the `image` keyword's default value
+ image: node
+
+.standard-rules: # Make a hidden job to hold the common rules
+ rules:
+ - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+
+build-job:
+ extends:
+ - .standard-rules # Reuse the configuration in `.standard-rules` here
+ stage: build
+ script:
+ - npm install
+ - npm run build
+ artifacts:
+ paths:
+ - "build/"
+
+lint-markdown:
+ stage: test
+ extends:
+ - .standard-rules # Reuse the configuration in `.standard-rules` here
+ dependencies: []
+ script:
+ - npm install markdownlint-cli2 --global
+ - markdownlint-cli2 -v
+ - markdownlint-cli2 "blog/**/*.md" "docs/**/*.md"
+ allow_failure: true
+
+test-html:
+ stage: test
+ extends:
+ - .standard-rules # Reuse the configuration in `.standard-rules` here
+ dependencies:
+ - build-job
+ script:
+ - npm install --save-dev htmlhint
+ - npx htmlhint --version
+ - npx htmlhint build/
+
+pages:
+ stage: deploy
+ image: busybox # Override the default `image` value with `busybox`
+ dependencies:
+ - build-job
+ script:
+ - mv build/ public/
+ artifacts:
+ paths:
+ - "public/"
+ rules:
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+```
+
+Use a merge request to commit this pipeline configuration to the default branch.
+The file is simpler, but it should have the same behavior as the previous step.
+
+You've just created a full pipeline and streamlined it to be more efficient. Nice work!
+Now you can take this knowledge, learn about [the rest of the `.gitlab-ci.yml` keywords](../yaml/index.md),
+and build your own pipelines.
diff --git a/doc/development/database/batched_background_migrations.md b/doc/development/database/batched_background_migrations.md
index 326d2795558..6a6b43e52a0 100644
--- a/doc/development/database/batched_background_migrations.md
+++ b/doc/development/database/batched_background_migrations.md
@@ -148,6 +148,49 @@ Make sure the newly-created data is either migrated, or
saved in both the old and new version upon creation. Removals in
turn can be handled by defining foreign keys with cascading deletes.
+### Job retry mechanism
+
+The batched background migrations retry mechanism ensures that a job is executed again in case of failure.
+The following diagram shows the different stages of our retry mechanism:
+
+```plantuml
+@startuml
+hide empty description
+note as N1
+ can_split?:
+ the failure is due to a query timeout
+end note
+[*] --> Running
+Running --> Failed
+note on link
+ if number of retries <= MAX_ATTEMPTS
+end note
+Running --> Succeeded
+Failed --> Running
+note on link
+ if number of retries > MAX_ATTEMPTS
+ and can_split? == true
+ then two jobs with smaller
+ batch size will be created
+end note
+Failed --> [*]
+Succeeded --> [*]
+@enduml
+```
+
+- `MAX_ATTEMPTS` is defined in the [`Gitlab::Database::BackgroundMigration`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/database/background_migration/batched_job.rb)
+class.
+- `can_split?` is defined in the [`Gitlab::Database::BatchedJob`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/database/background_migration/batched_job.rb) class.
+
+### Failed batched background migrations
+
+The whole batched background migration is marked as `failed`
+(`/chatops run batched_background_migrations status MIGRATION_ID` will show
+the migration as `failed`) if any of the following are true:
+
+- There are no more jobs to consume, and there are failed jobs.
+- More than [half of the jobs failed since the background migration was started](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/database/background_migration/batched_migration.rb).
+
### Requeuing batched background migrations
If one of the batched background migrations contains a bug that is fixed in a patch
diff --git a/doc/tutorials/build_application.md b/doc/tutorials/build_application.md
index 685cf408e77..2e0130e46ca 100644
--- a/doc/tutorials/build_application.md
+++ b/doc/tutorials/build_application.md
@@ -13,6 +13,7 @@ Use CI/CD pipelines to automatically build, test, and deploy your code.
| Topic | Description | Good for beginners |
|-------|-------------|--------------------|
| [Create and run your first GitLab CI/CD pipeline](../ci/quick_start/index.md) | Create a `.gitlab-ci.yml` file and start a pipeline. | **{star}** |
+| [Create a complex pipeline](../ci/quick_start/tutorial.md) | Learn about the most commonly used GitLab CI/CD keywords by building an increasingly complex pipeline. | |
| <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Get started: Learn about CI/CD](https://www.youtube.com/watch?v=sIegJaLy2ug) (9m 02s) | Learn about the `.gitlab-ci.yml` file and how it's used. | **{star}** |
| [GitLab CI/CD](https://levelup.gitlab.com/courses/continuous-integration-and-delivery-ci-cd-with-gitlab) | Learn about GitLab CI/CD and build a pipeline in this self-paced course. | **{star}** |
| <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [CI deep dive](https://www.youtube.com/watch?v=ZVUbmVac-m8&list=PL05JrBw4t0KorkxIFgZGnzzxjZRCGROt_&index=27) (22m 51s) | Take a closer look at pipelines and continuous integration concepts. | |
diff --git a/doc/user/project/repository/code_suggestions.md b/doc/user/project/repository/code_suggestions.md
index 6d3ce5267cf..857c75f2140 100644
--- a/doc/user/project/repository/code_suggestions.md
+++ b/doc/user/project/repository/code_suggestions.md
@@ -22,9 +22,11 @@ as you type. Depending on the cursor position, the extension either:
To accept a suggestion, press <kbd>Tab</kbd>.
-Code Suggestions are supported in Visual Studio Code with the GitLab Workflow extension.
+Code Suggestions are available in Visual Studio Code when you have the GitLab Workflow extension installed.
-Code Suggestions may produce [low-quality or incomplete suggestions](#model-accuracy-and-quality). Beta users should read about the [known limitations](#known-limitations). The best results from Code Suggestions are expected for these six languages:
+## Supported languages
+
+Code Suggestions may produce [low-quality or incomplete suggestions](#model-accuracy-and-quality). The best results from Code Suggestions are expected for these six languages:
- C
- C++
@@ -39,10 +41,6 @@ GitLab is continuously improving the model and expects to support an additional
Usage of Code Suggestions is governed by the [GitLab Testing Agreement](https://about.gitlab.com/handbook/legal/testing-agreement/). Learn about [data usage when using Code Suggestions](#code-suggestions-data-usage).
-## Group level setting
-
-[Group owners](../../permissions.md#group-members-permissions) can enable Code Suggestions for all projects in a group by using the [group level Code Suggestions setting](../../group/manage.md#group-code-suggestions).
-
## Enable Code Suggestions in VS Code
Prerequisites:
@@ -73,41 +71,43 @@ Start typing and receive suggestions for your GitLab projects.
<iframe src="https://www.youtube-nocookie.com/embed/WnxBYxN2-p4" frameborder="0" allowfullscreen> </iframe>
</figure>
-## Code Suggestions Data Usage
+## Code Suggestions data usage
-### Overview
+Code Suggestions is a generative artificial intelligence (AI) model hosted on GitLab.com.
-Code Suggestions is a generative artificial intelligence (AI) model hosted on GitLab.com that can empower your developers to code more efficiently by suggesting code as they type.
+Your personal access token enables a secure API connection to GitLab.com. This API connection securely transmits a context window from VS Code to the Code Suggestions ML model for inference, and the generated suggestion is transmitted back to VS Code.
-The personal access token enables a secure API connection to GitLab.com. This API connection securely transmits a context window from VS Code to the Code Suggestions ML model for inference, and the generated suggestion is transmitted back to VS Code.
+### Data privacy
-#### Progressive enhancement
+Code Suggestions operate completely in the GitLab.com infrastructure, providing the same level of [security](https://about.gitlab.com/security/) as any other feature of GitLab.com, and processing any personal data in accordance with our [Privacy Statement](https://about.gitlab.com/privacy/).
-This feature is designed as a progressive enhancement to the existing VS Code GitLab Workflow plugin. Code Suggestions offer a completion if the machine learning engine can generate a recommendation. In the event of a connection issue or model inference failure, the feature gracefully degrades. Code Suggestions do not prevent you from writing code in VS Code.
+No new additional data is collected to enable this feature. The content of your GitLab hosted source code is not used as training data. Source code inference against the Code Suggestions model is not used to re-train the model. Your data also never leaves GitLab.com. All training and inference is done in GitLab.com infrastructure.
-#### Off by default
+[Read more about the security of GitLab.com](https://about.gitlab.com/security/faq/).
-Code Suggestions are off by default and require a group owner to enable the feature with a [group-level setting](#group-level-setting).
+### Training data
-After the group level setting is enabled, Developers using Visual Studio Code with the [GitLab Workflow extension](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow) can connect to GitLab.com via a GitLab [personal access token](../../profile/personal_access_tokens.md#create-a-personal-access-token) with the `read_api` and `read_user` scopes.
+Code Suggestions uses open source pre-trained base models from the [CodeGen family](https://openreview.net/forum?id=iaYcJKpY2B_) including CodeGen-MULTI and CodeGen-NL. We then re-train and fine-tune these base models with a customized open source dataset to enable multi-language support and additional use cases. This customized dataset contains non-preprocessed open source code in 13 programming languages from [The Pile](https://pile.eleuther.ai/) and the [Google BigQuery source code dataset](https://cloud.google.com/blog/topics/public-datasets/github-on-bigquery-analyze-all-the-open-source-code). We then process this raw dataset against heuristics that aim to increase the quality of the dataset.
-#### Generating suggestions
+The Code Suggestions model is not trained on GitLab customer data.
-Once configured by a developer in VS Code. The personal access token enables a secure API connection to GitLab.com. This API connection securely transmits a context window from VS Code to the Code Suggestions ML model for inference, and the generated suggestion is transmitted back to VS Code.
+### Off by default
-Code Suggestions only work when you have internet connectivity and can access GitLab.com. Code Suggestions are not available for self-managed customers, nor customers operating within an air-gapped environment.
+Code Suggestions are off by default and require a group owner to enable the feature with a group-level setting.
-### Stability and performance
+After the group-level setting is enabled, developers using Visual Studio Code with the [GitLab Workflow extension](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow) can connect to GitLab.com by using a GitLab [personal access token](../../profile/personal_access_tokens.md#create-a-personal-access-token) with the `read_api` and `read_user` scopes.
-This feature is currently in [Beta](/ee/policy/alpha-beta-support.md#beta). While the Code Suggestions inference API operates completely within GitLab.com's enterprise infrastructure, we expect a high demand for this Beta feature, which may cause degraded performance or unexpected downtime of the feature. We have built this feature to gracefully degrade and have controls in place to allow us to mitigate abuse or misuse. GitLab may disable this feature for any or all customers at any time at our discretion.
+## Progressive enhancement
-## Data privacy
+This feature is designed as a progressive enhancement to the existing VS Code GitLab Workflow plugin. Code Suggestions offer a completion if the machine learning engine can generate a recommendation. In the event of a connection issue or model inference failure, the feature gracefully degrades. Code Suggestions do not prevent you from writing code in VS Code.
-Code Suggestions operate completely in the GitLab.com infrastructure, providing the same level of [security](https://about.gitlab.com/security/) as any other feature of GitLab.com, and processing any personal data in accordance with our [Privacy Statement](https://about.gitlab.com/privacy/).
+### Internet connectivity
-No new additional data is collected to enable this feature. The content of your GitLab hosted source code is not used as training data. Source code inference against the Code Suggestions model is not used to re-train the model. Your data also never leaves GitLab.com. All training and inference is done in GitLab.com infrastructure.
+Code Suggestions only work when you have internet connectivity and can access GitLab.com. Code Suggestions are not available for self-managed customers, nor customers operating within an air-gapped environment.
-[Read more about the security of GitLab.com](https://about.gitlab.com/security/faq/).
+### Stability and performance
+
+This feature is currently in [Beta](/ee/policy/alpha-beta-support.md#beta). While the Code Suggestions inference API operates completely within the GitLab.com enterprise infrastructure, we expect a high demand for this Beta feature, which may cause degraded performance or unexpected downtime of the feature. We have built this feature to gracefully degrade and have controls in place to allow us to mitigate abuse or misuse. GitLab may disable this feature for any or all customers at any time at our discretion.
### Model accuracy and quality
@@ -117,12 +117,6 @@ GitLab uses a customized open source dataset to fine-tune the model to support m
GitLab is actively refining these models to improve the quality of recommendations, add support for more languages, and add protections to limit personal data, insecure code, and other unwanted behavior that the model may have learned from training data.
-### Training data
-
-Code Suggestions uses open source pre-trained base models from the [CodeGen family](https://openreview.net/forum?id=iaYcJKpY2B_) including CodeGen-MULTI and CodeGen-NL. We then re-train and fine-tune these base models with a customized open source dataset to enable multi-language support and additional use cases. This customized dataset contains non-preprocessed open source code in 13 programming languages from [The Pile](https://pile.eleuther.ai/) and [Google's BigQuery source code dataset](https://cloud.google.com/blog/topics/public-datasets/github-on-bigquery-analyze-all-the-open-source-code). We then process this raw dataset against heuristics that aim to increase the quality of the dataset.
-
-The Code Suggestions model is not trained on GitLab customer data.
-
## Known limitations
While in Beta, we are working on improving the accuracy of overall generated content. However, Code Suggestions may generate suggestions that are:
diff --git a/lib/api/ci/runner.rb b/lib/api/ci/runner.rb
index b06bfbd95fe..0bdc2844b6f 100644
--- a/lib/api/ci/runner.rb
+++ b/lib/api/ci/runner.rb
@@ -75,6 +75,25 @@ module API
destroy_conditionally!(current_runner) { ::Ci::Runners::UnregisterRunnerService.new(current_runner, params[:token]).execute }
end
+ desc 'Delete a registered runner manager' do
+ summary 'Internal endpoint that deletes a runner manager by authentication token and system ID.'
+ failure [[400, 'Bad Request'], [403, 'Forbidden']]
+ end
+ params do
+ requires :token, type: String, desc: %q(The runner's authentication token)
+ requires :system_id, type: String, desc: %q(The runner's system identifier.)
+ end
+ delete '/managers', urgency: :low, feature_category: :runner_fleet do
+ authenticate_runner!
+
+ destroy_conditionally!(current_runner) do
+ ::Ci::Runners::UnregisterRunnerManagerService.new(
+ current_runner,
+ params[:token],
+ system_id: params[:system_id]).execute
+ end
+ end
+
desc 'Validate authentication credentials' do
summary "Verify authentication for a registered runner"
success Entities::Ci::RunnerRegistrationDetails
diff --git a/lib/api/entities/protected_ref_access.rb b/lib/api/entities/protected_ref_access.rb
index ba28c724448..28e0ef540d5 100644
--- a/lib/api/entities/protected_ref_access.rb
+++ b/lib/api/entities/protected_ref_access.rb
@@ -9,6 +9,8 @@ module API
documentation: { type: 'string', example: 'Maintainers' } do |protected_ref_access|
protected_ref_access.humanize
end
+ expose :deploy_key_id, documentation: { type: 'integer', example: 1 },
+ if: ->(access) { access.has_attribute?(:deploy_key_id) && access.deploy_key_id }
end
end
end
diff --git a/lib/gitlab/database/partitioning/convert_table_to_first_list_partition.rb b/lib/gitlab/database/partitioning/convert_table_to_first_list_partition.rb
deleted file mode 100644
index afca2368126..00000000000
--- a/lib/gitlab/database/partitioning/convert_table_to_first_list_partition.rb
+++ /dev/null
@@ -1,316 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Database
- module Partitioning
- class ConvertTableToFirstListPartition
- UnableToPartition = Class.new(StandardError)
-
- SQL_STATEMENT_SEPARATOR = ";\n\n"
-
- PARTITIONING_CONSTRAINT_NAME = 'partitioning_constraint'
-
- attr_reader :partitioning_column, :table_name, :parent_table_name, :zero_partition_value
-
- def initialize(
- migration_context:, table_name:, parent_table_name:, partitioning_column:,
- zero_partition_value:, lock_tables: [])
-
- @migration_context = migration_context
- @connection = migration_context.connection
- @table_name = table_name
- @parent_table_name = parent_table_name
- @partitioning_column = partitioning_column
- @zero_partition_value = zero_partition_value
- @lock_tables = Array.wrap(lock_tables)
- end
-
- def prepare_for_partitioning(async: false)
- assert_existing_constraints_partitionable
-
- add_partitioning_check_constraint(async: async)
- end
-
- def revert_preparation_for_partitioning
- migration_context.remove_check_constraint(table_name, partitioning_constraint.name)
- end
-
- def partition
- assert_existing_constraints_partitionable
- assert_partitioning_constraint_present
-
- create_parent_table
- attach_foreign_keys_to_parent
-
- lock_args = {
- raise_on_exhaustion: true,
- timing_configuration: lock_timing_configuration
- }
-
- migration_context.with_lock_retries(**lock_args) do
- redefine_loose_foreign_key_triggers do
- migration_context.execute(sql_to_convert_table)
- end
- end
- end
-
- def revert_partitioning
- migration_context.with_lock_retries(raise_on_exhaustion: true) do
- migration_context.execute(<<~SQL)
- ALTER TABLE #{connection.quote_table_name(parent_table_name)}
- DETACH PARTITION #{connection.quote_table_name(table_name)};
- SQL
-
- alter_sequences_sql = alter_sequence_statements(old_table: parent_table_name, new_table: table_name)
- .join(SQL_STATEMENT_SEPARATOR)
-
- migration_context.execute(alter_sequences_sql)
-
- # This takes locks for all the foreign keys that the parent table had.
- # However, those same locks were taken while detaching the partition, and we can't avoid that.
- # If we dropped the foreign key before detaching the partition to avoid this locking,
- # the drop would cascade to the child partitions and drop their foreign keys as well
- migration_context.drop_table(parent_table_name)
- end
-
- add_partitioning_check_constraint
- end
-
- private
-
- attr_reader :connection, :migration_context
-
- delegate :quote_table_name, :quote_column_name, to: :connection
-
- def sql_to_convert_table
- # The critical statement here is the attach_table_to_parent statement.
- # The following statements could be run in a later transaction,
- # but they acquire the same locks so it's much faster to incude them
- # here.
- [
- lock_tables_statement,
- attach_table_to_parent_statement,
- alter_sequence_statements(old_table: table_name, new_table: parent_table_name),
- remove_constraint_statement
- ].flatten.join(SQL_STATEMENT_SEPARATOR)
- end
-
- def table_identifier
- "#{connection.current_schema}.#{table_name}"
- end
-
- def assert_existing_constraints_partitionable
- violating_constraints = Gitlab::Database::PostgresConstraint
- .by_table_identifier(table_identifier)
- .primary_or_unique_constraints
- .not_including_column(partitioning_column)
- .to_a
-
- return if violating_constraints.empty?
-
- violation_messages = violating_constraints.map { |c| "#{c.name} on (#{c.column_names.join(', ')})" }
-
- raise UnableToPartition, <<~MSG
- Constraints on #{table_name} are incompatible with partitioning on #{partitioning_column}
-
- All primary key and unique constraints must include the partitioning column.
- Violations:
- #{violation_messages.join("\n")}
- MSG
- end
-
- def partitioning_constraint
- constraints_on_column = Gitlab::Database::PostgresConstraint
- .by_table_identifier(table_identifier)
- .check_constraints
- .including_column(partitioning_column)
-
- check_body = "CHECK ((#{partitioning_column} = #{zero_partition_value}))"
-
- constraints_on_column.find do |constraint|
- constraint.definition.start_with?(check_body)
- end
- end
-
- def assert_partitioning_constraint_present
- return if partitioning_constraint&.constraint_valid?
-
- raise UnableToPartition, <<~MSG
- Table #{table_name} is not ready for partitioning.
- Before partitioning, a check constraint must enforce that (#{partitioning_column} = #{zero_partition_value})
- MSG
- end
-
- def add_partitioning_check_constraint(async: false)
- return validate_partitioning_constraint_synchronously if partitioning_constraint.present?
-
- check_body = "#{partitioning_column} = #{connection.quote(zero_partition_value)}"
- # Any constraint name would work. The constraint is found based on its definition before partitioning
- migration_context.add_check_constraint(
- table_name, check_body, PARTITIONING_CONSTRAINT_NAME,
- validate: !async
- )
-
- if async
- migration_context.prepare_async_check_constraint_validation(
- table_name, name: PARTITIONING_CONSTRAINT_NAME
- )
- end
-
- return if partitioning_constraint.present?
-
- raise UnableToPartition, <<~MSG
- Error adding partitioning constraint `#{PARTITIONING_CONSTRAINT_NAME}` for `#{table_name}`
- MSG
- end
-
- def validate_partitioning_constraint_synchronously
- if partitioning_constraint.constraint_valid?
- return Gitlab::AppLogger.info <<~MSG
- Nothing to do, the partitioning constraint exists and is valid for `#{table_name}`
- MSG
- end
-
- # Async validations are executed only on .com, we need to validate synchronously for self-managed
- migration_context.validate_check_constraint(table_name, partitioning_constraint.name)
- return if partitioning_constraint.constraint_valid?
-
- raise UnableToPartition, <<~MSG
- Error validating partitioning constraint `#{partitioning_constraint.name}` for `#{table_name}`
- MSG
- end
-
- def create_parent_table
- migration_context.execute(<<~SQL)
- CREATE TABLE IF NOT EXISTS #{quote_table_name(parent_table_name)} (
- LIKE #{quote_table_name(table_name)} INCLUDING ALL
- ) PARTITION BY LIST(#{quote_column_name(partitioning_column)})
- SQL
- end
-
- def attach_foreign_keys_to_parent
- migration_context.foreign_keys(table_name).each do |fk|
- # At this point no other connection knows about the parent table.
- # Thus the only contended lock in the following transaction is on fk.to_table.
- # So a deadlock is impossible.
-
- # If we're rerunning this migration after a failure to acquire a lock, the foreign key might already exist.
- # Don't try to recreate it in that case
- if migration_context.foreign_keys(parent_table_name)
- .any? { |p_fk| p_fk.options[:name] == fk.options[:name] }
- next
- end
-
- migration_context.with_lock_retries(raise_on_exhaustion: true) do
- migration_context.add_foreign_key(parent_table_name, fk.to_table, **fk.options)
- end
- end
- end
-
- def lock_tables_statement
- return if @lock_tables.empty?
-
- table_names = @lock_tables.map { |name| quote_table_name(name) }.join(', ')
-
- <<~SQL
- LOCK #{table_names} IN ACCESS EXCLUSIVE MODE
- SQL
- end
-
- def attach_table_to_parent_statement
- <<~SQL
- ALTER TABLE #{quote_table_name(parent_table_name)}
- ATTACH PARTITION #{table_name}
- FOR VALUES IN (#{zero_partition_value})
- SQL
- end
-
- def alter_sequence_statements(old_table:, new_table:)
- sequences_owned_by(old_table).map do |seq_info|
- seq_name, column_name = seq_info.values_at(:name, :column_name)
-
- statement_parts = []
-
- # If a different user owns the old table, the conversion process will fail to reassign the sequence
- # ownership to the new parent table (as it will be owned by the current user).
- # Force the old table to be owned by the current user in that case.
- unless current_user_owns_table?(old_table)
- statement_parts << set_current_user_owns_table_statement(old_table)
- end
-
- statement_parts << <<~SQL.chomp
- ALTER SEQUENCE #{quote_table_name(seq_name)} OWNED BY #{quote_table_name(new_table)}.#{quote_column_name(column_name)}
- SQL
-
- statement_parts.join(SQL_STATEMENT_SEPARATOR)
- end
- end
-
- def remove_constraint_statement
- <<~SQL
- ALTER TABLE #{quote_table_name(parent_table_name)}
- DROP CONSTRAINT #{quote_table_name(partitioning_constraint.name)}
- SQL
- end
-
- # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/373887
- def sequences_owned_by(table_name)
- sequence_data = connection.exec_query(<<~SQL, nil, [table_name])
- SELECT seq_pg_class.relname AS seq_name,
- dep_pg_class.relname AS table_name,
- pg_attribute.attname AS col_name
- FROM pg_class seq_pg_class
- INNER JOIN pg_depend ON seq_pg_class.oid = pg_depend.objid
- INNER JOIN pg_class dep_pg_class ON pg_depend.refobjid = dep_pg_class.oid
- INNER JOIN pg_attribute ON dep_pg_class.oid = pg_attribute.attrelid
- AND pg_depend.refobjsubid = pg_attribute.attnum
- WHERE seq_pg_class.relkind = 'S'
- AND dep_pg_class.relname = $1
- SQL
-
- sequence_data.map do |seq_info|
- name, column_name = seq_info.values_at('seq_name', 'col_name')
- { name: name, column_name: column_name }
- end
- end
-
- def table_owner(table_name)
- connection.select_value(<<~SQL, nil, [table_name])
- SELECT tableowner FROM pg_tables WHERE tablename = $1
- SQL
- end
-
- def current_user_owns_table?(table_name)
- current_user = connection.select_value('select current_user')
- table_owner(table_name) == current_user
- end
-
- def set_current_user_owns_table_statement(table_name)
- <<~SQL.chomp
- ALTER TABLE #{connection.quote_table_name(table_name)} OWNER TO CURRENT_USER
- SQL
- end
-
- def lock_timing_configuration
- iterations = Gitlab::Database::WithLockRetries::DEFAULT_TIMING_CONFIGURATION
- aggressive_iterations = Array.new(5) { [10.seconds, 1.minute] }
-
- iterations + aggressive_iterations
- end
-
- def redefine_loose_foreign_key_triggers
- if migration_context.has_loose_foreign_key?(table_name)
- migration_context.untrack_record_deletions(table_name)
-
- yield if block_given?
-
- migration_context.track_record_deletions(parent_table_name)
- migration_context.track_record_deletions(table_name)
- elsif block_given?
- yield
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/database/partitioning/list/convert_table.rb b/lib/gitlab/database/partitioning/list/convert_table.rb
new file mode 100644
index 00000000000..d40ddc7a4d8
--- /dev/null
+++ b/lib/gitlab/database/partitioning/list/convert_table.rb
@@ -0,0 +1,313 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Partitioning
+ module List
+ class ConvertTable
+ UnableToPartition = Class.new(StandardError)
+
+ SQL_STATEMENT_SEPARATOR = ";\n\n"
+
+ PARTITIONING_CONSTRAINT_NAME = 'partitioning_constraint'
+
+ attr_reader :partitioning_column, :table_name, :parent_table_name, :zero_partition_value,
+ :locking_configuration
+
+ def initialize(
+ migration_context:, table_name:, parent_table_name:, partitioning_column:,
+ zero_partition_value:, lock_tables: [])
+
+ @migration_context = migration_context
+ @connection = migration_context.connection
+ @table_name = table_name
+ @parent_table_name = parent_table_name
+ @partitioning_column = partitioning_column
+ @zero_partition_value = zero_partition_value
+ @locking_configuration = LockingConfiguration.new(migration_context, table_locking_order: lock_tables)
+ end
+
+ def prepare_for_partitioning(async: false)
+ assert_existing_constraints_partitionable
+
+ add_partitioning_check_constraint(async: async)
+ end
+
+ def revert_preparation_for_partitioning
+ migration_context.remove_check_constraint(table_name, partitioning_constraint.name)
+ end
+
+ def partition
+ assert_existing_constraints_partitionable
+ assert_partitioning_constraint_present
+
+ create_parent_table
+ attach_foreign_keys_to_parent
+
+ locking_configuration.with_lock_retries do
+ redefine_loose_foreign_key_triggers do
+ migration_context.execute(sql_to_convert_table)
+ end
+ end
+ end
+
+ def revert_partitioning
+ migration_context.with_lock_retries(raise_on_exhaustion: true) do
+ migration_context.execute(<<~SQL)
+ ALTER TABLE #{connection.quote_table_name(parent_table_name)}
+ DETACH PARTITION #{connection.quote_table_name(table_name)};
+ SQL
+
+ alter_sequences_sql = alter_sequence_statements(old_table: parent_table_name, new_table: table_name)
+ .join(SQL_STATEMENT_SEPARATOR)
+
+ migration_context.execute(alter_sequences_sql)
+
+ # This takes locks for all the foreign keys that the parent table had.
+ # However, those same locks were taken while detaching the partition, and we can't avoid that.
+ # If we dropped the foreign key before detaching the partition to avoid this locking,
+ # the drop would cascade to the child partitions and drop their foreign keys as well
+ migration_context.drop_table(parent_table_name)
+ end
+
+ add_partitioning_check_constraint
+ end
+
+ private
+
+ attr_reader :connection, :migration_context
+
+ delegate :quote_table_name, :quote_column_name, :current_schema, to: :connection
+
+ def sql_to_convert_table
+ # The critical statement here is the attach_table_to_parent statement.
+ # The following statements could be run in a later transaction,
+ # but they acquire the same locks so it's much faster to include them
+ # here.
+ [
+ locking_configuration.locking_statement_for(tables_that_will_lock_during_partitioning),
+ attach_table_to_parent_statement,
+ alter_sequence_statements(old_table: table_name, new_table: parent_table_name),
+ remove_constraint_statement
+ ].flatten.join(SQL_STATEMENT_SEPARATOR)
+ end
+
+ def table_identifier
+ "#{current_schema}.#{table_name}"
+ end
+
+ def assert_existing_constraints_partitionable
+ violating_constraints = Gitlab::Database::PostgresConstraint
+ .by_table_identifier(table_identifier)
+ .primary_or_unique_constraints
+ .not_including_column(partitioning_column)
+ .to_a
+
+ return if violating_constraints.empty?
+
+ violation_messages = violating_constraints.map { |c| "#{c.name} on (#{c.column_names.join(', ')})" }
+
+ raise UnableToPartition, <<~MSG
+ Constraints on #{table_name} are incompatible with partitioning on #{partitioning_column}
+
+ All primary key and unique constraints must include the partitioning column.
+ Violations:
+ #{violation_messages.join("\n")}
+ MSG
+ end
+
+ def partitioning_constraint
+ constraints_on_column = Gitlab::Database::PostgresConstraint
+ .by_table_identifier(table_identifier)
+ .check_constraints
+ .including_column(partitioning_column)
+
+ check_body = "CHECK ((#{partitioning_column} = #{zero_partition_value}))"
+
+ constraints_on_column.find do |constraint|
+ constraint.definition.start_with?(check_body)
+ end
+ end
+
+ def assert_partitioning_constraint_present
+ return if partitioning_constraint&.constraint_valid?
+
+ raise UnableToPartition, <<~MSG
+ Table #{table_name} is not ready for partitioning.
+ Before partitioning, a check constraint must enforce that (#{partitioning_column} = #{zero_partition_value})
+ MSG
+ end
+
+ def add_partitioning_check_constraint(async: false)
+ return validate_partitioning_constraint_synchronously if partitioning_constraint.present?
+
+ check_body = "#{partitioning_column} = #{connection.quote(zero_partition_value)}"
+ # Any constraint name would work. The constraint is found based on its definition before partitioning
+ migration_context.add_check_constraint(
+ table_name, check_body, PARTITIONING_CONSTRAINT_NAME,
+ validate: !async
+ )
+
+ if async
+ migration_context.prepare_async_check_constraint_validation(
+ table_name, name: PARTITIONING_CONSTRAINT_NAME
+ )
+ end
+
+ return if partitioning_constraint.present?
+
+ raise UnableToPartition, <<~MSG
+ Error adding partitioning constraint `#{PARTITIONING_CONSTRAINT_NAME}` for `#{table_name}`
+ MSG
+ end
+
+ def validate_partitioning_constraint_synchronously
+ if partitioning_constraint.constraint_valid?
+ return Gitlab::AppLogger.info <<~MSG
+ Nothing to do, the partitioning constraint exists and is valid for `#{table_name}`
+ MSG
+ end
+
+ # Async validations are executed only on .com, we need to validate synchronously for self-managed
+ migration_context.validate_check_constraint(table_name, partitioning_constraint.name)
+ return if partitioning_constraint.constraint_valid?
+
+ raise UnableToPartition, <<~MSG
+ Error validating partitioning constraint `#{partitioning_constraint.name}` for `#{table_name}`
+ MSG
+ end
+
+ def create_parent_table
+ migration_context.execute(<<~SQL)
+ CREATE TABLE IF NOT EXISTS #{quote_table_name(parent_table_name)} (
+ LIKE #{quote_table_name(table_name)} INCLUDING ALL
+ ) PARTITION BY LIST(#{quote_column_name(partitioning_column)})
+ SQL
+ end
+
+ def attach_foreign_keys_to_parent
+ migration_context.foreign_keys(table_name).each do |fk|
+ # At this point no other connection knows about the parent table.
+ # Thus the only contended lock in the following transaction is on fk.to_table.
+ # So a deadlock is impossible.
+
+ # If we're rerunning this migration after a failure to acquire a lock, the foreign key might already exist
+ # Don't try to recreate it in that case
+ if migration_context.foreign_keys(parent_table_name)
+ .any? { |p_fk| p_fk.options[:name] == fk.options[:name] }
+ next
+ end
+
+ migration_context.with_lock_retries(raise_on_exhaustion: true) do
+ migration_context.add_foreign_key(parent_table_name, fk.to_table, **fk.options)
+ end
+ end
+ end
+
+ def attach_table_to_parent_statement
+ <<~SQL
+ ALTER TABLE #{quote_table_name(parent_table_name)}
+ ATTACH PARTITION #{table_name}
+ FOR VALUES IN (#{zero_partition_value})
+ SQL
+ end
+
+ def alter_sequence_statements(old_table:, new_table:)
+ sequences_owned_by(old_table).map do |seq_info|
+ seq_name, column_name = seq_info.values_at(:name, :column_name)
+
+ statement_parts = []
+
+ # If a different user owns the old table, the conversion process will fail to reassign the sequence
+ # ownership to the new parent table (as it will be owned by the current user).
+ # Force the old table to be owned by the current user in that case.
+ unless current_user_owns_table?(old_table)
+ statement_parts << set_current_user_owns_table_statement(old_table)
+ end
+
+ statement_parts << <<~SQL.chomp
+ ALTER SEQUENCE #{quote_table_name(seq_name)} OWNED BY #{quote_table_name(new_table)}.#{quote_column_name(column_name)}
+ SQL
+
+ statement_parts.join(SQL_STATEMENT_SEPARATOR)
+ end
+ end
+
+ def remove_constraint_statement
+ <<~SQL
+ ALTER TABLE #{quote_table_name(parent_table_name)}
+ DROP CONSTRAINT #{quote_table_name(partitioning_constraint.name)}
+ SQL
+ end
+
+ # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/373887
+ def sequences_owned_by(table_name)
+ sequence_data = connection.exec_query(<<~SQL, nil, [table_name])
+ SELECT seq_pg_class.relname AS seq_name,
+ dep_pg_class.relname AS table_name,
+ pg_attribute.attname AS col_name
+ FROM pg_class seq_pg_class
+ INNER JOIN pg_depend ON seq_pg_class.oid = pg_depend.objid
+ INNER JOIN pg_class dep_pg_class ON pg_depend.refobjid = dep_pg_class.oid
+ INNER JOIN pg_attribute ON dep_pg_class.oid = pg_attribute.attrelid
+ AND pg_depend.refobjsubid = pg_attribute.attnum
+ WHERE seq_pg_class.relkind = 'S'
+ AND dep_pg_class.relname = $1
+ SQL
+
+ sequence_data.map do |seq_info|
+ name, column_name = seq_info.values_at('seq_name', 'col_name')
+ { name: name, column_name: column_name }
+ end
+ end
+
+ def table_owner(table_name)
+ connection.select_value(<<~SQL, nil, [table_name])
+ SELECT tableowner FROM pg_tables WHERE tablename = $1
+ SQL
+ end
+
+ def current_user_owns_table?(table_name)
+ current_user = connection.select_value('select current_user')
+ table_owner(table_name) == current_user
+ end
+
+ def set_current_user_owns_table_statement(table_name)
+ <<~SQL.chomp
+ ALTER TABLE #{connection.quote_table_name(table_name)} OWNER TO CURRENT_USER
+ SQL
+ end
+
+ def table_name_for_identifier(table_identifier)
+ /^\w+\.(\w+)*$/.match(table_identifier)[1]
+ end
+
+ def redefine_loose_foreign_key_triggers
+ if migration_context.has_loose_foreign_key?(table_name)
+ migration_context.untrack_record_deletions(table_name)
+
+ yield if block_given?
+
+ migration_context.track_record_deletions(parent_table_name)
+ migration_context.track_record_deletions(table_name)
+ elsif block_given?
+ yield
+ end
+ end
+
+ def tables_that_will_lock_during_partitioning
+ # Locks are taken against the table + all tables that reference it by foreign key
+ # postgres_foreign_keys.referenced_table_name gives the table name that we need here directly, but that
+ # column did not exist yet during the migration 20221021145820_create_routing_table_for_builds_metadata_v2
+ # To ensure compatibility with that migration if it is run with this code, use referenced_table_identifier
+ # here.
+ referenced_tables = Gitlab::Database::PostgresForeignKey
+ .by_constrained_table_identifier(table_identifier)
+ .map { |fk| table_name_for_identifier(fk.referenced_table_identifier) }
+ referenced_tables + [table_name]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning/list/locking_configuration.rb b/lib/gitlab/database/partitioning/list/locking_configuration.rb
new file mode 100644
index 00000000000..02d20383de4
--- /dev/null
+++ b/lib/gitlab/database/partitioning/list/locking_configuration.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Partitioning
+ module List
+ class LockingConfiguration
+ attr_reader :migration_context
+
+ def initialize(migration_context, table_locking_order:)
+ @migration_context = migration_context
+ @table_locking_order = table_locking_order.map(&:to_s)
+ assert_table_names_unqualified!(@table_locking_order)
+ end
+
+ def locking_statement_for(tables)
+ tables_to_lock = locking_order_for(tables)
+
+ return if tables_to_lock.empty?
+
+ table_names = tables_to_lock.map { |name| migration_context.quote_table_name(name) }.join(', ')
+
+ <<~SQL
+ LOCK #{table_names} IN ACCESS EXCLUSIVE MODE
+ SQL
+ end
+
+ # Sorts and subsets `tables` to the tables that were explicitly requested for locking
+ # in the order that that locking was requested.
+ def locking_order_for(tables)
+ tables = Array.wrap(tables)
+ assert_table_names_unqualified!(tables)
+
+ @table_locking_order.intersection(tables.map(&:to_s))
+ end
+
+ def lock_timing_configuration
+ iterations = Gitlab::Database::WithLockRetries::DEFAULT_TIMING_CONFIGURATION
+ aggressive_iterations = Array.new(5) { [10.seconds, 1.minute] }
+
+ iterations + aggressive_iterations
+ end
+
+ def with_lock_retries(&block)
+ lock_args = {
+ raise_on_exhaustion: true,
+ timing_configuration: lock_timing_configuration
+ }
+
+ migration_context.with_lock_retries(**lock_args, &block)
+ end
+
+ private
+
+ def assert_table_names_unqualified!(table_names)
+ tables = Array.wrap(table_names).select { |name| name.to_s.include?('.') }
+ return if tables.empty?
+
+ raise ArgumentError, "All table names must be unqualified, but #{tables.join(', ')} include schema"
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb b/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
index 8b49cb00bdf..61e95dbe1a4 100644
--- a/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
+++ b/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
@@ -258,7 +258,7 @@ module Gitlab
def prepare_constraint_for_list_partitioning(table_name:, partitioning_column:, parent_table_name:, initial_partitioning_value:, async: false)
validate_not_in_transaction!(:prepare_constraint_for_list_partitioning)
- Gitlab::Database::Partitioning::ConvertTableToFirstListPartition
+ Gitlab::Database::Partitioning::List::ConvertTable
.new(migration_context: self,
table_name: table_name,
parent_table_name: parent_table_name,
@@ -270,7 +270,7 @@ module Gitlab
def revert_preparing_constraint_for_list_partitioning(table_name:, partitioning_column:, parent_table_name:, initial_partitioning_value:)
validate_not_in_transaction!(:revert_preparing_constraint_for_list_partitioning)
- Gitlab::Database::Partitioning::ConvertTableToFirstListPartition
+ Gitlab::Database::Partitioning::List::ConvertTable
.new(migration_context: self,
table_name: table_name,
parent_table_name: parent_table_name,
@@ -282,7 +282,7 @@ module Gitlab
def convert_table_to_first_list_partition(table_name:, partitioning_column:, parent_table_name:, initial_partitioning_value:, lock_tables: [])
validate_not_in_transaction!(:convert_table_to_first_list_partition)
- Gitlab::Database::Partitioning::ConvertTableToFirstListPartition
+ Gitlab::Database::Partitioning::List::ConvertTable
.new(migration_context: self,
table_name: table_name,
parent_table_name: parent_table_name,
@@ -295,7 +295,7 @@ module Gitlab
def revert_converting_table_to_first_list_partition(table_name:, partitioning_column:, parent_table_name:, initial_partitioning_value:)
validate_not_in_transaction!(:revert_converting_table_to_first_list_partition)
- Gitlab::Database::Partitioning::ConvertTableToFirstListPartition
+ Gitlab::Database::Partitioning::List::ConvertTable
.new(migration_context: self,
table_name: table_name,
parent_table_name: parent_table_name,
diff --git a/lib/gitlab/repository_size_error_message.rb b/lib/gitlab/repository_size_error_message.rb
index f5d82e61187..e7d527dd4ce 100644
--- a/lib/gitlab/repository_size_error_message.rb
+++ b/lib/gitlab/repository_size_error_message.rb
@@ -7,7 +7,8 @@ module Gitlab
delegate :current_size, :limit, :exceeded_size, :additional_repo_storage_available?, to: :@checker
# @param checker [RepositorySizeChecker]
- def initialize(checker)
+ def initialize(checker, message_params = {})
+ @message_params = message_params
@checker = checker
end
@@ -19,13 +20,21 @@ module Gitlab
"This merge request cannot be merged, #{base_message}"
end
+ def push_warning
+ _("##### WARNING ##### You have used %{usage_percentage} of the storage quota for %{namespace_name} " \
+ "(%{current_size} of %{size_limit}). If %{namespace_name} exceeds the storage quota, " \
+ "all projects in the namespace will be locked and actions will be restricted. " \
+ "To manage storage, or purchase additional storage, see %{manage_storage_url}. " \
+ "To learn more about restricted actions, see %{restricted_actions_url}") % push_message_params
+ end
+
def push_error(change_size = 0)
"Your push has been rejected, #{base_message(change_size)}. #{more_info_message}"
end
def new_changes_error
if additional_repo_storage_available?
- "Your push to this repository has been rejected because it would exceed storage limits. Please contact your GitLab administrator for more information."
+ "Your push to this repository has been rejected because it would exceed storage limits. #{more_info_message}"
else
"Your push to this repository would cause it to exceed the size limit of #{formatted(limit)} so it has been rejected. #{more_info_message}"
end
@@ -41,6 +50,19 @@ module Gitlab
private
+ attr_reader :message_params
+
+ def push_message_params
+ {
+ namespace_name: message_params[:namespace_name],
+ manage_storage_url: help_page_url('user/usage_quotas', 'manage-your-storage-usage'),
+ restricted_actions_url: help_page_url('user/read_only_namespaces', 'restricted-actions'),
+ current_size: formatted(current_size),
+ size_limit: formatted(limit),
+ usage_percentage: usage_percentage
+ }
+ end
+
def base_message(change_size = 0)
"because this repository has exceeded its size limit of #{formatted(limit)} by #{formatted(exceeded_size(change_size))}"
end
@@ -48,5 +70,13 @@ module Gitlab
def formatted(number)
number_to_human_size(number, delimiter: ',', precision: 2)
end
+
+ def usage_percentage
+ number_to_percentage(@checker.usage_ratio * 100, precision: 0)
+ end
+
+ def help_page_url(path, anchor = nil)
+ ::Gitlab::Routing.url_helpers.help_page_url(path, anchor: anchor)
+ end
end
end
diff --git a/lib/gitlab/sidekiq_config/worker_router.rb b/lib/gitlab/sidekiq_config/worker_router.rb
index 0670e5521df..6d5ecb64065 100644
--- a/lib/gitlab/sidekiq_config/worker_router.rb
+++ b/lib/gitlab/sidekiq_config/worker_router.rb
@@ -77,6 +77,11 @@ module Gitlab
def parse_routing_rules(routing_rules)
raise InvalidRoutingRuleError, 'The set of routing rule must be an array' unless routing_rules.is_a?(Array)
+ unless routing_rules.last&.first == WorkerMatcher::WILDCARD_MATCH
+ Gitlab::AppLogger.warn "sidekiq.routing_rules config is missing a catch-all `*` entry as the last rule. " \
+ "Consider adding `[['*', 'default']]` at the end of routing_rules."
+ end
+
routing_rules.map do |rule_tuple|
raise InvalidRoutingRuleError, "Routing rule `#{rule_tuple.inspect}` is invalid" unless valid_routing_rule?(rule_tuple)
diff --git a/lib/product_analytics/settings.rb b/lib/product_analytics/settings.rb
index 9e38adf8a13..5d52965f5be 100644
--- a/lib/product_analytics/settings.rb
+++ b/lib/product_analytics/settings.rb
@@ -6,21 +6,32 @@ module ProductAnalytics
%w[product_analytics_data_collector_host product_analytics_clickhouse_connection_string] +
%w[cube_api_base_url cube_api_key]).freeze
- class << self
- def enabled?
- ::Gitlab::CurrentSettings.product_analytics_enabled? && configured?
+ def initialize(project:)
+ @project = project
+ end
+
+ def enabled?
+ ::Gitlab::CurrentSettings.product_analytics_enabled? && configured?
+ end
+
+ # rubocop:disable GitlabSecurity/PublicSend
+ def configured?
+ CONFIG_KEYS.all? do |key|
+ @project.project_setting.public_send(key).present? ||
+ ::Gitlab::CurrentSettings.public_send(key).present?
end
+ end
- def configured?
- CONFIG_KEYS.all? do |key|
- ::Gitlab::CurrentSettings.public_send(key)&.present? # rubocop:disable GitlabSecurity/PublicSend
- end
+ CONFIG_KEYS.each do |key|
+ define_method key.to_sym do
+ @project.project_setting.public_send(key).presence || ::Gitlab::CurrentSettings.public_send(key)
end
+ end
+ # rubocop:enable GitlabSecurity/PublicSend
- CONFIG_KEYS.each do |key|
- define_method key.to_sym do
- ::Gitlab::CurrentSettings.public_send(key) # rubocop:disable GitlabSecurity/PublicSend
- end
+ class << self
+ def for_project(project)
+ ProductAnalytics::Settings.new(project: project)
end
end
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 603f8e8a275..d6c8470ea43 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -75,6 +75,9 @@ msgstr ""
msgid "##### ERROR ##### You have used %{usage_percentage} of the storage quota for %{namespace_name} (%{current_size} of %{size_limit}). %{namespace_name} is now read-only. Projects under this namespace are locked and actions will be restricted. To manage storage, or purchase additional storage, see %{manage_storage_url}. To learn more about restricted actions, see %{restricted_actions_url}"
msgstr ""
+msgid "##### WARNING ##### You have used %{usage_percentage} of the storage quota for %{namespace_name} (%{current_size} of %{size_limit}). If %{namespace_name} exceeds the storage quota, all projects in the namespace will be locked and actions will be restricted. To manage storage, or purchase additional storage, see %{manage_storage_url}. To learn more about restricted actions, see %{restricted_actions_url}"
+msgstr ""
+
msgid "#%{issueIid} (closed)"
msgstr ""
@@ -22088,9 +22091,6 @@ msgstr ""
msgid "Identity|Provider ID"
msgstr ""
-msgid "If %{namespace_name} exceeds the storage quota, all projects in the namespace will be locked and actions will be restricted. To manage storage, or purchase additional storage, see %{manage_storage_url}. To learn more about restricted actions, see %{restricted_actions_url}"
-msgstr ""
-
msgid "If any indexed field exceeds this limit, it is truncated to this number of characters. The rest of the content is neither indexed nor searchable. This does not apply to repository and wiki indexing. For unlimited characters, set this to 0."
msgstr ""
@@ -25281,6 +25281,9 @@ msgstr ""
msgid "Job|Manual"
msgstr ""
+msgid "Job|No job log"
+msgstr ""
+
msgid "Job|No search results found"
msgstr ""
@@ -28786,27 +28789,21 @@ msgid_plural "NamespaceStorageSize|%{namespace_name} contains %{locked_project_c
msgstr[0] ""
msgstr[1] ""
-msgid "NamespaceStorageSize|%{namespace_name} is now read-only. Projects under this namespace are locked and actions are restricted."
+msgid "NamespaceStorageSize|%{namespace_name} is now read-only. Projects under this namespace are locked and actions are restricted. %{actions_restricted_link}"
msgstr ""
-msgid "NamespaceStorageSize|If %{namespace_name} exceeds the storage quota, all projects in the namespace will be locked and actions will be restricted."
+msgid "NamespaceStorageSize|If %{namespace_name} exceeds the storage quota, all projects in the namespace will be locked and actions will be restricted. %{actions_restricted_link}"
msgstr ""
msgid "NamespaceStorageSize|If you reach 100%% storage capacity, you will not be able to: %{repository_limits_description}"
msgstr ""
-msgid "NamespaceStorageSize|Manage your storage usage or, if you are a namespace Owner, purchase additional storage."
+msgid "NamespaceStorageSize|Manage your storage usage or, if you are a namespace Owner, purchase additional storage. %{learn_more_link}."
msgstr ""
msgid "NamespaceStorageSize|Please purchase additional storage to unlock your projects over the free %{free_size_limit} project limit. You can't %{repository_limits_description}"
msgstr ""
-msgid "NamespaceStorageSize|Which actions are restricted?"
-msgstr ""
-
-msgid "NamespaceStorageSize|Which actions become restricted?"
-msgstr ""
-
msgid "NamespaceStorageSize|You have consumed all of your additional storage, please purchase more to unlock your projects over the free %{free_size_limit} limit. You can't %{repository_limits_description}"
msgstr ""
@@ -28816,7 +28813,7 @@ msgstr ""
msgid "NamespaceStorageSize|You have used %{usage_in_percent} of the storage quota for %{namespace_name} (%{used_storage} of %{storage_limit})"
msgstr ""
-msgid "NamespaceStorageSize|push to your repository, create pipelines, create issues or add comments. To reduce storage capacity, delete unused repositories, artifacts, wikis, issues, and pipelines."
+msgid "NamespaceStorageSize|push to your repository, create pipelines, create issues or add comments. To reduce storage capacity, delete unused repositories, artifacts, wikis, issues, and pipelines. %{learn_more_link}."
msgstr ""
msgid "NamespaceStorage|%{name_with_link} is now read-only. Projects under this namespace are locked and actions are restricted."
@@ -29445,9 +29442,6 @@ msgstr ""
msgid "No iteration"
msgstr ""
-msgid "No job log"
-msgstr ""
-
msgid "No label"
msgstr ""
@@ -34811,9 +34805,18 @@ msgstr ""
msgid "ProjectSettings|Choose your merge method, options, checks, and squash options."
msgstr ""
+msgid "ProjectSettings|Clickhouse URL"
+msgstr ""
+
+msgid "ProjectSettings|Collector host"
+msgstr ""
+
msgid "ProjectSettings|Combine git tags with release notes, release evidence, and assets to create a release."
msgstr ""
+msgid "ProjectSettings|Configure product analytics to track events within your project applications."
+msgstr ""
+
msgid "ProjectSettings|Configure your infrastructure."
msgstr ""
@@ -34823,6 +34826,12 @@ msgstr ""
msgid "ProjectSettings|Container registry"
msgstr ""
+msgid "ProjectSettings|Cube API URL"
+msgstr ""
+
+msgid "ProjectSettings|Cube API key"
+msgstr ""
+
msgid "ProjectSettings|Customize this project's badges."
msgstr ""
@@ -34916,6 +34925,18 @@ msgstr ""
msgid "ProjectSettings|Issues"
msgstr ""
+msgid "ProjectSettings|Jitsu administrator email"
+msgstr ""
+
+msgid "ProjectSettings|Jitsu administrator password"
+msgstr ""
+
+msgid "ProjectSettings|Jitsu host"
+msgstr ""
+
+msgid "ProjectSettings|Jitsu project ID"
+msgstr ""
+
msgid "ProjectSettings|LFS objects from this repository are available to forks. %{linkStart}How do I remove them?%{linkEnd}"
msgstr ""
@@ -35087,6 +35108,12 @@ msgstr ""
msgid "ProjectSettings|Target project"
msgstr ""
+msgid "ProjectSettings|The ID of the project in Jitsu. The project contains all analytics instances."
+msgstr ""
+
+msgid "ProjectSettings|The URL of your Cube instance."
+msgstr ""
+
msgid "ProjectSettings|The commit message used when applying merge request suggestions."
msgstr ""
@@ -35102,6 +35129,12 @@ msgstr ""
msgid "ProjectSettings|The default template will be applied on save."
msgstr ""
+msgid "ProjectSettings|The host of your Jitsu instance."
+msgstr ""
+
+msgid "ProjectSettings|The host of your data collector instance."
+msgstr ""
+
msgid "ProjectSettings|These checks must pass before merge requests can be merged."
msgstr ""
@@ -35132,6 +35165,15 @@ msgstr ""
msgid "ProjectSettings|Used for every new merge request."
msgstr ""
+msgid "ProjectSettings|Used to connect Jitsu to the Clickhouse instance."
+msgstr ""
+
+msgid "ProjectSettings|Used to generate short-lived API access tokens."
+msgstr ""
+
+msgid "ProjectSettings|Used to retrieve dashboard data from the Cube instance."
+msgstr ""
+
msgid "ProjectSettings|Users can copy the repository to a new project."
msgstr ""
@@ -48762,9 +48804,6 @@ msgstr ""
msgid "View job currently using resource"
msgstr ""
-msgid "View job log"
-msgstr ""
-
msgid "View jobs"
msgstr ""
@@ -49889,6 +49928,12 @@ msgstr ""
msgid "Which API requests are affected?"
msgstr ""
+msgid "Which actions are restricted?"
+msgstr ""
+
+msgid "Which actions become restricted?"
+msgstr ""
+
msgid "While it's rare to have no vulnerabilities, it can happen. In any event, we ask that you please double check your settings to make sure you've set up your dashboard correctly."
msgstr ""
diff --git a/sidekiq_cluster/cli.rb b/sidekiq_cluster/cli.rb
index 23b05bf2d16..22cddead3e4 100644
--- a/sidekiq_cluster/cli.rb
+++ b/sidekiq_cluster/cli.rb
@@ -28,6 +28,11 @@ module Gitlab
# The signals that should simply be forwarded to the workers.
FORWARD_SIGNALS = %i(TTIN USR1 USR2 HUP).freeze
+ # The default queues that each Sidekiq process always listens to if routing rules are not customized:
+ # - `default` queue comes from config initializer's Settings.build_sidekiq_routing_rules
+ # - `mailers` queue comes from Gitlab::Application.config.action_mailer.deliver_later_queue_name
+ DEFAULT_QUEUES = %w[default mailers].freeze
+
CommandError = Class.new(StandardError)
def initialize(log_output = $stderr)
@@ -93,6 +98,26 @@ module Gitlab
'No queues found, you must select at least one queue'
end
+ begin
+ routing_rules = ::Gitlab.config.sidekiq.routing_rules
+ rescue StandardError
+ routing_rules = []
+ end
+
+ # Routing rules are defaulted to [['*', 'default']] if not specified.
+ # This means all jobs go to 'default' queue and mailer jobs go to 'mailers' queue.
+ # See config/initializers/1_settings.rb and Settings.build_sidekiq_routing_rules.
+ #
+ # Now, in case queue_selector is used, we ensure all Sidekiq processes are still processing jobs
+ # from default and mailers queues.
+ # https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1491
+ if routing_rules.empty?
+ queue_groups.each do |queues|
+ queues.concat(DEFAULT_QUEUES)
+ queues.uniq!
+ end
+ end
+
if @list_queues
puts queue_groups.map(&:sort) # rubocop:disable Rails/Output
diff --git a/spec/bin/sidekiq_cluster_spec.rb b/spec/bin/sidekiq_cluster_spec.rb
index eb014c511e3..b36fb82c295 100644
--- a/spec/bin/sidekiq_cluster_spec.rb
+++ b/spec/bin/sidekiq_cluster_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe 'bin/sidekiq-cluster', :aggregate_failures do
context 'when selecting some queues and excluding others' do
where(:args, :included, :excluded) do
%w[--negate cronjob] | '-qdefault,1' | '-qcronjob,1'
- %w[--queue-selector resource_boundary=cpu] | '-qupdate_merge_requests,1' | '-qdefault,1'
+ %w[--queue-selector resource_boundary=cpu] | %w[-qupdate_merge_requests,1 -qdefault,1 -qmailers,1] |
+ '-qauthorized_keys_worker,1'
end
with_them do
@@ -23,8 +24,8 @@ RSpec.describe 'bin/sidekiq-cluster', :aggregate_failures do
expect(status).to be(0)
expect(output).to include('bundle exec sidekiq')
- expect(Shellwords.split(output)).to include(included)
- expect(Shellwords.split(output)).not_to include(excluded)
+ expect(Shellwords.split(output)).to include(*included)
+ expect(Shellwords.split(output)).not_to include(*excluded)
end
end
end
diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb
index 499432c2605..085be1ceac2 100644
--- a/spec/commands/sidekiq_cluster/cli_spec.rb
+++ b/spec/commands/sidekiq_cluster/cli_spec.rb
@@ -37,6 +37,8 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
allow(supervisor).to receive(:supervise)
allow(Prometheus::CleanupMultiprocDirService).to receive(:new).and_return(metrics_cleanup_service)
+
+ stub_config(sidekiq: { routing_rules: [] })
end
around do |example|
@@ -58,7 +60,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
context 'with arguments' do
it 'starts the Sidekiq workers' do
expect(Gitlab::SidekiqCluster).to receive(:start)
- .with([['foo']], default_options)
+ .with([['foo'] + described_class::DEFAULT_QUEUES], default_options)
.and_return([])
cli.run(%w(foo))
@@ -92,7 +94,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it 'starts Sidekiq workers for all queues in all_queues.yml except the ones in argv' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['baz'])
expect(Gitlab::SidekiqCluster).to receive(:start)
- .with([['baz']], default_options)
+ .with([['baz'] + described_class::DEFAULT_QUEUES], default_options)
.and_return([])
cli.run(%w(foo -n))
@@ -101,9 +103,10 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
context 'with --max-concurrency flag' do
it 'starts Sidekiq workers for specified queues with a max concurrency' do
+ expected_queues = [%w(foo bar baz), %w(solo)].each { |queues| queues.concat(described_class::DEFAULT_QUEUES) }
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz))
expect(Gitlab::SidekiqCluster).to receive(:start)
- .with([%w(foo bar baz), %w(solo)], default_options.merge(max_concurrency: 2))
+ .with(expected_queues, default_options.merge(max_concurrency: 2))
.and_return([])
cli.run(%w(foo,bar,baz solo -m 2))
@@ -112,9 +115,10 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
context 'with --min-concurrency flag' do
it 'starts Sidekiq workers for specified queues with a min concurrency' do
+ expected_queues = [%w(foo bar baz), %w(solo)].each { |queues| queues.concat(described_class::DEFAULT_QUEUES) }
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz))
expect(Gitlab::SidekiqCluster).to receive(:start)
- .with([%w(foo bar baz), %w(solo)], default_options.merge(min_concurrency: 2))
+ .with(expected_queues, default_options.merge(min_concurrency: 2))
.and_return([])
cli.run(%w(foo,bar,baz solo --min-concurrency 2))
@@ -124,7 +128,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
context 'with --timeout flag' do
it 'when given', 'starts Sidekiq workers with given timeout' do
expect(Gitlab::SidekiqCluster).to receive(:start)
- .with([['foo']], default_options.merge(timeout: 10))
+ .with([['foo'] + described_class::DEFAULT_QUEUES], default_options.merge(timeout: 10))
.and_return([])
cli.run(%w(foo --timeout 10))
@@ -132,7 +136,8 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it 'when not given', 'starts Sidekiq workers with default timeout' do
expect(Gitlab::SidekiqCluster).to receive(:start)
- .with([['foo']], default_options.merge(timeout: Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS))
+ .with([['foo'] + described_class::DEFAULT_QUEUES], default_options.merge(timeout:
+ Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS))
.and_return([])
cli.run(%w(foo))
@@ -146,8 +151,10 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it 'prints out a list of queues in alphabetical order' do
expected_queues = [
+ 'default',
'epics:epics_update_epics_dates',
'epics_new_epic_issue',
+ 'mailers',
'new_epic',
'todos_destroyer:todos_destroyer_confidential_epic'
]
@@ -164,7 +171,8 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it 'starts Sidekiq workers for all queues in all_queues.yml with a namespace in argv' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['cronjob:foo', 'cronjob:bar'])
expect(Gitlab::SidekiqCluster).to receive(:start)
- .with([['cronjob', 'cronjob:foo', 'cronjob:bar']], default_options)
+ .with([['cronjob', 'cronjob:foo', 'cronjob:bar'] +
+ described_class::DEFAULT_QUEUES], default_options)
.and_return([])
cli.run(%w(cronjob))
@@ -202,7 +210,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
'CI and SCM queues' => {
query: 'feature_category=continuous_integration|feature_category=source_code_management',
included_queues: %w(pipeline_default:ci_drop_pipeline merge),
- excluded_queues: %w(mailers)
+ excluded_queues: %w()
}
}
end
@@ -213,6 +221,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expect(opts).to eq(default_options)
expect(queues.first).to include(*included_queues)
expect(queues.first).not_to include(*excluded_queues)
+ expect(queues.first).to include(*described_class::DEFAULT_QUEUES)
[]
end
@@ -225,6 +234,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expect(opts).to eq(default_options)
expect(queues.first).not_to include(*included_queues)
expect(queues.first).to include(*excluded_queues)
+ expect(queues.first).to include(*described_class::DEFAULT_QUEUES)
[]
end
@@ -237,13 +247,15 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expected_workers =
if Gitlab.ee?
[
- %w[cronjob:clusters_integrations_check_prometheus_health incident_management_close_incident status_page_publish],
- %w[bulk_imports_pipeline bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import project_template_export]
+ %w[cronjob:clusters_integrations_check_prometheus_health incident_management_close_incident status_page_publish] + described_class::DEFAULT_QUEUES,
+ %w[bulk_imports_pipeline bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import project_template_export] +
+ described_class::DEFAULT_QUEUES
]
else
[
- %w[cronjob:clusters_integrations_check_prometheus_health incident_management_close_incident],
- %w[bulk_imports_pipeline bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import]
+ %w[cronjob:clusters_integrations_check_prometheus_health incident_management_close_incident] + described_class::DEFAULT_QUEUES,
+ %w[bulk_imports_pipeline bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import] +
+ described_class::DEFAULT_QUEUES
]
end
@@ -281,6 +293,40 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
.to raise_error(Gitlab::SidekiqConfig::WorkerMatcher::QueryError)
end
end
+
+ context "with routing rules specified" do
+ before do
+ stub_config(sidekiq: { routing_rules: [['resource_boundary=cpu', 'foo']] })
+ end
+
+ it "starts Sidekiq workers only for given queues without any additional DEFAULT_QUEUES" do
+ expect(Gitlab::SidekiqCluster).to receive(:start)
+ .with([['foo']], default_options)
+ .and_return([])
+
+ cli.run(%w(foo))
+ end
+ end
+
+ context "with sidekiq settings not specified" do
+ before do
+ stub_config(sidekiq: nil)
+ end
+
+ it "does not throw an error" do
+ allow(Gitlab::SidekiqCluster).to receive(:start).and_return([])
+
+ expect { cli.run(%w(foo)) }.not_to raise_error
+ end
+
+ it "starts Sidekiq workers with given queues, and additional default and mailers queues (DEFAULT_QUEUES)" do
+ expect(Gitlab::SidekiqCluster).to receive(:start)
+ .with([['foo'] + described_class::DEFAULT_QUEUES], default_options)
+ .and_return([])
+
+ cli.run(%w(foo))
+ end
+ end
end
context 'metrics server' do
diff --git a/spec/config/settings_spec.rb b/spec/config/settings_spec.rb
index d6cddc215f5..55e675d5107 100644
--- a/spec/config/settings_spec.rb
+++ b/spec/config/settings_spec.rb
@@ -203,8 +203,8 @@ RSpec.describe Settings, feature_category: :system_access do
using RSpec::Parameterized::TableSyntax
where(:input_rules, :result) do
- nil | [['*', nil]]
- [] | [['*', nil]]
+ nil | [['*', 'default']]
+ [] | [['*', 'default']]
[['name=foobar', 'foobar']] | [['name=foobar', 'foobar']]
end
diff --git a/spec/features/issues/filtered_search/dropdown_hint_spec.rb b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
index 39034a40b1f..1cd8326c5fe 100644
--- a/spec/features/issues/filtered_search/dropdown_hint_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
@@ -52,8 +52,8 @@ RSpec.describe 'Dropdown hint', :js, feature_category: :team_planning do
click_filtered_search_bar
send_keys 'as'
- # Expect Assignee and Release
- expect_suggestion_count 2
+ # Expect Assignee, Release, Search for this text
+ expect_suggestion_count 3
end
end
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index f67d5c40efd..a65befc3115 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe 'Filter issues', :js, feature_category: :team_planning do
it 'filters by all available tokens' do
search_term = 'issue'
select_tokens 'Assignee', '=', user.username, 'Author', '=', user.username, 'Label', '=', caps_sensitive_label.title, 'Milestone', '=', milestone.title
- send_keys search_term, :enter
+ send_keys search_term, :enter, :enter
expect_assignee_token(user.name)
expect_author_token(user.name)
@@ -261,7 +261,7 @@ RSpec.describe 'Filter issues', :js, feature_category: :team_planning do
it 'filters issues by searched label, label2, author, assignee, milestone and text' do
search_term = 'bug'
select_tokens 'Label', '=', bug_label.title, 'Label', '=', caps_sensitive_label.title, 'Author', '=', user.username, 'Assignee', '=', user.username, 'Milestone', '=', milestone.title
- send_keys search_term, :enter
+ send_keys search_term, :enter, :enter
expect_label_token(bug_label.title)
expect_label_token(caps_sensitive_label.title)
@@ -275,7 +275,7 @@ RSpec.describe 'Filter issues', :js, feature_category: :team_planning do
it 'filters issues by searched label, label2, author, assignee, not included in a milestone' do
search_term = 'bug'
select_tokens 'Label', '=', bug_label.title, 'Label', '=', caps_sensitive_label.title, 'Author', '=', user.username, 'Assignee', '=', user.username, 'Milestone', '!=', milestone.title
- send_keys search_term, :enter
+ send_keys search_term, :enter, :enter
expect_label_token(bug_label.title)
expect_label_token(caps_sensitive_label.title)
@@ -488,13 +488,13 @@ RSpec.describe 'Filter issues', :js, feature_category: :team_planning do
context 'searched text with other filters' do
it 'filters issues by searched text, author, text, assignee, text, label1, text, label2, text, milestone and text' do
click_filtered_search_bar
- send_keys 'bug '
+ send_keys 'bug', :enter
select_tokens 'Author', '=', user.username
- send_keys 'report '
+ send_keys 'report', :enter
select_tokens 'Label', '=', bug_label.title
select_tokens 'Label', '=', caps_sensitive_label.title
select_tokens 'Milestone', '=', milestone.title
- send_keys 'foo', :enter
+ send_keys 'foo', :enter, :enter
expect_issues_list_count(1)
expect_search_term('bug report foo')
diff --git a/spec/features/issues/filtered_search/recent_searches_spec.rb b/spec/features/issues/filtered_search/recent_searches_spec.rb
index 2d9c73f2756..0efa2f49e36 100644
--- a/spec/features/issues/filtered_search/recent_searches_spec.rb
+++ b/spec/features/issues/filtered_search/recent_searches_spec.rb
@@ -100,7 +100,7 @@ RSpec.describe 'Recent searches', :js, feature_category: :team_planning do
def submit_then_clear_search(search)
click_filtered_search_bar
- send_keys(search, :enter)
+ send_keys(search, :enter, :enter)
click_button 'Clear'
end
end
diff --git a/spec/features/issues/filtered_search/search_bar_spec.rb b/spec/features/issues/filtered_search/search_bar_spec.rb
index c975df2a531..35c099b29aa 100644
--- a/spec/features/issues/filtered_search/search_bar_spec.rb
+++ b/spec/features/issues/filtered_search/search_bar_spec.rb
@@ -70,7 +70,8 @@ RSpec.describe 'Search bar', :js, feature_category: :team_planning do
original_size = get_suggestion_count
send_keys 'autho'
- expect_suggestion_count 1
+ # Expect Author, Search for this text
+ expect_suggestion_count 2
click_button 'Clear'
click_filtered_search_bar
diff --git a/spec/features/issues/filtered_search/visual_tokens_spec.rb b/spec/features/issues/filtered_search/visual_tokens_spec.rb
index f25925ed33d..3031b20eb7c 100644
--- a/spec/features/issues/filtered_search/visual_tokens_spec.rb
+++ b/spec/features/issues/filtered_search/visual_tokens_spec.rb
@@ -79,10 +79,10 @@ RSpec.describe 'Visual tokens', :js, feature_category: :team_planning do
describe 'editing a search term while editing another filter token' do
before do
click_filtered_search_bar
- send_keys 'foo '
+ send_keys 'foo', :enter
select_tokens 'Assignee', '='
click_token_segment 'foo'
- send_keys ' '
+ send_keys :enter
end
it 'opens author dropdown' do
@@ -98,44 +98,6 @@ RSpec.describe 'Visual tokens', :js, feature_category: :team_planning do
end
end
- describe 'add new token after editing existing token' do
- before do
- select_tokens 'Assignee', '=', user.username, 'Label', '=', 'None'
- click_token_segment(user.name)
- send_keys ' '
- end
-
- describe 'opens dropdowns' do
- it 'opens hint dropdown' do
- expect_visible_suggestions_list
- end
-
- it 'opens token dropdown' do
- click_on 'Author'
-
- expect_visible_suggestions_list
- end
- end
-
- describe 'visual tokens' do
- it 'creates visual token' do
- click_on 'Author'
- click_on '= is'
- click_on 'The Rock'
-
- expect_author_token 'The Rock'
- end
- end
-
- it 'does not tokenize incomplete token' do
- click_on 'Author'
- find('.js-navbar').click
-
- expect_empty_search_term
- expect_token_segment 'Assignee'
- end
- end
-
describe 'search using incomplete visual tokens' do
before do
select_tokens 'Author', '=', user.username, 'Assignee', '=', 'None'
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index 8820d29ced4..d65eea3671c 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -337,6 +337,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
expect { click_button 'Register' }.not_to change { User.count }
expect(page).to have_content(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
+ expect(page).to have_content("Minimum length is #{Gitlab::CurrentSettings.minimum_password_length} characters")
end
end
diff --git a/spec/frontend/__helpers__/init_vue_mr_page_helper.js b/spec/frontend/__helpers__/init_vue_mr_page_helper.js
index d01affdaeac..3dccbd9fbef 100644
--- a/spec/frontend/__helpers__/init_vue_mr_page_helper.js
+++ b/spec/frontend/__helpers__/init_vue_mr_page_helper.js
@@ -6,9 +6,17 @@ import { getDiffFileMock } from '../diffs/mock_data/diff_file';
import { userDataMock, notesDataMock, noteableDataMock } from '../notes/mock_data';
export default function initVueMRPage() {
+ const contentWrapperEl = document.createElement('div');
+ contentWrapperEl.className = 'content-wrapper';
+ document.body.appendChild(contentWrapperEl);
+
+ const containerEl = document.createElement('div');
+ containerEl.className = 'container-fluid';
+ contentWrapperEl.appendChild(containerEl);
+
const mrTestEl = document.createElement('div');
mrTestEl.className = 'js-merge-request-test';
- document.body.appendChild(mrTestEl);
+ containerEl.appendChild(mrTestEl);
const diffsAppEndpoint = '/diffs/app/endpoint';
const diffsAppProjectPath = 'testproject';
diff --git a/spec/frontend/content_editor/extensions/attachment_spec.js b/spec/frontend/content_editor/extensions/attachment_spec.js
index 24b75ba6805..3c699b05b0f 100644
--- a/spec/frontend/content_editor/extensions/attachment_spec.js
+++ b/spec/frontend/content_editor/extensions/attachment_spec.js
@@ -14,6 +14,7 @@ import eventHubFactory from '~/helpers/event_hub_factory';
import { createTestEditor, createDocBuilder } from '../test_utils';
import {
PROJECT_WIKI_ATTACHMENT_IMAGE_HTML,
+ PROJECT_WIKI_ATTACHMENT_IMAGE_SVG_HTML,
PROJECT_WIKI_ATTACHMENT_AUDIO_HTML,
PROJECT_WIKI_ATTACHMENT_VIDEO_HTML,
PROJECT_WIKI_ATTACHMENT_LINK_HTML,
@@ -36,6 +37,7 @@ describe('content_editor/extensions/attachment', () => {
const uploadsPath = '/uploads/';
const imageFile = new File(['foo'], 'test-file.png', { type: 'image/png' });
+ const imageFileSvg = new File(['foo'], 'test-file.svg', { type: 'image/svg+xml' });
const audioFile = new File(['foo'], 'test-file.mp3', { type: 'audio/mpeg' });
const videoFile = new File(['foo'], 'test-file.mp4', { type: 'video/mp4' });
const drawioDiagramFile = new File(['foo'], 'test-file.drawio.svg', { type: 'image/svg+xml' });
@@ -120,7 +122,8 @@ describe('content_editor/extensions/attachment', () => {
describe.each`
nodeType | mimeType | html | file | mediaType
- ${'image'} | ${'image/png'} | ${PROJECT_WIKI_ATTACHMENT_IMAGE_HTML} | ${imageFile} | ${(attrs) => image(attrs)}
+ ${'image (png)'} | ${'image/png'} | ${PROJECT_WIKI_ATTACHMENT_IMAGE_HTML} | ${imageFile} | ${(attrs) => image(attrs)}
+ ${'image (svg)'} | ${'image/svg+xml'} | ${PROJECT_WIKI_ATTACHMENT_IMAGE_SVG_HTML} | ${imageFileSvg} | ${(attrs) => image(attrs)}
${'audio'} | ${'audio/mpeg'} | ${PROJECT_WIKI_ATTACHMENT_AUDIO_HTML} | ${audioFile} | ${(attrs) => audio(attrs)}
${'video'} | ${'video/mp4'} | ${PROJECT_WIKI_ATTACHMENT_VIDEO_HTML} | ${videoFile} | ${(attrs) => video(attrs)}
${'drawioDiagram'} | ${'image/svg+xml'} | ${PROJECT_WIKI_ATTACHMENT_DRAWIO_DIAGRAM_HTML} | ${drawioDiagramFile} | ${(attrs) => drawioDiagram(attrs)}
diff --git a/spec/frontend/content_editor/test_constants.js b/spec/frontend/content_editor/test_constants.js
index bd462ecec22..749f1234de0 100644
--- a/spec/frontend/content_editor/test_constants.js
+++ b/spec/frontend/content_editor/test_constants.js
@@ -4,6 +4,12 @@ export const PROJECT_WIKI_ATTACHMENT_IMAGE_HTML = `<p data-sourcepos="1:1-1:27"
</a>
</p>`;
+export const PROJECT_WIKI_ATTACHMENT_IMAGE_SVG_HTML = `<p data-sourcepos="1:1-1:27" dir="auto">
+ <a class="no-attachment-icon" href="/group1/project1/-/wikis/test-file.svg" target="_blank" rel="noopener noreferrer" data-canonical-src="test-file.svg">
+ <img alt="test-file" class="lazy" data-src="/group1/project1/-/wikis/test-file.svg" data-canonical-src="test-file.png">
+ </a>
+</p>`;
+
export const PROJECT_WIKI_ATTACHMENT_VIDEO_HTML = `<p data-sourcepos="1:1-1:132" dir="auto">
<span class="media-container video-container">
<video src="/group1/project1/-/wikis/test-file.mp4" controls="true" data-setup="{}" data-title="test-file" width="400" preload="metadata" data-canonical-src="test-file.mp4">
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index d1b796c5aa6..076fdc4a991 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -661,25 +661,9 @@ describe('CE IssuesListApp component', () => {
});
describe.each`
- event | params
- ${'next-page'} | ${{
- page_after: 'endcursor',
- page_before: undefined,
- first_page_size: 20,
- last_page_size: undefined,
- search: undefined,
- sort: 'created_date',
- state: 'opened',
-}}
- ${'previous-page'} | ${{
- page_after: undefined,
- page_before: 'startcursor',
- first_page_size: undefined,
- last_page_size: 20,
- search: undefined,
- sort: 'created_date',
- state: 'opened',
-}}
+ event | params
+ ${'next-page'} | ${{ page_after: 'endcursor', page_before: undefined, first_page_size: 20, last_page_size: undefined }}
+ ${'previous-page'} | ${{ page_after: undefined, page_before: 'startcursor', first_page_size: undefined, last_page_size: 20 }}
`('when "$event" event is emitted by IssuableList', ({ event, params }) => {
beforeEach(async () => {
wrapper = mountComponent({
diff --git a/spec/frontend/issues/list/mock_data.js b/spec/frontend/issues/list/mock_data.js
index 0332f68ddb6..bd006a6b3ce 100644
--- a/spec/frontend/issues/list/mock_data.js
+++ b/spec/frontend/issues/list/mock_data.js
@@ -216,6 +216,7 @@ export const locationSearchWithSpecialValues = [
].join('&');
export const filteredTokens = [
+ { type: FILTERED_SEARCH_TERM, value: { data: 'find issues', operator: 'undefined' } },
{ type: TOKEN_TYPE_AUTHOR, value: { data: 'homer', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_AUTHOR, value: { data: 'marge', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_AUTHOR, value: { data: 'burns', operator: OPERATOR_OR } },
@@ -260,8 +261,6 @@ export const filteredTokens = [
{ type: TOKEN_TYPE_ORGANIZATION, value: { data: '456', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_HEALTH, value: { data: 'atRisk', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_HEALTH, value: { data: 'onTrack', operator: OPERATOR_NOT } },
- { type: FILTERED_SEARCH_TERM, value: { data: 'find' } },
- { type: FILTERED_SEARCH_TERM, value: { data: 'issues' } },
];
export const filteredTokensWithSpecialValues = [
@@ -278,6 +277,7 @@ export const filteredTokensWithSpecialValues = [
];
export const apiParams = {
+ search: 'find issues',
authorUsername: 'homer',
assigneeUsernames: ['bart', 'lisa', '5'],
milestoneTitle: ['season 3', 'season 4'],
@@ -326,6 +326,7 @@ export const apiParamsWithSpecialValues = {
};
export const urlParams = {
+ search: 'find issues',
author_username: 'homer',
'not[author_username]': 'marge',
'or[author_username]': ['burns', 'smithers'],
diff --git a/spec/frontend/issues/list/utils_spec.js b/spec/frontend/issues/list/utils_spec.js
index e4ecdc6c29e..c14dcf96c98 100644
--- a/spec/frontend/issues/list/utils_spec.js
+++ b/spec/frontend/issues/list/utils_spec.js
@@ -21,7 +21,6 @@ import {
getSortOptions,
isSortKey,
} from '~/issues/list/utils';
-import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants';
import { DEFAULT_PAGE_SIZE } from '~/vue_shared/issuable/list/constants';
describe('getInitialPageParams', () => {
@@ -125,20 +124,6 @@ describe('getFilterTokens', () => {
filteredTokensWithSpecialValues,
);
});
-
- it.each`
- description | argument
- ${'an undefined value'} | ${undefined}
- ${'an irrelevant value'} | ${'?unrecognised=parameter'}
- `('returns an empty filtered search term given $description', ({ argument }) => {
- expect(getFilterTokens(argument)).toEqual([
- {
- id: expect.any(String),
- type: FILTERED_SEARCH_TERM,
- value: { data: '' },
- },
- ]);
- });
});
describe('convertToApiParams', () => {
diff --git a/spec/frontend/merge_request_tabs_spec.js b/spec/frontend/merge_request_tabs_spec.js
index 399c1446f37..8f92ab46714 100644
--- a/spec/frontend/merge_request_tabs_spec.js
+++ b/spec/frontend/merge_request_tabs_spec.js
@@ -41,6 +41,10 @@ describe('MergeRequestTabs', () => {
gl.mrWidget = {};
});
+ afterEach(() => {
+ document.body.innerHTML = '';
+ });
+
describe('clickTab', () => {
let params;
@@ -269,32 +273,32 @@ describe('MergeRequestTabs', () => {
describe('expandViewContainer', () => {
beforeEach(() => {
- $('body').append(
- '<div class="content-wrapper"><div class="container-fluid container-limited"></div></div>',
- );
- });
-
- afterEach(() => {
- $('.content-wrapper').remove();
+ $('.content-wrapper .container-fluid').addClass('container-limited');
});
- it('removes container-limited from containers', () => {
+ it('removes `container-limited` class from content container', () => {
+ expect($('.content-wrapper .container-limited')).toHaveLength(1);
testContext.class.expandViewContainer();
-
expect($('.content-wrapper .container-limited')).toHaveLength(0);
});
+ });
- it('does not add container-limited when fluid layout is prefered', () => {
- $('.content-wrapper .container-fluid').removeClass('container-limited');
-
- testContext.class.expandViewContainer(false);
+ describe('resetViewContainer', () => {
+ it('does not add `container-limited` CSS class when fluid layout is preferred', () => {
+ testContext.class.resetViewContainer();
expect($('.content-wrapper .container-limited')).toHaveLength(0);
});
- it('does remove container-limited from breadcrumbs', () => {
- $('.container-limited').addClass('breadcrumbs');
- testContext.class.expandViewContainer();
+ it('adds `container-limited` CSS class back when fixed layout is preferred', () => {
+ document.body.innerHTML = '';
+ initMrPage();
+ $('.content-wrapper .container-fluid').addClass('container-limited');
+ // recreate the instance so that `isFixedLayoutPreferred` is re-evaluated
+ testContext.class = new MergeRequestTabs({ stubLocation });
+ $('.content-wrapper .container-fluid').removeClass('container-limited');
+
+ testContext.class.resetViewContainer();
expect($('.content-wrapper .container-limited')).toHaveLength(1);
});
diff --git a/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js b/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js
index 169e3666cbd..6a2453704db 100644
--- a/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js
+++ b/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js
@@ -8,7 +8,7 @@ import { createAlert } from '~/alert';
import FailedJobsApp from '~/pipelines/components/jobs/failed_jobs_app.vue';
import FailedJobsTable from '~/pipelines/components/jobs/failed_jobs_table.vue';
import GetFailedJobsQuery from '~/pipelines/graphql/queries/get_failed_jobs.query.graphql';
-import { mockFailedJobsQueryResponse, mockFailedJobsSummaryData } from '../../mock_data';
+import { mockFailedJobsQueryResponse } from '../../mock_data';
Vue.use(VueApollo);
@@ -27,15 +27,12 @@ describe('Failed Jobs App', () => {
return createMockApollo(requestHandlers);
};
- const createComponent = (resolver, failedJobsSummaryData = mockFailedJobsSummaryData) => {
+ const createComponent = (resolver) => {
wrapper = shallowMount(FailedJobsApp, {
provide: {
fullPath: 'root/ci-project',
pipelineIid: 1,
},
- propsData: {
- failedJobsSummary: failedJobsSummaryData,
- },
apolloProvider: createMockApolloProvider(resolver),
});
};
diff --git a/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js b/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js
index 0ac3b6c9074..071977c9481 100644
--- a/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js
+++ b/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js
@@ -11,8 +11,8 @@ import RetryFailedJobMutation from '~/pipelines/graphql/mutations/retry_failed_j
import {
successRetryMutationResponse,
failedRetryMutationResponse,
- mockPreparedFailedJobsData,
- mockPreparedFailedJobsDataNoPermission,
+ mockFailedJobsData,
+ mockFailedJobsDataNoPermission,
} from '../../mock_data';
jest.mock('~/alert');
@@ -30,13 +30,15 @@ describe('Failed Jobs Table', () => {
const findRetryButton = () => wrapper.findComponent(GlButton);
const findJobLink = () => wrapper.findComponent(GlLink);
const findJobLog = () => wrapper.findByTestId('job-log');
+ const findSummary = (index) => wrapper.findAllByTestId('job-trace-summary').at(index);
+ const findFirstFailureMessage = () => wrapper.findAllByTestId('job-failure-message').at(0);
const createMockApolloProvider = (resolver) => {
const requestHandlers = [[RetryFailedJobMutation, resolver]];
return createMockApollo(requestHandlers);
};
- const createComponent = (resolver, failedJobsData = mockPreparedFailedJobsData) => {
+ const createComponent = (resolver, failedJobsData = mockFailedJobsData) => {
wrapper = mountExtended(FailedJobsTable, {
propsData: {
failedJobs: failedJobsData,
@@ -51,13 +53,31 @@ describe('Failed Jobs Table', () => {
expect(findJobsTable().exists()).toBe(true);
});
+ it('displays failed job summary', () => {
+ createComponent();
+
+ expect(findSummary(0).text()).toBe('Html Summary');
+ });
+
+ it('displays no job log when no trace', () => {
+ createComponent();
+
+ expect(findSummary(1).text()).toBe('No job log');
+ });
+
+ it('displays failure reason', () => {
+ createComponent();
+
+ expect(findFirstFailureMessage().text()).toBe('Job failed');
+ });
+
it('calls the retry failed job mutation correctly', () => {
createComponent(successRetryMutationHandler);
findRetryButton().trigger('click');
expect(successRetryMutationHandler).toHaveBeenCalledWith({
- id: mockPreparedFailedJobsData[0].id,
+ id: mockFailedJobsData[0].id,
});
});
@@ -90,7 +110,7 @@ describe('Failed Jobs Table', () => {
});
it('hides the job log and retry button if a user does not have permission', () => {
- createComponent([[]], mockPreparedFailedJobsDataNoPermission);
+ createComponent([[]], mockFailedJobsDataNoPermission);
expect(findJobLog().exists()).toBe(false);
expect(findRetryButton().exists()).toBe(false);
@@ -106,8 +126,6 @@ describe('Failed Jobs Table', () => {
it('job name links to the correct job', () => {
createComponent();
- expect(findJobLink().attributes('href')).toBe(
- mockPreparedFailedJobsData[0].detailedStatus.detailsPath,
- );
+ expect(findJobLink().attributes('href')).toBe(mockFailedJobsData[0].detailedStatus.detailsPath);
});
});
diff --git a/spec/frontend/pipelines/components/jobs/utils_spec.js b/spec/frontend/pipelines/components/jobs/utils_spec.js
deleted file mode 100644
index 720446cfda3..00000000000
--- a/spec/frontend/pipelines/components/jobs/utils_spec.js
+++ /dev/null
@@ -1,14 +0,0 @@
-import { prepareFailedJobs } from '~/pipelines/components/jobs/utils';
-import {
- mockFailedJobsData,
- mockFailedJobsSummaryData,
- mockPreparedFailedJobsData,
-} from '../../mock_data';
-
-describe('Utils', () => {
- it('prepares failed jobs data correctly', () => {
- expect(prepareFailedJobs(mockFailedJobsData, mockFailedJobsSummaryData)).toEqual(
- mockPreparedFailedJobsData,
- );
- });
-});
diff --git a/spec/frontend/pipelines/components/pipeline_tabs_spec.js b/spec/frontend/pipelines/components/pipeline_tabs_spec.js
index 337af6c1f60..fde13128662 100644
--- a/spec/frontend/pipelines/components/pipeline_tabs_spec.js
+++ b/spec/frontend/pipelines/components/pipeline_tabs_spec.js
@@ -19,7 +19,6 @@ describe('The Pipeline Tabs', () => {
const defaultProvide = {
defaultTabValue: '',
failedJobsCount: 1,
- failedJobsSummary: [],
totalJobCount: 10,
testsCount: 123,
};
diff --git a/spec/frontend/pipelines/mock_data.js b/spec/frontend/pipelines/mock_data.js
index dd7e81f3f22..a4b8d223a0c 100644
--- a/spec/frontend/pipelines/mock_data.js
+++ b/spec/frontend/pipelines/mock_data.js
@@ -1190,6 +1190,10 @@ export const mockFailedJobsQueryResponse = {
readBuild: true,
updateBuild: true,
},
+ trace: {
+ htmlSummary: '<span>Html Summary</span>',
+ },
+ failureMessage: 'Failed',
},
{
__typename: 'CiJob',
@@ -1218,6 +1222,8 @@ export const mockFailedJobsQueryResponse = {
readBuild: true,
updateBuild: true,
},
+ trace: null,
+ failureMessage: 'Failed',
},
],
},
@@ -1226,18 +1232,8 @@ export const mockFailedJobsQueryResponse = {
},
};
-export const mockFailedJobsSummaryData = [
- {
- id: 1848,
- failure: null,
- failure_summary:
- '<span>Pulling docker image node:latest ...<br/></span><span>Using docker image sha256:738d733448be00c72cb6618b7a06a1424806c6d239d8885e92f9b1e8727092b5 for node:latest with digest node@sha256:e5b7b349d517159246070bf14242027a9e220ffa8bd98a67ba1495d969c06c01 ...<br/></span><div class="section-start" data-timestamp="1651175313" data-section="prepare-script" role="button"></div><span class="term-fg-l-cyan term-bold section section-header js-s-prepare-script">Preparing environment</span><span class="section section-header js-s-prepare-script"><br/></span><span class="section line js-s-prepare-script">Running on runner-kvkqh24-project-20-concurrent-0 via 0706719b1b8d...<br/></span><div class="section-end" data-section="prepare-script"></div><div class="section-start" data-timestamp="1651175313" data-section="get-sources" role="button"></div><span class="term-fg-l-cyan term-bold section section-header js-s-get-sources">Getting source from Git repository</span><span class="section section-header js-s-get-sources"><br/></span><span class="term-fg-l-green term-bold section line js-s-get-sources">Fetching changes with git depth set to 50...</span><span class="section line js-s-get-sources"><br/>Reinitialized existing Git repository in /builds/root/ci-project/.git/<br/>fatal: couldn\'t find remote ref refs/heads/test<br/></span><div class="section-end" data-section="get-sources"></div><span class="term-fg-l-red term-bold">ERROR: Job failed: exit code 1<br/></span>',
- },
-];
-
export const mockFailedJobsData = [
{
- normalizedId: 1848,
__typename: 'CiJob',
status: 'FAILED',
detailedStatus: {
@@ -1260,13 +1256,25 @@ export const mockFailedJobsData = [
},
},
id: 'gid://gitlab/Ci::Build/1848',
- stage: { __typename: 'CiStage', id: 'gid://gitlab/Ci::Stage/358', name: 'build' },
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/358',
+ name: 'build',
+ },
name: 'wait_job',
retryable: true,
- userPermissions: { __typename: 'JobPermissions', readBuild: true, updateBuild: true },
+ userPermissions: {
+ __typename: 'JobPermissions',
+ readBuild: true,
+ updateBuild: true,
+ },
+ trace: {
+ htmlSummary: '<span>Html Summary</span>',
+ },
+ failureMessage: 'Job failed',
+ _showDetails: true,
},
{
- normalizedId: 1710,
__typename: 'CiJob',
status: 'FAILED',
detailedStatus: {
@@ -1281,52 +1289,27 @@ export const mockFailedJobsData = [
action: null,
},
id: 'gid://gitlab/Ci::Build/1710',
- stage: { __typename: 'CiStage', id: 'gid://gitlab/Ci::Stage/358', name: 'build' },
+ stage: {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/358',
+ name: 'build',
+ },
name: 'wait_job',
retryable: false,
- userPermissions: { __typename: 'JobPermissions', readBuild: true, updateBuild: true },
- },
-];
-
-export const mockPreparedFailedJobsData = [
- {
- __typename: 'CiJob',
- _showDetails: true,
- detailedStatus: {
- __typename: 'DetailedStatus',
- action: {
- __typename: 'StatusAction',
- buttonTitle: 'Retry this job',
- icon: 'retry',
- id: 'Ci::Build-failed-1848',
- method: 'post',
- path: '/root/ci-project/-/jobs/1848/retry',
- title: 'Retry',
- },
- detailsPath: '/root/ci-project/-/jobs/1848',
- group: 'failed',
- icon: 'status_failed',
- id: 'failed-1848-1848',
- label: 'failed',
- text: 'failed',
- tooltip: 'failed - (script failure)',
+ userPermissions: {
+ __typename: 'JobPermissions',
+ readBuild: true,
+ updateBuild: true,
},
- failure: null,
- failureSummary:
- '<span>Pulling docker image node:latest ...<br/></span><span>Using docker image sha256:738d733448be00c72cb6618b7a06a1424806c6d239d8885e92f9b1e8727092b5 for node:latest with digest node@sha256:e5b7b349d517159246070bf14242027a9e220ffa8bd98a67ba1495d969c06c01 ...<br/></span><div class="section-start" data-timestamp="1651175313" data-section="prepare-script" role="button"></div><span class="term-fg-l-cyan term-bold section section-header js-s-prepare-script">Preparing environment</span><span class="section section-header js-s-prepare-script"><br/></span><span class="section line js-s-prepare-script">Running on runner-kvkqh24-project-20-concurrent-0 via 0706719b1b8d...<br/></span><div class="section-end" data-section="prepare-script"></div><div class="section-start" data-timestamp="1651175313" data-section="get-sources" role="button"></div><span class="term-fg-l-cyan term-bold section section-header js-s-get-sources">Getting source from Git repository</span><span class="section section-header js-s-get-sources"><br/></span><span class="term-fg-l-green term-bold section line js-s-get-sources">Fetching changes with git depth set to 50...</span><span class="section line js-s-get-sources"><br/>Reinitialized existing Git repository in /builds/root/ci-project/.git/<br/>fatal: couldn\'t find remote ref refs/heads/test<br/></span><div class="section-end" data-section="get-sources"></div><span class="term-fg-l-red term-bold">ERROR: Job failed: exit code 1<br/></span>',
- id: 'gid://gitlab/Ci::Build/1848',
- name: 'wait_job',
- normalizedId: 1848,
- retryable: true,
- stage: { __typename: 'CiStage', id: 'gid://gitlab/Ci::Stage/358', name: 'build' },
- status: 'FAILED',
- userPermissions: { __typename: 'JobPermissions', readBuild: true, updateBuild: true },
+ trace: null,
+ failureMessage: 'Job failed',
+ _showDetails: true,
},
];
-export const mockPreparedFailedJobsDataNoPermission = [
+export const mockFailedJobsDataNoPermission = [
{
- ...mockPreparedFailedJobsData[0],
+ ...mockFailedJobsData[0],
userPermissions: { __typename: 'JobPermissions', readBuild: false, updateBuild: false },
},
];
diff --git a/spec/frontend/pipelines/pipeline_tabs_spec.js b/spec/frontend/pipelines/pipeline_tabs_spec.js
index 099748a5cca..8d1cd98e981 100644
--- a/spec/frontend/pipelines/pipeline_tabs_spec.js
+++ b/spec/frontend/pipelines/pipeline_tabs_spec.js
@@ -25,7 +25,6 @@ describe('~/pipelines/pipeline_tabs.js', () => {
el.dataset.exposeSecurityDashboard = 'true';
el.dataset.exposeLicenseScanningData = 'true';
el.dataset.failedJobsCount = 1;
- el.dataset.failedJobsSummary = '[]';
el.dataset.graphqlResourceEtag = 'graphqlResourceEtag';
el.dataset.pipelineIid = '123';
el.dataset.pipelineProjectPath = 'pipelineProjectPath';
@@ -50,7 +49,6 @@ describe('~/pipelines/pipeline_tabs.js', () => {
exposeSecurityDashboard: true,
exposeLicenseScanningData: true,
failedJobsCount: '1',
- failedJobsSummary: [],
graphqlResourceEtag: 'graphqlResourceEtag',
pipelineIid: '123',
pipelineProjectPath: 'pipelineProjectPath',
diff --git a/spec/frontend/read_more_spec.js b/spec/frontend/read_more_spec.js
index e45405088b1..5f7bd32e231 100644
--- a/spec/frontend/read_more_spec.js
+++ b/spec/frontend/read_more_spec.js
@@ -1,9 +1,8 @@
-import { loadHTMLFixture, resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures';
+import htmlProjectsOverview from 'test_fixtures/projects/overview.html';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import initReadMore from '~/read_more';
describe('Read more click-to-expand functionality', () => {
- const fixtureName = 'projects/overview.html';
-
const findTrigger = () => document.querySelector('.js-read-more-trigger');
afterEach(() => {
@@ -12,7 +11,7 @@ describe('Read more click-to-expand functionality', () => {
describe('expands target element', () => {
beforeEach(() => {
- loadHTMLFixture(fixtureName);
+ setHTMLFixture(htmlProjectsOverview);
});
it('adds "is-expanded" class to target element', () => {
diff --git a/spec/frontend/work_items/components/work_item_description_spec.js b/spec/frontend/work_items/components/work_item_description_spec.js
index 099c45ac683..174dd520a61 100644
--- a/spec/frontend/work_items/components/work_item_description_spec.js
+++ b/spec/frontend/work_items/components/work_item_description_spec.js
@@ -12,17 +12,15 @@ import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue
import WorkItemDescription from '~/work_items/components/work_item_description.vue';
import WorkItemDescriptionRendered from '~/work_items/components/work_item_description_rendered.vue';
import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
-import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemDescriptionSubscription from '~/work_items/graphql/work_item_description.subscription.graphql';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import { autocompleteDataSources, markdownPreviewPath } from '~/work_items/utils';
import {
updateWorkItemMutationResponse,
+ workItemByIidResponseFactory,
workItemDescriptionSubscriptionResponse,
- workItemResponseFactory,
workItemQueryResponse,
- projectWorkItemResponse,
} from '../mock_data';
jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal');
@@ -37,7 +35,6 @@ describe('WorkItemDescription', () => {
const mutationSuccessHandler = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
const subscriptionHandler = jest.fn().mockResolvedValue(workItemDescriptionSubscriptionResponse);
- const workItemByIidResponseHandler = jest.fn().mockResolvedValue(projectWorkItemResponse);
let workItemResponseHandler;
let workItemsMvc;
@@ -59,26 +56,23 @@ describe('WorkItemDescription', () => {
const createComponent = async ({
mutationHandler = mutationSuccessHandler,
canUpdate = true,
- workItemResponse = workItemResponseFactory({ canUpdate }),
+ workItemResponse = workItemByIidResponseFactory({ canUpdate }),
isEditing = false,
- queryVariables = { id: workItemId },
- fetchByIid = false,
+ queryVariables = { iid: '1' },
} = {}) => {
workItemResponseHandler = jest.fn().mockResolvedValue(workItemResponse);
const { id } = workItemQueryResponse.data.workItem;
wrapper = shallowMount(WorkItemDescription, {
apolloProvider: createMockApollo([
- [workItemQuery, workItemResponseHandler],
+ [workItemByIidQuery, workItemResponseHandler],
[updateWorkItemMutation, mutationHandler],
[workItemDescriptionSubscription, subscriptionHandler],
- [workItemByIidQuery, workItemByIidResponseHandler],
]),
propsData: {
workItemId: id,
fullPath: 'test-project-path',
queryVariables,
- fetchByIid,
},
provide: {
glFeatures: {
@@ -152,9 +146,7 @@ describe('WorkItemDescription', () => {
});
it('has a subscription', async () => {
- createComponent();
-
- await waitForPromises();
+ await createComponent();
expect(subscriptionHandler).toHaveBeenCalledWith({
issuableId: workItemQueryResponse.data.workItem.id,
@@ -170,10 +162,7 @@ describe('WorkItemDescription', () => {
};
await createComponent({
- workItemResponse: workItemResponseFactory({
- lastEditedAt,
- lastEditedBy,
- }),
+ workItemResponse: workItemByIidResponseFactory({ lastEditedAt, lastEditedBy }),
});
expect(findEditedAt().props()).toMatchObject({
@@ -309,25 +298,14 @@ describe('WorkItemDescription', () => {
});
});
- it('calls the global ID work item query when `fetchByIid` prop is false', async () => {
- createComponent({ fetchByIid: false });
- await waitForPromises();
+ it('calls the work item query', async () => {
+ await createComponent();
expect(workItemResponseHandler).toHaveBeenCalled();
- expect(workItemByIidResponseHandler).not.toHaveBeenCalled();
- });
-
- it('calls the IID work item query when when `fetchByIid` prop is true', async () => {
- createComponent({ fetchByIid: true });
- await waitForPromises();
-
- expect(workItemResponseHandler).not.toHaveBeenCalled();
- expect(workItemByIidResponseHandler).toHaveBeenCalled();
});
- it('skips calling the handlers when missing the needed queryVariables', async () => {
- createComponent({ queryVariables: {}, fetchByIid: false });
- await waitForPromises();
+ it('skips calling the work item query when missing queryVariables', async () => {
+ await createComponent({ queryVariables: {} });
expect(workItemResponseHandler).not.toHaveBeenCalled();
});
diff --git a/spec/helpers/ci/builds_helper_spec.rb b/spec/helpers/ci/builds_helper_spec.rb
index c215d7b4a78..eabd40f3dd4 100644
--- a/spec/helpers/ci/builds_helper_spec.rb
+++ b/spec/helpers/ci/builds_helper_spec.rb
@@ -3,51 +3,6 @@
require 'spec_helper'
RSpec.describe Ci::BuildsHelper do
- describe '#build_summary' do
- subject { helper.build_summary(build, skip: skip) }
-
- context 'when build has no trace' do
- let(:build) { instance_double(Ci::Build, has_trace?: false) }
-
- context 'when skip is false' do
- let(:skip) { false }
-
- it 'returns no job log' do
- expect(subject).to eq('No job log')
- end
- end
-
- context 'when skip is true' do
- let(:skip) { true }
-
- it 'returns no job log' do
- expect(subject).to eq('No job log')
- end
- end
- end
-
- context 'when build has trace' do
- let(:build) { create(:ci_build, :trace_live) }
-
- context 'when skip is true' do
- let(:skip) { true }
-
- it 'returns link to logs' do
- expect(subject).to include('View job log')
- expect(subject).to include(pipeline_job_url(build.pipeline, build))
- end
- end
-
- context 'when skip is false' do
- let(:skip) { false }
-
- it 'returns log lines' do
- expect(subject).to include(build.trace.html(last_lines: 10).html_safe)
- end
- end
- end
- end
-
describe '#sidebar_build_class' do
using RSpec::Parameterized::TableSyntax
@@ -97,20 +52,6 @@ RSpec.describe Ci::BuildsHelper do
end
end
- describe '#prepare_failed_jobs_summary_data' do
- let(:failed_build) { create(:ci_build, :failed, :trace_live) }
-
- subject { helper.prepare_failed_jobs_summary_data([failed_build]) }
-
- it 'returns array of failed jobs with id, failure and failure summary' do
- expect(subject).to eq([{
- id: failed_build.id,
- failure: failed_build.present.callout_failure_message,
- failure_summary: helper.build_summary(failed_build)
- }].to_json)
- end
- end
-
def assign_project
build(:project).tap do |project|
assign(:project, project)
diff --git a/spec/helpers/projects/pipeline_helper_spec.rb b/spec/helpers/projects/pipeline_helper_spec.rb
index ef52c4dd425..baebbb21aed 100644
--- a/spec/helpers/projects/pipeline_helper_spec.rb
+++ b/spec/helpers/projects/pipeline_helper_spec.rb
@@ -20,7 +20,6 @@ RSpec.describe Projects::PipelineHelper do
it 'returns pipeline tabs data' do
expect(pipeline_tabs_data).to include({
failed_jobs_count: pipeline.failed_builds.count,
- failed_jobs_summary: prepare_failed_jobs_summary_data(pipeline.failed_builds),
project_path: project.full_path,
graphql_resource_etag: graphql_etag_pipeline_path(pipeline),
metrics_path: namespace_project_ci_prometheus_metrics_histograms_path(namespace_id: project.namespace, project_id: project, format: :json),
diff --git a/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
index f4b13033270..d9dd1b387dc 100644
--- a/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
@@ -2,29 +2,12 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition, feature_category: :database do
+RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_category: :database do
include Gitlab::Database::DynamicModelHelpers
include Database::TableSchemaHelpers
+ include Database::InjectFailureHelpers
- let(:migration_context) do
- Gitlab::Database::Migration[2.0].new.tap do |migration|
- migration.extend Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers
- end
- end
-
- let(:connection) { migration_context.connection }
- let(:table_name) { '_test_table_to_partition' }
- let(:table_identifier) { "#{connection.current_schema}.#{table_name}" }
- let(:partitioning_column) { :partition_number }
- let(:partitioning_default) { 1 }
- let(:referenced_table_name) { '_test_referenced_table' }
- let(:other_referenced_table_name) { '_test_other_referenced_table' }
- let(:parent_table_name) { "#{table_name}_parent" }
- let(:lock_tables) { [] }
-
- let(:model) { define_batchable_model(table_name, connection: connection) }
-
- let(:parent_model) { define_batchable_model(parent_table_name, connection: connection) }
+ include_context 'with a table structure for converting a table to a list partition'
let(:converter) do
described_class.new(
@@ -37,45 +20,6 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition,
)
end
- before do
- # Suppress printing migration progress
- allow(migration_context).to receive(:puts)
- allow(migration_context.connection).to receive(:transaction_open?).and_return(false)
-
- connection.execute(<<~SQL)
- create table #{referenced_table_name} (
- id bigserial primary key not null
- )
- SQL
-
- connection.execute(<<~SQL)
- create table #{other_referenced_table_name} (
- id bigserial primary key not null
- )
- SQL
-
- connection.execute(<<~SQL)
- insert into #{referenced_table_name} default values;
- insert into #{other_referenced_table_name} default values;
- SQL
-
- connection.execute(<<~SQL)
- create table #{table_name} (
- id bigserial not null,
- #{partitioning_column} bigint not null default #{partitioning_default},
- referenced_id bigint not null references #{referenced_table_name} (id) on delete cascade,
- other_referenced_id bigint not null references #{other_referenced_table_name} (id) on delete set null,
- primary key (id, #{partitioning_column})
- )
- SQL
-
- connection.execute(<<~SQL)
- insert into #{table_name} (referenced_id, other_referenced_id)
- select #{referenced_table_name}.id, #{other_referenced_table_name}.id
- from #{referenced_table_name}, #{other_referenced_table_name};
- SQL
- end
-
describe "#prepare_for_partitioning" do
subject(:prepare) { converter.prepare_for_partitioning(async: async) }
@@ -211,6 +155,8 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition,
context 'when the primary key is incorrect' do
before do
connection.execute(<<~SQL)
+ alter table #{referencing_table_name} drop constraint fk_referencing; -- this depends on the primary key
+ alter table #{other_referencing_table_name} drop constraint fk_referencing_other; -- this does too
alter table #{table_name} drop constraint #{table_name}_pkey;
alter table #{table_name} add constraint #{table_name}_pkey PRIMARY KEY (id);
SQL
@@ -260,6 +206,8 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition,
parent_model.create!(partitioning_column => 2, :referenced_id => 1, :other_referenced_id => 1)
expect(parent_model.pluck(:id)).to match_array([1, 2, 3])
+
+ expect { referencing_model.create!(partitioning_column => 1, :ref_id => 1) }.not_to raise_error
end
context 'when the existing table is owned by a different user' do
@@ -288,53 +236,37 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition,
end
context 'when an error occurs during the conversion' do
- def fail_first_time
- # We can't directly use a boolean here, as we need something that will be passed by-reference to the proc
- fault_status = { faulted: false }
- proc do |m, *args, **kwargs|
- next m.call(*args, **kwargs) if fault_status[:faulted]
-
- fault_status[:faulted] = true
- raise 'fault!'
- end
- end
-
- def fail_sql_matching(regex)
- proc do
- allow(migration_context.connection).to receive(:execute).and_call_original
- allow(migration_context.connection).to receive(:execute).with(regex).and_wrap_original(&fail_first_time)
- end
+ before do
+ # Set up the fault that we'd like to inject
+ fault.call
end
- def fail_adding_fk(from_table, to_table)
- proc do
- allow(migration_context.connection).to receive(:add_foreign_key).and_call_original
- expect(migration_context.connection).to receive(:add_foreign_key).with(from_table, to_table, any_args)
- .and_wrap_original(&fail_first_time)
- end
+ let(:old_fks) do
+ Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(table_identifier).not_inherited
end
- where(:case_name, :fault) do
- [
- ["creating parent table", lazy { fail_sql_matching(/CREATE/i) }],
- ["adding the first foreign key", lazy { fail_adding_fk(parent_table_name, referenced_table_name) }],
- ["adding the second foreign key", lazy { fail_adding_fk(parent_table_name, other_referenced_table_name) }],
- ["attaching table", lazy { fail_sql_matching(/ATTACH/i) }]
- ]
+ let(:new_fks) do
+ Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(parent_table_identifier).not_inherited
end
- before do
- # Set up the fault that we'd like to inject
- fault.call
- end
+ context 'when partitioning fails the first time' do
+ where(:case_name, :fault) do
+ [
+ ["creating parent table", lazy { fail_sql_matching(/CREATE/i) }],
+ ["adding the first foreign key", lazy { fail_adding_fk(parent_table_name, referenced_table_name) }],
+ ["adding the second foreign key", lazy { fail_adding_fk(parent_table_name, other_referenced_table_name) }],
+ ["attaching table", lazy { fail_sql_matching(/ATTACH/i) }]
+ ]
+ end
- with_them do
- it 'recovers from a fault', :aggregate_failures do
- expect { converter.partition }.to raise_error(/fault/)
- expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(0)
+ with_them do
+ it 'recovers from a fault', :aggregate_failures do
+ expect { converter.partition }.to raise_error(/fault/)
+ expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(0)
- expect { converter.partition }.not_to raise_error
- expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
+ expect { converter.partition }.not_to raise_error
+ expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/partitioning/list/locking_configuration_spec.rb b/spec/lib/gitlab/database/partitioning/list/locking_configuration_spec.rb
new file mode 100644
index 00000000000..851add43e3c
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/list/locking_configuration_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::List::LockingConfiguration, feature_category: :database do
+ let(:migration_context) do
+ Gitlab::Database::Migration[2.1].new.tap do |migration|
+ migration.extend Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers
+ migration.extend Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
+ end
+ end
+
+ let(:locking_order) { %w[table_1 table_2 table_3] }
+
+ subject(:locking_configuration) { described_class.new(migration_context, table_locking_order: locking_order) }
+
+ describe '#locking_statement_for' do
+ it 'only includes locking information for tables in the locking specification' do
+ expect(subject.locking_statement_for(%w[table_1 table_other])).to eq(subject.locking_statement_for('table_1'))
+ end
+
+ it 'is nil when none of the tables match the lock configuration' do
+ expect(subject.locking_statement_for('table_other')).to be_nil
+ end
+
+ it 'is a lock tables statement' do
+ expect(subject.locking_statement_for(%w[table_3 table_2])).to eq(<<~SQL)
+ LOCK "table_2", "table_3" IN ACCESS EXCLUSIVE MODE
+ SQL
+ end
+
+ it 'raises if a table name with schema is passed' do
+ expect { subject.locking_statement_for('public.test') }.to raise_error(ArgumentError)
+ end
+ end
+
+ describe '#lock_ordering_for' do
+ it 'is the intersection with the locking specification, in the order of the specification' do
+ expect(subject.locking_order_for(%w[table_other table_3 table_1])).to eq(%w[table_1 table_3])
+ end
+
+ it 'raises if a table name with schema is passed' do
+ expect { subject.locking_order_for('public.test') }.to raise_error(ArgumentError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 5120eb76f42..4a0a740f121 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -43,15 +43,15 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
context 'list partitioning conversion helpers' do
- shared_examples_for 'delegates to ConvertTableToFirstListPartition' do
+ shared_examples_for 'delegates to ConvertTable' do
let(:extra_options) { {} }
it 'throws an error if in a transaction' do
allow(migration).to receive(:transaction_open?).and_return(true)
expect { migrate }.to raise_error(/cannot be run inside a transaction/)
end
- it 'delegates to a method on ConvertTableToFirstListPartition' do
- expect_next_instance_of(Gitlab::Database::Partitioning::ConvertTableToFirstListPartition,
+ it 'delegates to a method on List::ConvertTable' do
+ expect_next_instance_of(Gitlab::Database::Partitioning::List::ConvertTable,
migration_context: migration,
table_name: source_table,
parent_table_name: partitioned_table,
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
describe '#convert_table_to_first_list_partition' do
- it_behaves_like 'delegates to ConvertTableToFirstListPartition' do
+ it_behaves_like 'delegates to ConvertTable' do
let(:lock_tables) { [source_table] }
let(:extra_options) { { lock_tables: lock_tables } }
let(:expected_method) { :partition }
@@ -81,7 +81,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
describe '#revert_converting_table_to_first_list_partition' do
- it_behaves_like 'delegates to ConvertTableToFirstListPartition' do
+ it_behaves_like 'delegates to ConvertTable' do
let(:expected_method) { :revert_partitioning }
let(:migrate) do
migration.revert_converting_table_to_first_list_partition(table_name: source_table,
@@ -93,7 +93,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
describe '#prepare_constraint_for_list_partitioning' do
- it_behaves_like 'delegates to ConvertTableToFirstListPartition' do
+ it_behaves_like 'delegates to ConvertTable' do
let(:expected_method) { :prepare_for_partitioning }
let(:migrate) do
migration.prepare_constraint_for_list_partitioning(table_name: source_table,
@@ -106,7 +106,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
describe '#revert_preparing_constraint_for_list_partitioning' do
- it_behaves_like 'delegates to ConvertTableToFirstListPartition' do
+ it_behaves_like 'delegates to ConvertTable' do
let(:expected_method) { :revert_preparation_for_partitioning }
let(:migrate) do
migration.revert_preparing_constraint_for_list_partitioning(table_name: source_table,
diff --git a/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
index ef54cab5275..ea9d77bcfa4 100644
--- a/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
@@ -126,6 +126,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerRouter do
describe '.global' do
before do
described_class.remove_instance_variable(:@global_worker_router) if described_class.instance_variable_defined?(:@global_worker_router)
+ stub_config(sidekiq: { routing_rules: routing_rules })
end
after do
@@ -136,10 +137,6 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerRouter do
include_context 'router examples setup'
with_them do
- before do
- stub_config(sidekiq: { routing_rules: routing_rules })
- end
-
it 'routes the worker to the correct queue' do
expect(described_class.global.route(worker)).to eql(expected_queue)
end
@@ -157,10 +154,6 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerRouter do
end
end
- before do
- stub_config(sidekiq: { routing_rules: routing_rules })
- end
-
context 'invalid routing rules format' do
let(:routing_rules) { ['feature_category=a'] }
@@ -183,6 +176,26 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerRouter do
end
end
end
+
+ context 'when routing rules is missing `*` as the last rule' do
+ let(:routing_rules) { [['resource_boundary=cpu', 'cpu']] }
+
+ it 'logs a warning' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(a_string_matching('sidekiq.routing_rules config is missing'))
+
+ described_class.global
+ end
+ end
+
+ context 'when routing rules has a `*` rule as the last rule' do
+ let(:routing_rules) { [['resource_boundary=cpu', 'cpu'], ['*', 'default']] }
+
+ it 'does not log any warning' do
+ expect(Gitlab::AppLogger).not_to receive(:warn)
+
+ described_class.global
+ end
+ end
end
describe '#route' do
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
index 1b01793d80d..f65f7a645ea 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
@@ -40,10 +40,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Server, :clean_gitlab_r
describe '#call' do
it 'removes the stored job from redis before execution' do
bare_job = { 'class' => 'TestDeduplicationWorker', 'args' => ['hello'] }
- job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'test_deduplication')
+ job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'default')
expect(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
- .to receive(:new).with(a_hash_including(bare_job), 'test_deduplication')
+ .to receive(:new).with(a_hash_including(bare_job), 'default')
.and_return(job_definition).twice # once in client middleware
expect(job_definition).to receive(:delete!).ordered.and_call_original
@@ -59,10 +59,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Server, :clean_gitlab_r
it 'removes the stored job from redis after execution' do
bare_job = { 'class' => 'TestDeduplicationWorker', 'args' => ['hello'] }
- job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'test_deduplication')
+ job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'default')
expect(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
- .to receive(:new).with(a_hash_including(bare_job), 'test_deduplication')
+ .to receive(:new).with(a_hash_including(bare_job), 'default')
.and_return(job_definition).twice # once in client middleware
expect(TestDeduplicationWorker).to receive(:work).ordered.and_call_original
diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
index 9ed2a0642fc..c66e36c5621 100644
--- a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
expect(migrator.migrate_set(set_name)).to eq(scanned: 3, migrated: 0)
expect(set_after.length).to eq(3)
- expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects',
+ expect(set_after.map(&:first)).to all(include('queue' => 'default',
'class' => 'AuthorizedProjectsWorker'))
end
end
@@ -73,7 +73,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
if item['class'] == 'AuthorizedProjectsWorker'
expect(item).to include('queue' => 'new_queue', 'args' => [i])
else
- expect(item).to include('queue' => 'post_receive', 'args' => [i])
+ expect(item).to include('queue' => 'default', 'args' => [i])
end
expect(score).to be_within(schedule_jitter).of(i.succ.hours.from_now.to_i)
@@ -134,7 +134,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
expect(migrator.migrate_set(set_name)).to eq(scanned: 4, migrated: 0)
expect(set_after.length).to eq(3)
- expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects'))
+ expect(set_after.map(&:first)).to all(include('queue' => 'default'))
end
end
@@ -157,7 +157,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
expect(migrator.migrate_set(set_name)).to eq(scanned: 4, migrated: 1)
expect(set_after.group_by { |job| job.first['queue'] }.transform_values(&:count))
- .to eq('authorized_projects' => 6, 'new_queue' => 1)
+ .to eq('default' => 6, 'new_queue' => 1)
end
it 'iterates through the entire set of jobs' do
diff --git a/spec/lib/product_analytics/settings_spec.rb b/spec/lib/product_analytics/settings_spec.rb
index 2cacd55b871..8e6ac3cf0ad 100644
--- a/spec/lib/product_analytics/settings_spec.rb
+++ b/spec/lib/product_analytics/settings_spec.rb
@@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics do
+ let_it_be(:project) { create(:project) }
+
+ subject { described_class.for_project(project) }
+
describe 'config settings' do
context 'when configured' do
before do
@@ -10,7 +14,7 @@ RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics
end
it 'will be configured' do
- expect(described_class.configured?).to be_truthy
+ expect(subject.configured?).to be_truthy
end
end
@@ -20,7 +24,7 @@ RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics
end
it 'will not be configured' do
- expect(described_class.configured?).to be_falsey
+ expect(subject.configured?).to be_falsey
end
end
@@ -32,7 +36,7 @@ RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics
end
it 'will not be configured' do
- expect(described_class.configured?).to be_falsey
+ expect(subject.configured?).to be_falsey
end
end
@@ -40,14 +44,30 @@ RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics
it "can read #{key}" do
expect(::Gitlab::CurrentSettings).to receive(key).and_return('test')
- expect(described_class.send(key)).to eq('test')
+ expect(subject.send(key)).to eq('test')
+ end
+
+ context 'with project' do
+ it "will override when provided a project #{key}" do
+ expect(::Gitlab::CurrentSettings).not_to receive(key)
+ expect(project.project_setting).to receive(key).and_return('test')
+
+ expect(subject.send(key)).to eq('test')
+ end
+
+ it "will will not override when provided a blank project #{key}" do
+ expect(::Gitlab::CurrentSettings).to receive(key).and_return('test')
+ expect(project.project_setting).to receive(key).and_return('')
+
+ expect(subject.send(key)).to eq('test')
+ end
end
end
end
describe '.enabled?' do
before do
- allow(described_class).to receive(:configured?).and_return(true)
+ allow(subject).to receive(:configured?).and_return(true)
end
context 'when enabled' do
@@ -56,7 +76,7 @@ RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics
end
it 'will be enabled' do
- expect(described_class.enabled?).to be_truthy
+ expect(subject.enabled?).to be_truthy
end
end
@@ -66,7 +86,7 @@ RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics
end
it 'will be enabled' do
- expect(described_class.enabled?).to be_falsey
+ expect(subject.enabled?).to be_falsey
end
end
end
diff --git a/spec/models/project_setting_spec.rb b/spec/models/project_setting_spec.rb
index f3d6d0ff006..0a2ead0aa6b 100644
--- a/spec/models/project_setting_spec.rb
+++ b/spec/models/project_setting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectSetting, type: :model do
+RSpec.describe ProjectSetting, type: :model, feature_category: :projects do
using RSpec::Parameterized::TableSyntax
it { is_expected.to belong_to(:project) }
diff --git a/spec/requests/api/ci/runner/runners_delete_spec.rb b/spec/requests/api/ci/runner/runners_delete_spec.rb
index 65c287a9535..8d2b5677502 100644
--- a/spec/requests/api/ci/runner/runners_delete_spec.rb
+++ b/spec/requests/api/ci/runner/runners_delete_spec.rb
@@ -7,16 +7,19 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
include RedisHelpers
include WorkhorseHelpers
- let(:registration_token) { 'abcdefg123456' }
-
before do
stub_feature_flags(ci_enable_live_trace: true)
stub_gitlab_calls
- stub_application_setting(runners_registration_token: registration_token)
- allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ allow_next_instance_of(::Ci::Runner) { |runner| allow(runner).to receive(:cache_attributes) }
end
describe '/api/v4/runners' do
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_application_setting(runners_registration_token: registration_token)
+ end
+
describe 'DELETE /api/v4/runners' do
context 'when no token is provided' do
it 'returns 400 error' do
@@ -57,4 +60,75 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
end
end
end
+
+ describe '/api/v4/runners/managers' do
+ describe 'DELETE /api/v4/runners/managers' do
+ subject(:delete_request) { delete api('/runners/managers'), params: delete_params }
+
+ context 'with created runner' do
+ let!(:runner) { create(:ci_runner, :with_runner_manager, registration_type: :authenticated_user) }
+
+ context 'with matching system_id' do
+ context 'when no token is provided' do
+ let(:delete_params) { { system_id: runner.runner_managers.first.system_xid } }
+
+ it 'returns 400 error' do
+ delete_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when invalid token is provided' do
+ let(:delete_params) { { token: 'invalid', system_id: runner.runner_managers.first.system_xid } }
+
+ it 'returns 403 error' do
+ delete_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+ end
+
+ context 'when valid token is provided' do
+ context 'with created runner' do
+ let!(:runner) { create(:ci_runner, :with_runner_manager, registration_type: :authenticated_user) }
+
+ context 'with matching system_id' do
+ let(:delete_params) { { token: runner.token, system_id: runner.runner_managers.first.system_xid } }
+
+ it 'deletes runner manager' do
+ expect do
+ delete_request
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end.to change { runner.runner_managers.count }.from(1).to(0)
+
+ expect(::Ci::Runner.count).to eq(1)
+ end
+
+ it_behaves_like '412 response' do
+ let(:request) { api('/runners/managers') }
+ let(:params) { delete_params }
+ end
+
+ it_behaves_like 'storing arguments in the application context for the API' do
+ let(:expected_params) { { client_id: "runner/#{runner.id}" } }
+ end
+ end
+
+ context 'without system_id' do
+ let(:delete_params) { { token: runner.token } }
+
+ it 'does not delete runner manager nor runner' do
+ delete_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index bde6905da67..ecdc61694c6 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -167,6 +167,20 @@ project_setting:
- pages_unique_domain
- runner_registration_enabled
- product_analytics_instrumentation_key
+ - jitsu_host
+ - jitsu_project_xid
+ - jitsu_administrator_email
+ - jitsu_administrator_password
+ - encrypted_jitsu_administrator_password
+ - encrypted_jitsu_administrator_password_iv
+ - product_analytics_data_collector_host
+ - product_analytics_clickhouse_connection_string
+ - encrypted_product_analytics_clickhouse_connection_string
+ - encrypted_product_analytics_clickhouse_connection_string_iv
+ - cube_api_base_url
+ - cube_api_key
+ - encrypted_cube_api_key
+ - encrypted_cube_api_key_iv
build_service_desk_setting: # service_desk_setting
unexposed_attributes:
diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb
index 622e57edf6a..04d5f7ac20a 100644
--- a/spec/requests/api/protected_branches_spec.rb
+++ b/spec/requests/api/protected_branches_spec.rb
@@ -110,6 +110,21 @@ RSpec.describe API::ProtectedBranches, feature_category: :source_code_management
it_behaves_like 'protected branch'
end
+
+ context 'when a deploy key is present' do
+ let(:deploy_key) do
+ create(:deploy_key, deploy_keys_projects: [create(:deploy_keys_project, :write_access, project: project)])
+ end
+
+ it 'returns deploy key information' do
+ create(:protected_branch_push_access_level, protected_branch: protected_branch, deploy_key: deploy_key)
+ get api(route, user)
+
+ expect(json_response['push_access_levels']).to include(
+ a_hash_including('access_level_description' => 'Deploy key', 'deploy_key_id' => deploy_key.id)
+ )
+ end
+ end
end
context 'when authenticated as a developer' do
diff --git a/spec/requests/api/protected_tags_spec.rb b/spec/requests/api/protected_tags_spec.rb
index 5b128d4ec9e..c6398e624f8 100644
--- a/spec/requests/api/protected_tags_spec.rb
+++ b/spec/requests/api/protected_tags_spec.rb
@@ -84,6 +84,21 @@ RSpec.describe API::ProtectedTags, feature_category: :source_code_management do
it_behaves_like 'protected tag'
end
+
+ context 'when a deploy key is present' do
+ let(:deploy_key) do
+ create(:deploy_key, deploy_keys_projects: [create(:deploy_keys_project, :write_access, project: project)])
+ end
+
+ it 'returns deploy key information' do
+ create(:protected_tag_create_access_level, protected_tag: protected_tag, deploy_key: deploy_key)
+ get api(route, user)
+
+ expect(json_response['create_access_levels']).to include(
+ a_hash_including('access_level_description' => 'Deploy key', 'deploy_key_id' => deploy_key.id)
+ )
+ end
+ end
end
context 'when authenticated as a guest' do
diff --git a/spec/services/ci/runners/unregister_runner_manager_service_spec.rb b/spec/services/ci/runners/unregister_runner_manager_service_spec.rb
new file mode 100644
index 00000000000..0c37011463f
--- /dev/null
+++ b/spec/services/ci/runners/unregister_runner_manager_service_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', feature_category: :runner_fleet do
+ subject(:execute) { described_class.new(runner, 'some_token', system_id: system_id).execute }
+
+ context 'with runner registered with registration token' do
+ let!(:runner) { create(:ci_runner, registration_type: :registration_token) }
+ let(:system_id) { nil }
+
+ it 'does not destroy runner or runner managers' do
+ expect do
+ expect(execute).to be_error
+ end.to not_change { Ci::Runner.count }
+ .and not_change { Ci::RunnerManager.count }
+ expect(runner[:errors]).to be_nil
+ end
+ end
+
+ context 'with runner created in UI' do
+ let!(:runner_manager1) { create(:ci_runner_machine, runner: runner, system_xid: 'system_id_1') }
+ let!(:runner_manager2) { create(:ci_runner_machine, runner: runner, system_xid: 'system_id_2') }
+ let!(:runner) { create(:ci_runner, registration_type: :authenticated_user) }
+
+ context 'with system_id specified' do
+ let(:system_id) { runner_manager1.system_xid }
+
+ it 'destroys runner_manager1 and leaves runner', :aggregate_failures do
+ expect do
+ expect(execute).to be_success
+ end.to change { Ci::RunnerManager.count }.by(-1)
+ .and not_change { Ci::Runner.count }
+ expect(runner[:errors]).to be_nil
+ expect(runner.runner_managers).to contain_exactly(runner_manager2)
+ end
+ end
+
+ context 'with system_id missing' do
+ let(:system_id) { nil }
+
+ it 'returns error and leaves runner_manager1', :aggregate_failures do
+ expect do
+ expect(execute).to be_error
+ end.to not_change { Ci::Runner.count }
+ .and not_change { Ci::RunnerManager.count }
+ end
+ end
+ end
+end
diff --git a/spec/support/helpers/database/inject_failure_helpers.rb b/spec/support/helpers/database/inject_failure_helpers.rb
new file mode 100644
index 00000000000..df98f45e69f
--- /dev/null
+++ b/spec/support/helpers/database/inject_failure_helpers.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module Database
+ module InjectFailureHelpers
+ # These methods are used by specs that inject faults into the migration procedure and then ensure
+ # that it migrates correctly when rerun
+ def fail_first_time
+ # We can't directly use a boolean here, as we need something that will be passed by-reference to the proc
+ fault_status = { faulted: false }
+ proc do |m, *args, **kwargs|
+ next m.call(*args, **kwargs) if fault_status[:faulted]
+
+ fault_status[:faulted] = true
+ raise 'fault!'
+ end
+ end
+
+ def fail_sql_matching(regex)
+ proc do
+ allow(migration_context.connection).to receive(:execute).and_call_original
+ allow(migration_context.connection).to receive(:execute).with(regex).and_wrap_original(&fail_first_time)
+ end
+ end
+
+ def fail_adding_fk(from_table, to_table)
+ proc do
+ allow(migration_context.connection).to receive(:add_foreign_key).and_call_original
+ expect(migration_context.connection).to receive(:add_foreign_key).with(from_table, to_table, any_args)
+ .and_wrap_original(&fail_first_time)
+ end
+ end
+
+ def fail_removing_fk(from_table, to_table)
+ proc do
+ allow(migration_context.connection).to receive(:remove_foreign_key).and_call_original
+ expect(migration_context.connection).to receive(:remove_foreign_key).with(from_table, to_table, any_args)
+ .and_wrap_original(&fail_first_time)
+ end
+ end
+ end
+end
diff --git a/spec/support/helpers/filtered_search_helpers.rb b/spec/support/helpers/filtered_search_helpers.rb
index ecc749b1e45..60638eb06cd 100644
--- a/spec/support/helpers/filtered_search_helpers.rb
+++ b/spec/support/helpers/filtered_search_helpers.rb
@@ -213,7 +213,7 @@ module FilteredSearchHelpers
def submit_search_term(value)
click_filtered_search_bar
- send_keys(value, :enter)
+ send_keys(value, :enter, :enter)
end
def click_filtered_search_bar
diff --git a/spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb
new file mode 100644
index 00000000000..e9cd1bdbbf5
--- /dev/null
+++ b/spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'with a table structure for converting a table to a list partition' do
+ let(:migration_context) do
+ Gitlab::Database::Migration[2.1].new.tap do |migration|
+ migration.extend Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers
+ migration.extend Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
+ end
+ end
+
+ let(:connection) { migration_context.connection }
+ let(:table_name) { '_test_table_to_partition' }
+ let(:table_identifier) { "#{connection.current_schema}.#{table_name}" }
+ let(:partitioning_column) { :partition_number }
+ let(:partitioning_default) { 1 }
+ let(:referenced_table_name) { '_test_referenced_table' }
+ let(:other_referenced_table_name) { '_test_other_referenced_table' }
+ let(:referencing_table_name) { '_test_referencing_table' }
+ let(:other_referencing_table_name) { '_test_other_referencing_table' }
+ let(:parent_table_name) { "#{table_name}_parent" }
+ let(:parent_table_identifier) { "#{connection.current_schema}.#{parent_table_name}" }
+ let(:lock_tables) { [] }
+
+ let(:model) { define_batchable_model(table_name, connection: connection) }
+
+ let(:parent_model) { define_batchable_model(parent_table_name, connection: connection) }
+ let(:referencing_model) { define_batchable_model(referencing_table_name, connection: connection) }
+
+ before do
+ # Suppress printing migration progress
+ allow(migration_context).to receive(:puts)
+ allow(migration_context.connection).to receive(:transaction_open?).and_return(false)
+
+ connection.execute(<<~SQL)
+ create table #{referenced_table_name} (
+ id bigserial primary key not null
+ )
+ SQL
+
+ connection.execute(<<~SQL)
+ create table #{other_referenced_table_name} (
+ id bigserial primary key not null
+ )
+ SQL
+
+ connection.execute(<<~SQL)
+ insert into #{referenced_table_name} default values;
+ insert into #{other_referenced_table_name} default values;
+ SQL
+
+ connection.execute(<<~SQL)
+ create table #{table_name} (
+ id bigserial not null,
+ #{partitioning_column} bigint not null default #{partitioning_default},
+ referenced_id bigint not null references #{referenced_table_name} (id) on delete cascade,
+ other_referenced_id bigint not null references #{other_referenced_table_name} (id) on delete set null,
+ primary key (id, #{partitioning_column})
+ )
+ SQL
+
+ connection.execute(<<~SQL)
+ create table #{referencing_table_name} (
+ id bigserial primary key not null,
+ #{partitioning_column} bigint not null,
+ ref_id bigint not null,
+ constraint fk_referencing foreign key (#{partitioning_column}, ref_id) references #{table_name} (#{partitioning_column}, id) on delete cascade
+ )
+ SQL
+
+ connection.execute(<<~SQL)
+ create table #{other_referencing_table_name} (
+ id bigserial not null,
+ #{partitioning_column} bigint not null,
+ ref_id bigint not null,
+ primary key (#{partitioning_column}, id),
+ constraint fk_referencing_other foreign key (#{partitioning_column}, ref_id) references #{table_name} (#{partitioning_column}, id)
+ ) partition by hash(#{partitioning_column});
+
+ create table #{other_referencing_table_name}_1
+ partition of #{other_referencing_table_name} for values with (modulus 2, remainder 0);
+
+ create table #{other_referencing_table_name}_2
+ partition of #{other_referencing_table_name} for values with (modulus 2, remainder 1);
+ SQL
+
+ connection.execute(<<~SQL)
+ insert into #{table_name} (referenced_id, other_referenced_id)
+ select #{referenced_table_name}.id, #{other_referenced_table_name}.id
+ from #{referenced_table_name}, #{other_referenced_table_name};
+ SQL
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/status_shared_examples.rb b/spec/support/shared_examples/requests/api/status_shared_examples.rb
index fad5211fc59..ff3947c0e73 100644
--- a/spec/support/shared_examples/requests/api/status_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/status_shared_examples.rb
@@ -71,7 +71,7 @@ RSpec.shared_examples '412 response' do
let(:params) { nil }
let(:success_status) { 204 }
- context 'for a modified ressource' do
+ context 'for a modified resource' do
before do
delete request, params: params, headers: { 'HTTP_IF_UNMODIFIED_SINCE' => '1990-01-12T00:00:48-0600' }
end
@@ -82,7 +82,7 @@ RSpec.shared_examples '412 response' do
end
end
- context 'for an unmodified ressource' do
+ context 'for an unmodified resource' do
before do
delete request, params: params, headers: { 'HTTP_IF_UNMODIFIED_SINCE' => Time.now }
end