Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-09-22 03:11:47 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-09-22 03:11:47 +0300
commit0bdb61ade7f12067dd524463af4f83994f1baa37 (patch)
tree987819d2a9e915df73874f66c52d1d9cdc889583
parent8746f541fbcf2b89b30c3d4a1b290f5679186400 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--Gemfile3
-rw-r--r--Gemfile.checksum2
-rw-r--r--Gemfile.lock9
-rw-r--r--app/assets/javascripts/alert.js54
-rw-r--r--app/assets/javascripts/boards/components/boards_selector.vue271
-rw-r--r--app/assets/javascripts/ci/artifacts/components/job_artifacts_table.vue66
-rw-r--r--app/assets/javascripts/ci/job_details/components/job_header.vue4
-rw-r--r--app/assets/javascripts/sentry/init_sentry.js18
-rw-r--r--app/assets/javascripts/super_sidebar/components/nav_item.vue5
-rw-r--r--app/assets/javascripts/vue_shared/components/ci_badge_link.vue2
-rw-r--r--app/assets/stylesheets/_page_specific_files.scss1
-rw-r--r--app/assets/stylesheets/framework/common.scss20
-rw-r--r--app/assets/stylesheets/page_bundles/labels.scss (renamed from app/assets/stylesheets/pages/labels.scss)40
-rw-r--r--app/helpers/application_helper.rb4
-rw-r--r--app/helpers/blame_helper.rb15
-rw-r--r--app/helpers/blob_helper.rb13
-rw-r--r--app/helpers/merge_requests_helper.rb6
-rw-r--r--app/models/concerns/bulk_users_by_email_load.rb2
-rw-r--r--app/models/concerns/import_state/sidekiq_job_tracker.rb2
-rw-r--r--app/models/concerns/issuable_link.rb23
-rw-r--r--app/models/concerns/routable.rb2
-rw-r--r--app/models/integrations/asana.rb18
-rw-r--r--app/models/issue.rb9
-rw-r--r--app/models/project.rb2
-rw-r--r--app/models/project_import_data.rb5
-rw-r--r--app/models/vulnerability.rb3
-rw-r--r--app/models/work_item.rb6
-rw-r--r--app/models/work_items/related_work_item_link.rb12
-rw-r--r--app/services/import/bitbucket_server_service.rb7
-rw-r--r--app/services/import/github_service.rb1
-rw-r--r--app/services/issue_links/create_service.rb2
-rw-r--r--app/validators/json_schemas/vulnerability_cvss_vectors.json22
-rw-r--r--app/views/admin/labels/index.html.haml1
-rw-r--r--app/views/groups/labels/index.html.haml1
-rw-r--r--app/views/layouts/_head.html.haml6
-rw-r--r--app/views/projects/issues/new.html.haml1
-rw-r--r--app/views/projects/labels/index.html.haml1
-rw-r--r--app/views/projects/merge_requests/_mr_title.html.haml2
-rw-r--r--app/views/shared/boards/_show.html.haml1
-rw-r--r--app/views/shared/issuable/_label_dropdown.html.haml1
-rw-r--r--app/views/shared/issuable/_sidebar.html.haml1
-rw-r--r--app/views/shared/issuable/form/_metadata.html.haml1
-rw-r--r--app/views/shared/labels/_form.html.haml1
-rw-r--r--app/views/shared/milestones/_labels_tab.html.haml2
-rw-r--r--app/workers/concerns/limited_capacity/worker.rb36
-rw-r--r--app/workers/gitlab/bitbucket_import/advance_stage_worker.rb6
-rw-r--r--app/workers/gitlab/bitbucket_server_import/advance_stage_worker.rb6
-rw-r--r--app/workers/gitlab/github_import/advance_stage_worker.rb6
-rw-r--r--app/workers/gitlab/import/advance_stage.rb89
-rw-r--r--app/workers/gitlab/jira_import/advance_stage_worker.rb6
-rw-r--r--config/application.rb1
-rw-r--r--config/feature_flags/development/advance_stage_timeout.yml8
-rw-r--r--config/feature_flags/development/optimize_find_routable.yml (renamed from config/feature_flags/development/optimize_routable.yml)8
-rw-r--r--config/feature_flags/development/page_specific_styles.yml8
-rw-r--r--config/initializers/postgres_partitioning.rb3
-rw-r--r--db/docs/p_ci_finished_build_ch_sync_events.yml10
-rw-r--r--db/migrate/20230906122405_add_cvss_to_vulnerabilities.rb17
-rw-r--r--db/migrate/20230915103259_create_ci_finished_build_ch_sync_events.rb26
-rw-r--r--db/migrate/20230920162613_add_force_include_all_resources_to_workspaces.rb9
-rw-r--r--db/post_migrate/20230920122059_add_ci_job_artifacts_file_final_path_index_synchronously.rb16
-rw-r--r--db/schema_migrations/202309061224051
-rw-r--r--db/schema_migrations/202309151032591
-rw-r--r--db/schema_migrations/202309201220591
-rw-r--r--db/schema_migrations/202309201626131
-rw-r--r--db/structure.sql19
-rw-r--r--doc/administration/integration/terminal.md2
-rw-r--r--doc/api/commits.md1
-rw-r--r--doc/api/import.md2
-rw-r--r--doc/api/merge_requests.md3
-rw-r--r--doc/api/repositories.md1
-rw-r--r--doc/api/rest/index.md2
-rw-r--r--doc/architecture/blueprints/clickhouse_ingestion_pipeline/index.md2
-rw-r--r--doc/architecture/blueprints/gitlab_observability_backend/index.md4
-rw-r--r--doc/architecture/blueprints/gitlab_steps/index.md2
-rw-r--r--doc/architecture/blueprints/google_artifact_registry_integration/backend.md131
-rw-r--r--doc/architecture/blueprints/google_artifact_registry_integration/index.md42
-rw-r--r--doc/architecture/blueprints/modular_monolith/hexagonal_monolith/index.md2
-rw-r--r--doc/architecture/blueprints/observability_tracing/index.md6
-rw-r--r--doc/architecture/blueprints/permissions/index.md2
-rw-r--r--doc/architecture/blueprints/remote_development/index.md3
-rw-r--r--doc/ci/index.md2
-rw-r--r--doc/ci/migration/circleci.md4
-rw-r--r--doc/ci/yaml/signing_examples.md2
-rw-r--r--doc/development/contributing/merge_request_workflow.md2
-rw-r--r--doc/development/database/clickhouse/gitlab_activity_data.md6
-rw-r--r--doc/development/database/clickhouse/index.md40
-rw-r--r--doc/development/fe_guide/design_tokens.md2
-rw-r--r--doc/development/feature_flags/controls.md12
-rw-r--r--doc/development/feature_flags/index.md3
-rw-r--r--doc/development/sidekiq/limited_capacity_worker.md22
-rw-r--r--lib/api/commits.rb4
-rw-r--r--lib/api/entities/diff.rb6
-rw-r--r--lib/api/helpers/unidiff.rb17
-rw-r--r--lib/api/import_bitbucket_server.rb2
-rw-r--r--lib/api/import_github.rb2
-rw-r--r--lib/api/merge_request_diffs.rb4
-rw-r--r--lib/api/merge_requests.rb10
-rw-r--r--lib/api/repositories.rb4
-rw-r--r--lib/gitlab/bitbucket_server_import/project_creator.rb7
-rw-r--r--lib/gitlab/diff/file.rb2
-rw-r--r--lib/gitlab/git/diff.rb12
-rw-r--r--lib/gitlab/github_import/settings.rb5
-rw-r--r--locale/gitlab.pot18
-rw-r--r--qa/qa/page/component/issue_board/show.rb5
-rw-r--r--spec/controllers/import/bitbucket_server_controller_spec.rb7
-rw-r--r--spec/db/schema_spec.rb1
-rw-r--r--spec/features/invites_spec.rb24
-rw-r--r--spec/frontend/alert_spec.js68
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js79
-rw-r--r--spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js32
-rw-r--r--spec/frontend/ci/job_details/components/job_header_spec.js27
-rw-r--r--spec/frontend/sentry/init_sentry_spec.js28
-rw-r--r--spec/frontend/vue_shared/components/ci_badge_link_spec.js6
-rw-r--r--spec/helpers/blob_helper_spec.rb55
-rw-r--r--spec/lib/api/entities/diff_spec.rb46
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb122
-rw-r--r--spec/lib/gitlab/github_import/settings_spec.rb5
-rw-r--r--spec/models/concerns/routable_spec.rb44
-rw-r--r--spec/models/integrations/asana_spec.rb92
-rw-r--r--spec/models/issue_link_spec.rb4
-rw-r--r--spec/models/issue_spec.rb40
-rw-r--r--spec/models/work_item_spec.rb23
-rw-r--r--spec/models/work_items/related_work_item_link_spec.rb42
-rw-r--r--spec/requests/api/commits_spec.rb10
-rw-r--r--spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb2
-rw-r--r--spec/requests/api/import_bitbucket_server_spec.rb32
-rw-r--r--spec/requests/api/import_github_spec.rb13
-rw-r--r--spec/requests/api/invitations_spec.rb14
-rw-r--r--spec/requests/api/merge_request_diffs_spec.rb9
-rw-r--r--spec/requests/api/merge_requests_spec.rb18
-rw-r--r--spec/requests/api/repositories_spec.rb12
-rw-r--r--spec/services/import/github_service_spec.rb49
-rw-r--r--spec/services/members/invite_service_spec.rb6
-rw-r--r--spec/support/helpers/listbox_helpers.rb4
-rw-r--r--spec/support/helpers/stub_feature_flags.rb9
-rw-r--r--spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb85
-rw-r--r--spec/support/shared_examples/models/issuable_link_shared_examples.rb39
-rw-r--r--spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb46
140 files changed, 1679 insertions, 776 deletions
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index 6618e7f3c0b..dd7914a7999 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-ca0d131831136c26ada4460798d438466cbfa3a5
+9d00c51754d1199119e46d68db3b233115118afa
diff --git a/Gemfile b/Gemfile
index 71897781b1c..a7a132c61b5 100644
--- a/Gemfile
+++ b/Gemfile
@@ -287,9 +287,6 @@ gem 'slack-messenger', '~> 2.3.4'
# Hangouts Chat integration
gem 'hangouts-chat', '~> 0.0.5', require: 'hangouts_chat'
-# Asana integration
-gem 'asana', '~> 0.10.13'
-
# FogBugz integration
gem 'ruby-fogbugz', '~> 0.3.0'
diff --git a/Gemfile.checksum b/Gemfile.checksum
index a6e93952963..54e661e0838 100644
--- a/Gemfile.checksum
+++ b/Gemfile.checksum
@@ -23,7 +23,6 @@
{"name":"apollo_upload_server","version":"2.1.0","platform":"ruby","checksum":"e5f3c9dda0c2ca775d007072742b98d517dfd91a667111fedbcdc94dfabd904e"},
{"name":"app_store_connect","version":"0.29.0","platform":"ruby","checksum":"01d7a923825a4221892099acb5a72f86f6ee7d8aa95815d3c459ba6816ea430f"},
{"name":"arr-pm","version":"0.0.12","platform":"ruby","checksum":"fdff482f75239239201f4d667d93424412639aad0b3b0ad4d827e7c637e0ad39"},
-{"name":"asana","version":"0.10.13","platform":"ruby","checksum":"36d0d37f8dd6118a54580f1b80224875d7b6a9027598938e1722a508bfc2d7ac"},
{"name":"asciidoctor","version":"2.0.18","platform":"ruby","checksum":"bbd1e1d16deed8db94bf9624b9f4474fac32d9ca7225d377f076c08d9adde387"},
{"name":"asciidoctor-include-ext","version":"0.4.0","platform":"ruby","checksum":"406adb9d2fbfc25536609ca13b787ed704dc06a4e49d6709b83f3bad578f7878"},
{"name":"asciidoctor-kroki","version":"0.8.0","platform":"ruby","checksum":"e53b3f349167cebde990b0098863e8fe98fd235e35263a78c88cc4e0268b1a36"},
@@ -169,7 +168,6 @@
{"name":"faraday-retry","version":"1.0.3","platform":"ruby","checksum":"add154f4f399243cbe070806ed41b96906942e7f5259bb1fe6daf2ec8f497194"},
{"name":"faraday_middleware","version":"1.2.0","platform":"ruby","checksum":"ded15d574d50e92bd04448d5566913af5cb1a01b2fa311ceecc2464fa0ab88af"},
{"name":"faraday_middleware-aws-sigv4","version":"0.3.0","platform":"ruby","checksum":"744654bd5b15539a54aed39b806e2dfb45aa47708fa1e6f6766fedcda6c262be"},
-{"name":"faraday_middleware-multi_json","version":"0.0.6","platform":"ruby","checksum":"38fc4dab7a78916ad09827d5a164aab62fbf2cb8b9de0507763de1f561d7a118"},
{"name":"fast_blank","version":"1.0.1","platform":"java","checksum":"90d82106b0e4aa19ac24ba1604c79a0c5a4c471601e800c9b2b072938a6d9a92"},
{"name":"fast_blank","version":"1.0.1","platform":"ruby","checksum":"269fc30414fed4e6403bc4a49081e1ea539f8b9226e59276ed1efaefabaa17ea"},
{"name":"fast_gettext","version":"2.3.0","platform":"ruby","checksum":"0253e26423ccab68061c42387827e3b99243a1b15ad614df1c800ba870d64f84"},
diff --git a/Gemfile.lock b/Gemfile.lock
index 4fd9aa185c9..9b4f3cfd52e 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -240,11 +240,6 @@ GEM
activesupport (>= 6.0.0)
jwt (>= 1.4, <= 2.5.0)
arr-pm (0.0.12)
- asana (0.10.13)
- faraday (~> 1.0)
- faraday_middleware (~> 1.0)
- faraday_middleware-multi_json (~> 0.0)
- oauth2 (>= 1.4, < 3)
asciidoctor (2.0.18)
asciidoctor-include-ext (0.4.0)
asciidoctor (>= 1.5.6, < 3.0.0)
@@ -547,9 +542,6 @@ GEM
faraday_middleware-aws-sigv4 (0.3.0)
aws-sigv4 (~> 1.0)
faraday (>= 0.15)
- faraday_middleware-multi_json (0.0.6)
- faraday_middleware
- multi_json
fast_blank (1.0.1)
fast_gettext (2.3.0)
ffaker (2.10.0)
@@ -1735,7 +1727,6 @@ DEPENDENCIES
apollo_upload_server (~> 2.1.0)
app_store_connect
arr-pm (~> 0.0.12)
- asana (~> 0.10.13)
asciidoctor (~> 2.0.18)
asciidoctor-include-ext (~> 0.4.0)
asciidoctor-kroki (~> 0.8.0)
diff --git a/app/assets/javascripts/alert.js b/app/assets/javascripts/alert.js
index 006c4f50d09..4d724b17723 100644
--- a/app/assets/javascripts/alert.js
+++ b/app/assets/javascripts/alert.js
@@ -1,6 +1,7 @@
import * as Sentry from '@sentry/browser';
import Vue from 'vue';
-import { GlAlert } from '@gitlab/ui';
+import isEmpty from 'lodash/isEmpty';
+import { GlAlert, GlLink, GlSprintf } from '@gitlab/ui';
import { __ } from '~/locale';
export const VARIANT_SUCCESS = 'success';
@@ -32,6 +33,14 @@ export const VARIANT_TIP = 'tip';
* // Respond to the alert being dismissed
* createAlert({ message: 'Message', onDismiss: () => {} });
*
+ * @example
+ * // Add inline link in the message
+ * createAlert({ message: 'Read more at %{exampleLinkStart}example page%{exampleLinkEnd}.', messageLinks: { exampleLink: 'https://example.com' } });
+ *
+ * @example
+ * // Add inline links in the message with custom GlLink props
+ * createAlert({ message: 'Read more at %{exampleLinkStart}example page%{exampleLinkEnd}.', messageLinks: { exampleLink: { href: 'https://example.com', target: '_blank', isUnsafeLink: true }} });
+ *
* @param {object} options - Options to control the flash message
* @param {string} options.message - Alert message text
* @param {string} [options.title] - Alert title
@@ -48,6 +57,7 @@ export const VARIANT_TIP = 'tip';
* @param {string} [options.secondaryButton.link] - Href of secondary button
* @param {string} [options.secondaryButton.text] - Text of secondary button
* @param {Function} [options.secondaryButton.clickHandler] - Handler to call when secondary button is clicked on. The click event is sent as an argument.
+ * @param {object} [options.messageLinks] - Object containing mapping of sprintf tokens to URLs, used to format links within the message. If needed, you can pass a full props object for GlLink instead of a URL string
* @param {boolean} [options.captureError] - Whether to send error to Sentry
* @param {object} [options.error] - Error to be captured in Sentry
*/
@@ -63,6 +73,7 @@ export const createAlert = ({
onDismiss = null,
captureError = false,
error = null,
+ messageLinks = null,
}) => {
if (captureError && error) Sentry.captureException(error);
@@ -76,6 +87,45 @@ export const createAlert = ({
alertContainer.replaceChildren(el);
}
+ const createMessageNodes = (h) => {
+ if (isEmpty(messageLinks)) {
+ return message;
+ }
+
+ const normalizeLinkProps = (hrefOrProps) => {
+ const { href, ...otherLinkProps } =
+ typeof hrefOrProps === 'string' ? { href: hrefOrProps } : hrefOrProps;
+
+ return { href, linkProps: otherLinkProps };
+ };
+
+ return [
+ h(GlSprintf, {
+ props: {
+ message,
+ },
+ scopedSlots: Object.assign(
+ {},
+ ...Object.entries(messageLinks).map(([slotName, hrefOrProps]) => {
+ const { href, linkProps } = normalizeLinkProps(hrefOrProps);
+
+ return {
+ [slotName]: (props) =>
+ h(
+ GlLink,
+ {
+ props: linkProps,
+ attrs: { href },
+ },
+ props.content,
+ ),
+ };
+ }),
+ ),
+ }),
+ ];
+ };
+
return new Vue({
el,
components: {
@@ -130,7 +180,7 @@ export const createAlert = ({
},
on,
},
- message,
+ createMessageNodes(h),
);
},
});
diff --git a/app/assets/javascripts/boards/components/boards_selector.vue b/app/assets/javascripts/boards/components/boards_selector.vue
index cc6fde92f9b..7bc4c89699c 100644
--- a/app/assets/javascripts/boards/components/boards_selector.vue
+++ b/app/assets/javascripts/boards/components/boards_selector.vue
@@ -1,15 +1,7 @@
<script>
-import {
- GlLoadingIcon,
- GlSearchBoxByType,
- GlDropdown,
- GlDropdownDivider,
- GlDropdownSectionHeader,
- GlDropdownItem,
- GlModalDirective,
-} from '@gitlab/ui';
+import { GlButton, GlCollapsibleListbox, GlModalDirective } from '@gitlab/ui';
import { produce } from 'immer';
-import { throttle } from 'lodash';
+import { differenceBy, debounce } from 'lodash';
// eslint-disable-next-line no-restricted-imports
import { mapActions, mapState } from 'vuex';
@@ -18,7 +10,8 @@ import BoardForm from 'ee_else_ce/boards/components/board_form.vue';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { isMetaKey } from '~/lib/utils/common_utils';
import { updateHistory } from '~/lib/utils/url_utility';
-import { s__ } from '~/locale';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
+import { s__, __ } from '~/locale';
import eventHub from '../eventhub';
import groupBoardsQuery from '../graphql/group_boards.query.graphql';
@@ -34,15 +27,16 @@ export default {
name: 'BoardsSelector',
i18n: {
fetchBoardsError: s__('Boards|An error occurred while fetching boards. Please try again.'),
+ headerText: s__('IssueBoards|Switch board'),
+ noResultsText: s__('IssueBoards|No matching boards found'),
+ hiddenBoardsText: s__(
+ 'IssueBoards|Some of your boards are hidden, add a license to see them again.',
+ ),
},
components: {
BoardForm,
- GlLoadingIcon,
- GlSearchBoxByType,
- GlDropdown,
- GlDropdownDivider,
- GlDropdownSectionHeader,
- GlDropdownItem,
+ GlButton,
+ GlCollapsibleListbox,
},
directives: {
GlModalDirective,
@@ -60,11 +54,6 @@ export default {
'isApolloBoard',
],
props: {
- throttleDuration: {
- type: Number,
- default: 200,
- required: false,
- },
boardApollo: {
type: Object,
required: false,
@@ -78,13 +67,10 @@ export default {
},
data() {
return {
- hasScrollFade: false,
- scrollFadeInitialized: false,
boards: [],
recentBoards: [],
loadingBoards: false,
loadingRecentBoards: false,
- throttledSetScrollFade: throttle(this.setScrollFade, this.throttleDuration),
contentClientHeight: 0,
maxPosition: 0,
filterTerm: '',
@@ -97,6 +83,12 @@ export default {
boardToUse() {
return this.isApolloBoard ? this.boardApollo : this.board;
},
+ boardToUseName() {
+ return this.boardToUse?.name || s__('IssueBoards|Select board');
+ },
+ boardToUseId() {
+ return getIdFromGraphQLId(this.boardToUse.id) || '';
+ },
isBoardToUseLoading() {
return this.isApolloBoard ? this.isCurrentBoardLoading : this.isBoardLoading;
},
@@ -112,6 +104,26 @@ export default {
loading() {
return this.loadingRecentBoards || this.loadingBoards;
},
+ listBoxItems() {
+ const mapItems = ({ id, name }) => ({ text: name, value: id });
+
+ if (this.showRecentSection) {
+ const notRecent = differenceBy(this.filteredBoards, this.recentBoards, 'id');
+
+ return [
+ {
+ text: __('Recent'),
+ options: this.recentBoards.map(mapItems),
+ },
+ {
+ text: __('All'),
+ options: notRecent.map(mapItems),
+ },
+ ];
+ }
+
+ return this.filteredBoards.map(mapItems);
+ },
filteredBoards() {
return this.boards.filter((board) =>
board.name.toLowerCase().includes(this.filterTerm.toLowerCase()),
@@ -126,34 +138,25 @@ export default {
showDropdown() {
return this.showCreate || this.hasMissingBoards;
},
- scrollFadeClass() {
- return {
- 'fade-out': !this.hasScrollFade,
- };
- },
showRecentSection() {
return (
- this.recentBoards.length &&
+ this.recentBoards.length > 0 &&
this.boards.length > MIN_BOARDS_TO_VIEW_RECENT &&
!this.filterTerm.length
);
},
},
watch: {
- filteredBoards() {
- this.scrollFadeInitialized = false;
- this.$nextTick(this.setScrollFade);
- },
- recentBoards() {
- this.scrollFadeInitialized = false;
- this.$nextTick(this.setScrollFade);
- },
boardToUse(newBoard) {
document.title = newBoard.name;
},
},
created() {
eventHub.$on('showBoardModal', this.showPage);
+ this.handleSearch = debounce(this.setFilterTerm, DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+ },
+ destroyed() {
+ this.handleSearch.cancel();
},
beforeDestroy() {
eventHub.$off('showBoardModal', this.showPage);
@@ -248,34 +251,6 @@ export default {
this.$emit('switchBoard', board.id);
},
- isScrolledUp() {
- const { content } = this.$refs;
-
- if (!content) {
- return false;
- }
-
- const currentPosition = this.contentClientHeight + content.scrollTop;
-
- return currentPosition < this.maxPosition;
- },
- initScrollFade() {
- const { content } = this.$refs;
-
- if (!content) {
- return;
- }
-
- this.scrollFadeInitialized = true;
-
- this.contentClientHeight = content.clientHeight;
- this.maxPosition = content.scrollHeight;
- },
- setScrollFade() {
- if (!this.scrollFadeInitialized) this.initScrollFade();
-
- this.hasScrollFade = this.isScrolledUp();
- },
fetchCurrentBoard(boardId) {
this.fetchBoard({
fullPath: this.fullPath,
@@ -283,17 +258,24 @@ export default {
boardType: this.boardType,
});
},
- async switchBoard(boardId, e) {
+ setFilterTerm(value) {
+ this.filterTerm = value;
+ },
+ async switchBoardKeyEvent(boardId, e) {
if (isMetaKey(e)) {
+ e.stopPropagation();
window.open(`${this.boardBaseUrl}/${boardId}`, '_blank');
- } else if (this.isApolloBoard) {
+ }
+ },
+ switchBoardGroup(value) {
+ if (this.isApolloBoard) {
// Epic board ID is supported in EE version of this file
- this.$emit('switchBoard', this.fullBoardId(boardId));
- updateHistory({ url: `${this.boardBaseUrl}/${boardId}` });
+ this.$emit('switchBoard', this.fullBoardId(value));
+ updateHistory({ url: `${this.boardBaseUrl}/${value}` });
} else {
this.unsetActiveId();
- this.fetchCurrentBoard(boardId);
- updateHistory({ url: `${this.boardBaseUrl}/${boardId}` });
+ this.fetchCurrentBoard(value);
+ updateHistory({ url: `${this.boardBaseUrl}/${value}` });
}
},
},
@@ -303,105 +285,66 @@ export default {
<template>
<div class="boards-switcher gl-mr-3" data-testid="boards-selector">
<span class="boards-selector-wrapper">
- <gl-dropdown
+ <gl-collapsible-listbox
v-if="showDropdown"
+ block
data-testid="boards-dropdown"
data-qa-selector="boards_dropdown"
- toggle-class="dropdown-menu-toggle"
- menu-class="flex-column dropdown-extended-height"
+ searchable
+ :searching="loading"
+ toggle-class="gl-min-w-20"
+ :header-text="$options.i18n.headerText"
+ :no-results-text="$options.i18n.noResultsText"
:loading="isBoardToUseLoading"
- :text="boardToUse.name"
- @show="loadBoards"
+ :items="listBoxItems"
+ :toggle-text="boardToUseName"
+ :selected="boardToUseId"
+ @search="handleSearch"
+ @select="switchBoardGroup"
+ @shown="loadBoards"
>
- <p class="gl-dropdown-header-top" @mousedown.prevent>
- {{ s__('IssueBoards|Switch board') }}
- </p>
- <gl-search-box-by-type ref="searchBox" v-model="filterTerm" class="m-2" />
-
- <div
- v-if="!loading"
- ref="content"
- data-qa-selector="boards_dropdown_content"
- class="dropdown-content flex-fill"
- @scroll.passive="throttledSetScrollFade"
- >
- <gl-dropdown-item
- v-show="filteredBoards.length === 0"
- class="gl-pointer-events-none text-secondary"
- >
- {{ s__('IssueBoards|No matching boards found') }}
- </gl-dropdown-item>
-
- <gl-dropdown-section-header v-if="showRecentSection">
- {{ __('Recent') }}
- </gl-dropdown-section-header>
-
- <template v-if="showRecentSection">
- <gl-dropdown-item
- v-for="recentBoard in recentBoards"
- :key="`recent-${recentBoard.id}`"
- data-testid="dropdown-item"
- @click.prevent="switchBoard(recentBoard.id, $event)"
- >
- {{ recentBoard.name }}
- </gl-dropdown-item>
- </template>
-
- <gl-dropdown-divider v-if="showRecentSection" />
-
- <gl-dropdown-section-header v-if="showRecentSection">
- {{ __('All') }}
- </gl-dropdown-section-header>
-
- <gl-dropdown-item
- v-for="otherBoard in filteredBoards"
- :key="otherBoard.id"
- data-testid="dropdown-item"
- @click.prevent="switchBoard(otherBoard.id, $event)"
- >
- {{ otherBoard.name }}
- </gl-dropdown-item>
-
- <gl-dropdown-item v-if="hasMissingBoards" class="no-pointer-events">
+ <template #list-item="{ item }">
+ <div data-testid="dropdown-item-recent" @click="switchBoardKeyEvent(item.value, $event)">
+ {{ item.text }}
+ </div>
+ </template>
+
+ <template #footer>
+ <div v-if="hasMissingBoards" class="gl-border-t gl-font-sm gl-px-4 gl-pt-4 gl-pb-3">
{{
s__('IssueBoards|Some of your boards are hidden, add a license to see them again.')
}}
- </gl-dropdown-item>
- </div>
-
- <div
- v-show="filteredBoards.length > 0"
- class="dropdown-content-faded-mask"
- :class="scrollFadeClass"
- ></div>
-
- <gl-loading-icon v-if="loading" size="sm" />
-
- <div v-if="canAdminBoard">
- <gl-dropdown-divider />
-
- <gl-dropdown-item
- v-if="showCreate"
- v-gl-modal-directive="'board-config-modal'"
- data-qa-selector="create_new_board_button"
- data-track-action="click_button"
- data-track-label="create_new_board"
- data-track-property="dropdown"
- @click.prevent="showPage('new')"
- >
- {{ s__('IssueBoards|Create new board') }}
- </gl-dropdown-item>
-
- <gl-dropdown-item
- v-if="showDelete"
- v-gl-modal-directive="'board-config-modal'"
- class="text-danger"
- @click.prevent="showPage('delete')"
- >
- {{ s__('IssueBoards|Delete board') }}
- </gl-dropdown-item>
- </div>
- </gl-dropdown>
+ </div>
+ <div v-if="canAdminBoard" class="gl-border-t gl-py-2 gl-px-2">
+ <gl-button
+ v-if="showCreate"
+ v-gl-modal-directive="'board-config-modal'"
+ block
+ class="gl-justify-content-start!"
+ category="tertiary"
+ data-qa-selector="create_new_board_button"
+ data-track-action="click_button"
+ data-track-label="create_new_board"
+ data-track-property="dropdown"
+ @click="showPage('new')"
+ >
+ {{ s__('IssueBoards|Create new board') }}
+ </gl-button>
+
+ <gl-button
+ v-if="showDelete"
+ v-gl-modal-directive="'board-config-modal'"
+ block
+ category="tertiary"
+ variant="danger"
+ class="gl-mt-0! gl-justify-content-start!"
+ @click="showPage('delete')"
+ >
+ {{ s__('IssueBoards|Delete board') }}
+ </gl-button>
+ </div>
+ </template>
+ </gl-collapsible-listbox>
<board-form
v-if="currentPage"
diff --git a/app/assets/javascripts/ci/artifacts/components/job_artifacts_table.vue b/app/assets/javascripts/ci/artifacts/components/job_artifacts_table.vue
index e08470c62be..d2f63eef9ce 100644
--- a/app/assets/javascripts/ci/artifacts/components/job_artifacts_table.vue
+++ b/app/assets/javascripts/ci/artifacts/components/job_artifacts_table.vue
@@ -5,16 +5,15 @@ import {
GlLink,
GlButtonGroup,
GlButton,
- GlBadge,
GlIcon,
GlPagination,
GlFormCheckbox,
GlTooltipDirective,
} from '@gitlab/ui';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import { createAlert } from '~/alert';
import { getIdFromGraphQLId, convertToGraphQLId } from '~/graphql_shared/utils';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
import { TYPENAME_PROJECT } from '~/graphql_shared/constants';
import getJobArtifactsQuery from '../graphql/queries/get_job_artifacts.query.graphql';
import { totalArtifactsSizeForJob, mapArchivesToJobNodes, mapBooleansToJobNodes } from '../utils';
@@ -65,12 +64,11 @@ export default {
GlLink,
GlButtonGroup,
GlButton,
- GlBadge,
GlIcon,
GlPagination,
GlFormCheckbox,
- CiIcon,
TimeAgo,
+ CiBadgeLink,
JobCheckbox,
ArtifactsBulkDelete,
BulkDeleteModal,
@@ -328,7 +326,7 @@ export default {
{
key: 'artifacts',
label: I18N_ARTIFACTS,
- thClass: 'gl-w-quarter',
+ thClass: 'gl-w-eighth',
},
{
key: 'job',
@@ -350,7 +348,7 @@ export default {
{
key: 'actions',
label: '',
- thClass: 'gl-w-eighth',
+ thClass: 'gl-w-20p',
tdClass: 'gl-text-right',
},
],
@@ -441,45 +439,37 @@ export default {
</span>
</template>
<template #cell(job)="{ item }">
- <span class="gl-display-inline-flex gl-align-items-center gl-w-full gl-mb-4">
+ <div class="gl-display-inline-flex gl-align-items-center gl-mb-3 gl-gap-3">
<span data-testid="job-artifacts-job-status">
- <ci-icon v-if="item.succeeded" :status="item.detailedStatus" class="gl-mr-3" />
- <gl-badge
- v-else
- :icon="item.detailedStatus.icon"
- :variant="$options.STATUS_BADGE_VARIANTS[item.detailedStatus.group]"
- class="gl-mr-3"
- >
- {{ item.detailedStatus.label }}
- </gl-badge>
+ <ci-badge-link :status="item.detailedStatus" size="sm" :show-text="false" />
</span>
- <gl-link :href="item.webPath" class="gl-font-weight-bold">
+ <gl-link :href="item.webPath">
{{ item.name }}
</gl-link>
- </span>
- <span class="gl-display-inline-flex">
+ </div>
+ <div class="gl-mb-1">
<gl-icon name="pipeline" class="gl-mr-2" />
- <gl-link
- :href="item.pipeline.path"
- class="gl-text-black-normal gl-text-decoration-underline gl-mr-4"
- >
+ <gl-link :href="item.pipeline.path" class="gl-mr-2">
{{ pipelineId(item) }}
</gl-link>
- <gl-icon name="branch" class="gl-mr-2" />
- <gl-link
- :href="item.refPath"
- class="gl-text-black-normal gl-text-decoration-underline gl-mr-4"
- >
- {{ item.refName }}
- </gl-link>
- <gl-icon name="commit" class="gl-mr-2" />
- <gl-link
- :href="item.commitPath"
- class="gl-text-black-normal gl-text-decoration-underline gl-mr-4"
- >
- {{ item.shortSha }}
- </gl-link>
- </span>
+ <span class="gl-display-inline-block gl-rounded-base gl-px-2 gl-bg-gray-50">
+ <gl-icon name="commit" :size="12" class="gl-mr-2" />
+ <gl-link
+ :href="item.commitPath"
+ class="gl-text-black-normal gl-font-sm gl-font-monospace"
+ >
+ {{ item.shortSha }}
+ </gl-link>
+ </span>
+ </div>
+ <div>
+ <span class="gl-display-inline-block gl-rounded-base gl-px-2 gl-bg-gray-50">
+ <gl-icon name="branch" :size="12" class="gl-mr-1" />
+ <gl-link :href="item.refPath" class="gl-text-black-normal gl-font-sm gl-font-monospace">
+ {{ item.refName }}
+ </gl-link>
+ </span>
+ </div>
</template>
<template #cell(size)="{ item }">
<span data-testid="job-artifacts-size">{{ artifactsSize(item) }}</span>
diff --git a/app/assets/javascripts/ci/job_details/components/job_header.vue b/app/assets/javascripts/ci/job_details/components/job_header.vue
index 38bedb989c3..e30cd3a9474 100644
--- a/app/assets/javascripts/ci/job_details/components/job_header.vue
+++ b/app/assets/javascripts/ci/job_details/components/job_header.vue
@@ -141,10 +141,6 @@ export default {
</template>
</section>
- <!-- eslint-disable-next-line @gitlab/vue-prefer-dollar-scopedslots -->
- <section v-if="$slots.default" data-testid="job-header-action-buttons" class="gl-display-flex">
- <slot></slot>
- </section>
<gl-button
class="gl-md-display-none gl-ml-auto gl-align-self-start js-sidebar-build-toggle"
icon="chevron-double-lg-left"
diff --git a/app/assets/javascripts/sentry/init_sentry.js b/app/assets/javascripts/sentry/init_sentry.js
index 5b4e5be1f10..81ae46b8dd6 100644
--- a/app/assets/javascripts/sentry/init_sentry.js
+++ b/app/assets/javascripts/sentry/init_sentry.js
@@ -20,6 +20,8 @@ const initSentry = () => {
const hub = getCurrentHub();
+ const page = document?.body?.dataset?.page;
+
const client = new BrowserClient({
// Sentry.init(...) options
dsn: gon.sentry_dsn,
@@ -38,7 +40,19 @@ const initSentry = () => {
// https://github.com/getsentry/sentry-javascript/blob/7.66.0/MIGRATION.md#explicit-client-options
transport: makeFetchTransport,
stackParser: defaultStackParser,
- integrations: [...defaultIntegrations, new BrowserTracing()],
+ integrations: [
+ ...defaultIntegrations,
+ new BrowserTracing({
+ beforeNavigate(context) {
+ return {
+ ...context,
+ // `page` acts as transaction name for performance tracing.
+ // If missing, use default Sentry behavior: window.location.pathname
+ name: page || window?.location?.pathname,
+ };
+ },
+ }),
+ ],
});
hub.bindClient(client);
@@ -46,7 +60,7 @@ const initSentry = () => {
hub.setTags({
revision: gon.revision,
feature_category: gon.feature_category,
- page: document?.body?.dataset?.page,
+ page,
});
if (gon.current_user_id) {
diff --git a/app/assets/javascripts/super_sidebar/components/nav_item.vue b/app/assets/javascripts/super_sidebar/components/nav_item.vue
index 5e0f8fffb0e..305e2f94c20 100644
--- a/app/assets/javascripts/super_sidebar/components/nav_item.vue
+++ b/app/assets/javascripts/super_sidebar/components/nav_item.vue
@@ -145,6 +145,9 @@ export default {
hasAvatar() {
return Boolean(this.item.entity_id);
},
+ hasEndSpace() {
+ return this.hasPill || this.isPinnable || this.isFlyout;
+ },
avatarShape() {
return this.item.avatar_shape || 'rect';
},
@@ -236,7 +239,7 @@ export default {
</div>
</div>
<slot name="actions"></slot>
- <span v-if="hasPill || isPinnable" class="gl-text-right gl-relative gl-min-w-8">
+ <span v-if="hasEndSpace" class="gl-text-right gl-relative gl-min-w-6">
<gl-badge
v-if="hasPill"
size="sm"
diff --git a/app/assets/javascripts/vue_shared/components/ci_badge_link.vue b/app/assets/javascripts/vue_shared/components/ci_badge_link.vue
index 1f45b4c5c9d..101ebcd1c48 100644
--- a/app/assets/javascripts/vue_shared/components/ci_badge_link.vue
+++ b/app/assets/javascripts/vue_shared/components/ci_badge_link.vue
@@ -121,7 +121,7 @@ export default {
<template>
<gl-badge
v-gl-tooltip
- :class="{ 'gl-pl-2': isSmallBadgeSize }"
+ :class="{ 'gl-pl-2': isSmallBadgeSize, 'gl-p-2!': !showText }"
:title="title"
:href="detailsPath"
:size="size"
diff --git a/app/assets/stylesheets/_page_specific_files.scss b/app/assets/stylesheets/_page_specific_files.scss
index 47701d0490a..a6377480fa7 100644
--- a/app/assets/stylesheets/_page_specific_files.scss
+++ b/app/assets/stylesheets/_page_specific_files.scss
@@ -4,7 +4,6 @@
@import './pages/groups';
@import './pages/hierarchy';
@import './pages/issues';
-@import './pages/labels';
@import './pages/note_form';
@import './pages/notes';
@import './pages/pipelines';
diff --git a/app/assets/stylesheets/framework/common.scss b/app/assets/stylesheets/framework/common.scss
index 514247d2913..764a4169da0 100644
--- a/app/assets/stylesheets/framework/common.scss
+++ b/app/assets/stylesheets/framework/common.scss
@@ -567,3 +567,23 @@ See https://gitlab.com/gitlab-org/gitlab/issues/36857 for more details.
}
}
}
+
+// --- moved from labels.scss when moving to page_bundles ---
+// Fix scoped label padding in cases where old markdown uses the old label structure */
+.gl-label-text + .gl-label-text {
+ @include gl-pl-2;
+ @include gl-pr-3;
+}
+
+// used in the Markdown rendering of labels
+.scoped-label-tooltip-title {
+ color: var(--indigo-300, $indigo-300);
+}
+
+.gl-label-scoped {
+ box-shadow: 0 0 0 2px currentColor inset;
+
+ &.gl-label-sm {
+ box-shadow: 0 0 0 1px inset;
+ }
+}
diff --git a/app/assets/stylesheets/pages/labels.scss b/app/assets/stylesheets/page_bundles/labels.scss
index 423e25a900d..bc0bf4bc490 100644
--- a/app/assets/stylesheets/pages/labels.scss
+++ b/app/assets/stylesheets/page_bundles/labels.scss
@@ -1,3 +1,5 @@
+@import 'mixins_and_variables_and_functions';
+
.suggest-colors {
padding-top: 3px;
@@ -29,19 +31,19 @@
margin-bottom: -5px;
&:first-of-type {
- border-top-left-radius: $border-radius-base;
+ border-top-left-radius: $gl-border-radius-base;
}
&:nth-of-type(7) {
- border-top-right-radius: $border-radius-base;
+ border-top-right-radius: $gl-border-radius-base;
}
&:nth-last-child(7) {
- border-bottom-left-radius: $border-radius-base;
+ border-bottom-left-radius: $gl-border-radius-base;
}
&:last-of-type {
- border-bottom-right-radius: $border-radius-base;
+ border-bottom-right-radius: $gl-border-radius-base;
}
}
}
@@ -78,7 +80,7 @@
padding: 0 $grid-size;
line-height: 16px;
border-radius: $label-border-radius;
- color: $white;
+ color: var(--white, $white);
}
.manage-labels-list {
@@ -94,7 +96,7 @@
&:hover,
&:focus-within {
- background-color: $blue-50;
+ background-color: var(--blue-50, $blue-50);
}
&:active {
@@ -129,7 +131,7 @@
}
.label-badge {
- color: $gray-900;
+ color: var(--gray-900, $gray-900);
display: inline-block;
font-weight: $gl-font-weight-normal;
padding: $gl-padding-4 $gl-padding-8;
@@ -147,15 +149,15 @@
}
.label-action {
- color: $gray-700;
+ color: var(--gray-700, $gray-700);
cursor: pointer;
&:hover {
- color: $blue-600;
+ color: var(--blue-600, $blue-600);
}
&.hover-red:hover {
- color: $red-500;
+ color: var(--red-500, $red-500);
}
}
}
@@ -188,21 +190,3 @@
.priority-labels-empty-state .svg-content img {
max-width: $priority-label-empty-state-width;
}
-
-.scoped-label-tooltip-title {
- color: $indigo-300;
-}
-
-.gl-label-scoped {
- box-shadow: 0 0 0 2px currentColor inset;
-
- &.gl-label-sm {
- box-shadow: 0 0 0 1px inset;
- }
-}
-
-/* Fix scoped label padding in cases where old markdown uses the old label structure */
-.gl-label-text + .gl-label-text {
- @include gl-pl-2;
- @include gl-pr-3;
-}
diff --git a/app/helpers/application_helper.rb b/app/helpers/application_helper.rb
index e3a630024d9..fdb04b711f5 100644
--- a/app/helpers/application_helper.rb
+++ b/app/helpers/application_helper.rb
@@ -404,6 +404,10 @@ module ApplicationHelper
end
def add_page_specific_style(path, defer: true)
+ @already_added_styles ||= Set.new
+ return if @already_added_styles.include?(path)
+
+ @already_added_styles.add(path)
content_for :page_specific_styles do
if defer
stylesheet_link_tag_defer path
diff --git a/app/helpers/blame_helper.rb b/app/helpers/blame_helper.rb
index 56d651a8b65..f00493ddf2a 100644
--- a/app/helpers/blame_helper.rb
+++ b/app/helpers/blame_helper.rb
@@ -1,13 +1,6 @@
# frozen_string_literal: true
module BlameHelper
- BODY_FONT_SIZE = "0.875rem"
- COMMIT_LINE_HEIGHT = 3 # 150% * 2 lines of text
- COMMIT_PADDING = "10px" # 5px from both top and bottom
- COMMIT_BLOCK_HEIGHT_EXP = "(#{BODY_FONT_SIZE} * #{COMMIT_LINE_HEIGHT}) + #{COMMIT_PADDING}"
- CODE_LINE_HEIGHT = 1.1875
- CODE_PADDING = "20px" # 10px from both top and bottom
-
def age_map_duration(blame_groups, project)
now = Time.zone.now
start_date = blame_groups.map { |blame_group| blame_group[:commit].committed_date }
@@ -32,14 +25,6 @@ module BlameHelper
end
end
- def intrinsic_row_css(line_count)
- # using rems here because the size of the row depends on the text size
- # which can be customized via user agent styles and browser preferences
- total_line_height_exp = "#{line_count * CODE_LINE_HEIGHT}rem + #{CODE_PADDING}"
- row_height_exp = line_count == 1 ? COMMIT_BLOCK_HEIGHT_EXP : total_line_height_exp
- "contain-intrinsic-size: 1px calc(#{row_height_exp})"
- end
-
def blame_pages_streaming_url(id, project)
namespace_project_blame_page_url(namespace_id: project.namespace, project_id: project, id: id, streaming: true)
end
diff --git a/app/helpers/blob_helper.rb b/app/helpers/blob_helper.rb
index 6746e6549ec..0d5b8755a37 100644
--- a/app/helpers/blob_helper.rb
+++ b/app/helpers/blob_helper.rb
@@ -268,14 +268,6 @@ module BlobHelper
}.compact
end
- def edit_modify_file_fork_params(action)
- {
- to: request.fullpath,
- notice: edit_in_new_fork_notice_action(action),
- notice_now: edit_in_new_fork_notice_now
- }
- end
-
def edit_fork_button_tag(common_classes, project, label, params, action = 'edit')
fork_path = project_forks_path(project, namespace_key: current_user.namespace.id, continue: params)
@@ -322,11 +314,6 @@ module BlobHelper
@project.team.human_max_access(current_user&.id).try(:downcase)
end
- def editing_ci_config?
- @path.to_s.end_with?(Ci::Pipeline::CONFIG_EXTENSION) ||
- @path.to_s == @project.ci_config_path_or_default
- end
-
def vue_blob_app_data(project, blob, ref)
{
blob_path: blob.path,
diff --git a/app/helpers/merge_requests_helper.rb b/app/helpers/merge_requests_helper.rb
index bdb2744be71..8f28af743e5 100644
--- a/app/helpers/merge_requests_helper.rb
+++ b/app/helpers/merge_requests_helper.rb
@@ -144,12 +144,6 @@ module MergeRequestsHelper
end
end
- def toggle_draft_merge_request_path(issuable)
- wip_event = issuable.draft? ? 'ready' : 'draft'
-
- issuable_path(issuable, { merge_request: { wip_event: wip_event } })
- end
-
def user_merge_requests_counts
@user_merge_requests_counts ||= begin
assigned_count = assigned_issuables_count(:merge_requests)
diff --git a/app/models/concerns/bulk_users_by_email_load.rb b/app/models/concerns/bulk_users_by_email_load.rb
index edbd3e21458..55143ead30a 100644
--- a/app/models/concerns/bulk_users_by_email_load.rb
+++ b/app/models/concerns/bulk_users_by_email_load.rb
@@ -7,7 +7,7 @@ module BulkUsersByEmailLoad
def users_by_emails(emails)
Gitlab::SafeRequestLoader.execute(resource_key: user_by_email_resource_key, resource_ids: emails) do |emails|
# have to consider all emails - even secondary, so use all_emails here
- grouped_users_by_email = User.by_any_email(emails).preload(:emails).group_by(&:all_emails)
+ grouped_users_by_email = User.by_any_email(emails, confirmed: true).preload(:emails).group_by(&:all_emails)
grouped_users_by_email.each_with_object({}) do |(found_emails, users), h|
found_emails.each { |e| h[e] = users.first if emails.include?(e) } # don't include all emails for an account, only the ones we want
diff --git a/app/models/concerns/import_state/sidekiq_job_tracker.rb b/app/models/concerns/import_state/sidekiq_job_tracker.rb
index b7d0ed0f51b..9c892acb158 100644
--- a/app/models/concerns/import_state/sidekiq_job_tracker.rb
+++ b/app/models/concerns/import_state/sidekiq_job_tracker.rb
@@ -19,7 +19,7 @@ module ImportState
end
def self.jid_by(project_id:, status:)
- select(:jid).where(status: status).find_by(project_id: project_id)
+ select(:id, :jid).where(status: status).find_by(project_id: project_id)
end
end
end
diff --git a/app/models/concerns/issuable_link.rb b/app/models/concerns/issuable_link.rb
index e884e5acecf..4a922d3c2ea 100644
--- a/app/models/concerns/issuable_link.rb
+++ b/app/models/concerns/issuable_link.rb
@@ -9,6 +9,7 @@
module IssuableLink
extend ActiveSupport::Concern
+ MAX_LINKS_COUNT = 100
TYPE_RELATES_TO = 'relates_to'
TYPE_BLOCKS = 'blocks' ## EE-only. Kept here to be used on link_type enum.
@@ -38,6 +39,7 @@ module IssuableLink
validates :source, uniqueness: { scope: :target_id, message: 'is already related' }
validate :check_self_relation
validate :check_opposite_relation
+ validate :validate_max_number_of_links, on: :create
scope :for_source_or_target, ->(issuable) { where(source: issuable).or(where(target: issuable)) }
@@ -60,6 +62,27 @@ module IssuableLink
errors.add(:source, "is already related to this #{self.class.issuable_name}")
end
end
+
+ def validate_max_number_of_links
+ return unless source && target
+
+ validate_max_number_of_links_for(source, :source)
+ validate_max_number_of_links_for(target, :target)
+ end
+
+ def validate_max_number_of_links_for(item, attribute_name)
+ return unless item.linked_items_count >= MAX_LINKS_COUNT
+
+ errors.add(
+ attribute_name,
+ format(
+ s_('This %{issuable} would exceed the maximum number of linked %{issuables} (%{limit}).'),
+ issuable: self.class.issuable_name,
+ issuables: self.class.issuable_name.pluralize,
+ limit: MAX_LINKS_COUNT
+ )
+ )
+ end
end
end
diff --git a/app/models/concerns/routable.rb b/app/models/concerns/routable.rb
index ef14ff5fbe2..aa2c7c08435 100644
--- a/app/models/concerns/routable.rb
+++ b/app/models/concerns/routable.rb
@@ -77,7 +77,7 @@ module Routable
# rubocop:enable Metrics/CyclomaticComplexity
def self.optimize_routable_enabled?
- Feature.enabled?(:optimize_routable)
+ Feature.enabled?(:optimize_find_routable, Feature.current_request)
end
included do
diff --git a/app/models/integrations/asana.rb b/app/models/integrations/asana.rb
index 859522670ef..77555996cd9 100644
--- a/app/models/integrations/asana.rb
+++ b/app/models/integrations/asana.rb
@@ -1,9 +1,10 @@
# frozen_string_literal: true
-require 'asana'
-
module Integrations
class Asana < Integration
+ TASK_URL_TEMPLATE = 'https://app.asana.com/api/1.0/tasks/%{task_gid}'
+ STORY_URL_TEMPLATE = 'https://app.asana.com/api/1.0/tasks/%{task_gid}/stories'
+
validates :api_key, presence: true, if: :activated?
field :api_key,
@@ -40,12 +41,6 @@ module Integrations
%w[push]
end
- def client
- @_client ||= ::Asana::Client.new do |c|
- c.authentication :access_token, api_key
- end
- end
-
def execute(data)
return unless supported_events.include?(data[:object_kind])
@@ -78,11 +73,12 @@ module Integrations
taskid = tuple[2] || tuple[1]
begin
- task = ::Asana::Resources::Task.find_by_id(client, taskid)
- task.add_comment(text: "#{push_msg} #{message}")
+ story_on_task_url = format(STORY_URL_TEMPLATE, task_gid: taskid)
+ Gitlab::HTTP.post(story_on_task_url, headers: { "Authorization" => "Bearer #{api_key}" }, body: { text: "#{push_msg} #{message}" })
if tuple[0]
- task.update(completed: true)
+ task_url = format(TASK_URL_TEMPLATE, task_gid: taskid)
+ Gitlab::HTTP.put(task_url, headers: { "Authorization" => "Bearer #{api_key}" }, body: { completed: true })
end
rescue StandardError => e
log_error(e.message)
diff --git a/app/models/issue.rb b/app/models/issue.rb
index 58383a6a329..89188870f0e 100644
--- a/app/models/issue.rb
+++ b/app/models/issue.rb
@@ -543,7 +543,9 @@ class Issue < ApplicationRecord
end
end
- def related_issues(current_user, preload: nil)
+ def related_issues(current_user = nil, authorize: true, preload: nil)
+ return [] if new_record?
+
related_issues =
linked_issues_select
.joins("INNER JOIN issue_links ON
@@ -554,6 +556,7 @@ class Issue < ApplicationRecord
.reorder('issue_link_id')
related_issues = yield related_issues if block_given?
+ return related_issues unless authorize
cross_project_filter = -> (issues) { issues.where(project: project) }
Ability.issues_readable_by_user(related_issues,
@@ -561,6 +564,10 @@ class Issue < ApplicationRecord
filters: { read_cross_project: cross_project_filter })
end
+ def linked_items_count
+ related_issues(authorize: false).size
+ end
+
def can_be_worked_on?
!self.closed? && !self.project.forked?
end
diff --git a/app/models/project.rb b/app/models/project.rb
index 68196f0a757..ffce5b6b8aa 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -3346,7 +3346,7 @@ class Project < ApplicationRecord
end
def merge_requests_allowing_collaboration(source_branch = nil)
- relation = source_of_merge_requests.opened.where(allow_collaboration: true)
+ relation = source_of_merge_requests.from_fork.opened.where(allow_collaboration: true)
relation = relation.where(source_branch: source_branch) if source_branch
relation
end
diff --git a/app/models/project_import_data.rb b/app/models/project_import_data.rb
index 7e0722ab68c..96c1ad7def8 100644
--- a/app/models/project_import_data.rb
+++ b/app/models/project_import_data.rb
@@ -5,6 +5,11 @@ require 'carrierwave/orm/activerecord'
class ProjectImportData < ApplicationRecord
prepend_mod_with('ProjectImportData') # rubocop: disable Cop/InjectEnterpriseEditionModule
+ # Timeout strategy can only be changed via API, currently only with GitHub and BitBucket Server
+ OPTIMISTIC_TIMEOUT = "optimistic"
+ PESSIMISTIC_TIMEOUT = "pessimistic"
+ TIMEOUT_STRATEGIES = [OPTIMISTIC_TIMEOUT, PESSIMISTIC_TIMEOUT].freeze
+
belongs_to :project, inverse_of: :import_data
attr_encrypted :credentials,
key: Settings.attr_encrypted_db_key_base,
diff --git a/app/models/vulnerability.rb b/app/models/vulnerability.rb
index 650e8942132..0e3fe2cc8ac 100644
--- a/app/models/vulnerability.rb
+++ b/app/models/vulnerability.rb
@@ -9,6 +9,9 @@ class Vulnerability < ApplicationRecord
scope :with_projects, -> { includes(:project) }
+ validates :cvss, json_schema: { filename: "vulnerability_cvss_vectors", draft: 7 }
+ attribute :cvss, :ind_jsonb
+
def self.link_reference_pattern
nil
end
diff --git a/app/models/work_item.rb b/app/models/work_item.rb
index 62b837eeeb6..0761a213532 100644
--- a/app/models/work_item.rb
+++ b/app/models/work_item.rb
@@ -148,6 +148,8 @@ class WorkItem < Issue
end
def linked_work_items(current_user = nil, authorize: true, preload: nil, link_type: nil)
+ return [] if new_record?
+
linked_work_items = linked_work_items_query(link_type).preload(preload).reorder('issue_link_id')
return linked_work_items unless authorize
@@ -159,6 +161,10 @@ class WorkItem < Issue
)
end
+ def linked_items_count
+ linked_work_items(authorize: false).size
+ end
+
private
override :parent_link_confidentiality
diff --git a/app/models/work_items/related_work_item_link.rb b/app/models/work_items/related_work_item_link.rb
index a911ef5f05d..34dbaeb49d8 100644
--- a/app/models/work_items/related_work_item_link.rb
+++ b/app/models/work_items/related_work_item_link.rb
@@ -11,8 +11,6 @@ module WorkItems
belongs_to :source, class_name: 'WorkItem'
belongs_to :target, class_name: 'WorkItem'
- validate :validate_max_number_of_links, on: :create
-
class << self
extend ::Gitlab::Utils::Override
@@ -27,15 +25,5 @@ module WorkItems
'work item'
end
end
-
- def validate_max_number_of_links
- if source && source.linked_work_items(authorize: false).size >= MAX_LINKS_COUNT
- errors.add :source, s_('WorkItems|This work item would exceed the maximum number of linked items.')
- end
-
- return unless target && target.linked_work_items(authorize: false).size >= MAX_LINKS_COUNT
-
- errors.add :target, s_('WorkItems|This work item would exceed the maximum number of linked items.')
- end
end
end
diff --git a/app/services/import/bitbucket_server_service.rb b/app/services/import/bitbucket_server_service.rb
index 3d961780889..e628e88eaa9 100644
--- a/app/services/import/bitbucket_server_service.rb
+++ b/app/services/import/bitbucket_server_service.rb
@@ -42,7 +42,8 @@ module Import
project_name,
target_namespace,
current_user,
- credentials
+ credentials,
+ timeout_strategy
).execute
end
@@ -74,6 +75,10 @@ module Import
@url ||= params[:bitbucket_server_url]
end
+ def timeout_strategy
+ @timeout_strategy ||= params[:timeout_strategy] || ProjectImportData::PESSIMISTIC_TIMEOUT
+ end
+
def allow_local_requests?
Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
diff --git a/app/services/import/github_service.rb b/app/services/import/github_service.rb
index 73e0c229a9c..86c62145a87 100644
--- a/app/services/import/github_service.rb
+++ b/app/services/import/github_service.rb
@@ -138,6 +138,7 @@ module Import
Gitlab::GithubImport::Settings
.new(project)
.write(
+ timeout_strategy: params[:timeout_strategy] || ProjectImportData::PESSIMISTIC_TIMEOUT,
optional_stages: params[:optional_stages],
additional_access_tokens: access_params[:additional_access_tokens]
)
diff --git a/app/services/issue_links/create_service.rb b/app/services/issue_links/create_service.rb
index db05920678e..3523e945d37 100644
--- a/app/services/issue_links/create_service.rb
+++ b/app/services/issue_links/create_service.rb
@@ -9,7 +9,7 @@ module IssueLinks
end
def previous_related_issuables
- @related_issues ||= issuable.related_issues(current_user).to_a
+ @related_issues ||= issuable.related_issues(authorize: false).to_a
end
private
diff --git a/app/validators/json_schemas/vulnerability_cvss_vectors.json b/app/validators/json_schemas/vulnerability_cvss_vectors.json
new file mode 100644
index 00000000000..7ec1339e974
--- /dev/null
+++ b/app/validators/json_schemas/vulnerability_cvss_vectors.json
@@ -0,0 +1,22 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Schema for cvss attribute of Vulnerability",
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "vendor": {
+ "type": "string",
+ "default": "unknown"
+ },
+ "vector_string": {
+ "type": "string",
+ "example": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H"
+ }
+ },
+ "required": [
+ "vendor",
+ "vector_string"
+ ]
+ }
+}
diff --git a/app/views/admin/labels/index.html.haml b/app/views/admin/labels/index.html.haml
index 3d392a86566..b3d04e4f576 100644
--- a/app/views/admin/labels/index.html.haml
+++ b/app/views/admin/labels/index.html.haml
@@ -1,4 +1,5 @@
- page_title _("Labels")
+- add_page_specific_style 'page_bundles/labels'
= render Pajamas::CardComponent.new(card_options: { class: 'gl-new-card labels other-labels js-toggle-container js-admin-labels-container' }, header_options: { class: 'gl-new-card-header' }, body_options: { class: 'gl-new-card-body gl-px-0' }) do |c|
- c.with_header do
diff --git a/app/views/groups/labels/index.html.haml b/app/views/groups/labels/index.html.haml
index 6b4832d81aa..e174d6318e9 100644
--- a/app/views/groups/labels/index.html.haml
+++ b/app/views/groups/labels/index.html.haml
@@ -3,6 +3,7 @@
- search = params[:search]
- subscribed = params[:subscribed]
- labels_or_filters = @labels.exists? || search.present? || subscribed.present?
+- add_page_specific_style 'page_bundles/labels'
- if labels_or_filters
#js-promote-label-modal
diff --git a/app/views/layouts/_head.html.haml b/app/views/layouts/_head.html.haml
index bbde5f2843b..bb492913346 100644
--- a/app/views/layouts/_head.html.haml
+++ b/app/views/layouts/_head.html.haml
@@ -1,6 +1,11 @@
- page_description brand_title unless page_description
- site_name = _('GitLab')
- omit_og = sign_in_with_redirect?
+
+-# This is a temporary place for the page specific style migrations to be included on all pages like page_specific_files
+- if Feature.disabled?(:page_specific_styles, current_user)
+ - add_page_specific_style('page_bundles/labels')
+
%head{ omit_og ? { } : { prefix: "og: http://ogp.me/ns#" } }
%meta{ charset: "utf-8" }
%meta{ 'http-equiv' => 'X-UA-Compatible', content: 'IE=edge' }
@@ -38,6 +43,7 @@
= stylesheet_link_tag_defer "highlight/themes/#{user_color_scheme}"
= stylesheet_link_tag 'performance_bar' if performance_bar_enabled?
+
= render 'layouts/snowplow'
= render 'layouts/loading_hints'
diff --git a/app/views/projects/issues/new.html.haml b/app/views/projects/issues/new.html.haml
index 64143502b77..57f78152159 100644
--- a/app/views/projects/issues/new.html.haml
+++ b/app/views/projects/issues/new.html.haml
@@ -1,4 +1,5 @@
- add_page_specific_style 'page_bundles/merge_request'
+- add_page_specific_style 'page_bundles/labels'
- add_to_breadcrumbs _("Issues"), project_issues_path(@project)
- breadcrumb_title _("New")
- page_title _("New Issue")
diff --git a/app/views/projects/labels/index.html.haml b/app/views/projects/labels/index.html.haml
index 4b27b344498..03086b19984 100644
--- a/app/views/projects/labels/index.html.haml
+++ b/app/views/projects/labels/index.html.haml
@@ -3,6 +3,7 @@
- search = params[:search]
- subscribed = params[:subscribed]
- labels_or_filters = @labels.exists? || @prioritized_labels.exists? || search.present? || subscribed.present?
+- add_page_specific_style 'page_bundles/labels'
- if labels_or_filters
#js-promote-label-modal
diff --git a/app/views/projects/merge_requests/_mr_title.html.haml b/app/views/projects/merge_requests/_mr_title.html.haml
index f0e7df8a379..9c20127e102 100644
--- a/app/views/projects/merge_requests/_mr_title.html.haml
+++ b/app/views/projects/merge_requests/_mr_title.html.haml
@@ -2,7 +2,7 @@
- can_reopen_merge_request = can?(current_user, :reopen_merge_request, @merge_request)
- are_close_and_open_buttons_hidden = merge_request_button_hidden?(@merge_request, true) && merge_request_button_hidden?(@merge_request, false)
- hide_gutter_toggle = local_assigns.fetch(:hide_gutter_toggle, false)
-- cache_key = [@project, @merge_request, can_update_merge_request, can_reopen_merge_request, are_close_and_open_buttons_hidden, current_user&.preferred_language, "1.1-updated_header", moved_mr_sidebar_enabled?, hide_gutter_toggle, fluid_layout]
+- cache_key = [@project, @merge_request, can_update_merge_request, can_reopen_merge_request, are_close_and_open_buttons_hidden, current_user&.preferred_language, "1.1-updated_header", moved_mr_sidebar_enabled?, hide_gutter_toggle, fluid_layout, Gitlab::CurrentSettings.gitpod_enabled, current_user&.gitpod_enabled]
= cache(cache_key, expires_in: 1.day) do
- if @merge_request.closed_or_merged_without_fork?
diff --git a/app/views/shared/boards/_show.html.haml b/app/views/shared/boards/_show.html.haml
index e5aa4c58da1..882730f536d 100644
--- a/app/views/shared/boards/_show.html.haml
+++ b/app/views/shared/boards/_show.html.haml
@@ -15,5 +15,6 @@
- page_title("#{board.name}", _("Boards"))
- add_page_specific_style 'page_bundles/boards'
+- add_page_specific_style 'page_bundles/labels'
#js-issuable-board-app{ data: board_data }
diff --git a/app/views/shared/issuable/_label_dropdown.html.haml b/app/views/shared/issuable/_label_dropdown.html.haml
index 3c4ee01d04f..e82e7fb0177 100644
--- a/app/views/shared/issuable/_label_dropdown.html.haml
+++ b/app/views/shared/issuable/_label_dropdown.html.haml
@@ -1,3 +1,4 @@
+- add_page_specific_style 'page_bundles/labels'
- project = @target_project || @project
- edit_context = local_assigns.fetch(:edit_context, nil) || project
- show_create = local_assigns.fetch(:show_create, true)
diff --git a/app/views/shared/issuable/_sidebar.html.haml b/app/views/shared/issuable/_sidebar.html.haml
index 93e1a53ccb4..fe7fbb1328c 100644
--- a/app/views/shared/issuable/_sidebar.html.haml
+++ b/app/views/shared/issuable/_sidebar.html.haml
@@ -11,6 +11,7 @@
- is_merge_request = issuable_type === 'merge_request'
- moved_sidebar_enabled = moved_mr_sidebar_enabled?
- is_merge_request_with_flag = is_merge_request && moved_sidebar_enabled
+- add_page_specific_style 'page_bundles/labels'
%aside.right-sidebar.js-right-sidebar.js-issuable-sidebar{ data: { always_show_toggle: true, signed: { in: signed_in }, issuable_type: issuable_type }, class: "#{sidebar_gutter_collapsed_class(is_merge_request_with_flag)} #{'right-sidebar-merge-requests' if is_merge_request_with_flag}", 'aria-live' => 'polite', 'aria-label': issuable_type }
.issuable-sidebar{ class: "#{'is-merge-request' if is_merge_request_with_flag}" }
diff --git a/app/views/shared/issuable/form/_metadata.html.haml b/app/views/shared/issuable/form/_metadata.html.haml
index 1da0b82b634..e7ddc47653e 100644
--- a/app/views/shared/issuable/form/_metadata.html.haml
+++ b/app/views/shared/issuable/form/_metadata.html.haml
@@ -3,6 +3,7 @@
- presenter = local_assigns.fetch(:presenter)
- has_due_date = issuable.has_attribute?(:due_date)
- form = local_assigns.fetch(:form)
+- add_page_specific_style 'page_bundles/labels'
- if @add_related_issue
.form-group
diff --git a/app/views/shared/labels/_form.html.haml b/app/views/shared/labels/_form.html.haml
index 53fbe3dac03..c6b00212e34 100644
--- a/app/views/shared/labels/_form.html.haml
+++ b/app/views/shared/labels/_form.html.haml
@@ -1,3 +1,4 @@
+- add_page_specific_style 'page_bundles/labels'
- show_lock_on_merge = local_assigns.fetch(:show_lock_on_merge, false)
= gitlab_ui_form_for @label, as: :label, url: url, html: { class: 'label-form js-quick-submit js-requires-input' } do |f|
diff --git a/app/views/shared/milestones/_labels_tab.html.haml b/app/views/shared/milestones/_labels_tab.html.haml
index 1e856bf4355..1abf4b46d09 100644
--- a/app/views/shared/milestones/_labels_tab.html.haml
+++ b/app/views/shared/milestones/_labels_tab.html.haml
@@ -1,3 +1,5 @@
+- add_page_specific_style 'page_bundles/labels'
+
%ul.bordered-list.manage-labels-list
- labels.each do |label|
- options = { milestone_title: @milestone.title, label_name: label.title }
diff --git a/app/workers/concerns/limited_capacity/worker.rb b/app/workers/concerns/limited_capacity/worker.rb
index af66d80b3e9..0a79c5c46d5 100644
--- a/app/workers/concerns/limited_capacity/worker.rb
+++ b/app/workers/concerns/limited_capacity/worker.rb
@@ -1,41 +1,5 @@
# frozen_string_literal: true
-# Usage:
-#
-# Worker that performs the tasks:
-#
-# class DummyWorker
-# include ApplicationWorker
-# include LimitedCapacity::Worker
-#
-# # For each job that raises any error, a worker instance will be disabled
-# # until the next schedule-run.
-# # If you wish to get around this, exceptions must by handled by the implementer.
-# #
-# def perform_work(*args)
-# end
-#
-# def remaining_work_count(*args)
-# 5
-# end
-#
-# def max_running_jobs
-# 25
-# end
-# end
-#
-# Cron worker to fill the pool of regular workers:
-#
-# class ScheduleDummyCronWorker
-# include ApplicationWorker
-# include CronjobQueue
-#
-# def perform(*args)
-# DummyWorker.perform_with_capacity(*args)
-# end
-# end
-#
-
module LimitedCapacity
module Worker
extend ActiveSupport::Concern
diff --git a/app/workers/gitlab/bitbucket_import/advance_stage_worker.rb b/app/workers/gitlab/bitbucket_import/advance_stage_worker.rb
index e5a1ab13562..cc1bf160ac8 100644
--- a/app/workers/gitlab/bitbucket_import/advance_stage_worker.rb
+++ b/app/workers/gitlab/bitbucket_import/advance_stage_worker.rb
@@ -24,10 +24,14 @@ module Gitlab
finish: Stage::FinishImportWorker
}.freeze
- def find_import_state(project_id)
+ def find_import_state_jid(project_id)
ProjectImportState.jid_by(project_id: project_id, status: :started)
end
+ def find_import_state(id)
+ ProjectImportState.find(id)
+ end
+
private
def next_stage_worker(next_stage)
diff --git a/app/workers/gitlab/bitbucket_server_import/advance_stage_worker.rb b/app/workers/gitlab/bitbucket_server_import/advance_stage_worker.rb
index 2c8db639725..1fc35725c9f 100644
--- a/app/workers/gitlab/bitbucket_server_import/advance_stage_worker.rb
+++ b/app/workers/gitlab/bitbucket_server_import/advance_stage_worker.rb
@@ -25,10 +25,14 @@ module Gitlab
finish: Stage::FinishImportWorker
}.freeze
- def find_import_state(project_id)
+ def find_import_state_jid(project_id)
ProjectImportState.jid_by(project_id: project_id, status: :started)
end
+ def find_import_state(id)
+ ProjectImportState.find(id)
+ end
+
private
def next_stage_worker(next_stage)
diff --git a/app/workers/gitlab/github_import/advance_stage_worker.rb b/app/workers/gitlab/github_import/advance_stage_worker.rb
index 45f4bf486d7..a012241e90c 100644
--- a/app/workers/gitlab/github_import/advance_stage_worker.rb
+++ b/app/workers/gitlab/github_import/advance_stage_worker.rb
@@ -33,10 +33,14 @@ module Gitlab
finish: Stage::FinishImportWorker
}.freeze
- def find_import_state(project_id)
+ def find_import_state_jid(project_id)
ProjectImportState.jid_by(project_id: project_id, status: :started)
end
+ def find_import_state(id)
+ ProjectImportState.find(id)
+ end
+
private
def next_stage_worker(next_stage)
diff --git a/app/workers/gitlab/import/advance_stage.rb b/app/workers/gitlab/import/advance_stage.rb
index 5d5abc88388..b1c83d0a553 100644
--- a/app/workers/gitlab/import/advance_stage.rb
+++ b/app/workers/gitlab/import/advance_stage.rb
@@ -4,6 +4,9 @@ module Gitlab
module Import
module AdvanceStage
INTERVAL = 30.seconds.to_i
+ TIMEOUT_DURATION = 2.hours
+
+ AdvanceStageTimeoutError = Class.new(StandardError)
# The number of seconds to wait (while blocking the thread) before
# continuing to the next waiter.
@@ -14,30 +17,37 @@ module Gitlab
# remaining jobs.
# next_stage - The name of the next stage to start when all jobs have been
# completed.
- def perform(project_id, waiters, next_stage)
- import_state = find_import_state(project_id)
+ # timeout_timer - Time the sidekiq worker was first initiated with the current job_count
+ # previous_job_count - Number of jobs remaining on last invocation of this worker
+ def perform(project_id, waiters, next_stage, timeout_timer = Time.zone.now, previous_job_count = nil)
+ import_state_jid = find_import_state_jid(project_id)
# If the import state is nil the project may have been deleted or the import
# may have failed or been canceled. In this case we tidy up the cache data and no
# longer attempt to advance to the next stage.
- if import_state.nil?
+ if import_state_jid.nil?
clear_waiter_caches(waiters)
return
end
+ project = Project.find_by_id(project_id)
new_waiters = wait_for_jobs(waiters)
+ new_job_count = new_waiters.values.sum
+
+ # Reset the timeout timer as some jobs finished processing
+ if new_job_count != previous_job_count
+ timeout_timer = Time.zone.now
+ previous_job_count = new_job_count
+ end
if new_waiters.empty?
- # We refresh the import JID here so workers importing individual
- # resources (e.g. notes) don't have to do this all the time, reducing
- # the pressure on Redis. We _only_ do this once all jobs are done so
- # we don't get stuck forever if one or more jobs failed to notify the
- # JobWaiter.
- import_state.refresh_jid_expiration
-
- next_stage_worker(next_stage).perform_async(project_id)
+ proceed_to_next_stage(import_state_jid, next_stage, project_id)
+ elsif Feature.enabled?(:advance_stage_timeout, project) && timeout_reached?(timeout_timer) &&
+ new_job_count == previous_job_count
+
+ handle_timeout(import_state_jid, next_stage, project_id, new_waiters, new_job_count)
else
- self.class.perform_in(INTERVAL, project_id, new_waiters, next_stage)
+ self.class.perform_in(INTERVAL, project_id, new_waiters, next_stage, timeout_timer, previous_job_count)
end
end
@@ -55,12 +65,65 @@ module Gitlab
end
end
- def find_import_state(project_id)
+ def find_import_state_jid(project_id)
+ raise NotImplementedError
+ end
+
+ def find_import_state(id)
raise NotImplementedError
end
private
+ def proceed_to_next_stage(import_state_jid, next_stage, project_id)
+ # We refresh the import JID here so workers importing individual
+ # resources (e.g. notes) don't have to do this all the time, reducing
+ # the pressure on Redis. We _only_ do this once all jobs are done so
+ # we don't get stuck forever if one or more jobs failed to notify the
+ # JobWaiter.
+ import_state_jid.refresh_jid_expiration
+
+ next_stage_worker(next_stage).perform_async(project_id)
+ end
+
+ def handle_timeout(import_state_jid, next_stage, project_id, new_waiters, new_job_count)
+ project = Project.find_by_id(project_id)
+ strategy = project.import_data&.data&.dig("timeout_strategy") || ProjectImportData::PESSIMISTIC_TIMEOUT
+
+ Gitlab::Import::Logger.info(
+ message: 'Timeout reached, no longer retrying',
+ project_id: project_id,
+ jobs_remaining: new_job_count,
+ waiters: new_waiters,
+ timeout_strategy: strategy
+ )
+
+ clear_waiter_caches(new_waiters)
+
+ case strategy
+ when ProjectImportData::OPTIMISTIC_TIMEOUT
+ proceed_to_next_stage(import_state_jid, next_stage, project_id)
+ when ProjectImportData::PESSIMISTIC_TIMEOUT
+ import_state = find_import_state(import_state_jid.id)
+ fail_import_and_log_status(import_state)
+ end
+ end
+
+ def fail_import_and_log_status(import_state)
+ raise AdvanceStageTimeoutError, "Failing advance stage, timeout reached with pessimistic strategy"
+ rescue AdvanceStageTimeoutError => e
+ Gitlab::Import::ImportFailureService.track(
+ import_state: import_state,
+ exception: e,
+ error_source: self.class.name,
+ fail_import: true
+ )
+ end
+
+ def timeout_reached?(timeout_timer)
+ Time.zone.now > timeout_timer + TIMEOUT_DURATION
+ end
+
def next_stage_worker(next_stage)
raise NotImplementedError
end
diff --git a/app/workers/gitlab/jira_import/advance_stage_worker.rb b/app/workers/gitlab/jira_import/advance_stage_worker.rb
index 5fae7caf791..9641b55a584 100644
--- a/app/workers/gitlab/jira_import/advance_stage_worker.rb
+++ b/app/workers/gitlab/jira_import/advance_stage_worker.rb
@@ -20,10 +20,14 @@ module Gitlab
finish: Gitlab::JiraImport::Stage::FinishImportWorker
}.freeze
- def find_import_state(project_id)
+ def find_import_state_jid(project_id)
JiraImportState.jid_by(project_id: project_id, status: :started)
end
+ def find_import_state(id)
+ JiraImportState.find(id)
+ end
+
private
def next_stage_worker(next_stage)
diff --git a/config/application.rb b/config/application.rb
index 552a68f6046..459487f2af3 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -360,6 +360,7 @@ module Gitlab
config.assets.precompile << "page_bundles/wiki.css"
config.assets.precompile << "page_bundles/work_items.css"
config.assets.precompile << "page_bundles/xterm.css"
+ config.assets.precompile << "page_bundles/labels.css"
config.assets.precompile << "lazy_bundles/cropper.css"
config.assets.precompile << "lazy_bundles/gridstack.css"
config.assets.precompile << "performance_bar.css"
diff --git a/config/feature_flags/development/advance_stage_timeout.yml b/config/feature_flags/development/advance_stage_timeout.yml
new file mode 100644
index 00000000000..9bedf0b39f4
--- /dev/null
+++ b/config/feature_flags/development/advance_stage_timeout.yml
@@ -0,0 +1,8 @@
+---
+name: advance_stage_timeout
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/131312
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/425490
+milestone: '16.5'
+type: development
+group: group::import and integrate
+default_enabled: false
diff --git a/config/feature_flags/development/optimize_routable.yml b/config/feature_flags/development/optimize_find_routable.yml
index 1191d389fa2..a78719cea01 100644
--- a/config/feature_flags/development/optimize_routable.yml
+++ b/config/feature_flags/development/optimize_find_routable.yml
@@ -1,8 +1,8 @@
---
-name: optimize_routable
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130842
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/424138
-milestone: '16.4'
+name: optimize_find_routable
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132324
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/425859
+milestone: '16.5'
type: development
group: group::tenant scale
default_enabled: false
diff --git a/config/feature_flags/development/page_specific_styles.yml b/config/feature_flags/development/page_specific_styles.yml
new file mode 100644
index 00000000000..5ca06e09689
--- /dev/null
+++ b/config/feature_flags/development/page_specific_styles.yml
@@ -0,0 +1,8 @@
+---
+name: page_specific_styles
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/131606
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/425035
+milestone: '16.5'
+type: development
+group: group::project management
+default_enabled: false
diff --git a/config/initializers/postgres_partitioning.rb b/config/initializers/postgres_partitioning.rb
index a7728bf51b3..1cf40059a03 100644
--- a/config/initializers/postgres_partitioning.rb
+++ b/config/initializers/postgres_partitioning.rb
@@ -19,7 +19,8 @@ if Gitlab.ee?
IncidentManagement::PendingEscalations::Alert,
IncidentManagement::PendingEscalations::Issue,
Security::Finding,
- Analytics::ValueStreamDashboard::Count
+ Analytics::ValueStreamDashboard::Count,
+ Ci::FinishedBuildChSyncEvent
])
else
Gitlab::Database::Partitioning.register_tables(
diff --git a/db/docs/p_ci_finished_build_ch_sync_events.yml b/db/docs/p_ci_finished_build_ch_sync_events.yml
new file mode 100644
index 00000000000..09938f99b16
--- /dev/null
+++ b/db/docs/p_ci_finished_build_ch_sync_events.yml
@@ -0,0 +1,10 @@
+---
+table_name: p_ci_finished_build_ch_sync_events
+classes:
+- Ci::FinishedBuildChSyncEvent
+feature_categories:
+- runner_fleet
+description: Holds references to finished CI builds ready to be synced to ClickHouse
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/
+milestone: '16.5'
+gitlab_schema: gitlab_ci
diff --git a/db/migrate/20230906122405_add_cvss_to_vulnerabilities.rb b/db/migrate/20230906122405_add_cvss_to_vulnerabilities.rb
new file mode 100644
index 00000000000..6914bceef87
--- /dev/null
+++ b/db/migrate/20230906122405_add_cvss_to_vulnerabilities.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddCvssToVulnerabilities < Gitlab::Database::Migration[2.1]
+ disable_ddl_transaction!
+
+ def up
+ with_lock_retries do
+ add_column :vulnerabilities, :cvss, :jsonb, default: [], if_not_exists: true
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_column :vulnerabilities, :cvss, if_exists: true
+ end
+ end
+end
diff --git a/db/migrate/20230915103259_create_ci_finished_build_ch_sync_events.rb b/db/migrate/20230915103259_create_ci_finished_build_ch_sync_events.rb
new file mode 100644
index 00000000000..718fd49f5c0
--- /dev/null
+++ b/db/migrate/20230915103259_create_ci_finished_build_ch_sync_events.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+class CreateCiFinishedBuildChSyncEvents < Gitlab::Database::Migration[2.1]
+ def change
+ options = {
+ primary_key: [:build_id, :partition],
+ options: 'PARTITION BY LIST (partition)'
+ }
+
+ create_table(:p_ci_finished_build_ch_sync_events, **options) do |t|
+ # Do not bother with foreign key as it provides not benefit and has a performance cost. These get cleaned up over
+ # time anyway.
+ t.bigint :build_id, null: false
+ t.bigint :partition, null: false, default: 1
+ # rubocop: disable Migration/Datetime
+ # The source for this field does not have a timezone
+ t.datetime :build_finished_at, null: false
+ # rubocop: enable Migration/Datetime
+ t.boolean :processed, null: false, default: false
+
+ t.index '(build_id % 100), build_id',
+ where: 'processed = FALSE',
+ name: 'index_ci_finished_build_ch_sync_events_for_partitioned_query'
+ end
+ end
+end
diff --git a/db/migrate/20230920162613_add_force_include_all_resources_to_workspaces.rb b/db/migrate/20230920162613_add_force_include_all_resources_to_workspaces.rb
new file mode 100644
index 00000000000..502732bc308
--- /dev/null
+++ b/db/migrate/20230920162613_add_force_include_all_resources_to_workspaces.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddForceIncludeAllResourcesToWorkspaces < Gitlab::Database::Migration[2.1]
+ enable_lock_retries!
+
+ def change
+ add_column :workspaces, :force_include_all_resources, :boolean, default: false, null: false
+ end
+end
diff --git a/db/post_migrate/20230920122059_add_ci_job_artifacts_file_final_path_index_synchronously.rb b/db/post_migrate/20230920122059_add_ci_job_artifacts_file_final_path_index_synchronously.rb
new file mode 100644
index 00000000000..e2b91bf8711
--- /dev/null
+++ b/db/post_migrate/20230920122059_add_ci_job_artifacts_file_final_path_index_synchronously.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+class AddCiJobArtifactsFileFinalPathIndexSynchronously < Gitlab::Database::Migration[2.1]
+ disable_ddl_transaction!
+
+ INDEX_NAME = 'index_ci_job_artifacts_on_file_final_path'
+ WHERE_CLAUSE = 'file_final_path IS NOT NULL'
+
+ def up
+ add_concurrent_index :ci_job_artifacts, :file_final_path, name: INDEX_NAME, where: WHERE_CLAUSE
+ end
+
+ def down
+ remove_concurrent_index_by_name :ci_job_artifacts, INDEX_NAME
+ end
+end
diff --git a/db/schema_migrations/20230906122405 b/db/schema_migrations/20230906122405
new file mode 100644
index 00000000000..f4743a41b74
--- /dev/null
+++ b/db/schema_migrations/20230906122405
@@ -0,0 +1 @@
+8ef820d3702eb460400a7109655bfb4abd2c3ffb8c5bac2f4c58a72051109bd0 \ No newline at end of file
diff --git a/db/schema_migrations/20230915103259 b/db/schema_migrations/20230915103259
new file mode 100644
index 00000000000..2cbfa061f31
--- /dev/null
+++ b/db/schema_migrations/20230915103259
@@ -0,0 +1 @@
+d3dbc12fcadb285af3e4953addc76352c95bc6db8b20a43524627d8e6ed69b11 \ No newline at end of file
diff --git a/db/schema_migrations/20230920122059 b/db/schema_migrations/20230920122059
new file mode 100644
index 00000000000..18b212c9e85
--- /dev/null
+++ b/db/schema_migrations/20230920122059
@@ -0,0 +1 @@
+25c5f9c4d26da1f4007ea775890b6ac8146667c5afb71d56b4df00dd4fa42190 \ No newline at end of file
diff --git a/db/schema_migrations/20230920162613 b/db/schema_migrations/20230920162613
new file mode 100644
index 00000000000..9fb10e9fe35
--- /dev/null
+++ b/db/schema_migrations/20230920162613
@@ -0,0 +1 @@
+5547fc402692964dd08d384a5e31778ee6708fffd08b47cce48ca93b9c19e94b \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 468ee1b8c37..d2f3d5023df 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -646,6 +646,14 @@ CREATE TABLE p_batched_git_ref_updates_deletions (
)
PARTITION BY LIST (partition_id);
+CREATE TABLE p_ci_finished_build_ch_sync_events (
+ build_id bigint NOT NULL,
+ partition bigint DEFAULT 1 NOT NULL,
+ build_finished_at timestamp without time zone NOT NULL,
+ processed boolean DEFAULT false NOT NULL
+)
+PARTITION BY LIST (partition);
+
CREATE TABLE projects_visits (
id bigint NOT NULL,
entity_id bigint NOT NULL,
@@ -24696,7 +24704,8 @@ CREATE TABLE vulnerabilities (
resolved_on_default_branch boolean DEFAULT false NOT NULL,
present_on_default_branch boolean DEFAULT true NOT NULL,
detected_at timestamp with time zone DEFAULT now(),
- finding_id bigint
+ finding_id bigint,
+ cvss jsonb DEFAULT '[]'::jsonb
);
CREATE SEQUENCE vulnerabilities_id_seq
@@ -25441,6 +25450,7 @@ CREATE TABLE workspaces (
personal_access_token_id bigint,
config_version integer DEFAULT 1 NOT NULL,
force_full_reconciliation boolean DEFAULT false NOT NULL,
+ force_include_all_resources boolean DEFAULT false NOT NULL,
CONSTRAINT check_15543fb0fa CHECK ((char_length(name) <= 64)),
CONSTRAINT check_157d5f955c CHECK ((char_length(namespace) <= 64)),
CONSTRAINT check_2b401b0034 CHECK ((char_length(deployment_resource_version) <= 64)),
@@ -28615,6 +28625,9 @@ ALTER TABLE ONLY organizations
ALTER TABLE ONLY p_batched_git_ref_updates_deletions
ADD CONSTRAINT p_batched_git_ref_updates_deletions_pkey PRIMARY KEY (id, partition_id);
+ALTER TABLE ONLY p_ci_finished_build_ch_sync_events
+ ADD CONSTRAINT p_ci_finished_build_ch_sync_events_pkey PRIMARY KEY (build_id, partition);
+
ALTER TABLE ONLY p_ci_job_annotations
ADD CONSTRAINT p_ci_job_annotations_pkey PRIMARY KEY (id, partition_id);
@@ -31365,6 +31378,8 @@ CREATE INDEX index_ci_editor_ai_messages_on_user_project_and_created_at ON ci_ed
CREATE INDEX index_ci_editor_ai_messages_project_id ON ci_editor_ai_conversation_messages USING btree (project_id);
+CREATE INDEX index_ci_finished_build_ch_sync_events_for_partitioned_query ON ONLY p_ci_finished_build_ch_sync_events USING btree (((build_id % (100)::bigint)), build_id) WHERE (processed = false);
+
CREATE INDEX index_ci_freeze_periods_on_project_id ON ci_freeze_periods USING btree (project_id);
CREATE UNIQUE INDEX index_ci_group_variables_on_group_id_and_key_and_environment ON ci_group_variables USING btree (group_id, key, environment_scope);
@@ -31381,6 +31396,8 @@ CREATE INDEX index_ci_job_artifacts_id_for_terraform_reports ON ci_job_artifacts
CREATE INDEX index_ci_job_artifacts_on_expire_at_and_job_id ON ci_job_artifacts USING btree (expire_at, job_id);
+CREATE INDEX index_ci_job_artifacts_on_file_final_path ON ci_job_artifacts USING btree (file_final_path) WHERE (file_final_path IS NOT NULL);
+
CREATE INDEX index_ci_job_artifacts_on_file_store ON ci_job_artifacts USING btree (file_store);
CREATE INDEX index_ci_job_artifacts_on_file_type_for_devops_adoption ON ci_job_artifacts USING btree (file_type, project_id, created_at) WHERE (file_type = ANY (ARRAY[5, 6, 8, 23]));
diff --git a/doc/administration/integration/terminal.md b/doc/administration/integration/terminal.md
index 6e39ab8015c..baeabdc6964 100644
--- a/doc/administration/integration/terminal.md
+++ b/doc/administration/integration/terminal.md
@@ -79,7 +79,7 @@ guides document the necessary steps for a selection of popular reverse proxies:
- [Apache](https://httpd.apache.org/docs/2.4/mod/mod_proxy_wstunnel.html)
- [NGINX](https://www.nginx.com/blog/websocket-nginx/)
-- [HAProxy](https://www.haproxy.com/blog/websockets-load-balancing-with-haproxy/)
+- [HAProxy](https://www.haproxy.com/blog/websockets-load-balancing-with-haproxy)
- [Varnish](https://varnish-cache.org/docs/4.1/users-guide/vcl-example-websockets.html)
Workhorse doesn't let WebSocket requests through to non-WebSocket endpoints, so
diff --git a/doc/api/commits.md b/doc/api/commits.md
index 68c453aa317..94cdaaa191d 100644
--- a/doc/api/commits.md
+++ b/doc/api/commits.md
@@ -452,6 +452,7 @@ Parameters:
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user
| `sha` | string | yes | The commit hash or name of a repository branch or tag |
+| `unidiff` | boolean | no | Present diffs in the [unified diff](https://www.gnu.org/software/diffutils/manual/html_node/Detailed-Unified.html) format. Default is false. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130610) in GitLab 16.5. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/repository/commits/main/diff"
diff --git a/doc/api/import.md b/doc/api/import.md
index b981c1b57da..677848a0ed3 100644
--- a/doc/api/import.md
+++ b/doc/api/import.md
@@ -35,6 +35,7 @@ POST /import/github
| `github_hostname` | string | no | Custom GitHub Enterprise hostname. Do not set for GitHub.com. |
| `optional_stages` | object | no | [Additional items to import](../user/project/import/github.md#select-additional-items-to-import). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/373705) in GitLab 15.5 |
| `additional_access_tokens` | string | no | Comma-separated list of [additional](#use-multiple-github-personal-access-tokens) GitHub personal access tokens. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/337232) in GitLab 16.2 |
+| `timeout_strategy` | string | no | Strategy for handling import timeouts. Valid values are `optimistic` (continue to next stage of import) or `pessimistic` (fail immediately). Defaults to `pessimistic`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/422979) in GitLab 16.5. |
```shell
curl --request POST \
@@ -206,6 +207,7 @@ POST /import/bitbucket_server
| `bitbucket_server_repo` | string | yes | Bitbucket Repository Name |
| `new_name` | string | no | New repository name |
| `target_namespace` | string | no | Namespace to import repository into. Supports subgroups like `/namespace/subgroup` |
+| `timeout_strategy` | string | no | Strategy for handling import timeouts. Valid values are `optimistic` (continue to next stage of import) or `pessimistic` (fail immediately). Defaults to `pessimistic`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/422979) in GitLab 16.5. |
```shell
curl --request POST \
diff --git a/doc/api/merge_requests.md b/doc/api/merge_requests.md
index 53a605c56f0..d9700b807d8 100644
--- a/doc/api/merge_requests.md
+++ b/doc/api/merge_requests.md
@@ -986,6 +986,7 @@ Supported attributes:
| `id` | integer or string | Yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user. |
| `merge_request_iid` | integer | Yes | The internal ID of the merge request. |
| `access_raw_diffs` | boolean | No | Retrieve change diffs via Gitaly. |
+| `unidiff` | boolean | No | Present change diffs in the [unified diff](https://www.gnu.org/software/diffutils/manual/html_node/Detailed-Unified.html) format. Default is false. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130610) in GitLab 16.5. |
```json
{
@@ -1110,6 +1111,7 @@ Supported attributes:
| `merge_request_iid` | integer | Yes | The internal ID of the merge request. |
| `page` | integer | no | The page of results to return. Defaults to 1. |
| `per_page` | integer | no | The number of results per page. Defaults to 20. |
+| `unidiff` | boolean | no | Present diffs in the [unified diff](https://www.gnu.org/software/diffutils/manual/html_node/Detailed-Unified.html) format. Default is false. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130610) in GitLab 16.5. |
If successful, returns [`200 OK`](rest/index.md#status-codes) and the
following response attributes:
@@ -2605,6 +2607,7 @@ GET /projects/:id/merge_requests/:merge_request_iid/versions/:version_id
| `id` | String | Yes | The ID of the project. |
| `merge_request_iid` | integer | Yes | The internal ID of the merge request. |
| `version_id` | integer | Yes | The ID of the merge request diff version. |
+| `unidiff` | boolean | No | Present diffs in the [unified diff](https://www.gnu.org/software/diffutils/manual/html_node/Detailed-Unified.html) format. Default is false. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130610) in GitLab 16.5. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/merge_requests/1/versions/1"
diff --git a/doc/api/repositories.md b/doc/api/repositories.md
index aae475a0356..e17044f1c38 100644
--- a/doc/api/repositories.md
+++ b/doc/api/repositories.md
@@ -187,6 +187,7 @@ Supported attributes:
| `to` | string | yes | The commit SHA or branch name. |
| `from_project_id` | integer | no | The ID to compare from. |
| `straight` | boolean | no | Comparison method: `true` for direct comparison between `from` and `to` (`from`..`to`), `false` to compare using merge base (`from`...`to`)'. Default is `false`. |
+| `unidiff` | boolean | No | Present diffs in the [unified diff](https://www.gnu.org/software/diffutils/manual/html_node/Detailed-Unified.html) format. Default is false. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130610) in GitLab 16.5. |
```plaintext
GET /projects/:id/repository/compare?from=main&to=feature
diff --git a/doc/api/rest/index.md b/doc/api/rest/index.md
index 17da691b720..059efa15602 100644
--- a/doc/api/rest/index.md
+++ b/doc/api/rest/index.md
@@ -808,7 +808,7 @@ For questions about these integrations, use the [GitLab community forum](https:/
### Haskell
-- [`gitlab-haskell`](http://hackage.haskell.org/package/gitlab-haskell)
+- [`gitlab-haskell`](https://hackage.haskell.org/package/gitlab-haskell)
### Java
diff --git a/doc/architecture/blueprints/clickhouse_ingestion_pipeline/index.md b/doc/architecture/blueprints/clickhouse_ingestion_pipeline/index.md
index 66089085d0d..9ce41b51b0c 100644
--- a/doc/architecture/blueprints/clickhouse_ingestion_pipeline/index.md
+++ b/doc/architecture/blueprints/clickhouse_ingestion_pipeline/index.md
@@ -45,7 +45,7 @@ ClickHouse is an online, analytical processing (OLAP) database that powers use-c
At GitLab, [our current and future ClickHouse uses/capabilities](https://gitlab.com/groups/gitlab-com/-/epics/2075) reference & describe multiple use-cases that could be facilitated by using ClickHouse as a backing datastore. A majority of these talk about the following two major areas of concern:
-1. Being able to leverage [ClickHouse's OLAP capabilities](https://clickhouse.com/docs/en/faq/general/olap/) enabling underlying systems to perform an aggregated analysis of data, both over short and long periods of time.
+1. Being able to leverage [ClickHouse's OLAP capabilities](https://clickhouse.com/docs/en/faq/general/olap) enabling underlying systems to perform an aggregated analysis of data, both over short and long periods of time.
1. The fact that executing these operations with our currently existing datasets primarily in Postgres, is starting to become challenging and non-performant.
Looking forward, assuming a larger volume of data being produced by our application(s) and the rate at which it gets produced, the ability to ingest it into a *more* capable system, both effectively and efficiently helps us scale our applications and prepare for business growth.
diff --git a/doc/architecture/blueprints/gitlab_observability_backend/index.md b/doc/architecture/blueprints/gitlab_observability_backend/index.md
index 5b99235e18c..3efdaa0c462 100644
--- a/doc/architecture/blueprints/gitlab_observability_backend/index.md
+++ b/doc/architecture/blueprints/gitlab_observability_backend/index.md
@@ -453,8 +453,8 @@ The following sections intend to deep-dive into specific characteristics of our
- table engines
- - [MergeTree](https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/mergetree/)
- - [S3 Table Engine](https://clickhouse.com/docs/en/engines/table-engines/integrations/s3/)
+ - [MergeTree](https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/mergetree)
+ - [S3 Table Engine](https://clickhouse.com/docs/en/engines/table-engines/integrations/s3)
- efficient partitioning and/or sharding
diff --git a/doc/architecture/blueprints/gitlab_steps/index.md b/doc/architecture/blueprints/gitlab_steps/index.md
index d7878445cd0..c14f6888470 100644
--- a/doc/architecture/blueprints/gitlab_steps/index.md
+++ b/doc/architecture/blueprints/gitlab_steps/index.md
@@ -15,7 +15,7 @@ participating-stages: [ ]
This document describes architecture of a new component called Step Runner, the GitLab Steps syntax it uses,
and how the GitHub Actions support will be achieved.
-The competitive CI products [drone.io](https://drone.io),
+The competitive CI products [drone.io](https://drone.io/),
[GitHub Actions](https://docs.github.com/en/actions/creating-actions)
have a composable CI jobs execution in form of steps, or actions.
diff --git a/doc/architecture/blueprints/google_artifact_registry_integration/backend.md b/doc/architecture/blueprints/google_artifact_registry_integration/backend.md
new file mode 100644
index 00000000000..8213e3ede32
--- /dev/null
+++ b/doc/architecture/blueprints/google_artifact_registry_integration/backend.md
@@ -0,0 +1,131 @@
+---
+stage: Package
+group: Container Registry
+description: 'Backend changes for Google Artifact Registry Integration'
+---
+
+# Backend changes for Google Artifact Registry Integration
+
+## Client SDK
+
+To interact with GAR we will make use of the official GAR [Ruby client SDK](https://cloud.google.com/ruby/docs/reference/google-cloud-artifact_registry/latest).
+By default, this client will use the [RPC](https://cloud.google.com/artifact-registry/docs/reference/rpc) version of the Artifact Registry API.
+
+To build the client, we will need the [service account key](index.md#authentication).
+
+### Interesting functions
+
+For the scope of this blueprint, we will need to use the following functions from the Ruby client:
+
+- [`#get_repository`](https://github.com/googleapis/google-cloud-ruby/blob/d0ce758a03335b60285a3d2783e4cca7089ee2ea/google-cloud-artifact_registry-v1/lib/google/cloud/artifact_registry/v1/artifact_registry/client.rb#L1244). [API documentation](https://cloud.google.com/artifact-registry/docs/reference/rpc/google.devtools.artifactregistry.v1#getrepositoryrequest). This will return a single [`Repository`](https://cloud.google.com/artifact-registry/docs/reference/rpc/google.devtools.artifactregistry.v1#repository).
+- [`#list_docker_images`](https://github.com/googleapis/google-cloud-ruby/blob/d0ce758a03335b60285a3d2783e4cca7089ee2ea/google-cloud-artifact_registry-v1/lib/google/cloud/artifact_registry/v1/artifact_registry/client.rb#L243). [API documentation](https://cloud.google.com/artifact-registry/docs/reference/rpc/google.devtools.artifactregistry.v1#listdockerimagesrequest). This will return a list of [`DockerImage`](https://cloud.google.com/artifact-registry/docs/reference/rpc/google.devtools.artifactregistry.v1#dockerimage).
+- [`#get_docker_image`](https://github.com/googleapis/google-cloud-ruby/blob/d0ce758a03335b60285a3d2783e4cca7089ee2ea/google-cloud-artifact_registry-v1/lib/google/cloud/artifact_registry/v1/artifact_registry/client.rb#L329). [API documentation](https://cloud.google.com/artifact-registry/docs/reference/rpc/google.devtools.artifactregistry.v1#getdockerimagerequest). This will return a single [`DockerImage`](https://cloud.google.com/artifact-registry/docs/reference/rpc/google.devtools.artifactregistry.v1#dockerimage).
+
+### Limitations
+
+Filtering is not available in `#list_docker_images`. In other words, we can't filter the returned list (for example on a specific name). However, ordering on some columns is available.
+
+In addition, we can't point directly to a specific page. For example, directly accessing page 3 of the list of Docker images without going first through page 1 and 2.
+We can't build this feature on the GitLab side because this will require to walk through all pages and we could hit a situation where we need to go through a very large amount of pages.
+
+### Exposing the client
+
+It would be better to centralize the access to the official Ruby client. This way, it's very easy to check for permissions.
+
+We suggest having a custom client class located in `Integrations::GoogleCloudPlatform::ArtifactRegistry::Client`. That class will need to require a `User` and a `Integrations::GoogleCloudPlatform::ArtifactRegistry` (see [Project Integration](#project-integration)).
+
+The client will then need to expose three functions: `#repository`, `#docker_images` and `#docker_image` that will be mapped to the similarly name functions of the official client.
+
+Before calling the official client, this class will need to check the user permissions. The given `User` should have `read_gcp_artifact_registry_repository` on the `Project` related with the `Integrations::GoogleCloudPlatform::ArtifactRegistry`.
+
+Lastly, to setup the official client, we will need to properly set:
+
+- the [timeout](https://github.com/googleapis/google-cloud-ruby/blob/a64ed1de61a6f1b5752e7c8e01d6a79365e6de67/google-cloud-artifact_registry-v1/lib/google/cloud/artifact_registry/v1/artifact_registry/operations.rb#L646).
+- the [retry_policy](https://github.com/googleapis/google-cloud-ruby/blob/a64ed1de61a6f1b5752e7c8e01d6a79365e6de67/google-cloud-artifact_registry-v1/lib/google/cloud/artifact_registry/v1/artifact_registry/operations.rb#L652).
+
+For these, we can simply either use the default values if they are ok or use fixed values.
+
+## New permission
+
+We will need a new permission on the [Project policy](https://gitlab.com/gitlab-org/gitlab/-/blob/1411076f1c8ec80dd32f5da7518f795014ea5a2b/app/policies/project_policy.rb):
+
+- `read_gcp_artifact_registry_repository` granted to at least reporter users.
+
+## Project Integration
+
+We will need to build a new [project integration](../../../development/integrations/index.md) with the following properties:
+
+- `google_project_id` - the Google project ID. A simple string.
+- `google_location` - the Google location. A simple string.
+- `repositories` - an array of repository names (see below).
+- `json_key` - the service account JSON. A string but displayed as a text area.
+- `json_key_base64` - the service account JSON, encoded with base64. Value set from `json_key`.
+
+We will also have derived properties:
+
+- `repository`- the repository name. Derived from `repositories`.
+
+`repositories` is used as a way to store the repository name in an array. This is to help with a future follow up where multiple repositories will need to be supported. As such, we store the repository name into an array and we create a `repository` property that is the first entry of the array. By having a `repository` single property, we can use the [frontend helpers](../../../development/integrations/index.md#customize-the-frontend-form) as array values are not supported in project integrations.
+
+We also need the base64 version of the `json_key`. This is required for the [`CI/CD variables`](#cicd-variables).
+
+Regarding the class name, we suggest using `Integrations::GoogleCloudPlatform::ArtifactRegistry`. The `Integrations::GoogleCloudPlatform` namespace allows us to have possible future other integrations for the other services of the Google Cloud Platform.
+
+Regarding the [configuration test](../../../development/integrations/index.md#define-configuration-test), we need to get the repository info on the official API (method `#get_repository`). The test is successful if and only if, the call is successful and the returned repository has the format `DOCKER`.
+
+## GraphQL APIs
+
+The [UI](ui_ux.md) will basically have two pages: listing Docker images out of the repository configured in the project integration and show details of a given Docker image.
+
+In order to support the other repository formats in follow ups, we choose to not map the official client function names in GraphQL fields or methods but rather have a more re-usable approach.
+
+All GraphQL changes should be marked as [`alpha`](../../../development/api_graphql_styleguide.md#mark-schema-items-as-alpha).
+
+First, on the [`ProjectType`](../../../api/graphql/reference/index.md#project), we will need a new field `google_cloud_platform_artifact_registry_repository_artifacts`. This will return a list of an [abstract](../../../api/graphql/reference/index.md#abstract-types) new type: `Integrations::GoogleCloudPlatform::ArtifactRegistry::ArtifactType`. This list will have pagination support. Ordering options will be available.
+
+We will have `Integrations::GoogleCloudPlatform::ArtifactRegistry::DockerImage` as a concrete type of `Integrations::GoogleCloudPlatform::ArtifactRegistry::ArtifactType` with the following fields:
+
+- `name`. A string.
+- `uri`. A string.
+- `image_size_bytes`. A integer.
+- `upload_time`. A timestamp.
+
+Then, we will need a new query `Query.google_cloud_platform_registry_registry_artifact_details` that given a name of a `Integrations::GoogleCloudPlatform::ArtifactRegistry::DockerImage` will return a single `Integrations::GoogleCloudPlatform::ArtifactRegistry::ArtifacDetailsType` with the following fields:
+
+- all fields of `Integrations::GoogleCloudPlatform::ArtifactRegistry::ArtifactType`.
+- `tags`. An array of strings.
+- `media_type`. A string.
+- `build_time`. A timestamp.
+- `updated_time`. A timestamp.
+
+All GraphQL changes will require users to have the [`read_gcp_artifact_registry_repository` permission](#new-permission).
+
+## CI/CD variables
+
+Similar to the [Harbor](../../../user/project/integrations/harbor.md#configure-gitlab) integration, once users activates the GAR integration, additional CI/CD variables will be automatically available if the integration is enabled. These will be set according to the requirements described in the [documentation](https://cloud.google.com/artifact-registry/docs/docker/authentication#json-key):
+
+- `GCP_ARTIFACT_REGISTRY_URL`: This will be set to `https://LOCATION-docker.pkg.dev`, where `LOCATION` is the GCP project location configured for the integration.
+- `GCP_ARTIFACT_REGISTRY_PROJECT_URI`: This will be set to `LOCATION-docker.pkg.dev/PROJECT-ID`. `PROJECT-ID` is the GCP project ID of the GAR repository configured for the integration.
+- `GCP_ARTIFACT_REGISTRY_PASSWORD`: This will be set to the base64-encode version of the service account JSON key file configured for the integration.
+- `GCP_ARTIFACT_REGISTRY_USER`: This will be set to `_json_key_base64`.
+
+These can then be used to log in using `docker login`:
+
+```shell
+docker login -u $GCP_ARTIFACT_REGISTRY_USER -p $GCP_ARTIFACT_REGISTRY_PASSWORD $GCP_ARTIFACT_REGISTRY_URL
+```
+
+Similarly, these can be used to download images from the repository with `docker pull`:
+
+```shell
+docker pull $GCP_ARTIFACT_REGISTRY_PROJECT_URI/REPOSITORY/myapp:latest
+```
+
+Finally, provided that the configured service account has the `Artifact Registry Writer` role, one can also push images to GAR:
+
+```shell
+docker build -t $GCP_ARTIFACT_REGISTRY_REPOSITORY_URI/myapp:latest .
+docker push $GCP_ARTIFACT_REGISTRY_REPOSITORY_URI/myapp:latest
+```
+
+For forward compatibility reasons, the repository name (`REPOSITORY` in the command above) must be appended to `GCP_ARTIFACT_REGISTRY_PROJECT_URI` by the user. In the first iteration we will only support a single GAR repository, and therefore we could technically provide a variable like `GCP_ARTIFACT_REGISTRY_REPOSITORY_URI` with the repository name already included. However, once we add support for multiple repositories, there is no way we can tell what repository a user will want to target for a specific instruction.
diff --git a/doc/architecture/blueprints/google_artifact_registry_integration/index.md b/doc/architecture/blueprints/google_artifact_registry_integration/index.md
index adde0f7f587..4c2bfe95c5e 100644
--- a/doc/architecture/blueprints/google_artifact_registry_integration/index.md
+++ b/doc/architecture/blueprints/google_artifact_registry_integration/index.md
@@ -88,49 +88,11 @@ Among the proprietary GAR APIs, the [REST API](https://cloud.google.com/artifact
Last but not least, there is also an [RPC API](https://cloud.google.com/artifact-registry/docs/reference/rpc/google.devtools.artifactregistry.v1), backed by gRPC and Protocol Buffers. This API provides the most functionality, covering all GAR features. From the available operations, we can make use of the [`ListDockerImagesRequest`](https://cloud.google.com/artifact-registry/docs/reference/rpc/google.devtools.artifactregistry.v1#listdockerimagesrequest) and [`GetDockerImageRequest`](https://cloud.google.com/artifact-registry/docs/reference/rpc/google.devtools.artifactregistry.v1#google.devtools.artifactregistry.v1.GetDockerImageRequest) operations. As with the REST API, both responses are composed of [`DockerImage`](https://cloud.google.com/artifact-registry/docs/reference/rpc/google.devtools.artifactregistry.v1#google.devtools.artifactregistry.v1.DockerImage) objects.
-Between the two proprietary API options, we chose the RPC one because it provides support not only for the operations we need today but also offers better coverage of all GAR features, which will be beneficial in future iterations. Finally, we do not intend to make direct use of this API but rather use it through the official Ruby client SDK. Please see [Client SDK](#client-sdk) below for more details.
+Between the two proprietary API options, we chose the RPC one because it provides support not only for the operations we need today but also offers better coverage of all GAR features, which will be beneficial in future iterations. Finally, we do not intend to make direct use of this API but rather use it through the official Ruby client SDK. Please see [Client SDK](backend.md#client-sdk) below for more details.
#### Backend Integration
-##### Client SDK
-
-To interact with GAR we will make use of the official GAR [Ruby client SDK](https://cloud.google.com/ruby/docs/reference/google-cloud-artifact_registry/latest).
-
-*TODO: Add more details about the client SDK integration and its limitations (no filtering for example).*
-
-##### Database Changes
-
-*TODO: Describe any necessary changes to the database to support this integration.*
-
-##### CI/CD variables
-
-Similar to the [Harbor](../../../user/project/integrations/harbor.md#configure-gitlab) integration, once users activates the GAR integration, additional CI/CD variables will be automatically available if the integration is enabled. These will be set according to the requirements described in the [documentation](https://cloud.google.com/artifact-registry/docs/docker/authentication#json-key):
-
-- `GCP_ARTIFACT_REGISTRY_URL`: This will be set to `https://LOCATION-docker.pkg.dev`, where `LOCATION` is the GCP project location configured for the integration.
-- `GCP_ARTIFACT_REGISTRY_PROJECT_URI`: This will be set to `LOCATION-docker.pkg.dev/PROJECT-ID`. `PROJECT-ID` is the GCP project ID of the GAR repository configured for the integration.
-- `GCP_ARTIFACT_REGISTRY_PASSWORD`: This will be set to the base64-encode version of the service account JSON key file configured for the integration.
-- `GCP_ARTIFACT_REGISTRY_USER`: This will be set to `_json_key_base64`.
-
-These can then be used to log in using `docker login`:
-
-```shell
-docker login -u $GCP_ARTIFACT_REGISTRY_USER -p $GCP_ARTIFACT_REGISTRY_PASSWORD $GCP_ARTIFACT_REGISTRY_URL
-```
-
-Similarly, these can be used to download images from the repository with `docker pull`:
-
-```shell
-docker pull $GCP_ARTIFACT_REGISTRY_PROJECT_URI/REPOSITORY/myapp:latest
-```
-
-Finally, provided that the configured service account has the `Artifact Registry Writer` role, one can also push images to GAR:
-
-```shell
-docker build -t $GCP_ARTIFACT_REGISTRY_REPOSITORY_URI/myapp:latest .
-docker push $GCP_ARTIFACT_REGISTRY_REPOSITORY_URI/myapp:latest
-```
-
-For forward compatibility reasons, the repository name (`REPOSITORY` in the command above) must be appended to `GCP_ARTIFACT_REGISTRY_PROJECT_URI` by the user. In the first iteration we will only support a single GAR repository, and therefore we could technically provide an e.g. `GCP_ARTIFACT_REGISTRY_REPOSITORY_URI` variable with the repository name already included. However, once we add support for multiple repositories, there is no way we can tell what repository a user will want to target for a specific instruction. So it must be the user to tell that.
+This integration will need several changes on the backend side of the rails project. See the [backend](backend.md) page for additional details.
#### UI/UX
diff --git a/doc/architecture/blueprints/modular_monolith/hexagonal_monolith/index.md b/doc/architecture/blueprints/modular_monolith/hexagonal_monolith/index.md
index f0f689d48ca..f8003a3dd56 100644
--- a/doc/architecture/blueprints/modular_monolith/hexagonal_monolith/index.md
+++ b/doc/architecture/blueprints/modular_monolith/hexagonal_monolith/index.md
@@ -12,7 +12,7 @@ owning-stage: ""
## Summary
**TL;DR:** Change the Rails monolith from a [big ball of mud](https://en.wikipedia.org/wiki/Big_ball_of_mud) state to
-a [modular monolith](https://www.thereformedprogrammer.net/my-experience-of-using-modular-monolith-and-ddd-architectures)
+a [modular monolith](https://www.thereformedprogrammer.net/my-experience-of-using-modular-monolith-and-ddd-architectures/)
that uses an [Hexagonal architecture](https://en.wikipedia.org/wiki/Hexagonal_architecture_(software)) (or ports and adapters architecture).
Extract cohesive functional domains into separate directory structure using Domain-Driven Design practices.
Extract infrastructure code (logging, database tools, instrumentation, etc.) into gems, essentially remove the need for `lib/` directory.
diff --git a/doc/architecture/blueprints/observability_tracing/index.md b/doc/architecture/blueprints/observability_tracing/index.md
index 71e03d81bcf..4c95d23e6bd 100644
--- a/doc/architecture/blueprints/observability_tracing/index.md
+++ b/doc/architecture/blueprints/observability_tracing/index.md
@@ -45,14 +45,14 @@ To release a generally available distributed tracing feature as part of GitLab.c
Specific goals:
-- An HTTPS write API implemented in the [GitLab Observability Backend](https://GitLab.com/GitLab-org/opstrace/opstrace) project which receives spans sent to GitLab using [OTLP (OpenTelemetry Protocol)](https://opentelemetry.io/docs/specs/otel/protocol/). Users can collect and send distributed traces using either the [OpenTelemetry SDK](https://opentelemetry.io/docs/collector/deployment/no-collector/) or the [OpenTelemetry Collector](https://opentelemetry.io/docs/collector/).
+- An HTTPS write API implemented in the [GitLab Observability Backend](https://gitlab.com/gitlab-org/opstrace/opstrace) project which receives spans sent to GitLab using [OTLP (OpenTelemetry Protocol)](https://opentelemetry.io/docs/specs/otel/protocol/). Users can collect and send distributed traces using either the [OpenTelemetry SDK](https://opentelemetry.io/docs/collector/deployment/no-collector/) or the [OpenTelemetry Collector](https://opentelemetry.io/docs/collector/).
- UI to list and filter/search for traces by ID, service, attributes or time
- UI to show a detail view of a trace and its corresponding spans
- Apply sensible ingestion and storage limits per top-level namespace for all GitLab tiers
## Timeline
-In order to achieve the group objectives, the following timelines must be met for [GitLab phased rollout](https://about.GitLab.com/handbook/product/GitLab-the-product/#experiment-beta-ga) of Tracing.
+In order to achieve the group objectives, the following timelines must be met for [GitLab phased rollout](https://about.gitlab.com/handbook/product/gitlab-the-product/#experiment-beta-ga) of Tracing.
- **Tracing Experiment Release**: 16.2
- **Tracing Beta Release**: 16.3
@@ -114,7 +114,7 @@ The scope of effort for GA would include two APIs:
### Authentication and Authorization
<!-- markdownlint-disable-next-line MD044 -->
-GitLab Observability Backend utilizes an [instance-wide trusted GitLab OAuth](https://docs.GitLab.com/ee/integration/OAuth_provider.html#create-an-instance-wide-application) token to perform a seamless OAuth flow that authenticates the GitLab user against the GitLab Observability Backend (GOB). GOB creates an auth session and stores the session identifier in an http-only, secure cookie. This mechanism has already been examined and approved by AppSec. Now that the Observability UI will be native within the UI hosted at GitLab.com, a few small adjustments must be made for authentication to work against the new UI domain vs the embedded iframe that we previously relied upon (GitLab.com instead of observe.gitLab.com).
+GitLab Observability Backend utilizes an [instance-wide trusted GitLab OAuth](../../../integration/oauth_provider.md#create-an-instance-wide-application) token to perform a seamless OAuth flow that authenticates the GitLab user against the GitLab Observability Backend (GOB). GOB creates an auth session and stores the session identifier in an http-only, secure cookie. This mechanism has already been examined and approved by AppSec. Now that the Observability UI will be native within the UI hosted at GitLab.com, a few small adjustments must be made for authentication to work against the new UI domain vs the embedded iframe that we previously relied upon (GitLab.com instead of observe.gitLab.com).
A hidden iframe will be embedded in the GitLab UI only on pages where GOB authenticated APIs must be consumed. This allows GitLab.com UI to directly communicate with GOB APIs without the need for an intermediate proxy layer in rails and without relying on the less secure shared token between proxy and GOB. This iframe will be hidden and its sole purpose is to perform the OAuth flow and assign the http-only secure cookie containing the GOB user session. This flow is seamless and can be fully hidden from the user since its a **trusted** GitLab OAuth flow. Sessions currently expire after 30 days which is configurable in GOB deployment terraform.
diff --git a/doc/architecture/blueprints/permissions/index.md b/doc/architecture/blueprints/permissions/index.md
index ab66733803d..c131c372550 100644
--- a/doc/architecture/blueprints/permissions/index.md
+++ b/doc/architecture/blueprints/permissions/index.md
@@ -179,6 +179,6 @@ Cons:
## Resources
-- [Custom Roles MVC announcement](https://github.blog/changelog/2021-10-27-enterprise-organizations-can-now-create-custom-repository-roles)
+- [Custom Roles MVC announcement](https://github.blog/changelog/2021-10-27-enterprise-organizations-can-now-create-custom-repository-roles/)
- [Custom Roles lunch and learn notes](https://docs.google.com/document/d/1x2ExhGJl2-nEibTaQE_7e5w2sDCRRHiakrBYDspPRqw/edit#)
- [Discovery on auto-generating documentation for permissions](https://gitlab.com/gitlab-org/gitlab/-/issues/352891#note_989392294).
diff --git a/doc/architecture/blueprints/remote_development/index.md b/doc/architecture/blueprints/remote_development/index.md
index d64fbfc8b55..cc66c3b5416 100644
--- a/doc/architecture/blueprints/remote_development/index.md
+++ b/doc/architecture/blueprints/remote_development/index.md
@@ -747,7 +747,7 @@ You can read more about this decision in this [issue](https://gitlab.com/gitlab-
## Links
-- [Remote Development direction](https://about.gitlab.com/direction/create/editor/remote_development)
+- [Remote Development direction](https://about.gitlab.com/direction/create/ide/remote_development/)
- [Remote Development presentation](https://docs.google.com/presentation/d/1XHH_ZilZPufQoWVWViv3evipI-BnAvRQrdvzlhBuumw/edit#slide=id.g131f2bb72e4_0_8)
- [Category Strategy epic](https://gitlab.com/groups/gitlab-org/-/epics/7419)
- [Minimal Maturity epic](https://gitlab.com/groups/gitlab-org/-/epics/9189)
@@ -760,5 +760,4 @@ You can read more about this decision in this [issue](https://gitlab.com/gitlab-
- [Browser runtime](https://gitlab.com/groups/gitlab-org/-/epics/8291)
- [GitLab-hosted infrastructure](https://gitlab.com/groups/gitlab-org/-/epics/8292)
- [Browser runtime spike](https://gitlab.com/gitlab-org/gitlab-web-ide/-/merge_requests/58)
-- [Ideal user journey](https://about.gitlab.com/direction/create/editor/remote_development/#ideal-user-journey)
- [Building container images for workspaces](https://gitlab.com/gitlab-org/gitlab/-/issues/396300#note_1375061754)
diff --git a/doc/ci/index.md b/doc/ci/index.md
index 8d9108e4fc5..4b93bec9c34 100644
--- a/doc/ci/index.md
+++ b/doc/ci/index.md
@@ -84,7 +84,7 @@ They can be hard-coded in your `.gitlab-ci.yml` file, project settings, or dynam
## Related topics
- [Five teams that made the switch to GitLab CI/CD](https://about.gitlab.com/blog/2019/04/25/5-teams-that-made-the-switch-to-gitlab-ci-cd/).
-- [Make the case for CI/CD in your organization](https://about.gitlab.com/devops-tools/github-vs-gitlab/).
+- [Make the case for CI/CD in your organization](https://about.gitlab.com/why-gitlab/).
- Learn how [Verizon reduced rebuilds](https://about.gitlab.com/blog/2019/02/14/verizon-customer-story/) from 30 days to under 8 hours with GitLab.
- Use the [GitLab Workflow VS Code extension](../user/project/repository/vscode.md) to
[validate your configuration](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#validate-gitlab-ci-configuration)
diff --git a/doc/ci/migration/circleci.md b/doc/ci/migration/circleci.md
index d70794639d1..5b171a646f5 100644
--- a/doc/ci/migration/circleci.md
+++ b/doc/ci/migration/circleci.md
@@ -8,9 +8,7 @@ type: index, howto
# Migrating from CircleCI **(FREE ALL)**
If you are currently using CircleCI, you can migrate your CI/CD pipelines to [GitLab CI/CD](../introduction/index.md),
-and start making use of all its powerful features. Check out our
-[CircleCI vs GitLab](https://about.gitlab.com/devops-tools/circle-ci-vs-gitlab/)
-comparison to see what's different.
+and start making use of all its powerful features.
We have collected several resources that you may find useful before starting to migrate.
diff --git a/doc/ci/yaml/signing_examples.md b/doc/ci/yaml/signing_examples.md
index e97ade891c4..3ace367dfd0 100644
--- a/doc/ci/yaml/signing_examples.md
+++ b/doc/ci/yaml/signing_examples.md
@@ -169,7 +169,7 @@ needed to make it safer to distribute and use open source software.
**Related topics**:
- [SLSA Provenance definition](https://slsa.dev/provenance/v1)
-- [npm Docs](https://docs.npmjs.com/generating-provenance-statements)
+- [npm Docs](https://docs.npmjs.com/generating-provenance-statements/)
- [npm Provenance RFC](https://github.com/npm/rfcs/blob/main/accepted/0049-link-packages-to-source-and-build.md#detailed-steps-to-publish)
### Generating provenance in GitLab CI/CD
diff --git a/doc/development/contributing/merge_request_workflow.md b/doc/development/contributing/merge_request_workflow.md
index 12862cb0993..273fa898c00 100644
--- a/doc/development/contributing/merge_request_workflow.md
+++ b/doc/development/contributing/merge_request_workflow.md
@@ -197,7 +197,7 @@ the contribution acceptance criteria below:
## Definition of done
If you contribute to GitLab, please know that changes involve more than just
-code. We use the following [definition of done](https://www.agilealliance.org/glossary/definition-of-done).
+code. We use the following [definition of done](https://www.agilealliance.org/glossary/definition-of-done/).
To reach the definition of done, the merge request must create no regressions and meet all these criteria:
- Verified as working in production on GitLab.com.
diff --git a/doc/development/database/clickhouse/gitlab_activity_data.md b/doc/development/database/clickhouse/gitlab_activity_data.md
index 6ba11b8afaf..7c30703a016 100644
--- a/doc/development/database/clickhouse/gitlab_activity_data.md
+++ b/doc/development/database/clickhouse/gitlab_activity_data.md
@@ -246,9 +246,9 @@ ORDER BY id;
A few changes compared to the PostgreSQL version:
-- `target_type` uses [an optimization](https://clickhouse.com/docs/en/sql-reference/data-types/lowcardinality/) for low-cardinality column values.
+- `target_type` uses [an optimization](https://clickhouse.com/docs/en/sql-reference/data-types/lowcardinality) for low-cardinality column values.
- `fingerprint` becomes an integer and leverages a performant integer-based hashing function such as xxHash64.
-- All columns get a default value, the 0 default value for the integer columns means no value. See the related [best practices](https://clickhouse.com/docs/en/cloud/bestpractices/avoid-nullable-columns/).
+- All columns get a default value, the 0 default value for the integer columns means no value. See the related [best practices](https://clickhouse.com/docs/en/cloud/bestpractices/avoid-nullable-columns).
- `NOT NULL` to ensure that we always use the default values when data is missing (different behavior compared to PostgreSQL).
- The "primary" key automatically becomes the `id` column due to the `ORDER BY` clause.
@@ -276,7 +276,7 @@ ClickHouse will eventually "replace" the rows with the same primary key in the b
SELECT * FROM events FINAL
```
-Adding `FINAL` to a query can have significant performance consequences, some of the issues are documented in the [ClickHouse documentation](https://clickhouse.com/docs/en/sql-reference/statements/select/from/#final-modifier).
+Adding `FINAL` to a query can have significant performance consequences, some of the issues are documented in the [ClickHouse documentation](https://clickhouse.com/docs/en/sql-reference/statements/select/from#final-modifier).
We should always expect duplicated values in the table, so we must take care of the deduplication in query time.
diff --git a/doc/development/database/clickhouse/index.md b/doc/development/database/clickhouse/index.md
index 8ca6240e0f1..52e9aecce4a 100644
--- a/doc/development/database/clickhouse/index.md
+++ b/doc/development/database/clickhouse/index.md
@@ -8,10 +8,10 @@ info: To determine the technical writer assigned to the Stage/Group associated w
## How it differs from PostgreSQL
-The [intro](https://clickhouse.com/docs/en/intro/) page is quite good to give an overview of ClickHouse.
+The [intro](https://clickhouse.com/docs/en/intro) page is quite good to give an overview of ClickHouse.
ClickHouse has a lot of differences from traditional OLTP (online transaction processing) databases like PostgreSQL. The underlying architecture is a bit different, and the processing is a lot more CPU-bound than in traditional databases.
-ClickHouse is a log-centric database where immutability is a key component. The advantages of such approaches are well documented [[1]](https://www.odbms.org/2015/10/the-rise-of-immutable-data-stores/) however it also makes updates much harder. See ClickHouse [documentation](https://clickhouse.com/docs/en/guides/developer/mutations/) for operations that provide UPDATE/DELETE support. It is noticeable that these operations are supposed to be non-frequent.
+ClickHouse is a log-centric database where immutability is a key component. The advantages of such approaches are well documented [[1]](https://www.odbms.org/2015/10/the-rise-of-immutable-data-stores/) however it also makes updates much harder. See ClickHouse [documentation](https://clickhouse.com/docs/en/guides/developer/mutations) for operations that provide UPDATE/DELETE support. It is noticeable that these operations are supposed to be non-frequent.
This distinction is important while designing tables. Either:
@@ -20,7 +20,7 @@ This distinction is important while designing tables. Either:
## ACID compatibility
-ClickHouse has a slightly different overview of Transactional support, where the guarantees are applicable only up to a block of inserted data to a specific table. See [the Transactional (ACID) support](https://clickhouse.com/docs/en/guides/developer/transactional/) documentation for details.
+ClickHouse has a slightly different overview of Transactional support, where the guarantees are applicable only up to a block of inserted data to a specific table. See [the Transactional (ACID) support](https://clickhouse.com/docs/en/guides/developer/transactional) documentation for details.
Multiple insertions in a single write should be avoided as transactional support across multiple tables is only covered in materialized views.
@@ -31,19 +31,19 @@ ClickHouse has some good blog posts covering [details of aggregations](https://a
It is highly recommended to read ["A practical introduction to primary indexes in ClickHouse""](https://clickhouse.com/docs/en/guides/improving-query-performance/sparse-primary-indexes/sparse-primary-indexes-intro) to get an understanding of indexes in ClickHouse.
-Particularly how database index design in ClickHouse [differs](https://clickhouse.com/docs/en/guides/improving-query-performance/sparse-primary-indexes/sparse-primary-indexes-design/#an-index-design-for-massive-data-scales) from those in transactional databases like PostgreSQL.
+Particularly how database index design in ClickHouse [differs](https://clickhouse.com/docs/en/guides/improving-query-performance/sparse-primary-indexes/sparse-primary-indexes-design#an-index-design-for-massive-data-scales) from those in transactional databases like PostgreSQL.
Primary index design plays a very important role in query performance and should be stated carefully. Almost all of the queries should rely on the primary index as full data scans are bound to take longer.
-Read the documentation for [primary keys and indexes in queries](https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/mergetree/#primary-keys-and-indexes-in-queries) to learn how indexes can affect query performance in MergeTree Table engines (default table engine in ClickHouse).
+Read the documentation for [primary keys and indexes in queries](https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/mergetree#primary-keys-and-indexes-in-queries) to learn how indexes can affect query performance in MergeTree Table engines (default table engine in ClickHouse).
Secondary indexes in ClickHouse are different from what is available in other systems. They are also called data-skipping indexes as they are used to skip over a block of data. See the documentation for [data-skipping indexes](https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/mergetree#table_engine-mergetree-data_skipping-indexes).
-ClickHouse also offers ["Dictionaries"](https://clickhouse.com/docs/en/sql-reference/dictionaries/external-dictionaries/external-dicts/) which can be used as external indexes. Dictionaries are loaded from memory and can be used to look up values on query runtime.
+ClickHouse also offers ["Dictionaries"](https://clickhouse.com/docs/en/sql-reference/dictionaries/external-dictionaries/external-dicts) which can be used as external indexes. Dictionaries are loaded from memory and can be used to look up values on query runtime.
## Data types & Partitioning
-ClickHouse offers SQL-compatible [data types](https://clickhouse.com/docs/en/sql-reference/data-types/) and few specialized data types like:
+ClickHouse offers SQL-compatible [data types](https://clickhouse.com/docs/en/sql-reference/data-types) and few specialized data types like:
- [`LowCardinality`](https://clickhouse.com/docs/en/sql-reference/data-types/lowcardinality)
- [UUID](https://clickhouse.com/docs/en/sql-reference/data-types/uuid)
@@ -61,13 +61,13 @@ Suggested reads:
Sharding is a feature that allows splitting the data into multiple ClickHouse nodes to increase throughput and decrease latency. The sharding feature uses a distributed engine that is backed by local tables. The distributed engine is a "virtual" table that does not store any data. It is used as an interface to insert and query data.
-See [the ClickHouse documentation](https://clickhouse.com/docs/en/engines/table-engines/special/distributed/) and this section on [replication and sharding](https://clickhouse.com/docs/en/manage/replication-and-sharding/). ClickHouse can use either Zookeeper or its own compatible API via a component called [ClickHouse Keeper](https://clickhouse.com/docs/en/operations/clickhouse-keeper) to maintain consensus.
+See [the ClickHouse documentation](https://clickhouse.com/docs/en/engines/table-engines/special/distributed) and this section on [replication and sharding](https://clickhouse.com/docs/en/manage/replication-and-sharding/). ClickHouse can use either Zookeeper or its own compatible API via a component called [ClickHouse Keeper](https://clickhouse.com/docs/en/operations/clickhouse-keeper) to maintain consensus.
After nodes are set up, they can become invisible from the Clients and both write and read queries can be issued to any node.
In most cases, clusters usually start with a fixed number of nodes(~ shards). [Rebalancing shards](https://clickhouse.com/docs/en/guides/sre/scaling-clusters) is operationally heavy and requires rigorous testing.
-Replication is supported by MergeTree Table engine, see the [replication section](https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/) in documentation for details on how to define them.
+Replication is supported by MergeTree Table engine, see the [replication section](https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication) in documentation for details on how to define them.
ClickHouse relies on a distributed coordination component (either Zookeeper or ClickHouse Keeper) to track the participating nodes in the quorum. Replication is asynchronous and multi-leader. Inserts can be issued to any node and they can appear on other nodes with some latency. If desired, stickiness to a specific node can be used to make sure that reads observe the latest written data.
## Materialized views
@@ -94,12 +94,12 @@ Files: `users.xml` and `config.xml`.
| Topic | Security Requirement | Reason |
| ----- | -------------------- | ------ |
-| [`user_name/password`](https://clickhouse.com/docs/en/operations/settings/settings-users/#user-namepassword) | Usernames **must not** be blank. Passwords **must** use `password_sha256_hex` and **must not** be blank. | `plaintext` and `password_double_sha1_hex` are insecure. If username isn't specified, [`default` is used with no password](https://clickhouse.com/docs/en/operations/settings/settings-users/). |
-| [`access_management`](https://clickhouse.com/docs/en/operations/settings/settings-users/#access_management-user-setting) | Use Server [configuration files](https://clickhouse.com/docs/en/operations/configuration-files) `users.xml` and `config.xml`. Avoid SQL-driven workflow. | SQL-driven workflow implies that at least one user has `access_management` which can be avoided via configuration files. These files are easier to audit and monitor too, considering that ["You can't manage the same access entity by both configuration methods simultaneously."](https://clickhouse.com/docs/en/operations/access-rights/#access-control). |
-| [`user_name/networks`](https://clickhouse.com/docs/en/operations/settings/settings-users/#user-namenetworks) | At least one of `<ip>`, `<host>`, `<host_regexp>` **must** be set. Do not use `<ip>::/0</ip>` to open access for any network. | Network controls. ([Trust cautiously](https://about.gitlab.com/handbook/security/architecture/#trust-cautiously) principle) |
-| [`user_name/profile`](https://clickhouse.com/docs/en/operations/settings/settings-users/#user-nameprofile) | Use profiles to set similar properties across multiple users and set limits (from the user interface). | [Least privilege](https://about.gitlab.com/handbook/security/architecture/#assign-the-least-privilege-possible) principle and limits. |
-| [`user_name/quota`](https://clickhouse.com/docs/en/operations/settings/settings-users/#user-namequota) | Set quotas for users whenever possible. | Limit resource usage over a period of time or track the use of resources. |
-| [`user_name/databases`](https://clickhouse.com/docs/en/operations/settings/settings-users/#user-namedatabases) | Restrict access to data, and avoid users with full access. | [Least privilege](https://about.gitlab.com/handbook/security/architecture/#assign-the-least-privilege-possible) principle. |
+| [`user_name/password`](https://clickhouse.com/docs/en/operations/settings/settings-users#user-namepassword) | Usernames **must not** be blank. Passwords **must** use `password_sha256_hex` and **must not** be blank. | `plaintext` and `password_double_sha1_hex` are insecure. If username isn't specified, [`default` is used with no password](https://clickhouse.com/docs/en/operations/settings/settings-users). |
+| [`access_management`](https://clickhouse.com/docs/en/operations/settings/settings-users#access_management-user-setting) | Use Server [configuration files](https://clickhouse.com/docs/en/operations/configuration-files) `users.xml` and `config.xml`. Avoid SQL-driven workflow. | SQL-driven workflow implies that at least one user has `access_management` which can be avoided via configuration files. These files are easier to audit and monitor too, considering that ["You can't manage the same access entity by both configuration methods simultaneously."](https://clickhouse.com/docs/en/operations/access-rights#access-control). |
+| [`user_name/networks`](https://clickhouse.com/docs/en/operations/settings/settings-users#user-namenetworks) | At least one of `<ip>`, `<host>`, `<host_regexp>` **must** be set. Do not use `<ip>::/0</ip>` to open access for any network. | Network controls. ([Trust cautiously](https://about.gitlab.com/handbook/security/architecture/#trust-cautiously) principle) |
+| [`user_name/profile`](https://clickhouse.com/docs/en/operations/settings/settings-users#user-nameprofile) | Use profiles to set similar properties across multiple users and set limits (from the user interface). | [Least privilege](https://about.gitlab.com/handbook/security/architecture/#assign-the-least-privilege-possible) principle and limits. |
+| [`user_name/quota`](https://clickhouse.com/docs/en/operations/settings/settings-users#user-namequota) | Set quotas for users whenever possible. | Limit resource usage over a period of time or track the use of resources. |
+| [`user_name/databases`](https://clickhouse.com/docs/en/operations/settings/settings-users#user-namedatabases) | Restrict access to data, and avoid users with full access. | [Least privilege](https://about.gitlab.com/handbook/security/architecture/#assign-the-least-privilege-possible) principle. |
### Network
@@ -107,10 +107,10 @@ Files: `config.xml`
| Topic | Security Requirement | Reason |
| ----- | -------------------- | ------ |
-| [`mysql_port`](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings/#server_configuration_parameters-mysql_port) | Disable MySQL access unless strictly necessary:<br/> `<!-- <mysql_port>9004</mysql_port> -->`. | Close unnecessary ports and features exposure. ([Defense in depth](https://about.gitlab.com/handbook/security/architecture/#implement-defense-in-depth) principle) |
-| [`postgresql_port`](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings/#server_configuration_parameters-postgresql_port) | Disable PostgreSQL access unless strictly necessary:<br/> `<!-- <mysql_port>9005</mysql_port> -->` | Close unnecessary ports and features exposure. ([Defense in depth](https://about.gitlab.com/handbook/security/architecture/#implement-defense-in-depth) principle) |
-| [`http_port/https_port`](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings/#http-porthttps-port) & [`tcp_port/tcp_port_secure`](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings/#http-porthttps-port) | Configure [SSL-TLS](https://clickhouse.com/docs/en/guides/sre/configuring-ssl), and disable non SSL ports:<br/>`<!-- <http_port>8123</http_port> -->`<br/>`<!-- <tcp_port>9000</tcp_port> -->`<br/>and enable secure ports:<br/>`<https_port>8443</https_port>`<br/>`<tcp_port_secure>9440</tcp_port_secure>` | Encrypt data in transit. ([Defense in depth](https://about.gitlab.com/handbook/security/architecture/#implement-defense-in-depth) principle) |
-| [`interserver_http_host`](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings/#interserver-http-host) | Disable `interserver_http_host` in favor of `interserver_https_host` (`<interserver_https_port>9010</interserver_https_port>`) if ClickHouse is configured as a cluster. | Encrypt data in transit. ([Defense in depth](https://about.gitlab.com/handbook/security/architecture/#implement-defense-in-depth) principle) |
+| [`mysql_port`](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#server_configuration_parameters-mysql_port) | Disable MySQL access unless strictly necessary:<br/> `<!-- <mysql_port>9004</mysql_port> -->`. | Close unnecessary ports and features exposure. ([Defense in depth](https://about.gitlab.com/handbook/security/architecture/#implement-defense-in-depth) principle) |
+| [`postgresql_port`](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#server_configuration_parameters-postgresql_port) | Disable PostgreSQL access unless strictly necessary:<br/> `<!-- <mysql_port>9005</mysql_port> -->` | Close unnecessary ports and features exposure. ([Defense in depth](https://about.gitlab.com/handbook/security/architecture/#implement-defense-in-depth) principle) |
+| [`http_port/https_port`](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#http-porthttps-port) & [`tcp_port/tcp_port_secure`](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#http-porthttps-port) | Configure [SSL-TLS](https://clickhouse.com/docs/en/guides/sre/configuring-ssl), and disable non SSL ports:<br/>`<!-- <http_port>8123</http_port> -->`<br/>`<!-- <tcp_port>9000</tcp_port> -->`<br/>and enable secure ports:<br/>`<https_port>8443</https_port>`<br/>`<tcp_port_secure>9440</tcp_port_secure>` | Encrypt data in transit. ([Defense in depth](https://about.gitlab.com/handbook/security/architecture/#implement-defense-in-depth) principle) |
+| [`interserver_http_host`](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#interserver-http-host) | Disable `interserver_http_host` in favor of `interserver_https_host` (`<interserver_https_port>9010</interserver_https_port>`) if ClickHouse is configured as a cluster. | Encrypt data in transit. ([Defense in depth](https://about.gitlab.com/handbook/security/architecture/#implement-defense-in-depth) principle) |
### Storage
@@ -125,7 +125,7 @@ Files: `config.xml`
| ----- | -------------------- | ------ |
| `logger` | `Log` and `errorlog` **must** be defined and writable by `clickhouse`. | Make sure logs are stored. |
| SIEM | If hosted on GitLab.com, the ClickHouse instance or cluster **must** report [logs to our SIEM](https://internal-handbook.gitlab.io/handbook/security/infrastructure_security_logging/tooling/devo/) (internal link). | [GitLab logs critical information system activity](https://about.gitlab.com/handbook/security/audit-logging-policy.html). |
-| Log sensitive data | Query masking rules **must** be used if sensitive data can be logged. See [example masking rules](#example-masking-rules). | [Column level encryption](https://clickhouse.com/docs/en/sql-reference/functions/encryption-functions/) can be used and leak sensitive data (keys) in logs. |
+| Log sensitive data | Query masking rules **must** be used if sensitive data can be logged. See [example masking rules](#example-masking-rules). | [Column level encryption](https://clickhouse.com/docs/en/sql-reference/functions/encryption-functions) can be used and leak sensitive data (keys) in logs. |
#### Example masking rules
diff --git a/doc/development/fe_guide/design_tokens.md b/doc/development/fe_guide/design_tokens.md
index 9a1cc48c68f..78219809b05 100644
--- a/doc/development/fe_guide/design_tokens.md
+++ b/doc/development/fe_guide/design_tokens.md
@@ -95,7 +95,7 @@ The Design Tokens Format Module promotes a `*.token.json` extension standard for
### Transformations
-Our design tokens use [style-dictionary](https://amzn.github.io/style-dictionary) to convert design tokens into consumable file formats (CSS/SCSS/JavaScript/JSON).
+Our design tokens use [style-dictionary](https://amzn.github.io/style-dictionary/) to convert design tokens into consumable file formats (CSS/SCSS/JavaScript/JSON).
A parser makes [design tokens format properties](https://tr.designtokens.org/format/#design-token-properties) compatible with [style-dictionary design token attributes](https://amzn.github.io/style-dictionary/#/tokens?id=design-token-attributes).
diff --git a/doc/development/feature_flags/controls.md b/doc/development/feature_flags/controls.md
index d341cb3f1ba..b721707e07b 100644
--- a/doc/development/feature_flags/controls.md
+++ b/doc/development/feature_flags/controls.md
@@ -70,6 +70,8 @@ there for any exceptions while testing your feature after enabling the feature f
For these pre-production environments, it's strongly encouraged to run the command in
`#staging`, `#production`, or `#chatops-ops-test`, for improved visibility.
+#### Enabling the feature flag with percentage of time
+
To enable a feature for 25% of the time, run the following in Slack:
```shell
@@ -77,6 +79,11 @@ To enable a feature for 25% of the time, run the following in Slack:
/chatops run feature set new_navigation_bar 25 --random --staging
```
+NOTE:
+Percentage of time feature flags are deprecated in favor of [percentage of actors](#percentage-based-actor-selection).
+If you understand the consequences of using percentage of time feature flags, you can force it using
+`--ignore-random-deprecation-check`.
+
### Enabling a feature for GitLab.com
When a feature has successfully been
@@ -130,6 +137,11 @@ To enable a feature for 25% of the time, run the following in Slack:
/chatops run feature set new_navigation_bar 25 --random
```
+NOTE:
+Percentage of time feature flags are deprecated in favor of [percentage of actors](#percentage-based-actor-selection).
+If you understand the consequences of using percentage of time feature flags, you can force it using
+`--ignore-random-deprecation-check`.
+
This sets a feature flag to `true` based on the following formula:
```ruby
diff --git a/doc/development/feature_flags/index.md b/doc/development/feature_flags/index.md
index 13dccc485d1..f966395edb4 100644
--- a/doc/development/feature_flags/index.md
+++ b/doc/development/feature_flags/index.md
@@ -15,6 +15,9 @@ view [this feature flags information](../../operations/feature_flags.md) instead
WARNING:
All newly-introduced feature flags should be [disabled by default](https://about.gitlab.com/handbook/product-development-flow/feature-flag-lifecycle/#feature-flags-in-gitlab-development).
+WARNING:
+All newly-introduced feature flags should be [used with an actor](controls.md#percentage-based-actor-selection).
+
This document is the subject of continued work as part of an epic to [improve internal usage of feature flags](https://gitlab.com/groups/gitlab-org/-/epics/3551). Raise any suggestions as new issues and attach them to the epic.
For an [overview of the feature flag lifecycle](https://about.gitlab.com/handbook/product-development-flow/feature-flag-lifecycle/#feature-flag-lifecycle), or if you need help deciding [if you should use a feature flag](https://about.gitlab.com/handbook/product-development-flow/feature-flag-lifecycle/#when-to-use-feature-flags) or not, please see the [feature flag lifecycle](https://about.gitlab.com/handbook/product-development-flow/feature-flag-lifecycle/) handbook page.
diff --git a/doc/development/sidekiq/limited_capacity_worker.md b/doc/development/sidekiq/limited_capacity_worker.md
index 5efb9b16725..b1aff829d4d 100644
--- a/doc/development/sidekiq/limited_capacity_worker.md
+++ b/doc/development/sidekiq/limited_capacity_worker.md
@@ -34,17 +34,29 @@ class MyDummyWorker
end
```
-Additional to the regular worker, a cron worker must be defined as well to
-backfill the queue with jobs. the arguments passed to `perform_with_capacity`
-are passed to the `perform_work` method.
+To queue this worker, use
+`MyDummyWorker.perform_with_capacity(*args)`. The `*args` passed to this worker
+are passed to the `perform_work` method. Due to the way this job throttles
+and requeues itself, it is expected that you always provide the same
+`*args` in every usage. In practice, this type of worker is often not
+used with arguments and must instead consume a workload stored
+elsewhere (like in PostgreSQL). This design also means it is unsuitable to
+take a normal Sidekiq workload with arguments and make it a
+`LimitedCapacity::Worker`. Instead, to use this, you might need to
+re-architect your queue to be stored elsewhere.
+
+A common use case for this kind of worker is one that runs periodically
+consuming a separate queue of work to be done (like from PostgreSQL). In that case,
+you need an additional cron worker to start the worker periodically. For
+example, in the following scheduler:
```ruby
class ScheduleMyDummyCronWorker
include ApplicationWorker
include CronjobQueue
- def perform(*args)
- MyDummyWorker.perform_with_capacity(*args)
+ def perform
+ MyDummyWorker.perform_with_capacity
end
end
```
diff --git a/lib/api/commits.rb b/lib/api/commits.rb
index 069d117db17..7256d9c6d93 100644
--- a/lib/api/commits.rb
+++ b/lib/api/commits.rb
@@ -4,6 +4,7 @@ require 'mime/types'
module API
class Commits < ::API::Base
include PaginationParams
+ include Helpers::Unidiff
feature_category :source_code_management
@@ -274,6 +275,7 @@ module API
params do
requires :sha, type: String, desc: 'A commit sha, or the name of a branch or tag'
use :pagination
+ use :with_unidiff
end
get ':id/repository/commits/:sha/diff', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS, urgency: :low do
commit = user_project.commit(params[:sha])
@@ -282,7 +284,7 @@ module API
raw_diffs = ::Kaminari.paginate_array(commit.diffs(expanded: true).diffs.to_a)
- present paginate(raw_diffs), with: Entities::Diff
+ present paginate(raw_diffs), with: Entities::Diff, enable_unidiff: declared_params[:unidiff]
end
desc "Get a commit's comments" do
diff --git a/lib/api/entities/diff.rb b/lib/api/entities/diff.rb
index b9538893d32..cc53736a5b1 100644
--- a/lib/api/entities/diff.rb
+++ b/lib/api/entities/diff.rb
@@ -3,10 +3,12 @@
module API
module Entities
class Diff < Grape::Entity
- expose :json_safe_diff, as: :diff, documentation: {
+ expose :diff, documentation: {
type: 'string',
example: '@@ -71,6 +71,8 @@\n...'
- }
+ } do |instance, options|
+ options[:enable_unidiff] == true ? instance.unidiff : instance.json_safe_diff
+ end
expose :new_path, documentation: { type: 'string', example: 'doc/update/5.4-to-6.0.md' }
expose :old_path, documentation: { type: 'string', example: 'doc/update/5.4-to-6.0.md' }
expose :a_mode, documentation: { type: 'string', example: '100755' }
diff --git a/lib/api/helpers/unidiff.rb b/lib/api/helpers/unidiff.rb
new file mode 100644
index 00000000000..aabc0acd454
--- /dev/null
+++ b/lib/api/helpers/unidiff.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module API
+ module Helpers
+ module Unidiff
+ extend ActiveSupport::Concern
+
+ included do
+ helpers do
+ params :with_unidiff do
+ optional :unidiff, type: ::Grape::API::Boolean, default: false, desc: 'A diff in a Unified diff format'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/import_bitbucket_server.rb b/lib/api/import_bitbucket_server.rb
index f315ae5afff..1635e5ab07b 100644
--- a/lib/api/import_bitbucket_server.rb
+++ b/lib/api/import_bitbucket_server.rb
@@ -40,6 +40,8 @@ module API
requires :bitbucket_server_repo, type: String, desc: 'BitBucket Server Repository Name'
optional :new_name, type: String, desc: 'New repo name'
optional :new_namespace, type: String, desc: 'Namespace to import repo into'
+ optional :timeout_strategy, type: String, values: ::ProjectImportData::TIMEOUT_STRATEGIES,
+ desc: 'Strategy for behavior on timeouts'
end
post 'import/bitbucket_server' do
diff --git a/lib/api/import_github.rb b/lib/api/import_github.rb
index ab7ac6624a8..475a03621e8 100644
--- a/lib/api/import_github.rb
+++ b/lib/api/import_github.rb
@@ -62,6 +62,8 @@ module API
requires :target_namespace, type: String, allow_blank: false, desc: 'Namespace or group to import repository into'
optional :github_hostname, type: String, desc: 'Custom GitHub enterprise hostname'
optional :optional_stages, type: Hash, desc: 'Optional stages of import to be performed'
+ optional :timeout_strategy, type: String, values: ::ProjectImportData::TIMEOUT_STRATEGIES,
+ desc: 'Strategy for behavior on timeouts'
optional :additional_access_tokens,
type: Array[String],
coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce,
diff --git a/lib/api/merge_request_diffs.rb b/lib/api/merge_request_diffs.rb
index e7193035ce0..9b8468b6efb 100644
--- a/lib/api/merge_request_diffs.rb
+++ b/lib/api/merge_request_diffs.rb
@@ -4,6 +4,7 @@ module API
# MergeRequestDiff API
class MergeRequestDiffs < ::API::Base
include PaginationParams
+ include Helpers::Unidiff
before { authenticate! }
@@ -39,12 +40,13 @@ module API
params do
requires :merge_request_iid, type: Integer, desc: 'The internal ID of the merge request'
requires :version_id, type: Integer, desc: 'The ID of the merge request diff version'
+ use :with_unidiff
end
get ":id/merge_requests/:merge_request_iid/versions/:version_id", urgency: :low do
merge_request = find_merge_request_with_access(params[:merge_request_iid])
- present_cached merge_request.merge_request_diffs.find(params[:version_id]), with: Entities::MergeRequestDiffFull, cache_context: nil
+ present_cached merge_request.merge_request_diffs.find(params[:version_id]), with: Entities::MergeRequestDiffFull, cache_context: nil, enable_unidiff: declared_params[:unidiff]
end
end
end
diff --git a/lib/api/merge_requests.rb b/lib/api/merge_requests.rb
index 1c0b9c56aa7..6b67e3aba19 100644
--- a/lib/api/merge_requests.rb
+++ b/lib/api/merge_requests.rb
@@ -3,6 +3,7 @@
module API
class MergeRequests < ::API::Base
include PaginationParams
+ include Helpers::Unidiff
CONTEXT_COMMITS_POST_LIMIT = 20
@@ -505,6 +506,9 @@ module API
]
tags %w[merge_requests]
end
+ params do
+ use :with_unidiff
+ end
get ':id/merge_requests/:merge_request_iid/changes', feature_category: :code_review_workflow, urgency: :low do
merge_request = find_merge_request_with_access(params[:merge_request_iid])
@@ -512,7 +516,8 @@ module API
with: Entities::MergeRequestChanges,
current_user: current_user,
project: user_project,
- access_raw_diffs: to_boolean(params.fetch(:access_raw_diffs, false))
+ access_raw_diffs: to_boolean(params.fetch(:access_raw_diffs, false)),
+ enable_unidiff: declared_params[:unidiff]
end
desc 'Get the merge request diffs' do
@@ -526,11 +531,12 @@ module API
end
params do
use :pagination
+ use :with_unidiff
end
get ':id/merge_requests/:merge_request_iid/diffs', feature_category: :code_review_workflow, urgency: :low do
merge_request = find_merge_request_with_access(params[:merge_request_iid])
- present paginate(merge_request.merge_request_diff.paginated_diffs(params[:page], params[:per_page])).diffs, with: Entities::Diff
+ present paginate(merge_request.merge_request_diff.paginated_diffs(params[:page], params[:per_page])).diffs, with: Entities::Diff, enable_unidiff: declared_params[:unidiff]
end
desc 'Get single merge request pipelines' do
diff --git a/lib/api/repositories.rb b/lib/api/repositories.rb
index 98316bf1d4b..0f1426dde99 100644
--- a/lib/api/repositories.rb
+++ b/lib/api/repositories.rb
@@ -5,6 +5,7 @@ require 'mime/types'
module API
class Repositories < ::API::Base
include PaginationParams
+ include Helpers::Unidiff
content_type :txt, 'text/plain'
@@ -202,6 +203,7 @@ module API
documentation: { example: 'feature' }
optional :from_project_id, type: Integer, desc: 'The project to compare from', documentation: { example: 1 }
optional :straight, type: Boolean, desc: 'Comparison method, `true` for direct comparison between `from` and `to` (`from`..`to`), `false` to compare using merge base (`from`...`to`)', default: false
+ use :with_unidiff
end
get ':id/repository/compare', urgency: :low do
target_project = fetch_target_project(current_user, user_project, params)
@@ -220,7 +222,7 @@ module API
compare = CompareService.new(user_project, params[:to]).execute(target_project, params[:from], straight: params[:straight])
if compare
- present compare, with: Entities::Compare, current_user: current_user
+ present compare, with: Entities::Compare, current_user: current_user, enable_unidiff: declared_params[:unidiff]
else
not_found!("Ref")
end
diff --git a/lib/gitlab/bitbucket_server_import/project_creator.rb b/lib/gitlab/bitbucket_server_import/project_creator.rb
index ddc678abdd8..be60e431b80 100644
--- a/lib/gitlab/bitbucket_server_import/project_creator.rb
+++ b/lib/gitlab/bitbucket_server_import/project_creator.rb
@@ -3,9 +3,9 @@
module Gitlab
module BitbucketServerImport
class ProjectCreator
- attr_reader :project_key, :repo_slug, :repo, :name, :namespace, :current_user, :session_data
+ attr_reader :project_key, :repo_slug, :repo, :name, :namespace, :current_user, :session_data, :timeout_strategy
- def initialize(project_key, repo_slug, repo, name, namespace, current_user, session_data)
+ def initialize(project_key, repo_slug, repo, name, namespace, current_user, session_data, timeout_strategy)
@project_key = project_key
@repo_slug = repo_slug
@repo = repo
@@ -13,6 +13,7 @@ module Gitlab
@namespace = namespace
@current_user = current_user
@session_data = session_data
+ @timeout_strategy = timeout_strategy
end
def execute
@@ -28,7 +29,7 @@ module Gitlab
import_url: repo.clone_url,
import_data: {
credentials: session_data,
- data: { project_key: project_key, repo_slug: repo_slug }
+ data: { project_key: project_key, repo_slug: repo_slug, timeout_strategy: timeout_strategy }
},
skip_wiki: true
).execute
diff --git a/lib/gitlab/diff/file.rb b/lib/gitlab/diff/file.rb
index d5c0b187f92..de7be6efd72 100644
--- a/lib/gitlab/diff/file.rb
+++ b/lib/gitlab/diff/file.rb
@@ -7,7 +7,7 @@ module Gitlab
attr_reader :diff, :repository, :diff_refs, :fallback_diff_refs, :unique_identifier
- delegate :new_file?, :deleted_file?, :renamed_file?,
+ delegate :new_file?, :deleted_file?, :renamed_file?, :unidiff,
:old_path, :new_path, :a_mode, :b_mode, :mode_changed?,
:submodule?, :expanded?, :too_large?, :collapsed?, :line_count, :has_binary_notice?, to: :diff, prefix: false
diff --git a/lib/gitlab/git/diff.rb b/lib/gitlab/git/diff.rb
index 0694e4d0f78..743bac62764 100644
--- a/lib/gitlab/git/diff.rb
+++ b/lib/gitlab/git/diff.rb
@@ -33,7 +33,7 @@ module Gitlab
SERIALIZE_KEYS = %i[diff new_path old_path a_mode b_mode new_file renamed_file deleted_file too_large].freeze
- BINARY_NOTICE_PATTERN = %r{Binary files a\/(.*) and b\/(.*) differ}
+ BINARY_NOTICE_PATTERN = %r{Binary files (.*) and (.*) differ}
class << self
def between(repo, head, base, options = {}, *paths)
@@ -183,6 +183,16 @@ module Gitlab
a_mode == '160000' || b_mode == '160000'
end
+ def unidiff
+ return diff if diff.blank?
+ return json_safe_diff if detect_binary?(@diff) || has_binary_notice?
+
+ old_path_header = new_file? ? '/dev/null' : "a/#{old_path}"
+ new_path_header = deleted_file? ? '/dev/null' : "b/#{new_path}"
+
+ "--- #{old_path_header}\n+++ #{new_path_header}\n" + diff
+ end
+
def line_count
@line_count ||= Util.count_lines(@diff)
end
diff --git a/lib/gitlab/github_import/settings.rb b/lib/gitlab/github_import/settings.rb
index 73a5f49a9e3..edf40cb6afb 100644
--- a/lib/gitlab/github_import/settings.rb
+++ b/lib/gitlab/github_import/settings.rb
@@ -62,7 +62,10 @@ module Gitlab
)
import_data = project.create_or_update_import_data(
- data: { optional_stages: optional_stages },
+ data: {
+ optional_stages: optional_stages,
+ timeout_strategy: user_settings[:timeout_strategy]
+ },
credentials: credentials
)
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 32645b52ce7..f98cb46dd78 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -25968,6 +25968,9 @@ msgstr ""
msgid "IssueBoards|No matching boards found"
msgstr ""
+msgid "IssueBoards|Select board"
+msgstr ""
+
msgid "IssueBoards|Some of your boards are hidden, add a license to see them again."
msgstr ""
@@ -48103,6 +48106,9 @@ msgstr ""
msgid "This %{issuable} is locked. Only project members can comment."
msgstr ""
+msgid "This %{issuable} would exceed the maximum number of linked %{issuables} (%{limit})."
+msgstr ""
+
msgid "This %{noteableTypeText} is %{confidentialLinkStart}confidential%{confidentialLinkEnd} and %{lockedLinkStart}locked%{lockedLinkEnd}."
msgstr ""
@@ -48304,9 +48310,6 @@ msgstr ""
msgid "This epic does not exist or you don't have sufficient permission."
msgstr ""
-msgid "This epic would exceed maximum number of related epics."
-msgstr ""
-
msgid "This feature requires local storage to be enabled"
msgstr ""
@@ -52568,6 +52571,9 @@ msgstr ""
msgid "Vulnerability|Explain this vulnerability and how to mitigate it with AI"
msgstr ""
+msgid "Vulnerability|Explain vulnerability"
+msgstr ""
+
msgid "Vulnerability|External Security Report"
msgstr ""
@@ -52715,9 +52721,6 @@ msgstr ""
msgid "Vulnerability|Training not available for this vulnerability."
msgstr ""
-msgid "Vulnerability|Try it out"
-msgstr ""
-
msgid "Vulnerability|URL:"
msgstr ""
@@ -53531,9 +53534,6 @@ msgstr ""
msgid "Work items are already linked"
msgstr ""
-msgid "WorkItems|This work item would exceed the maximum number of linked items."
-msgstr ""
-
msgid "WorkItem|%{count} more assignees"
msgstr ""
diff --git a/qa/qa/page/component/issue_board/show.rb b/qa/qa/page/component/issue_board/show.rb
index 2259a65b546..41bb33ed943 100644
--- a/qa/qa/page/component/issue_board/show.rb
+++ b/qa/qa/page/component/issue_board/show.rb
@@ -20,7 +20,6 @@ module QA
view 'app/assets/javascripts/boards/components/boards_selector.vue' do
element :boards_dropdown
- element :boards_dropdown_content
element :create_new_board_button
end
@@ -47,10 +46,6 @@ module QA
find_element(:boards_dropdown)
end
- def boards_dropdown_content
- find_element(:boards_dropdown_content)
- end
-
def boards_list_cards_area_with_index(index)
wait_boards_list_finish_loading do
within_element_by_index(:board_list, index) do
diff --git a/spec/controllers/import/bitbucket_server_controller_spec.rb b/spec/controllers/import/bitbucket_server_controller_spec.rb
index b2a56423253..3266c4d4d39 100644
--- a/spec/controllers/import/bitbucket_server_controller_spec.rb
+++ b/spec/controllers/import/bitbucket_server_controller_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Import::BitbucketServerController, feature_category: :importers d
let(:repo_slug) { 'some-repo' }
let(:repo_id) { "#{project_key}/#{repo_slug}" }
let(:client) { instance_double(BitbucketServer::Client) }
+ let(:timeout_strategy) { "pessimistic" }
def assign_session_tokens
session[:bitbucket_server_url] = 'http://localhost:7990'
@@ -44,7 +45,7 @@ RSpec.describe Import::BitbucketServerController, feature_category: :importers d
it 'returns the new project' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything, timeout_strategy)
.and_return(double(execute: project))
post :create, params: { repo_id: repo_id }, format: :json
@@ -57,7 +58,7 @@ RSpec.describe Import::BitbucketServerController, feature_category: :importers d
it 'successfully creates a project' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything, timeout_strategy)
.and_return(double(execute: project))
post :create, params: { repo_id: repo_id }, format: :json
@@ -88,7 +89,7 @@ RSpec.describe Import::BitbucketServerController, feature_category: :importers d
it 'returns an error when the project cannot be saved' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything, timeout_strategy)
.and_return(double(execute: build(:project)))
post :create, params: { repo_id: repo_id }, format: :json
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index e4e1772f08e..153f466fa57 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -89,6 +89,7 @@ RSpec.describe 'Database schema', feature_category: :database do
oauth_applications: %w[owner_id],
p_ci_builds: %w[erased_by_id trigger_request_id partition_id],
p_batched_git_ref_updates_deletions: %w[project_id partition_id],
+ p_ci_finished_build_ch_sync_events: %w[build_id],
product_analytics_events_experimental: %w[event_id txn_id user_id],
project_build_artifacts_size_refreshes: %w[last_job_artifact_id],
project_data_transfers: %w[project_id namespace_id],
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index 61f91adee32..c86d4c260ee 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -292,5 +292,29 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
+
+ context 'when inviting a registered user by a secondary email address' do
+ let(:user) { create(:user) }
+ let(:secondary_email) { create(:email, user: user) }
+
+ before do
+ create(:group_member, :invited, group: group, invite_email: secondary_email.email, created_by: owner)
+ gitlab_sign_in(user)
+ end
+
+ it 'does not accept the pending invitation and does not redirect to the groups activity path' do
+ expect(page).not_to have_current_path(activity_group_path(group), ignore_query: true)
+ expect(group.reload.users).not_to include(user)
+ end
+
+ context 'when the secondary email address is confirmed' do
+ let(:secondary_email) { create(:email, :confirmed, user: user) }
+
+ it 'accepts the pending invitation and redirects to the groups activity path' do
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ expect(group.reload.users).to include(user)
+ end
+ end
+ end
end
end
diff --git a/spec/frontend/alert_spec.js b/spec/frontend/alert_spec.js
index 1ae8373016b..de3093c6c19 100644
--- a/spec/frontend/alert_spec.js
+++ b/spec/frontend/alert_spec.js
@@ -271,6 +271,74 @@ describe('Flash', () => {
expect(findTextContent()).toBe('message 1 message 2');
});
});
+
+ describe('with message links', () => {
+ const findAlertMessageLinks = () =>
+ Array.from(document.querySelectorAll('.flash-container a'));
+
+ it('creates a link', () => {
+ alert = createAlert({
+ message: 'Read more at %{exampleLinkStart}example site%{exampleLinkEnd}.',
+ messageLinks: {
+ exampleLink: 'https://example.com',
+ },
+ });
+ const messageLinks = findAlertMessageLinks();
+
+ expect(messageLinks).toHaveLength(1);
+ const link = messageLinks.at(0);
+ expect(link.textContent).toBe('example site');
+ expect(link.getAttribute('href')).toBe('https://example.com');
+ });
+
+ it('creates multiple links', () => {
+ alert = createAlert({
+ message:
+ 'Read more at %{exampleLinkStart}example site%{exampleLinkEnd}, or on %{docsLinkStart}the documentation%{docsLinkEnd}.',
+ messageLinks: {
+ exampleLink: 'https://example.com',
+ docsLink: 'https://docs.example.com',
+ },
+ });
+ const messageLinks = findAlertMessageLinks();
+
+ expect(messageLinks).toHaveLength(2);
+ const [firstLink, secondLink] = messageLinks;
+ expect(firstLink.textContent).toBe('example site');
+ expect(firstLink.getAttribute('href')).toBe('https://example.com');
+ expect(secondLink.textContent).toBe('the documentation');
+ expect(secondLink.getAttribute('href')).toBe('https://docs.example.com');
+ });
+
+ it('allows passing more props to gl-link', () => {
+ alert = createAlert({
+ message: 'Read more at %{exampleLinkStart}example site%{exampleLinkEnd}.',
+ messageLinks: {
+ exampleLink: {
+ href: 'https://example.com',
+ target: '_blank',
+ },
+ },
+ });
+ const messageLinks = findAlertMessageLinks();
+
+ expect(messageLinks).toHaveLength(1);
+ const link = messageLinks.at(0);
+ expect(link.textContent).toBe('example site');
+ expect(link.getAttribute('href')).toBe('https://example.com');
+ expect(link.getAttribute('target')).toBe('_blank');
+ });
+
+ it('does not create any links when given an empty messageLinks object', () => {
+ alert = createAlert({
+ message: 'Read more at %{exampleLinkStart}example site%{exampleLinkEnd}.',
+ messageLinks: {},
+ });
+ const messageLinks = findAlertMessageLinks();
+
+ expect(messageLinks).toHaveLength(0);
+ });
+ });
});
});
});
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index fa18b47cf54..0a628af9939 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlLoadingIcon, GlDropdownSectionHeader } from '@gitlab/ui';
+import { GlCollapsibleListbox } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
// eslint-disable-next-line no-restricted-imports
@@ -13,7 +13,7 @@ import projectRecentBoardsQuery from '~/boards/graphql/project_recent_boards.que
import * as cacheUpdates from '~/boards/graphql/cache_updates';
import { WORKSPACE_GROUP, WORKSPACE_PROJECT } from '~/issues/constants';
import createMockApollo from 'helpers/mock_apollo_helper';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import {
mockBoard,
mockGroupAllBoardsResponse,
@@ -47,17 +47,11 @@ describe('BoardsSelector', () => {
});
};
- const fillSearchBox = (filterTerm) => {
- const searchBox = wrapper.findComponent({ ref: 'searchBox' });
- const searchBoxInput = searchBox.find('input');
- searchBoxInput.setValue(filterTerm);
- searchBoxInput.trigger('input');
- };
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
- const getDropdownItems = () => wrapper.findAllByTestId('dropdown-item');
- const getDropdownHeaders = () => wrapper.findAllComponents(GlDropdownSectionHeader);
- const getLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const fillSearchBox = async (filterTerm) => {
+ await findDropdown().vm.$emit('search', filterTerm);
+ };
const projectBoardsQueryHandlerSuccess = jest
.fn()
@@ -96,7 +90,7 @@ describe('BoardsSelector', () => {
[groupRecentBoardsQuery, groupRecentBoardsQueryHandlerSuccess],
]);
- wrapper = mountExtended(BoardsSelector, {
+ wrapper = shallowMountExtended(BoardsSelector, {
store,
apolloProvider: fakeApollo,
propsData: {
@@ -142,13 +136,19 @@ describe('BoardsSelector', () => {
});
it('shows loading spinner', async () => {
+ createComponent({
+ provide: {
+ isApolloBoard: true,
+ },
+ props: {
+ isCurrentBoardLoading: true,
+ },
+ });
// Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await nextTick();
- expect(getLoadingIcon().exists()).toBe(true);
- expect(getDropdownHeaders()).toHaveLength(0);
- expect(getDropdownItems()).toHaveLength(0);
+ expect(findDropdown().props('loading')).toBe(true);
});
});
@@ -158,7 +158,7 @@ describe('BoardsSelector', () => {
await nextTick();
// Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await nextTick();
});
@@ -167,9 +167,8 @@ describe('BoardsSelector', () => {
expect(projectBoardsQueryHandlerSuccess).toHaveBeenCalled();
});
- it('hides loading spinner', async () => {
- await nextTick();
- expect(getLoadingIcon().exists()).toBe(false);
+ it('hides loading spinner', () => {
+ expect(findDropdown().props('loading')).toBe(false);
});
describe('filtering', () => {
@@ -178,25 +177,26 @@ describe('BoardsSelector', () => {
});
it('shows all boards without filtering', () => {
- expect(getDropdownItems()).toHaveLength(boards.length + recentIssueBoards.length);
+ expect(findDropdown().props('items')[0].text).toBe('Recent');
+ expect(findDropdown().props('items')[0].options).toHaveLength(recentIssueBoards.length);
+ expect(findDropdown().props('items')[1].text).toBe('All');
+ expect(findDropdown().props('items')[1].options).toHaveLength(
+ boards.length - recentIssueBoards.length,
+ );
});
it('shows only matching boards when filtering', async () => {
const filterTerm = 'board1';
const expectedCount = boards.filter((board) => board.name.includes(filterTerm)).length;
- fillSearchBox(filterTerm);
-
- await nextTick();
- expect(getDropdownItems()).toHaveLength(expectedCount);
+ await fillSearchBox(filterTerm);
+ expect(findDropdown().props('items')).toHaveLength(expectedCount);
});
it('shows message if there are no matching boards', async () => {
- fillSearchBox('does not exist');
+ await fillSearchBox('does not exist');
- await nextTick();
- expect(getDropdownItems()).toHaveLength(0);
- expect(wrapper.text().includes('No matching boards found')).toBe(true);
+ expect(findDropdown().props('noResultsText')).toBe('No matching boards found');
});
});
@@ -204,14 +204,18 @@ describe('BoardsSelector', () => {
it('shows only when boards are greater than 10', async () => {
await nextTick();
expect(projectRecentBoardsQueryHandlerSuccess).toHaveBeenCalled();
- expect(getDropdownHeaders()).toHaveLength(2);
+
+ expect(findDropdown().props('items')).toHaveLength(2);
+ expect(findDropdown().props('items')[0].text).toBe('Recent');
+ expect(findDropdown().props('items')[1].text).toBe('All');
});
it('does not show when boards are less than 10', async () => {
createComponent({ projectBoardsQueryHandler: smallBoardsQueryHandlerSuccess });
await nextTick();
- expect(getDropdownHeaders()).toHaveLength(0);
+
+ expect(findDropdown().props('items')).toHaveLength(0);
});
it('does not show when recentIssueBoards api returns empty array', async () => {
@@ -220,14 +224,14 @@ describe('BoardsSelector', () => {
});
await nextTick();
- expect(getDropdownHeaders()).toHaveLength(0);
+ expect(findDropdown().props('items')).toHaveLength(0);
});
it('does not show when search is active', async () => {
fillSearchBox('Random string');
await nextTick();
- expect(getDropdownHeaders()).toHaveLength(0);
+ expect(findDropdown().props('items')).toHaveLength(0);
});
});
});
@@ -248,7 +252,7 @@ describe('BoardsSelector', () => {
await nextTick();
// Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await nextTick();
@@ -272,7 +276,7 @@ describe('BoardsSelector', () => {
await nextTick();
// Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await waitForPromises();
@@ -286,6 +290,7 @@ describe('BoardsSelector', () => {
createStore();
createComponent({ provide: { multipleIssueBoardsAvailable: true } });
expect(findDropdown().exists()).toBe(true);
+ expect(findDropdown().props('toggleText')).toBe('Select board');
});
});
@@ -296,6 +301,7 @@ describe('BoardsSelector', () => {
provide: { multipleIssueBoardsAvailable: false, hasMissingBoards: true },
});
expect(findDropdown().exists()).toBe(true);
+ expect(findDropdown().props('toggleText')).toBe('Select board');
});
});
@@ -317,6 +323,7 @@ describe('BoardsSelector', () => {
provide: { isApolloBoard: true },
});
expect(findDropdown().props('loading')).toBe(true);
+ expect(findDropdown().props('toggleText')).toBe('Select board');
});
});
});
diff --git a/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
index 1cbb1a714c9..3628af31aa1 100644
--- a/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
+++ b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
@@ -1,16 +1,8 @@
-import {
- GlLoadingIcon,
- GlTable,
- GlLink,
- GlBadge,
- GlPagination,
- GlModal,
- GlFormCheckbox,
-} from '@gitlab/ui';
+import { GlLoadingIcon, GlTable, GlLink, GlPagination, GlModal, GlFormCheckbox } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import getJobArtifactsResponse from 'test_fixtures/graphql/ci/artifacts/graphql/queries/get_job_artifacts.query.graphql.json';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import waitForPromises from 'helpers/wait_for_promises';
import JobArtifactsTable from '~/ci/artifacts/components/job_artifacts_table.vue';
import ArtifactsTableRowDetails from '~/ci/artifacts/components/artifacts_table_row_details.vue';
@@ -59,13 +51,13 @@ describe('JobArtifactsTable component', () => {
const findStatuses = () => wrapper.findAllByTestId('job-artifacts-job-status');
const findSuccessfulJobStatus = () => findStatuses().at(0);
- const findFailedJobStatus = () => findStatuses().at(1);
+ const findCiBadgeLink = () => findSuccessfulJobStatus().findComponent(CiBadgeLink);
const findLinks = () => wrapper.findAllComponents(GlLink);
const findJobLink = () => findLinks().at(0);
const findPipelineLink = () => findLinks().at(1);
- const findRefLink = () => findLinks().at(2);
- const findCommitLink = () => findLinks().at(3);
+ const findCommitLink = () => findLinks().at(2);
+ const findRefLink = () => findLinks().at(3);
const findSize = () => wrapper.findByTestId('job-artifacts-size');
const findCreated = () => wrapper.findByTestId('job-artifacts-created');
@@ -209,13 +201,13 @@ describe('JobArtifactsTable component', () => {
});
it('shows the job status as an icon for a successful job', () => {
- expect(findSuccessfulJobStatus().findComponent(CiIcon).exists()).toBe(true);
- expect(findSuccessfulJobStatus().findComponent(GlBadge).exists()).toBe(false);
- });
-
- it('shows the job status as a badge for other job statuses', () => {
- expect(findFailedJobStatus().findComponent(GlBadge).exists()).toBe(true);
- expect(findFailedJobStatus().findComponent(CiIcon).exists()).toBe(false);
+ expect(findCiBadgeLink().props()).toMatchObject({
+ status: {
+ group: 'success',
+ },
+ size: 'sm',
+ showText: false,
+ });
});
it('shows links to the job, pipeline, ref, and commit', () => {
diff --git a/spec/frontend/ci/job_details/components/job_header_spec.js b/spec/frontend/ci/job_details/components/job_header_spec.js
index c37d44d67ba..39b130ef414 100644
--- a/spec/frontend/ci/job_details/components/job_header_spec.js
+++ b/spec/frontend/ci/job_details/components/job_header_spec.js
@@ -34,17 +34,15 @@ describe('Header CI Component', () => {
const findUserLink = () => wrapper.findComponent(GlAvatarLink);
const findSidebarToggleBtn = () => wrapper.findComponent(GlButton);
const findStatusTooltip = () => wrapper.findComponent(GlTooltip);
- const findActionButtons = () => wrapper.findByTestId('job-header-action-buttons');
const findJobName = () => wrapper.findByTestId('job-name');
- const createComponent = (props, slots) => {
+ const createComponent = (props) => {
wrapper = extendedWrapper(
shallowMount(JobHeader, {
propsData: {
...defaultProps,
...props,
},
- ...slots,
}),
);
};
@@ -66,10 +64,6 @@ describe('Header CI Component', () => {
expect(findSidebarToggleBtn().exists()).toBe(true);
});
- it('should not render header action buttons when slot is empty', () => {
- expect(findActionButtons().exists()).toBe(false);
- });
-
it('renders the correct job name', () => {
expect(findJobName().text()).toBe(defaultProps.name);
});
@@ -128,25 +122,6 @@ describe('Header CI Component', () => {
});
});
- describe('job name', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('should render the job name', () => {
- expect(findJobName().text()).toBe('build_job');
- });
- });
-
- describe('slot', () => {
- it('should render header action buttons', () => {
- createComponent({}, { slots: { default: 'Test Actions' } });
-
- expect(findActionButtons().exists()).toBe(true);
- expect(findActionButtons().text()).toBe('Test Actions');
- });
- });
-
describe('shouldRenderTriggeredLabel', () => {
it('should render created keyword when the shouldRenderTriggeredLabel is false', () => {
createComponent({ shouldRenderTriggeredLabel: false });
diff --git a/spec/frontend/sentry/init_sentry_spec.js b/spec/frontend/sentry/init_sentry_spec.js
index 4196664e8d6..6baf9a4860c 100644
--- a/spec/frontend/sentry/init_sentry_spec.js
+++ b/spec/frontend/sentry/init_sentry_spec.js
@@ -102,6 +102,12 @@ describe('SentryConfig', () => {
);
});
+ it('Uses data-page to set BrowserTracing transaction name', () => {
+ const context = BrowserTracing.mock.calls[0][0].beforeNavigate();
+
+ expect(context).toMatchObject({ name: mockPage });
+ });
+
it('binds the BrowserClient to the hub', () => {
expect(mockBindClient).toHaveBeenCalledTimes(1);
expect(mockBindClient).toHaveBeenCalledWith(expect.any(BrowserClient));
@@ -174,5 +180,27 @@ describe('SentryConfig', () => {
expect(window._Sentry).toBe(undefined);
});
});
+
+ describe('when data-page is not defined in the body', () => {
+ beforeEach(() => {
+ delete document.body.dataset.page;
+ initSentry();
+ });
+
+ it('calls Sentry.setTags with gon values', () => {
+ expect(mockSetTags).toHaveBeenCalledTimes(1);
+ expect(mockSetTags).toHaveBeenCalledWith(
+ expect.objectContaining({
+ page: undefined,
+ }),
+ );
+ });
+
+ it('Uses location.path to set BrowserTracing transaction name', () => {
+ const context = BrowserTracing.mock.calls[0][0].beforeNavigate({ op: 'pageload' });
+
+ expect(context).toEqual({ op: 'pageload', name: window.location.pathname });
+ });
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/ci_badge_link_spec.js b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
index c74964c13f5..fa42e87ec70 100644
--- a/spec/frontend/vue_shared/components/ci_badge_link_spec.js
+++ b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
@@ -149,4 +149,10 @@ describe('CI Badge Link Component', () => {
expect(findBadge().props('size')).toBe('lg');
});
+
+ it('should have class `gl-p-2!` when `showText` is false', () => {
+ createComponent({ status: statuses.success, size: 'lg', showText: false });
+
+ expect(findBadge().classes()).toContain('gl-p-2!');
+ });
});
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index 6d97afd4c78..e832fa2718a 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -469,61 +469,6 @@ RSpec.describe BlobHelper do
end
end
- describe '#editing_ci_config?' do
- let(:project) { build(:project) }
-
- subject { helper.editing_ci_config? }
-
- before do
- assign(:project, project)
- assign(:path, path)
- end
-
- context 'when path is nil' do
- let(:path) { nil }
-
- it { is_expected.to be_falsey }
- end
-
- context 'when path is not a ci file' do
- let(:path) { 'some-file.txt' }
-
- it { is_expected.to be_falsey }
- end
-
- context 'when path ends is gitlab-ci.yml' do
- let(:path) { '.gitlab-ci.yml' }
-
- it { is_expected.to be_truthy }
- end
-
- context 'when path ends with gitlab-ci.yml' do
- let(:path) { 'template.gitlab-ci.yml' }
-
- it { is_expected.to be_truthy }
- end
-
- context 'with custom ci paths' do
- let(:path) { 'path/to/ci.yaml' }
-
- before do
- project.ci_config_path = 'path/to/ci.yaml'
- end
-
- it { is_expected.to be_truthy }
- end
-
- context 'with custom ci config and path' do
- let(:path) { 'path/to/template.gitlab-ci.yml' }
-
- before do
- project.ci_config_path = 'ci/path/.gitlab-ci.yml@another-group/another-project'
- end
-
- it { is_expected.to be_truthy }
- end
- end
-
describe '#vue_blob_app_data' do
let(:blob) { fake_blob(path: 'file.md', size: 2.megabytes) }
let(:project) { build_stubbed(:project) }
diff --git a/spec/lib/api/entities/diff_spec.rb b/spec/lib/api/entities/diff_spec.rb
new file mode 100644
index 00000000000..27d9ed44c98
--- /dev/null
+++ b/spec/lib/api/entities/diff_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::API::Entities::Diff, feature_category: :source_code_management do
+ subject(:json) { entity.as_json }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository }
+ let_it_be(:diff) { repository.diff('HEAD~1', 'HEAD').first }
+
+ let(:entity) { described_class.new(diff, options) }
+ let(:options) { {} }
+
+ it 'returns expected data' do
+ expect(entity.as_json).to eq(
+ {
+ diff: diff.diff,
+ new_path: diff.new_path,
+ old_path: diff.old_path,
+ a_mode: diff.a_mode,
+ b_mode: diff.b_mode,
+ new_file: diff.new_file?,
+ renamed_file: diff.renamed_file?,
+ deleted_file: diff.deleted_file?
+ }
+ )
+ end
+
+ context 'when enable_unidiff option is set' do
+ let(:options) { { enable_unidiff: true } }
+
+ it 'returns expected data' do
+ expect(entity.as_json).to include(diff: diff.unidiff)
+ end
+ end
+
+ context 'when enable_unidiff option is false' do
+ let(:options) { { enable_unidiff: false } }
+
+ it 'returns expected data' do
+ expect(entity.as_json).to include(diff: diff.diff)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 4d78e194da8..6b3630d7a1f 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Diff do
+RSpec.describe Gitlab::Git::Diff, feature_category: :source_code_management do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:repository) { project.repository }
@@ -336,6 +336,121 @@ EOT
end
end
+ describe '#unidiff' do
+ let_it_be(:project) { create(:project, :empty_repo) }
+ let_it_be(:repository) { project.repository }
+ let_it_be(:user) { project.first_owner }
+
+ let(:commits) { repository.commits('master', limit: 10) }
+ let(:diffs) { commits.map(&:diffs).map(&:diffs).flat_map(&:to_a).reverse }
+
+ before_all do
+ create_commit(
+ project,
+ user,
+ commit_message: "Create file",
+ actions: [{ action: 'create', content: 'foo', file_path: 'a.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Update file",
+ actions: [{ action: 'update', content: 'foo2', file_path: 'a.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Rename file without change",
+ actions: [{ action: 'move', previous_path: 'a.txt', file_path: 'b.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Rename file with change",
+ actions: [{ action: 'move', content: 'foo3', previous_path: 'b.txt', file_path: 'c.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Delete file",
+ actions: [{ action: 'delete', file_path: 'c.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Create empty file",
+ actions: [{ action: 'create', file_path: 'empty.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Create binary file",
+ actions: [{ action: 'create', content: 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABAQMAAAAl21bKAAAAA1BMVEUAAACnej3aAAAAAXRSTlMAQObYZgAAAApJREFUCNdjYAAAAAIAAeIhvDMAAAAASUVORK5CYII=', file_path: 'test%2Ebin', encoding: 'base64' }]
+ )
+ end
+
+ context 'when file was created' do
+ it 'returns a correct header' do
+ diff = diffs[0]
+
+ expect(diff.unidiff).to start_with("--- /dev/null\n+++ b/a.txt\n")
+ end
+ end
+
+ context 'when file was changed' do
+ it 'returns a correct header' do
+ diff = diffs[1]
+
+ expect(diff.unidiff).to start_with("--- a/a.txt\n+++ b/a.txt\n")
+ end
+ end
+
+ context 'when file was moved without content change' do
+ it 'returns an empty header' do
+ diff = diffs[2]
+
+ expect(diff.unidiff).to eq('')
+ end
+ end
+
+ context 'when file was moved with content change' do
+ it 'returns a correct header' do
+ expect(diffs[3].unidiff).to start_with("--- /dev/null\n+++ b/c.txt\n")
+ expect(diffs[4].unidiff).to start_with("--- a/b.txt\n+++ /dev/null\n")
+ end
+ end
+
+ context 'when file was deleted' do
+ it 'returns a correct header' do
+ diff = diffs[5]
+
+ expect(diff.unidiff).to start_with("--- a/c.txt\n+++ /dev/null\n")
+ end
+ end
+
+ context 'when empty file was created' do
+ it 'returns an empty header' do
+ diff = diffs[6]
+
+ expect(diff.unidiff).to eq('')
+ end
+ end
+
+ context 'when file is binary' do
+ it 'returns a binary files message' do
+ diff = diffs[7]
+
+ expect(diff.unidiff).to eq("Binary files /dev/null and b/test%2Ebin differ\n")
+ end
+ end
+ end
+
describe '#submodule?' do
let(:gitaly_submodule_diff) do
Gitlab::GitalyClient::Diff.new(
@@ -445,4 +560,9 @@ EOT
# rugged will not detect this as binary, but we can fake it
described_class.between(project.repository, 'add-pdf-text-binary', 'add-pdf-text-binary^').first
end
+
+ def create_commit(project, user, params)
+ params = { start_branch: 'master', branch_name: 'master' }.merge(params)
+ Files::MultiService.new(project, user, params).execute.fetch(:result)
+ end
end
diff --git a/spec/lib/gitlab/github_import/settings_spec.rb b/spec/lib/gitlab/github_import/settings_spec.rb
index d670aaea482..bfadc8e1fac 100644
--- a/spec/lib/gitlab/github_import/settings_spec.rb
+++ b/spec/lib/gitlab/github_import/settings_spec.rb
@@ -62,17 +62,20 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
collaborators_import: false,
foo: :bar
},
+ timeout_strategy: "optimistic",
additional_access_tokens: %w[foo bar]
}.stringify_keys
end
- it 'puts optional steps & access tokens into projects import_data' do
+ it 'puts optional steps, timeout strategy & access tokens into projects import_data' do
project.create_or_update_import_data(credentials: { user: 'token' })
settings.write(data_input)
expect(project.import_data.data['optional_stages'])
.to eq optional_stages.stringify_keys
+ expect(project.import_data.data['timeout_strategy'])
+ .to eq("optimistic")
expect(project.import_data.credentials.fetch(:additional_access_tokens))
.to eq(data_input['additional_access_tokens'])
end
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 2b6f8535743..725a081df1a 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -3,14 +3,14 @@
require 'spec_helper'
RSpec.shared_examples 'routable resource' do
- shared_examples_for '.find_by_full_path' do
+ shared_examples_for '.find_by_full_path' do |has_cross_join: false|
it 'finds records by their full path' do
expect(described_class.find_by_full_path(record.full_path)).to eq(record)
expect(described_class.find_by_full_path(record.full_path.upcase)).to eq(record)
end
- it 'checks if `optimize_routable` is enabled only once' do
- expect(Routable).to receive(:optimize_routable_enabled?).once
+ it 'checks if `optimize_find_routable` is enabled only once' do
+ expect(Routable).to receive(:optimize_routable_enabled?).and_call_original
described_class.find_by_full_path(record.full_path)
end
@@ -51,17 +51,47 @@ RSpec.shared_examples 'routable resource' do
end
end
end
+
+ if has_cross_join
+ it 'has a cross-join' do
+ expect(Gitlab::Database).to receive(:allow_cross_joins_across_databases)
+
+ described_class.find_by_full_path(record.full_path)
+ end
+ else
+ it 'does not have cross-join' do
+ expect(Gitlab::Database).not_to receive(:allow_cross_joins_across_databases)
+
+ described_class.find_by_full_path(record.full_path)
+ end
+ end
end
it_behaves_like '.find_by_full_path', :aggregate_failures
- context 'when the `optimize_routable` feature flag is turned OFF' do
+ context 'when the `optimize_find_routable` feature flag is enabled for the current request', :request_store do
before do
- stub_feature_flags(optimize_routable: false)
+ stub_feature_flags(optimize_find_routable: Feature.current_request)
end
it_behaves_like '.find_by_full_path', :aggregate_failures
+ context 'for a different request' do
+ before do
+ stub_with_new_feature_current_request
+ end
+
+ it_behaves_like '.find_by_full_path', :aggregate_failures, has_cross_join: true
+ end
+ end
+
+ context 'when the `optimize_find_routable` feature flag is turned OFF' do
+ before do
+ stub_feature_flags(optimize_find_routable: false)
+ end
+
+ it_behaves_like '.find_by_full_path', :aggregate_failures, has_cross_join: true
+
it 'includes route information when loading a record' do
control_count = ActiveRecord::QueryRecorder.new do
described_class.find_by_full_path(record.full_path)
@@ -280,9 +310,9 @@ RSpec.describe Routable, feature_category: :groups_and_projects do
it { is_expected.to eq(true) }
- context 'when the `optimize_routable` feature flag is turned OFF' do
+ context 'when the `optimize_find_routable` feature flag is turned OFF' do
before do
- stub_feature_flags(optimize_routable: false)
+ stub_feature_flags(optimize_find_routable: false)
end
it { is_expected.to eq(false) }
diff --git a/spec/models/integrations/asana_spec.rb b/spec/models/integrations/asana_spec.rb
index 376aec1088e..70c56d35a04 100644
--- a/spec/models/integrations/asana_spec.rb
+++ b/spec/models/integrations/asana_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
let_it_be(:project) { build(:project) }
let(:gid) { "123456789ABCD" }
- let(:asana_task) { double(::Asana::Resources::Task) }
+ let(:asana_task) { double(data: { gid: gid }) }
let(:asana_integration) { described_class.new }
let(:ref) { 'main' }
let(:restrict_to_branch) { nil }
@@ -41,6 +41,15 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
}
end
+ let(:completed_message) do
+ {
+ body: {
+ completed: true
+ },
+ headers: { "Authorization" => "Bearer verySecret" }
+ }
+ end
+
before do
allow(asana_integration).to receive_messages(
project: project,
@@ -60,9 +69,10 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
let(:ref) { 'main' }
it 'calls the Asana integration' do
- expect(asana_task).to receive(:add_comment)
- expect(asana_task).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '456789').once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/456789/stories", anything).once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/456789", completed_message).once.and_return(asana_task)
execute_integration
end
@@ -72,8 +82,8 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
let(:ref) { 'mai' }
it 'does not call the Asana integration' do
- expect(asana_task).not_to receive(:add_comment)
- expect(::Asana::Resources::Task).not_to receive(:find_by_id)
+ expect(Gitlab::HTTP).not_to receive(:post)
+ expect(Gitlab::HTTP).not_to receive(:put)
execute_integration
end
@@ -83,12 +93,17 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
context 'when creating a story' do
let(:message) { "Message from commit. related to ##{gid}" }
let(:expected_message) do
- "#{user.name} pushed to branch main of #{project.full_name} ( https://gitlab.com/ ): #{message}"
+ {
+ body: {
+ text: "#{user.name} pushed to branch main of #{project.full_name} ( https://gitlab.com/ ): #{message}"
+ },
+ headers: { "Authorization" => "Bearer verySecret" }
+ }
end
it 'calls Asana integration to create a story' do
- expect(asana_task).to receive(:add_comment).with(text: expected_message)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, gid).once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/#{gid}/stories", expected_message).once.and_return(asana_task)
execute_integration
end
@@ -98,9 +113,10 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
let(:message) { 'fix #456789' }
it 'calls Asana integration to create a story and close a task' do
- expect(asana_task).to receive(:add_comment)
- expect(asana_task).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '456789').once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/456789/stories", anything).once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/456789", completed_message).once.and_return(asana_task)
execute_integration
end
@@ -110,9 +126,10 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
let(:message) { 'closes https://app.asana.com/19292/956299/42' }
it 'calls Asana integration to close via url' do
- expect(asana_task).to receive(:add_comment)
- expect(asana_task).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '42').once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/42/stories", anything).once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/42", completed_message).once.and_return(asana_task)
execute_integration
end
@@ -127,27 +144,30 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
end
it 'allows multiple matches per line' do
- expect(asana_task).to receive(:add_comment)
- expect(asana_task).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '123').once.and_return(asana_task)
-
- asana_task_2 = double(Asana::Resources::Task)
- expect(asana_task_2).to receive(:add_comment)
- expect(asana_task_2).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '456').once.and_return(asana_task_2)
-
- asana_task_3 = double(Asana::Resources::Task)
- expect(asana_task_3).to receive(:add_comment)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '789').once.and_return(asana_task_3)
-
- asana_task_4 = double(Asana::Resources::Task)
- expect(asana_task_4).to receive(:add_comment)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '42').once.and_return(asana_task_4)
-
- asana_task_5 = double(Asana::Resources::Task)
- expect(asana_task_5).to receive(:add_comment)
- expect(asana_task_5).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '12').once.and_return(asana_task_5)
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/123/stories", anything).once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/123", completed_message).once.and_return(asana_task)
+
+ asana_task_2 = double(double(data: { gid: 456 }))
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/456/stories", anything).once.and_return(asana_task_2)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/456", completed_message).once.and_return(asana_task_2)
+
+ asana_task_3 = double(double(data: { gid: 789 }))
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/789/stories", anything).once.and_return(asana_task_3)
+
+ asana_task_4 = double(double(data: { gid: 42 }))
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/42/stories", anything).once.and_return(asana_task_4)
+
+ asana_task_5 = double(double(data: { gid: 12 }))
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/12/stories", anything).once.and_return(asana_task_5)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/12", completed_message).once.and_return(asana_task_5)
execute_integration
end
diff --git a/spec/models/issue_link_spec.rb b/spec/models/issue_link_spec.rb
index 9af667c2960..24f0b9f2a5c 100644
--- a/spec/models/issue_link_spec.rb
+++ b/spec/models/issue_link_spec.rb
@@ -7,7 +7,9 @@ RSpec.describe IssueLink, feature_category: :portfolio_management do
it_behaves_like 'issuable link' do
let_it_be_with_reload(:issuable_link) { create(:issue_link) }
- let_it_be(:issuable) { create(:issue) }
+ let_it_be(:issuable) { create(:issue, project: project) }
+ let_it_be(:issuable2) { create(:issue, project: project) }
+ let_it_be(:issuable3) { create(:issue, project: project) }
let(:issuable_class) { 'Issue' }
let(:issuable_link_factory) { :issue_link }
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 9c63dfb1de2..f240670c514 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -867,6 +867,29 @@ RSpec.describe Issue, feature_category: :team_planning do
.to contain_exactly(authorized_issue_b, authorized_incident_a)
end
end
+
+ context 'when authorize argument is false' do
+ it 'returns all related issues' do
+ expect(authorized_issue_a.related_issues(authorize: false))
+ .to contain_exactly(authorized_issue_b, authorized_issue_c, authorized_incident_a, unauthorized_issue)
+ end
+ end
+
+ context 'when current_user argument is nil' do
+ let_it_be(:public_issue) { create(:issue, project: create(:project, :public)) }
+
+ it 'returns public linked issues only' do
+ create(:issue_link, source: authorized_issue_a, target: public_issue)
+
+ expect(authorized_issue_a.related_issues).to contain_exactly(public_issue)
+ end
+ end
+
+ context 'when issue is a new record' do
+ let(:new_issue) { build(:issue, project: authorized_project) }
+
+ it { expect(new_issue.related_issues(user)).to be_empty }
+ end
end
describe '#can_move?' do
@@ -2038,4 +2061,21 @@ RSpec.describe Issue, feature_category: :team_planning do
expect(issue.search_data.namespace_id).to eq(issue.namespace_id)
end
end
+
+ describe '#linked_items_count' do
+ let_it_be(:issue1) { create(:issue, project: reusable_project) }
+ let_it_be(:issue2) { create(:issue, project: reusable_project) }
+ let_it_be(:issue3) { create(:issue, project: reusable_project) }
+ let_it_be(:issue4) { build(:issue, project: reusable_project) }
+
+ it 'returns number of issues linked to the issue' do
+ create(:issue_link, source: issue1, target: issue2)
+ create(:issue_link, source: issue1, target: issue3)
+
+ expect(issue1.linked_items_count).to eq(2)
+ expect(issue2.linked_items_count).to eq(1)
+ expect(issue3.linked_items_count).to eq(1)
+ expect(issue4.linked_items_count).to eq(0)
+ end
+ end
end
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index 75c14adecb5..3294d53e364 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -713,5 +713,28 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
.to contain_exactly(authorized_item_b, authorized_item_c, unauthorized_item)
end
end
+
+ context 'when work item is a new record' do
+ let(:new_work_item) { build(:work_item, project: authorized_project) }
+
+ it { expect(new_work_item.linked_work_items(user)).to be_empty }
+ end
+ end
+
+ describe '#linked_items_count' do
+ let_it_be(:item1) { create(:work_item, :issue, project: reusable_project) }
+ let_it_be(:item2) { create(:work_item, :issue, project: reusable_project) }
+ let_it_be(:item3) { create(:work_item, :issue, project: reusable_project) }
+ let_it_be(:item4) { build(:work_item, :issue, project: reusable_project) }
+
+ it 'returns number of items linked to the work item' do
+ create(:work_item_link, source: item1, target: item2)
+ create(:work_item_link, source: item1, target: item3)
+
+ expect(item1.linked_items_count).to eq(2)
+ expect(item2.linked_items_count).to eq(1)
+ expect(item3.linked_items_count).to eq(1)
+ expect(item4.linked_items_count).to eq(0)
+ end
end
end
diff --git a/spec/models/work_items/related_work_item_link_spec.rb b/spec/models/work_items/related_work_item_link_spec.rb
index 3217ac52489..be0a147abc4 100644
--- a/spec/models/work_items/related_work_item_link_spec.rb
+++ b/spec/models/work_items/related_work_item_link_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe WorkItems::RelatedWorkItemLink, type: :model, feature_category: :
it_behaves_like 'issuable link' do
let_it_be_with_reload(:issuable_link) { create(:work_item_link) }
let_it_be(:issuable) { issue }
+ let_it_be(:issuable2) { create(:work_item, :issue, project: project) }
+ let_it_be(:issuable3) { create(:work_item, :issue, project: project) }
let(:issuable_class) { 'WorkItem' }
let(:issuable_link_factory) { :work_item_link }
end
@@ -21,46 +23,6 @@ RSpec.describe WorkItems::RelatedWorkItemLink, type: :model, feature_category: :
let_it_be(:item_type) { described_class.issuable_name }
end
- describe 'validations' do
- let_it_be(:task1) { create(:work_item, :task, project: project) }
- let_it_be(:task2) { create(:work_item, :task, project: project) }
- let_it_be(:task3) { create(:work_item, :task, project: project) }
-
- subject(:link) { build(:work_item_link, source_id: task1.id, target_id: task2.id) }
-
- describe '#validate_max_number_of_links' do
- shared_examples 'invalid due to exceeding max number of links' do
- let(:error_msg) { 'This work item would exceed the maximum number of linked items.' }
-
- before do
- create(:work_item_link, source: source, target: target)
- stub_const("#{described_class}::MAX_LINKS_COUNT", 1)
- end
-
- specify do
- is_expected.to be_invalid
- expect(link.errors.messages[error_item]).to include(error_msg)
- end
- end
-
- context 'when source exceeds max' do
- let(:source) { task1 }
- let(:target) { task3 }
- let(:error_item) { :source }
-
- it_behaves_like 'invalid due to exceeding max number of links'
- end
-
- context 'when target exceeds max' do
- let(:source) { task2 }
- let(:target) { task3 }
- let(:error_item) { :target }
-
- it_behaves_like 'invalid due to exceeding max number of links'
- end
- end
- end
-
describe '.issuable_type' do
it { expect(described_class.issuable_type).to eq(:issue) }
end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 8d04b87ac3d..08850b18c78 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -1640,6 +1640,16 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
it_behaves_like 'ref diff'
end
+
+ context 'when unidiff format is requested' do
+ it 'returns the diff in Unified format' do
+ get api(route, current_user), params: { unidiff: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_limited_pagination_headers
+ expect(json_response.dig(0, 'diff')).to eq(commit.diffs.diffs.first.unidiff)
+ end
+ end
end
end
diff --git a/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb b/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
index b3d25155a6f..e3a7442ffe6 100644
--- a/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
+++ b/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe 'Deleting Sidekiq jobs', :clean_gitlab_redis_queues, feature_cate
raise 'Not enqueued!' if Sidekiq::Queue.new(queue).size.zero?
end
- it 'returns info about the deleted jobs' do
+ it 'returns info about the deleted jobs', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/425824' do
add_job(admin, [1])
add_job(admin, [2])
add_job(create(:user), [3])
diff --git a/spec/requests/api/import_bitbucket_server_spec.rb b/spec/requests/api/import_bitbucket_server_spec.rb
index 7c2df52fdf3..9a9ccc867a3 100644
--- a/spec/requests/api/import_bitbucket_server_spec.rb
+++ b/spec/requests/api/import_bitbucket_server_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
let(:secret) { "sekrettt" }
let(:project_key) { 'TES' }
let(:repo_slug) { 'vim' }
+ let(:timeout_strategy) { 'pessimistic' }
let(:repo) do
double('repo',
name: repo_slug,
@@ -52,7 +53,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
it 'returns 201 response when the project is imported successfully' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, repo_slug, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, repo_slug, user.namespace, user, anything, timeout_strategy)
.and_return(double(execute: project))
post api("/import/bitbucket_server", user), params: {
@@ -87,7 +88,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
it 'returns 201 response when the project is imported successfully with a new project name' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, project.name, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, project.name, user.namespace, user, anything, 'pessimistic')
.and_return(double(execute: project))
post api("/import/bitbucket_server", user), params: {
@@ -96,7 +97,8 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
personal_access_token: token,
bitbucket_server_project: project_key,
bitbucket_server_repo: repo_slug,
- new_name: 'new-name'
+ new_name: 'new-name',
+ timeout_strategy: 'pessimistic'
}
expect(response).to have_gitlab_http_status(:created)
@@ -123,7 +125,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
it 'returns 400 response due to a blocked URL' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, project.name, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, project.name, user.namespace, user, anything, timeout_strategy)
.and_return(double(execute: project))
allow(Gitlab::UrlBlocker)
@@ -142,6 +144,24 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
end
end
+ context 'with an invalid timeout strategy' do
+ let_it_be(:project) { create(:project, name: 'new-name') }
+
+ it 'returns 400 response due to a blocked URL' do
+ post api("/import/bitbucket_server", user), params: {
+ bitbucket_server_url: base_uri,
+ bitbucket_server_username: user,
+ personal_access_token: token,
+ bitbucket_server_project: project_key,
+ bitbucket_server_repo: repo_slug,
+ timeout_strategy: 'no-strategy'
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response["error"]).to eq("timeout_strategy does not have a valid value")
+ end
+ end
+
context 'with a new namespace' do
let(:bitbucket_client) { instance_double(BitbucketServer::Client) }
@@ -159,7 +179,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
it 'returns 201 response when the project is imported successfully to a new namespace' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, repo_slug, an_instance_of(Group), user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, repo_slug, an_instance_of(Group), user, anything, timeout_strategy)
.and_return(double(execute: create(:project, name: repo_slug)))
post api("/import/bitbucket_server", user), params: {
@@ -195,7 +215,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
it 'returns 401 response when user can not create projects in the chosen namespace' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, repo_slug, an_instance_of(Group), user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, repo_slug, an_instance_of(Group), user, anything, timeout_strategy)
.and_return(double(execute: build(:project)))
other_namespace = create(:group, :private, name: 'private-group')
diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb
index e394b92c0a2..9a42b11dc76 100644
--- a/spec/requests/api/import_github_spec.rb
+++ b/spec/requests/api/import_github_spec.rb
@@ -122,6 +122,19 @@ RSpec.describe API::ImportGithub, feature_category: :importers do
end
end
+ context 'with invalid timeout stategy' do
+ it 'returns 400 response' do
+ post api("/import/github", user), params: {
+ target_namespace: user.namespace_path,
+ personal_access_token: token,
+ repo_id: non_existing_record_id,
+ timeout_strategy: "invalid_strategy"
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
context 'when additional access tokens are provided' do
let(:additional_access_tokens) { 'token1,token2' }
diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb
index bb0f557cfee..5d605303f06 100644
--- a/spec/requests/api/invitations_spec.rb
+++ b/spec/requests/api/invitations_spec.rb
@@ -130,24 +130,24 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
end.to change { source.members.non_invite.count }.by(1)
end
- it 'adds a new member by unconfirmed primary email' do
+ it 'adds a new member by confirmed secondary email' do
+ secondary_email = create(:email, :confirmed, email: 'secondary@example.com', user: stranger)
+
expect do
post invitations_url(source, maintainer),
- params: { email: unconfirmed_stranger.email, access_level: Member::DEVELOPER }
+ params: { email: secondary_email.email, access_level: Member::DEVELOPER }
expect(response).to have_gitlab_http_status(:created)
end.to change { source.members.non_invite.count }.by(1)
end
- it 'adds a new member by confirmed secondary email' do
- secondary_email = create(:email, :confirmed, email: 'secondary@example.com', user: stranger)
-
+ it 'adds a new member as an invite for unconfirmed primary email' do
expect do
post invitations_url(source, maintainer),
- params: { email: secondary_email.email, access_level: Member::DEVELOPER }
+ params: { email: unconfirmed_stranger.email, access_level: Member::DEVELOPER }
expect(response).to have_gitlab_http_status(:created)
- end.to change { source.members.non_invite.count }.by(1)
+ end.to change { source.members.invite.count }.by(1).and change { source.members.non_invite.count }.by(0)
end
it 'adds a new member as an invite for unconfirmed secondary email' do
diff --git a/spec/requests/api/merge_request_diffs_spec.rb b/spec/requests/api/merge_request_diffs_spec.rb
index 4f812e5d8eb..53cef226ad8 100644
--- a/spec/requests/api/merge_request_diffs_spec.rb
+++ b/spec/requests/api/merge_request_diffs_spec.rb
@@ -55,6 +55,15 @@ RSpec.describe API::MergeRequestDiffs, 'MergeRequestDiffs', feature_category: :s
expect(json_response['diffs'].size).to eq(merge_request_diff.diffs.size)
end
+ context 'when unidiff format is requested' do
+ it 'returns a diff in Unified format' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/versions/#{merge_request_diff.id}", user), params: { unidiff: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig('diffs', 0, 'diff')).to eq(merge_request_diff.diffs.diffs.first.unidiff)
+ end
+ end
+
it 'returns a 404 when merge_request id is used instead of the iid' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.id}/versions/#{merge_request_diff.id}", user)
expect(response).to have_gitlab_http_status(:not_found)
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index d3f8aeb3e76..4a11cddc79f 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1829,6 +1829,15 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
expect(json_response['overflow']).to be_falsy
end
+ context 'when unidiff format is requested' do
+ it 'returns the diff in Unified format' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/changes", user), params: { unidiff: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig('changes', 0, 'diff')).to eq(merge_request.diffs.diffs.first.unidiff)
+ end
+ end
+
context 'when using DB-backed diffs' do
it_behaves_like 'find an existing merge request'
@@ -1902,6 +1911,15 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
expect(json_response.size).to eq(merge_request.diffs.size)
end
+ context 'when unidiff format is requested' do
+ it 'returns the diff in Unified format' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/diffs", user), params: { unidiff: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig(0, 'diff')).to eq(merge_request.diffs.diffs.first.unidiff)
+ end
+ end
+
context 'when pagination params are present' do
it 'returns limited diffs' do
get(
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index a94ed63bf47..22239f1d23f 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -538,6 +538,18 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
expect(json_response['compare_same_ref']).to be_truthy
end
+ context 'when unidiff format is requested' do
+ let(:commit) { project.repository.commit('feature') }
+ let(:diff) { commit.diffs.diffs.first }
+
+ it 'returns a diff in Unified format' do
+ get api(route, current_user), params: { from: 'master', to: 'feature', unidiff: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig('diffs', 0, 'diff')).to eq(diff.unidiff)
+ end
+ end
+
it "returns an empty string when the diff overflows" do
allow(Gitlab::Git::DiffCollection)
.to receive(:default_limits)
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index 982b8b11383..99cc5ad9874 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -15,12 +15,14 @@ RSpec.describe Import::GithubService, feature_category: :importers do
let(:settings) { instance_double(Gitlab::GithubImport::Settings) }
let(:user_namespace_path) { user.namespace_path }
let(:optional_stages) { nil }
+ let(:timeout_strategy) { "optimistic" }
let(:params) do
{
repo_id: 123,
new_name: 'new_repo',
target_namespace: user_namespace_path,
- optional_stages: optional_stages
+ optional_stages: optional_stages,
+ timeout_strategy: timeout_strategy
}
end
@@ -36,7 +38,8 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to receive(:write)
.with(
optional_stages: optional_stages,
- additional_access_tokens: access_params[:additional_access_tokens]
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
)
end
@@ -95,7 +98,10 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(subject.execute(access_params, :github)).to include(status: :success)
expect(settings)
.to have_received(:write)
- .with(optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens])
+ .with(optional_stages: nil,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
expect_snowplow_event(
category: 'Import::GithubService',
action: 'create',
@@ -117,7 +123,11 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(subject.execute(access_params, :github)).to include(status: :success)
expect(settings)
.to have_received(:write)
- .with(optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens])
+ .with(
+ optional_stages: nil,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
expect_snowplow_event(
category: 'Import::GithubService',
action: 'create',
@@ -146,7 +156,11 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(subject.execute(access_params, :github)).to include(status: :success)
expect(settings)
.to have_received(:write)
- .with(optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens])
+ .with(
+ optional_stages: nil,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
expect_snowplow_event(
category: 'Import::GithubService',
action: 'create',
@@ -181,7 +195,24 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
- additional_access_tokens: access_params[:additional_access_tokens]
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
+ end
+ end
+
+ context 'when timeout strategy param is present' do
+ let(:timeout_strategy) { 'pessimistic' }
+
+ it 'saves timeout strategy to import_data' do
+ subject.execute(access_params, :github)
+
+ expect(settings)
+ .to have_received(:write)
+ .with(
+ optional_stages: optional_stages,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
)
end
end
@@ -192,7 +223,11 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(settings)
.to have_received(:write)
- .with(optional_stages: optional_stages, additional_access_tokens: %w[foo bar])
+ .with(
+ optional_stages: optional_stages,
+ additional_access_tokens: %w[foo bar],
+ timeout_strategy: timeout_strategy
+ )
end
end
end
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index 76cd5d6c89e..0b1a6f77e6b 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -321,11 +321,11 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
let(:params) { { email: unconfirmed_user.email } }
- it 'adds an existing user to members' do
+ it 'adds a new member as an invite for unconfirmed primary email' do
expect_to_create_members(count: 1)
expect(result[:status]).to eq(:success)
- expect(project.users).to include unconfirmed_user
- expect(project.members.last).not_to be_invite
+ expect(project.users).not_to include unconfirmed_user
+ expect(project.members.last).to be_invite
end
end
diff --git a/spec/support/helpers/listbox_helpers.rb b/spec/support/helpers/listbox_helpers.rb
index e943790fc65..7a734d2b097 100644
--- a/spec/support/helpers/listbox_helpers.rb
+++ b/spec/support/helpers/listbox_helpers.rb
@@ -10,6 +10,10 @@ module ListboxHelpers
find('.gl-new-dropdown-item[role="option"]', text: text, exact_text: exact_text).click
end
+ def select_disclosure_dropdown_item(text, exact_text: false)
+ find('.gl-new-dropdown-item', text: text, exact_text: exact_text).click
+ end
+
def expect_listbox_item(text)
expect(page).to have_css('.gl-new-dropdown-item[role="option"]', text: text)
end
diff --git a/spec/support/helpers/stub_feature_flags.rb b/spec/support/helpers/stub_feature_flags.rb
index e301e29afc2..63be85acef3 100644
--- a/spec/support/helpers/stub_feature_flags.rb
+++ b/spec/support/helpers/stub_feature_flags.rb
@@ -24,6 +24,15 @@ module StubFeatureFlags
Feature.stub = false
end
+ def stub_with_new_feature_current_request
+ return unless Gitlab::SafeRequestStore.active?
+
+ new_request = Feature::FlipperRequest.new
+ allow(new_request).to receive(:id).and_return(SecureRandom.uuid)
+
+ allow(Feature).to receive(:current_request).and_return(new_request)
+ end
+
# Stub Feature flags with `flag_name: true/false`
#
# @param [Hash] features where key is feature name and value is boolean whether enabled or not.
diff --git a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
index ddd3bbd636a..c86fcf5ae20 100644
--- a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
+++ b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
RSpec.shared_examples 'multiple issue boards' do
+ include ListboxHelpers
+
context 'authorized user' do
before do
stub_feature_flags(apollo_boards: false)
@@ -27,7 +29,7 @@ RSpec.shared_examples 'multiple issue boards' do
it 'switches current board' do
in_boards_switcher_dropdown do
- click_button board2.name
+ select_listbox_item(board2.name)
end
wait_for_requests
@@ -67,7 +69,7 @@ RSpec.shared_examples 'multiple issue boards' do
it 'adds a list to the none default board' do
in_boards_switcher_dropdown do
- click_button board2.name
+ select_listbox_item(board2.name)
end
wait_for_requests
@@ -89,7 +91,7 @@ RSpec.shared_examples 'multiple issue boards' do
expect(page).to have_selector('.board', count: 3)
in_boards_switcher_dropdown do
- click_button board.name
+ select_listbox_item(board.name)
end
wait_for_requests
@@ -101,7 +103,7 @@ RSpec.shared_examples 'multiple issue boards' do
assert_boards_nav_active
in_boards_switcher_dropdown do
- click_button board2.name
+ select_listbox_item(board2.name)
end
assert_boards_nav_active
@@ -109,7 +111,7 @@ RSpec.shared_examples 'multiple issue boards' do
it 'switches current board back' do
in_boards_switcher_dropdown do
- click_button board.name
+ select_listbox_item(board.name)
end
wait_for_requests
@@ -142,7 +144,7 @@ RSpec.shared_examples 'multiple issue boards' do
it 'switches current board' do
in_boards_switcher_dropdown do
- click_button board2.name
+ select_listbox_item(board2.name)
end
wait_for_requests
@@ -165,7 +167,7 @@ RSpec.shared_examples 'multiple issue boards' do
wait_for_requests
- dropdown_selector = '[data-testid="boards-selector"] .dropdown-menu'
+ dropdown_selector = '[data-testid="boards-selector"] .gl-new-dropdown'
page.within(dropdown_selector) do
yield
end
diff --git a/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb
index 0fef5269ab6..c1b3eca8b7b 100644
--- a/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb
@@ -17,16 +17,18 @@ RSpec.shared_examples Gitlab::Import::AdvanceStage do |factory:|
context 'when there are remaining jobs' do
it 'reschedules itself' do
- expect(worker)
- .to receive(:wait_for_jobs)
- .with({ '123' => 2 })
- .and_return({ '123' => 1 })
+ freeze_time do
+ expect(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({ '123' => 1 })
- expect(described_class)
- .to receive(:perform_in)
- .with(described_class::INTERVAL, project.id, { '123' => 1 }, next_stage)
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::INTERVAL, project.id, { '123' => 1 }, next_stage, Time.zone.now, 1)
- worker.perform(project.id, { '123' => 2 }, next_stage)
+ worker.perform(project.id, { '123' => 2 }, next_stage)
+ end
end
context 'when the project import is not running' do
@@ -74,6 +76,73 @@ RSpec.shared_examples Gitlab::Import::AdvanceStage do |factory:|
.to raise_error(KeyError)
end
end
+
+ context 'on worker timeouts' do
+ it 'refreshes timeout and updates counter if jobs have been processed' do
+ freeze_time do
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::INTERVAL, project.id, { '123' => 2 }, next_stage, Time.zone.now, 2)
+
+ worker.perform(project.id, { '123' => 2 }, next_stage, 3.hours.ago, 5)
+ end
+ end
+
+ context 'with an optimistic strategy' do
+ before do
+ project.create_or_update_import_data(data: { timeout_strategy: "optimistic" })
+ project.save!
+ end
+
+ it 'advances to next stage' do
+ freeze_time do
+ next_worker = described_class::STAGES[next_stage]
+
+ expect(next_worker).to receive(:perform_async).with(project.id)
+
+ stuck_start_time = 3.hours.ago
+
+ worker.perform(project.id, { '123' => 2 }, next_stage, stuck_start_time, 2)
+ end
+ end
+ end
+
+ context 'with a pessimistic strategy' do
+ let(:expected_error_message) { "Failing advance stage, timeout reached with pessimistic strategy" }
+
+ it 'logs error and fails import' do
+ freeze_time do
+ next_worker = described_class::STAGES[next_stage]
+
+ expect(next_worker).not_to receive(:perform_async).with(project.id)
+ expect_next_instance_of(described_class) do |klass|
+ expect(klass).to receive(:find_import_state).and_call_original
+ end
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
+ .with(
+ import_state: import_state,
+ exception: Gitlab::Import::AdvanceStage::AdvanceStageTimeoutError,
+ error_source: described_class.name,
+ fail_import: true
+ )
+ .and_call_original
+
+ stuck_start_time = 3.hours.ago
+
+ worker.perform(project.id, { '123' => 2 }, next_stage, stuck_start_time, 2)
+
+ expect(import_state.reload.status).to eq("failed")
+
+ if import_state.is_a?(ProjectImportState)
+ expect(import_state.reload.last_error).to eq(expected_error_message)
+ else
+ expect(import_state.reload.error_message).to eq(expected_error_message)
+ end
+ end
+ end
+ end
+ end
end
describe '#wait_for_jobs' do
diff --git a/spec/support/shared_examples/models/issuable_link_shared_examples.rb b/spec/support/shared_examples/models/issuable_link_shared_examples.rb
index af96b77edaf..f28abb35128 100644
--- a/spec/support/shared_examples/models/issuable_link_shared_examples.rb
+++ b/spec/support/shared_examples/models/issuable_link_shared_examples.rb
@@ -52,6 +52,45 @@ RSpec.shared_examples 'issuable link' do
end
end
+ context 'when max number of links is exceeded' do
+ subject(:link) { create_issuable_link(issuable, issuable2) }
+
+ shared_examples 'invalid due to exceeding max number of links' do
+ let(:stubbed_limit) { 1 }
+ let(:issuable_name) { described_class.issuable_name }
+ let(:error_msg) do
+ "This #{issuable_name} would exceed the maximum number of " \
+ "linked #{issuable_name.pluralize} (#{stubbed_limit})."
+ end
+
+ before do
+ create(issuable_link_factory, source: source, target: target)
+ stub_const("IssuableLink::MAX_LINKS_COUNT", stubbed_limit)
+ end
+
+ specify do
+ is_expected.to be_invalid
+ expect(link.errors.messages[error_item]).to include(error_msg)
+ end
+ end
+
+ context 'when source exceeds max' do
+ let(:source) { issuable }
+ let(:target) { issuable3 }
+ let(:error_item) { :source }
+
+ it_behaves_like 'invalid due to exceeding max number of links'
+ end
+
+ context 'when target exceeds max' do
+ let(:source) { issuable2 }
+ let(:target) { issuable3 }
+ let(:error_item) { :target }
+
+ it_behaves_like 'invalid due to exceeding max number of links'
+ end
+ end
+
def create_issuable_link(source, target)
build(issuable_link_factory, source: source, target: target)
end
diff --git a/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
index 16e3a3dc481..c04ccafdcf8 100644
--- a/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
+++ b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
@@ -19,28 +19,30 @@ RSpec.describe Gitlab::BitbucketImport::AdvanceStageWorker, :clean_gitlab_redis_
context 'when there are remaining jobs' do
before do
allow(worker)
- .to receive(:find_import_state)
+ .to receive(:find_import_state_jid)
.and_return(import_state)
end
it 'reschedules itself' do
- expect(worker)
- .to receive(:wait_for_jobs)
- .with({ '123' => 2 })
- .and_return({ '123' => 1 })
-
- expect(described_class)
- .to receive(:perform_in)
- .with(described_class::INTERVAL, project.id, { '123' => 1 }, :finish)
-
- worker.perform(project.id, { '123' => 2 }, :finish)
+ freeze_time do
+ expect(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({ '123' => 1 })
+
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::INTERVAL, project.id, { '123' => 1 }, :finish, Time.zone.now, 1)
+
+ worker.perform(project.id, { '123' => 2 }, :finish)
+ end
end
end
context 'when there are no remaining jobs' do
before do
allow(worker)
- .to receive(:find_import_state)
+ .to receive(:find_import_state_jid)
.and_return(import_state)
allow(worker)
@@ -98,18 +100,30 @@ RSpec.describe Gitlab::BitbucketImport::AdvanceStageWorker, :clean_gitlab_redis_
end
end
- describe '#find_import_state' do
- it 'returns a ProjectImportState' do
+ describe '#find_import_state_jid' do
+ it 'returns a ProjectImportState with only id and jid' do
import_state.update_column(:status, 'started')
- found = worker.find_import_state(project.id)
+ found = worker.find_import_state_jid(project.id)
expect(found).to be_an_instance_of(ProjectImportState)
expect(found.attributes.keys).to match_array(%w[id jid])
end
it 'returns nil if the project import is not running' do
- expect(worker.find_import_state(project.id)).to be_nil
+ expect(worker.find_import_state_jid(project.id)).to be_nil
+ end
+ end
+
+ describe '#find_import_state' do
+ it 'returns a ProjectImportState' do
+ import_state.update_column(:status, 'started')
+
+ found_partial = worker.find_import_state_jid(project.id)
+ found = worker.find_import_state(found_partial.id)
+
+ expect(found).to be_an_instance_of(ProjectImportState)
+ expect(found.attributes.keys).to include('id', 'project_id', 'status', 'last_error')
end
end
end