Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-01-26 18:09:04 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-01-26 18:09:04 +0300
commitee24c7d68f57a67754a5d1e2ea99f688029d14bd (patch)
tree8391744a26dd3f77c4bb1bbb55672ba0e066d969
parenta1c0b634f78f51389fd3ec390a1803afa3de49a2 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.gitlab/ci/rules.gitlab-ci.yml6
-rw-r--r--.gitlab/ci/vendored-gems.gitlab-ci.yml8
-rw-r--r--app/assets/javascripts/invite_members/components/invite_modal_base.vue9
-rw-r--r--app/assets/javascripts/invite_members/constants.js1
-rw-r--r--app/assets/javascripts/issues/list/components/empty_state_without_any_issues.vue6
-rw-r--r--app/assets/javascripts/issues/list/components/issues_list_app.vue6
-rw-r--r--app/assets/javascripts/issues/list/queries/search_projects.query.graphql3
-rw-r--r--app/assets/javascripts/pages/dashboard/issues/index.js4
-rw-r--r--app/assets/javascripts/vue_shared/components/new_issue_dropdown/graphql/search_user_projects.query.graphql11
-rw-r--r--app/assets/javascripts/vue_shared/components/new_issue_dropdown/init_new_issue_dropdown.js30
-rw-r--r--app/assets/javascripts/vue_shared/components/new_resource_dropdown/constants.js26
-rw-r--r--app/assets/javascripts/vue_shared/components/new_resource_dropdown/graphql/search_user_projects_with_issues_enabled.query.graphql15
-rw-r--r--app/assets/javascripts/vue_shared/components/new_resource_dropdown/init_new_resource_dropdown.js46
-rw-r--r--app/assets/javascripts/vue_shared/components/new_resource_dropdown/new_resource_dropdown.vue (renamed from app/assets/javascripts/vue_shared/components/new_issue_dropdown/new_issue_dropdown.vue)59
-rw-r--r--app/graphql/types/permission_types/work_item.rb2
-rw-r--r--app/models/ci/build.rb14
-rw-r--r--app/models/project.rb12
-rw-r--r--app/models/protected_branch.rb23
-rw-r--r--app/services/ci/create_pipeline_service.rb1
-rw-r--r--app/services/projects/protect_default_branch_service.rb6
-rw-r--r--app/views/dashboard/issues.html.haml2
-rw-r--r--app/views/projects/artifacts/browse.html.haml1
-rw-r--r--app/views/shared/_new_project_item_vue_select.html.haml2
-rw-r--r--app/workers/ci/initial_pipeline_process_worker.rb2
-rw-r--r--config/feature_flags/development/move_create_deployments_to_worker.yml8
-rw-r--r--config/feature_flags/development/only_allow_merge_if_all_status_checks_passed.yml2
-rw-r--r--doc/administration/reference_architectures/index.md14
-rw-r--r--doc/api/graphql/reference/index.md4
-rw-r--r--doc/api/projects.md2
-rw-r--r--doc/development/cached_queries.md8
-rw-r--r--doc/development/caching.md6
-rw-r--r--doc/development/ee_features.md4
-rw-r--r--doc/development/pipelines/index.md211
-rw-r--r--doc/development/rake_tasks.md8
-rw-r--r--doc/development/sidekiq/worker_attributes.md26
-rw-r--r--doc/development/uploads/working_with_uploads.md28
-rw-r--r--doc/development/workhorse/gitlab_features.md2
-rw-r--r--doc/install/docker.md8
-rw-r--r--doc/user/application_security/dast_api/index.md32
-rw-r--r--doc/user/group/saml_sso/troubleshooting.md2
-rw-r--r--doc/user/packages/maven_repository/index.md2
-rw-r--r--doc/user/project/deploy_keys/index.md4
-rw-r--r--doc/user/project/import/index.md2
-rw-r--r--doc/user/project/integrations/slack.md10
-rw-r--r--doc/user/project/merge_requests/status_checks.md18
-rw-r--r--doc/user/project/settings/index.md1
-rw-r--r--lib/gitlab.rb15
-rw-r--r--lib/gitlab/ci/config/external/file/project.rb28
-rw-r--r--lib/gitlab/ci/parsers/instrumentation.rb2
-rw-r--r--lib/gitlab/ci/pipeline/chain/create_deployments.rb29
-rw-r--r--locale/gitlab.pot16
-rw-r--r--spec/frontend/invite_members/components/invite_modal_base_spec.js9
-rw-r--r--spec/frontend/issues/list/components/empty_state_without_any_issues_spec.js10
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/new_resource_dropdown/mock_data.js (renamed from spec/frontend/vue_shared/components/new_issue_dropdown/mock_data.js)3
-rw-r--r--spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js (renamed from spec/frontend/vue_shared/components/new_issue_dropdown/new_issue_dropdown_spec.js)145
-rw-r--r--spec/graphql/types/permission_types/work_item_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/external/file/project_spec.rb58
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb50
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/parsers/instrumentation_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb72
-rw-r--r--spec/lib/gitlab_spec.rb82
-rw-r--r--spec/models/ci/build_spec.rb20
-rw-r--r--spec/models/project_spec.rb48
-rw-r--r--spec/models/protected_branch_spec.rb88
-rw-r--r--spec/requests/api/branches_spec.rb2
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb7
-rw-r--r--spec/services/projects/protect_default_branch_service_spec.rb32
-rw-r--r--spec/support/shared_examples/finders/issues_finder_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb6
-rw-r--r--tooling/danger/config_files.rb2
-rw-r--r--vendor/gems/gitlab_active_record/.gitignore11
-rw-r--r--vendor/gems/gitlab_active_record/.gitlab-ci.yml28
-rw-r--r--vendor/gems/gitlab_active_record/.rspec3
-rw-r--r--vendor/gems/gitlab_active_record/Gemfile6
-rw-r--r--vendor/gems/gitlab_active_record/Gemfile.lock54
-rw-r--r--vendor/gems/gitlab_active_record/LICENSE7
-rw-r--r--vendor/gems/gitlab_active_record/Rakefile8
-rwxr-xr-xvendor/gems/gitlab_active_record/bin/console15
-rwxr-xr-xvendor/gems/gitlab_active_record/bin/setup8
-rw-r--r--vendor/gems/gitlab_active_record/gitlab_active_record.gemspec29
-rw-r--r--vendor/gems/gitlab_active_record/lib/gitlab_active_record.rb7
-rw-r--r--vendor/gems/gitlab_active_record/lib/gitlab_active_record/version.rb5
-rw-r--r--vendor/gems/gitlab_active_record/spec/gitlab_active_record_spec.rb7
-rw-r--r--vendor/gems/gitlab_active_record/spec/spec_helper.rb15
87 files changed, 1093 insertions, 566 deletions
diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml
index 36a22ea8bb5..58a394e160a 100644
--- a/.gitlab/ci/rules.gitlab-ci.yml
+++ b/.gitlab/ci/rules.gitlab-ci.yml
@@ -1800,6 +1800,12 @@
changes: ["vendor/gems/devise-pbkdf2-encryptable/**/*"]
- <<: *if-merge-request-labels-run-all-rspec
+.vendor:rules:gitlab_active_record:
+ rules:
+ - <<: *if-merge-request
+ changes: ["vendor/gems/gitlab_active_record/**/*"]
+ - <<: *if-merge-request-labels-run-all-rspec
+
.vendor:rules:bundler-checksum:
rules:
- <<: *if-merge-request
diff --git a/.gitlab/ci/vendored-gems.gitlab-ci.yml b/.gitlab/ci/vendored-gems.gitlab-ci.yml
index a22ac5337f6..1086d9074d2 100644
--- a/.gitlab/ci/vendored-gems.gitlab-ci.yml
+++ b/.gitlab/ci/vendored-gems.gitlab-ci.yml
@@ -85,3 +85,11 @@ vendor bundler-checksum:
trigger:
include: vendor/gems/bundler-checksum/.gitlab-ci.yml
strategy: depend
+
+vendor gitlab_active_record:
+ extends:
+ - .vendor:rules:gitlab_active_record
+ needs: []
+ trigger:
+ include: vendor/gems/gitlab_active_record/.gitlab-ci.yml
+ strategy: depend
diff --git a/app/assets/javascripts/invite_members/components/invite_modal_base.vue b/app/assets/javascripts/invite_members/components/invite_modal_base.vue
index 1b9ac10d8e4..a5e0db8179a 100644
--- a/app/assets/javascripts/invite_members/components/invite_modal_base.vue
+++ b/app/assets/javascripts/invite_members/components/invite_modal_base.vue
@@ -12,6 +12,7 @@ import {
CANCEL_BUTTON_TEXT,
HEADER_CLOSE_LABEL,
ON_SHOW_TRACK_LABEL,
+ ON_CELEBRATION_TRACK_LABEL,
} from '../constants';
const DEFAULT_SLOT = 'default';
@@ -208,7 +209,7 @@ export default {
},
onShowModal() {
if (this.isCelebration) {
- this.track('render');
+ this.track('render', { label: ON_CELEBRATION_TRACK_LABEL });
}
if (this.usersLimitDataset.reachedLimit) {
@@ -217,7 +218,7 @@ export default {
},
onCancel(e) {
if (this.isCelebration) {
- this.track('click_cancel');
+ this.track('click_cancel', { label: ON_CELEBRATION_TRACK_LABEL });
}
if (this.preventCancelDefault) {
@@ -231,7 +232,7 @@ export default {
},
onSubmit(e) {
if (this.isCelebration) {
- this.track('click_invite');
+ this.track('click_invite', { label: ON_CELEBRATION_TRACK_LABEL });
}
// We never want to hide when submitting
@@ -244,7 +245,7 @@ export default {
},
onClose() {
if (this.isCelebration) {
- this.track('click_x');
+ this.track('click_x', { label: ON_CELEBRATION_TRACK_LABEL });
}
},
},
diff --git a/app/assets/javascripts/invite_members/constants.js b/app/assets/javascripts/invite_members/constants.js
index edc0ebff083..fc01d80994c 100644
--- a/app/assets/javascripts/invite_members/constants.js
+++ b/app/assets/javascripts/invite_members/constants.js
@@ -138,6 +138,7 @@ export const GROUP_MODAL_LABELS = {
export const LEARN_GITLAB = 'learn_gitlab';
export const ON_SHOW_TRACK_LABEL = 'over_limit_modal_viewed';
+export const ON_CELEBRATION_TRACK_LABEL = 'invite_celebration_modal';
export const INFO_ALERT_TITLE = s__(
'InviteMembersModal|Your top-level group %{namespaceName} is over the %{dashboardLimit} user limit.',
diff --git a/app/assets/javascripts/issues/list/components/empty_state_without_any_issues.vue b/app/assets/javascripts/issues/list/components/empty_state_without_any_issues.vue
index 251a6680ed3..652d4e0fb42 100644
--- a/app/assets/javascripts/issues/list/components/empty_state_without_any_issues.vue
+++ b/app/assets/javascripts/issues/list/components/empty_state_without_any_issues.vue
@@ -2,7 +2,7 @@
import { GlButton, GlEmptyState, GlLink, GlSprintf } from '@gitlab/ui';
import { helpPagePath } from '~/helpers/help_page_helper';
import CsvImportExportButtons from '~/issuable/components/csv_import_export_buttons.vue';
-import NewIssueDropdown from '~/vue_shared/components/new_issue_dropdown/new_issue_dropdown.vue';
+import NewResourceDropdown from '~/vue_shared/components/new_resource_dropdown/new_resource_dropdown.vue';
import { i18n } from '../constants';
import { hasNewIssueDropdown } from '../has_new_issue_dropdown_mixin';
@@ -15,7 +15,7 @@ export default {
GlEmptyState,
GlLink,
GlSprintf,
- NewIssueDropdown,
+ NewResourceDropdown,
},
mixins: [hasNewIssueDropdown()],
inject: [
@@ -77,7 +77,7 @@ export default {
:export-csv-path="exportCsvPathWithQuery"
:issuable-count="currentTabCount"
/>
- <new-issue-dropdown
+ <new-resource-dropdown
v-if="showNewIssueDropdown"
class="gl-align-self-center"
:query="$options.searchProjectsQuery"
diff --git a/app/assets/javascripts/issues/list/components/issues_list_app.vue b/app/assets/javascripts/issues/list/components/issues_list_app.vue
index ce4eafe7c8d..03a49d5233f 100644
--- a/app/assets/javascripts/issues/list/components/issues_list_app.vue
+++ b/app/assets/javascripts/issues/list/components/issues_list_app.vue
@@ -49,7 +49,7 @@ import {
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
import { IssuableListTabs, IssuableStates } from '~/vue_shared/issuable/list/constants';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
-import NewIssueDropdown from '~/vue_shared/components/new_issue_dropdown/new_issue_dropdown.vue';
+import NewResourceDropdown from '~/vue_shared/components/new_resource_dropdown/new_resource_dropdown.vue';
import {
CREATED_DESC,
defaultTypeTokenOptions,
@@ -114,7 +114,7 @@ export default {
IssuableList,
IssueCardStatistics,
IssueCardTimeInfo,
- NewIssueDropdown,
+ NewResourceDropdown,
},
directives: {
GlTooltip: GlTooltipDirective,
@@ -859,7 +859,7 @@ export default {
{{ $options.i18n.newIssueLabel }}
</gl-button>
<slot name="new-objective-button"></slot>
- <new-issue-dropdown
+ <new-resource-dropdown
v-if="showNewIssueDropdown"
:query="$options.searchProjectsQuery"
:query-variables="newIssueDropdownQueryVariables"
diff --git a/app/assets/javascripts/issues/list/queries/search_projects.query.graphql b/app/assets/javascripts/issues/list/queries/search_projects.query.graphql
index bd2f9bc2340..2fd37489234 100644
--- a/app/assets/javascripts/issues/list/queries/search_projects.query.graphql
+++ b/app/assets/javascripts/issues/list/queries/search_projects.query.graphql
@@ -1,10 +1,9 @@
query searchProjects($fullPath: ID!, $search: String) {
group(fullPath: $fullPath) {
id
- projects(search: $search, includeSubgroups: true) {
+ projects(search: $search, withIssuesEnabled: true, includeSubgroups: true) {
nodes {
id
- issuesEnabled
name
nameWithNamespace
webUrl
diff --git a/app/assets/javascripts/pages/dashboard/issues/index.js b/app/assets/javascripts/pages/dashboard/issues/index.js
index 865634f5d76..659982fcc3a 100644
--- a/app/assets/javascripts/pages/dashboard/issues/index.js
+++ b/app/assets/javascripts/pages/dashboard/issues/index.js
@@ -4,7 +4,7 @@ import initManualOrdering from '~/issues/manual_ordering';
import { FILTERED_SEARCH } from '~/filtered_search/constants';
import initFilteredSearch from '~/pages/search/init_filtered_search';
import projectSelect from '~/project_select';
-import { initNewIssueDropdown } from '~/vue_shared/components/new_issue_dropdown/init_new_issue_dropdown';
+import { initNewResourceDropdown } from '~/vue_shared/components/new_resource_dropdown/init_new_resource_dropdown';
initFilteredSearch({
page: FILTERED_SEARCH.ISSUES,
@@ -13,7 +13,7 @@ initFilteredSearch({
});
projectSelect();
-initNewIssueDropdown();
+initNewResourceDropdown();
initManualOrdering();
mountIssuesDashboardApp();
diff --git a/app/assets/javascripts/vue_shared/components/new_issue_dropdown/graphql/search_user_projects.query.graphql b/app/assets/javascripts/vue_shared/components/new_issue_dropdown/graphql/search_user_projects.query.graphql
deleted file mode 100644
index 28ed3e8d1b9..00000000000
--- a/app/assets/javascripts/vue_shared/components/new_issue_dropdown/graphql/search_user_projects.query.graphql
+++ /dev/null
@@ -1,11 +0,0 @@
-query searchUserProjects($search: String) {
- projects(search: $search, membership: true, sort: "latest_activity_desc") {
- nodes {
- id
- issuesEnabled
- name
- nameWithNamespace
- webUrl
- }
- }
-}
diff --git a/app/assets/javascripts/vue_shared/components/new_issue_dropdown/init_new_issue_dropdown.js b/app/assets/javascripts/vue_shared/components/new_issue_dropdown/init_new_issue_dropdown.js
deleted file mode 100644
index 373878a9a26..00000000000
--- a/app/assets/javascripts/vue_shared/components/new_issue_dropdown/init_new_issue_dropdown.js
+++ /dev/null
@@ -1,30 +0,0 @@
-import Vue from 'vue';
-import VueApollo from 'vue-apollo';
-import createDefaultClient from '~/lib/graphql';
-import NewIssueDropdown from './new_issue_dropdown.vue';
-
-Vue.use(VueApollo);
-
-const apolloProvider = new VueApollo({
- defaultClient: createDefaultClient(),
-});
-
-export const initNewIssueDropdown = () => {
- const el = document.querySelector('.js-new-issue-dropdown');
-
- if (!el) {
- return false;
- }
-
- return new Vue({
- el,
- apolloProvider,
- render(createElement) {
- return createElement(NewIssueDropdown, {
- props: {
- withLocalStorage: true,
- },
- });
- },
- });
-};
diff --git a/app/assets/javascripts/vue_shared/components/new_resource_dropdown/constants.js b/app/assets/javascripts/vue_shared/components/new_resource_dropdown/constants.js
new file mode 100644
index 00000000000..e5dca170965
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/new_resource_dropdown/constants.js
@@ -0,0 +1,26 @@
+import { __ } from '~/locale';
+
+export const RESOURCE_TYPE_ISSUE = 'issue';
+export const RESOURCE_TYPE_MERGE_REQUEST = 'merge-request';
+export const RESOURCE_TYPE_MILESTONE = 'milestone';
+
+export const RESOURCE_TYPES = [
+ RESOURCE_TYPE_ISSUE,
+ RESOURCE_TYPE_MERGE_REQUEST,
+ RESOURCE_TYPE_MILESTONE,
+];
+
+export const RESOURCE_OPTIONS = {
+ [RESOURCE_TYPE_ISSUE]: {
+ path: 'issues/new',
+ label: __('issue'),
+ },
+ [RESOURCE_TYPE_MERGE_REQUEST]: {
+ path: 'merge_requests/new',
+ label: __('merge request'),
+ },
+ [RESOURCE_TYPE_MILESTONE]: {
+ path: 'milestones/new',
+ label: __('milestone'),
+ },
+};
diff --git a/app/assets/javascripts/vue_shared/components/new_resource_dropdown/graphql/search_user_projects_with_issues_enabled.query.graphql b/app/assets/javascripts/vue_shared/components/new_resource_dropdown/graphql/search_user_projects_with_issues_enabled.query.graphql
new file mode 100644
index 00000000000..a630c885d28
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/new_resource_dropdown/graphql/search_user_projects_with_issues_enabled.query.graphql
@@ -0,0 +1,15 @@
+query searchUserProjectsWithIssuesEnabled($search: String) {
+ projects(
+ search: $search
+ membership: true
+ withIssuesEnabled: true
+ sort: "latest_activity_desc"
+ ) {
+ nodes {
+ id
+ name
+ nameWithNamespace
+ webUrl
+ }
+ }
+}
diff --git a/app/assets/javascripts/vue_shared/components/new_resource_dropdown/init_new_resource_dropdown.js b/app/assets/javascripts/vue_shared/components/new_resource_dropdown/init_new_resource_dropdown.js
new file mode 100644
index 00000000000..f3905dabedd
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/new_resource_dropdown/init_new_resource_dropdown.js
@@ -0,0 +1,46 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createDefaultClient from '~/lib/graphql';
+import NewResourceDropdown from './new_resource_dropdown.vue';
+
+Vue.use(VueApollo);
+
+const apolloProvider = new VueApollo({
+ defaultClient: createDefaultClient(),
+});
+
+export const initNewResourceDropdown = (props = {}) => {
+ const el = document.querySelector('.js-new-resource-dropdown');
+
+ if (!el) {
+ return false;
+ }
+
+ const { groupId, fullPath, username } = el.dataset;
+
+ return new Vue({
+ el,
+ apolloProvider,
+ render(createElement) {
+ return createElement(NewResourceDropdown, {
+ props: {
+ withLocalStorage: true,
+ groupId,
+ queryVariables: {
+ ...(fullPath
+ ? {
+ fullPath,
+ }
+ : {}),
+ ...(username
+ ? {
+ username,
+ }
+ : {}),
+ },
+ ...props,
+ },
+ });
+ },
+ });
+};
diff --git a/app/assets/javascripts/vue_shared/components/new_issue_dropdown/new_issue_dropdown.vue b/app/assets/javascripts/vue_shared/components/new_resource_dropdown/new_resource_dropdown.vue
index b787cee3f01..b079181bd10 100644
--- a/app/assets/javascripts/vue_shared/components/new_issue_dropdown/new_issue_dropdown.vue
+++ b/app/assets/javascripts/vue_shared/components/new_resource_dropdown/new_resource_dropdown.vue
@@ -12,11 +12,11 @@ import { __, sprintf } from '~/locale';
import { DEBOUNCE_DELAY } from '~/vue_shared/components/filtered_search_bar/constants';
import AccessorUtilities from '~/lib/utils/accessor';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
-import searchUserProjects from './graphql/search_user_projects.query.graphql';
+import searchUserProjectsWithIssuesEnabled from './graphql/search_user_projects_with_issues_enabled.query.graphql';
+import { RESOURCE_TYPE_ISSUE, RESOURCE_TYPES, RESOURCE_OPTIONS } from './constants';
export default {
i18n: {
- defaultDropdownText: __('Select project to create issue'),
noMatchesFound: __('No matches found'),
toggleButtonLabel: __('Toggle project select'),
},
@@ -29,10 +29,21 @@ export default {
LocalStorageSync,
},
props: {
+ resourceType: {
+ type: String,
+ required: false,
+ default: RESOURCE_TYPE_ISSUE,
+ validator: (value) => RESOURCE_TYPES.includes(value),
+ },
query: {
type: Object,
required: false,
- default: () => searchUserProjects,
+ default: () => searchUserProjectsWithIssuesEnabled,
+ },
+ groupId: {
+ type: String,
+ required: false,
+ default: '',
},
queryVariables: {
type: Object,
@@ -86,24 +97,33 @@ export default {
},
},
computed: {
+ localStorageKey() {
+ return `group-${this.groupId}-new-${this.resourceType}-recent-project`;
+ },
+ resourceOptions() {
+ return RESOURCE_OPTIONS[this.resourceType];
+ },
+ defaultDropdownText() {
+ return sprintf(__('Select project to create %{type}'), { type: this.resourceOptions.label });
+ },
dropdownHref() {
return this.hasSelectedProject
- ? joinPaths(this.selectedProject.webUrl, DASH_SCOPE, 'issues/new')
+ ? joinPaths(this.selectedProject.webUrl, DASH_SCOPE, this.resourceOptions.path)
: undefined;
},
dropdownText() {
return this.hasSelectedProject
- ? sprintf(__('New issue in %{project}'), { project: this.selectedProject.name })
- : this.$options.i18n.defaultDropdownText;
+ ? sprintf(__('New %{type} in %{project}'), {
+ type: this.resourceOptions.label,
+ project: this.selectedProject.name,
+ })
+ : this.defaultDropdownText;
},
hasSelectedProject() {
return this.selectedProject.webUrl;
},
- projectsWithIssuesEnabled() {
- return this.projects.filter((project) => project.issuesEnabled);
- },
showNoSearchResultsText() {
- return !this.projectsWithIssuesEnabled.length && this.search;
+ return !this.projects.length && this.search;
},
canUseLocalStorage() {
return this.withLocalStorage && AccessorUtilities.canUseLocalStorage();
@@ -137,22 +157,23 @@ export default {
// The select2 implementation used to include the resource path in the local storage. We
// need to clean this up so that we can then re-build a fresh URL in the computed prop.
- const path = 'issues/new';
- webUrl = webUrl.endsWith(path) ? webUrl.slice(0, webUrl.length - path.length) : webUrl;
+ webUrl = webUrl.endsWith(this.resourceOptions.path)
+ ? webUrl.slice(0, webUrl.length - this.resourceOptions.path.length)
+ : webUrl;
+ const dashSuffix = `${DASH_SCOPE}/`;
+ webUrl = webUrl.endsWith(dashSuffix)
+ ? webUrl.slice(0, webUrl.length - dashSuffix.length)
+ : webUrl;
this.selectedProject = { webUrl, name: storedProject.name };
},
},
- // This key is hardcoded for now as we'll only be using the localStorage capability in the
- // instance-level issues dashboard. If we want to make this feature available in the groups'
- // issues lists, we should make this key dynamic.
- localStorageKey: 'group--new-issue-recent-project',
};
</script>
<template>
<local-storage-sync
- :storage-key="$options.localStorageKey"
+ :storage-key="localStorageKey"
:value="selectedProjectForLocalStorage"
@input="initFromLocalStorage"
>
@@ -172,11 +193,11 @@ export default {
<gl-loading-icon v-if="$apollo.queries.projects.loading" />
<template v-else>
<gl-dropdown-item
- v-for="project of projectsWithIssuesEnabled"
+ v-for="project of projects"
:key="project.id"
@click="selectProject(project)"
>
- {{ project.nameWithNamespace }}
+ {{ project.nameWithNamespace || project.name }}
</gl-dropdown-item>
<gl-dropdown-text v-if="showNoSearchResultsText">
{{ $options.i18n.noMatchesFound }}
diff --git a/app/graphql/types/permission_types/work_item.rb b/app/graphql/types/permission_types/work_item.rb
index bae1dae4834..f35f42001e0 100644
--- a/app/graphql/types/permission_types/work_item.rb
+++ b/app/graphql/types/permission_types/work_item.rb
@@ -6,7 +6,7 @@ module Types
graphql_name 'WorkItemPermissions'
description 'Check permissions for the current user on a work item'
- abilities :read_work_item, :update_work_item, :delete_work_item
+ abilities :read_work_item, :update_work_item, :delete_work_item, :admin_work_item
end
end
end
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 0139b025d98..770b6d8d723 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -34,11 +34,11 @@ module Ci
DEPLOYMENT_NAMES = %w[deploy release rollout].freeze
has_one :deployment, as: :deployable, class_name: 'Deployment', inverse_of: :deployable
- has_one :pending_state, class_name: 'Ci::BuildPendingState', inverse_of: :build
+ has_one :pending_state, class_name: 'Ci::BuildPendingState', foreign_key: :build_id, inverse_of: :build
has_one :queuing_entry, class_name: 'Ci::PendingBuild', foreign_key: :build_id
has_one :runtime_metadata, class_name: 'Ci::RunningBuild', foreign_key: :build_id
has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id, inverse_of: :build
- has_many :report_results, class_name: 'Ci::BuildReportResult', inverse_of: :build
+ has_many :report_results, class_name: 'Ci::BuildReportResult', foreign_key: :build_id, inverse_of: :build
has_one :namespace, through: :project
# Projects::DestroyService destroys Ci::Pipelines, which use_fast_destroy on :job_artifacts
@@ -49,16 +49,16 @@ module Ci
has_many :job_variables, class_name: 'Ci::JobVariable', foreign_key: :job_id, inverse_of: :job
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_job_id
- has_many :pages_deployments, inverse_of: :ci_build
+ has_many :pages_deployments, foreign_key: :ci_build_id, inverse_of: :ci_build
Ci::JobArtifact.file_types.each do |key, value|
- has_one :"job_artifacts_#{key}", -> { where(file_type: value) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
+ has_one :"job_artifacts_#{key}", -> { where(file_type: value) }, class_name: 'Ci::JobArtifact', foreign_key: :job_id, inverse_of: :job
end
- has_one :runner_session, class_name: 'Ci::BuildRunnerSession', validate: true, inverse_of: :build
- has_one :trace_metadata, class_name: 'Ci::BuildTraceMetadata', inverse_of: :build
+ has_one :runner_session, class_name: 'Ci::BuildRunnerSession', validate: true, foreign_key: :build_id, inverse_of: :build
+ has_one :trace_metadata, class_name: 'Ci::BuildTraceMetadata', foreign_key: :build_id, inverse_of: :build
- has_many :terraform_state_versions, class_name: 'Terraform::StateVersion', inverse_of: :build, foreign_key: :ci_build_id
+ has_many :terraform_state_versions, class_name: 'Terraform::StateVersion', foreign_key: :ci_build_id, inverse_of: :build
accepts_nested_attributes_for :runner_session, update_only: true
accepts_nested_attributes_for :job_variables
diff --git a/app/models/project.rb b/app/models/project.rb
index 31b1d597055..8c931310614 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -2796,6 +2796,18 @@ class Project < ApplicationRecord
protected_branches.limit(limit)
end
+ def group_protected_branches
+ root_namespace.is_a?(Group) ? root_namespace.protected_branches : ProtectedBranch.none
+ end
+
+ def all_protected_branches
+ if Feature.enabled?(:group_protected_branches)
+ @all_protected_branches ||= ProtectedBranch.from_union([protected_branches, group_protected_branches])
+ else
+ protected_branches
+ end
+ end
+
def self_monitoring?
Gitlab::CurrentSettings.self_monitoring_project_id == id
end
diff --git a/app/models/protected_branch.rb b/app/models/protected_branch.rb
index d4793beffb9..cab35304778 100644
--- a/app/models/protected_branch.rb
+++ b/app/models/protected_branch.rb
@@ -3,6 +3,7 @@
class ProtectedBranch < ApplicationRecord
include ProtectedRef
include Gitlab::SQL::Pattern
+ include FromUnion
belongs_to :group, foreign_key: :namespace_id, touch: true, inverse_of: :protected_branches
@@ -12,6 +13,8 @@ class ProtectedBranch < ApplicationRecord
scope :allowing_force_push, -> { where(allow_force_push: true) }
scope :sorted_by_name, -> { order(name: :asc) }
+ scope :for_group, ->(group) { where(group: group) }
+
protected_ref_access_levels :merge, :push
def self.get_ids_by_name(name)
@@ -64,7 +67,19 @@ class ProtectedBranch < ApplicationRecord
# End of deprecation --------------------------------------------
def self.allow_force_push?(project, ref_name)
- project.protected_branches.allowing_force_push.matching(ref_name).any?
+ if Feature.enabled?(:group_protected_branches)
+ protected_branches = project.all_protected_branches.matching(ref_name)
+
+ project_protected_branches, group_protected_branches = protected_branches.partition(&:project_id)
+
+ # Group owner can be able to enforce the settings
+ return group_protected_branches.any?(&:allow_force_push) if group_protected_branches.present?
+ return project_protected_branches.any?(&:allow_force_push) if project_protected_branches.present?
+
+ false
+ else
+ project.protected_branches.allowing_force_push.matching(ref_name).any?
+ end
end
def self.any_protected?(project, ref_names)
@@ -76,7 +91,11 @@ class ProtectedBranch < ApplicationRecord
end
def self.protected_refs(project)
- project.protected_branches
+ if Feature.enabled?(:group_protected_branches)
+ project.all_protected_branches
+ else
+ project.protected_branches
+ end
end
# overridden in EE
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index eb25aeaf5a5..390675ab80b 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -33,7 +33,6 @@ module Ci
Gitlab::Ci::Pipeline::Chain::EnsureEnvironments,
Gitlab::Ci::Pipeline::Chain::EnsureResourceGroups,
Gitlab::Ci::Pipeline::Chain::Create,
- Gitlab::Ci::Pipeline::Chain::CreateDeployments,
Gitlab::Ci::Pipeline::Chain::CreateCrossDatabaseAssociations,
Gitlab::Ci::Pipeline::Chain::Limit::Activity,
Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines,
diff --git a/app/services/projects/protect_default_branch_service.rb b/app/services/projects/protect_default_branch_service.rb
index 03d1c49657d..5360902038b 100644
--- a/app/services/projects/protect_default_branch_service.rb
+++ b/app/services/projects/protect_default_branch_service.rb
@@ -45,7 +45,11 @@ module Projects
end
def protected_branch_exists?
- project.protected_branches.find_by_name(default_branch).present?
+ if Feature.enabled?(:group_protected_branches)
+ project.all_protected_branches.find_by_name(default_branch).present?
+ else
+ project.protected_branches.find_by_name(default_branch).present?
+ end
end
def default_branch
diff --git a/app/views/dashboard/issues.html.haml b/app/views/dashboard/issues.html.haml
index 54bb807aca2..0933f6d6a94 100644
--- a/app/views/dashboard/issues.html.haml
+++ b/app/views/dashboard/issues.html.haml
@@ -13,7 +13,7 @@
- if current_user
.page-title-controls
- = render 'shared/new_project_item_vue_select', path: 'issues/new', label: _("issue"), with_feature_enabled: 'issues', type: :issues
+ = render 'shared/new_project_item_vue_select'
- if ::Feature.enabled?(:vue_issues_dashboard)
.js-issues-dashboard{ data: dashboard_issues_list_data(current_user) }
diff --git a/app/views/projects/artifacts/browse.html.haml b/app/views/projects/artifacts/browse.html.haml
index c91dfe6d28e..3359ea5f63b 100644
--- a/app/views/projects/artifacts/browse.html.haml
+++ b/app/views/projects/artifacts/browse.html.haml
@@ -1,6 +1,7 @@
- breadcrumb_title _('Artifacts')
- page_title @path.presence, _('Artifacts'), "#{@build.name} (##{@build.id})", _('Jobs')
- add_page_specific_style 'page_bundles/tree'
+- add_page_specific_style 'page_bundles/ci_status'
= render "projects/jobs/header"
diff --git a/app/views/shared/_new_project_item_vue_select.html.haml b/app/views/shared/_new_project_item_vue_select.html.haml
index 55c09274b22..24d275c4975 100644
--- a/app/views/shared/_new_project_item_vue_select.html.haml
+++ b/app/views/shared/_new_project_item_vue_select.html.haml
@@ -1,2 +1,2 @@
- if any_projects?(@projects)
- .js-new-issue-dropdown
+ .js-new-resource-dropdown
diff --git a/app/workers/ci/initial_pipeline_process_worker.rb b/app/workers/ci/initial_pipeline_process_worker.rb
index 734755f176a..52a4f075cf0 100644
--- a/app/workers/ci/initial_pipeline_process_worker.rb
+++ b/app/workers/ci/initial_pipeline_process_worker.rb
@@ -17,7 +17,7 @@ module Ci
def perform(pipeline_id)
Ci::Pipeline.find_by_id(pipeline_id).try do |pipeline|
- create_deployments!(pipeline) if Feature.enabled?(:move_create_deployments_to_worker, pipeline.project)
+ create_deployments!(pipeline)
Ci::PipelineCreation::StartPipelineService
.new(pipeline)
diff --git a/config/feature_flags/development/move_create_deployments_to_worker.yml b/config/feature_flags/development/move_create_deployments_to_worker.yml
deleted file mode 100644
index e428a7510de..00000000000
--- a/config/feature_flags/development/move_create_deployments_to_worker.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: move_create_deployments_to_worker
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108042
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/386903
-milestone: '15.8'
-type: development
-group: group::pipeline execution
-default_enabled: false
diff --git a/config/feature_flags/development/only_allow_merge_if_all_status_checks_passed.yml b/config/feature_flags/development/only_allow_merge_if_all_status_checks_passed.yml
index b5fd39354ec..b8d1b1f4175 100644
--- a/config/feature_flags/development/only_allow_merge_if_all_status_checks_passed.yml
+++ b/config/feature_flags/development/only_allow_merge_if_all_status_checks_passed.yml
@@ -5,4 +5,4 @@ rollout_issue_url: "https://gitlab.com/gitlab-org/gitlab/-/issues/372340"
milestone: '15.5'
type: development
group: group::compliance
-default_enabled: false
+default_enabled: true
diff --git a/doc/administration/reference_architectures/index.md b/doc/administration/reference_architectures/index.md
index 60258fb5a09..6dfa4dc1bd0 100644
--- a/doc/administration/reference_architectures/index.md
+++ b/doc/administration/reference_architectures/index.md
@@ -47,9 +47,9 @@ The following Cloud Native Hybrid reference architectures, where select recommen
The Reference Architectures are designed to strike a balance between two important factors--performance and resilience.
-While they are designed to make it easier to set up GitLab at scale, it can still be a challenge to know which one will meet your requirements.
+While they are designed to make it easier to set up GitLab at scale, it can still be a challenge to know which one meets your requirements.
-As a general guide, **the more performant and/or resilient you want your environment to be, the more involved it will be**.
+As a general guide, **the more performant and/or resilient you want your environment to be, the more involved it is**.
This section explains the designs you can choose from. It begins with the least complexity, goes to the most, and ends with a decision tree.
@@ -63,9 +63,9 @@ Backups can provide a good level of RPO / RTO while avoiding the complexities th
High Availability ensures every component in the GitLab setup can handle failures through various mechanisms. To achieve this however is involved, and the environments required can be sizable.
-For environments serving 3,000 or more users we generally recommend that a HA strategy is used as at this level outages will have a bigger impact against more users. All the architectures in this range have HA built in by design for this reason.
+For environments serving 3,000 or more users we generally recommend that a HA strategy is used as at this level outages have a bigger impact against more users. All the architectures in this range have HA built in by design for this reason.
-For users who still need to have HA for a lower number of users this can also be achieved with an [adjusted 3K architecture as detailed here](3k_users.md#supported-modifications-for-lower-user-counts-ha).
+For users who still need to have HA for a lower number of users this can also be achieved with an adjusted [3K architecture](3k_users.md#supported-modifications-for-lower-user-counts-ha).
#### Do you need High Availability (HA)?
@@ -90,7 +90,7 @@ In most cases the downtime required for doing an upgrade in general shouldn't be
As an additional layer of HA resilience you can deploy select components in Kubernetes, known as a Cloud Native Hybrid Reference Architecture.
-Note that this is an alternative and more **advanced** setup compared to a standard Reference Architecture. Running services in Kubernetes is well known to be complex. **This setup is only recommended** if you have strong working knowledge and experience in Kubernetes.
+This is an alternative and more **advanced** setup compared to a standard Reference Architecture. Running services in Kubernetes is well known to be complex. **This setup is only recommended** if you have strong working knowledge and experience in Kubernetes.
### GitLab Geo (Cross Regional Distribution / Disaster Recovery)
@@ -208,7 +208,7 @@ Several cloud provider services are known not to support the above or have been
- [Amazon Aurora](https://aws.amazon.com/rds/aurora/) is incompatible and not supported. See [14.4.0](../../update/index.md#1440) for more details.
- [Azure Database for PostgreSQL Single Server](https://azure.microsoft.com/en-gb/products/postgresql/#overview) (Single / Flexible) is **strongly not recommended** for use due to notable performance / stability issues or missing functionality. See [Recommendation Notes for Azure](#recommendation-notes-for-azure) for more details.
- [Google AlloyDB](https://cloud.google.com/alloydb) and [Amazon RDS Multi-AZ DB cluster](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/multi-az-db-clusters-concepts.html) have not been tested and are not recommended. Both solutions are specifically not expected to work with GitLab Geo.
- - Note that [Amazon RDS Multi-AZ DB instance](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZSingleStandby.html) is a separate product and is supported.
+ - [Amazon RDS Multi-AZ DB instance](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZSingleStandby.html) is a separate product and is supported.
### Recommendation notes for Azure
@@ -241,7 +241,7 @@ Testing occurs against all reference architectures and cloud providers in an aut
- The [GitLab Environment Toolkit](https://gitlab.com/gitlab-org/gitlab-environment-toolkit) for building the environments.
- The [GitLab Performance Tool](https://gitlab.com/gitlab-org/quality/performance) for performance testing.
-Network latency on the test environments between components on all Cloud Providers were measured at <5 ms. Note that this is shared as an observation and not as an implicit recommendation.
+Network latency on the test environments between components on all Cloud Providers were measured at <5 ms. This is shared as an observation and not as an implicit recommendation.
We aim to have a "test smart" approach where architectures tested have a good range that can also apply to others. Testing focuses on 10k Omnibus on GCP as the testing has shown this is a good bellwether for the other architectures and cloud providers as well as Cloud Native Hybrids.
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 11f4ddac94c..be6a7e129eb 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -10630,6 +10630,7 @@ Describes a rule for who can approve merge requests.
| <a id="approvalruleapprovalsrequired"></a>`approvalsRequired` | [`Int`](#int) | Number of required approvals. |
| <a id="approvalruleapproved"></a>`approved` | [`Boolean`](#boolean) | Indicates if the rule is satisfied. |
| <a id="approvalruleapprovedby"></a>`approvedBy` | [`UserCoreConnection`](#usercoreconnection) | List of users defined in the rule that approved the merge request. (see [Connections](#connections)) |
+| <a id="approvalrulecommentedby"></a>`commentedBy` | [`UserCoreConnection`](#usercoreconnection) | List of users, defined in the rule, who commented on the merge request. (see [Connections](#connections)) |
| <a id="approvalrulecontainshiddengroups"></a>`containsHiddenGroups` | [`Boolean`](#boolean) | Indicates if the rule contains approvers from a hidden group. |
| <a id="approvalruleeligibleapprovers"></a>`eligibleApprovers` | [`[UserCore!]`](#usercore) | List of all users eligible to approve the merge request (defined explicitly and from associated groups). |
| <a id="approvalrulegroups"></a>`groups` | [`GroupConnection`](#groupconnection) | List of groups added as approvers for the rule. (see [Connections](#connections)) |
@@ -15421,6 +15422,7 @@ Information relating to rules that must be satisfied to merge this merge request
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mergerequestapprovalstateapprovalrulesoverwritten"></a>`approvalRulesOverwritten` | [`Boolean`](#boolean) | Indicates if the merge request approval rules are overwritten for the merge request. |
+| <a id="mergerequestapprovalstateinvalidapproversrules"></a>`invalidApproversRules` | [`[ApprovalRule!]`](#approvalrule) | List of approval rules that are associated with the merge request, but invalid. |
| <a id="mergerequestapprovalstaterules"></a>`rules` | [`[ApprovalRule!]`](#approvalrule) | List of approval rules associated with the merge request. |
### `MergeRequestAssignee`
@@ -20649,6 +20651,7 @@ Represents a vulnerability.
| <a id="vulnerabilityscanner"></a>`scanner` | [`VulnerabilityScanner`](#vulnerabilityscanner) | Scanner metadata for the vulnerability. |
| <a id="vulnerabilityseverity"></a>`severity` | [`VulnerabilitySeverity`](#vulnerabilityseverity) | Severity of the vulnerability (INFO, UNKNOWN, LOW, MEDIUM, HIGH, CRITICAL). |
| <a id="vulnerabilitystate"></a>`state` | [`VulnerabilityState`](#vulnerabilitystate) | State of the vulnerability (DETECTED, CONFIRMED, RESOLVED, DISMISSED). |
+| <a id="vulnerabilitystatecomment"></a>`stateComment` | [`String`](#string) | Comment given for the vulnerability state change. |
| <a id="vulnerabilitytitle"></a>`title` | [`String`](#string) | Title of the vulnerability. |
| <a id="vulnerabilityupdatedat"></a>`updatedAt` | [`Time`](#time) | Timestamp of when the vulnerability was last updated. |
| <a id="vulnerabilityusernotescount"></a>`userNotesCount` | [`Int!`](#int) | Number of user notes attached to the vulnerability. |
@@ -21229,6 +21232,7 @@ Check permissions for the current user on a work item.
| Name | Type | Description |
| ---- | ---- | ----------- |
+| <a id="workitempermissionsadminworkitem"></a>`adminWorkItem` | [`Boolean!`](#boolean) | Indicates the user can perform `admin_work_item` on this resource. |
| <a id="workitempermissionsdeleteworkitem"></a>`deleteWorkItem` | [`Boolean!`](#boolean) | Indicates the user can perform `delete_work_item` on this resource. |
| <a id="workitempermissionsreadworkitem"></a>`readWorkItem` | [`Boolean!`](#boolean) | Indicates the user can perform `read_work_item` on this resource. |
| <a id="workitempermissionsupdateworkitem"></a>`updateWorkItem` | [`Boolean!`](#boolean) | Indicates the user can perform `update_work_item` on this resource. |
diff --git a/doc/api/projects.md b/doc/api/projects.md
index 5b9ce203141..41e4ffcf2c5 100644
--- a/doc/api/projects.md
+++ b/doc/api/projects.md
@@ -1412,7 +1412,7 @@ Supported attributes:
|-------------------------------------------------------------|----------------|------------------------|-------------|
| `id` | integer or string | **{check-circle}** Yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding). |
| `allow_merge_on_skipped_pipeline` | boolean | **{dotted-circle}** No | Set whether or not merge requests can be merged with skipped jobs. |
-| `only_allow_merge_if_all_status_checks_passed` **(ULTIMATE)** | boolean | **{dotted-circle}** No | Indicates that merges of merge requests should be blocked unless all status checks have passed. Defaults to false. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/369859) in GitLab 15.5 with feature flag `only_allow_merge_if_all_status_checks_passed` disabled by default. |
+| `only_allow_merge_if_all_status_checks_passed` **(ULTIMATE)** | boolean | **{dotted-circle}** No | Indicates that merges of merge requests should be blocked unless all status checks have passed. Defaults to false.<br/><br/>[Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/369859) in GitLab 15.5 with feature flag `only_allow_merge_if_all_status_checks_passed` disabled by default. The feature flag was enabled by default in GitLab 15.9. |
| `analytics_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private` or `enabled` |
| `approvals_before_merge` **(PREMIUM)** | integer | **{dotted-circle}** No | How many approvers should approve merge request by default. To configure approval rules, see [Merge request approvals API](merge_request_approvals.md). |
| `auto_cancel_pending_pipelines` | string | **{dotted-circle}** No | Auto-cancel pending pipelines. This isn't a boolean, but enabled/disabled. |
diff --git a/doc/development/cached_queries.md b/doc/development/cached_queries.md
index 1b590d68d18..c8fa3c40f4a 100644
--- a/doc/development/cached_queries.md
+++ b/doc/development/cached_queries.md
@@ -149,16 +149,16 @@ the following statistics:
- Total retained: 757595 bytes (6070 objects)
- `db_count`: 144
- `db_cached_count`: 55
-- `db_duration`: 303ms
+- `db_duration`: 303 ms
The fix reduced the allocated memory, and the number of cached queries. These
factors help improve the overall execution time:
-- Total allocated: 5313899 bytes (65290 objects), 1810KB (25%) less
-- Total retained: 685593 bytes (5278 objects), 72KB (9%) less
+- Total allocated: 5313899 bytes (65290 objects), 1810 KB (25%) less
+- Total retained: 685593 bytes (5278 objects), 72 KB (9%) less
- `db_count`: 95 (34% less)
- `db_cached_count`: 6 (89% less)
-- `db_duration`: 162ms (87% faster)
+- `db_duration`: 162 ms (87% faster)
## For more information
diff --git a/doc/development/caching.md b/doc/development/caching.md
index 58ec7a77591..9b3f9a4215e 100644
--- a/doc/development/caching.md
+++ b/doc/development/caching.md
@@ -22,11 +22,11 @@ A faster store for data, which is:
## What is fast?
-The goal for every web page should be to return in under 100ms:
+The goal for every web page should be to return in under 100 ms:
- This is achievable, but you need caching on a modern application.
- Larger responses take longer to build, and caching becomes critical to maintaining a constant speed.
-- Cache reads are typically sub-1ms. There is very little that this doesn't improve.
+- Cache reads are typically sub-1 ms. There is very little that this doesn't improve.
- It's no good only being fast on subsequent page loads, as the initial experience
is important too, so this isn't a complete solution.
- User-specific data makes this challenging, and presents the biggest challenge
@@ -219,7 +219,7 @@ Use conditional GET caching when the entire response is cacheable:
- Users and API libraries can ignore the cache.
- Sometimes Chrome does weird things with caches.
-- You will forget it exists in development mode and get angry when your changes aren't appearing.
+- You forget it exists in development mode and get angry when your changes aren't appearing.
- In theory using conditional GET caching makes sense everywhere, but in practice it can
sometimes cause odd issues.
diff --git a/doc/development/ee_features.md b/doc/development/ee_features.md
index 707ec6d1b71..4eb5bedef1c 100644
--- a/doc/development/ee_features.md
+++ b/doc/development/ee_features.md
@@ -139,8 +139,8 @@ To do so:
### Simulate a SaaS instance
-If you run GitLab in development or have a license installed issued to a `@gitlab.com` email
-and you need your instance to simulate the SaaS (GitLab.com) version of the product:
+If you're developing locally and need your instance to simulate the SaaS (GitLab.com)
+version of the product:
1. Export this environment variable:
diff --git a/doc/development/pipelines/index.md b/doc/development/pipelines/index.md
index 1797e082aea..07c5db908b0 100644
--- a/doc/development/pipelines/index.md
+++ b/doc/development/pipelines/index.md
@@ -207,83 +207,13 @@ If you want to force all the RSpec jobs to run regardless of your changes, you c
WARNING:
Forcing all jobs on docs only related MRs would not have the prerequisite jobs and would lead to errors
-### Test suite parallelization
-
-Our current RSpec tests parallelization setup is as follows:
-
-1. The `retrieve-tests-metadata` job in the `prepare` stage ensures we have a
- `knapsack/report-master.json` file:
- - The `knapsack/report-master.json` file is fetched from the latest `main` pipeline which runs `update-tests-metadata`
- (for now it's the 2-hourly `maintenance` scheduled master pipeline), if it's not here we initialize the file with `{}`.
-1. Each `[rspec|rspec-ee] [migration|unit|integration|system|geo] n m` job are run with
- `knapsack rspec` and should have an evenly distributed share of tests:
- - It works because the jobs have access to the `knapsack/report-master.json`
- since the "artifacts from all previous stages are passed by default".
- - the jobs set their own report path to
- `"knapsack/${TEST_TOOL}_${TEST_LEVEL}_${DATABASE}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json"`.
- - if knapsack is doing its job, test files that are run should be listed under
- `Report specs`, not under `Leftover specs`.
-1. The `update-tests-metadata` job (which only runs on scheduled pipelines for
- [the canonical project](https://gitlab.com/gitlab-org/gitlab) takes all the
- `knapsack/rspec*.json` files and merge them all together into a single
- `knapsack/report-master.json` file that is saved as artifact.
-
-After that, the next pipeline uses the up-to-date `knapsack/report-master.json` file.
-
-### Flaky tests
-
-#### Automatic skipping of flaky tests
-
-Tests that are [known to be flaky](../testing_guide/flaky_tests.md#automatic-retries-and-flaky-tests-detection) are
-skipped unless the `$SKIP_FLAKY_TESTS_AUTOMATICALLY` variable is set to `false` or if the `~"pipeline:run-flaky-tests"`
-label is set on the MR.
-
-See the [experiment issue](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1069).
-
-#### Automatic retry of failing tests in a separate process
-
-Unless `$RETRY_FAILED_TESTS_IN_NEW_PROCESS` variable is set to `false` (`true` by default), RSpec tests that failed are automatically retried once in a separate
-RSpec process. The goal is to get rid of most side-effects from previous tests that may lead to a subsequent test failure.
-
-We keep track of retried tests in the `$RETRIED_TESTS_REPORT_FILE` file saved as artifact by the `rspec:flaky-tests-report` job.
-
-See the [experiment issue](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1148).
-
-### Compatibility testing
-
-By default, we run all tests with the versions that runs on GitLab.com.
-
-Other versions (usually one back-compatible version, and one forward-compatible version) should be running in nightly scheduled pipelines.
-
-Exceptions to this general guideline should be motivated and documented.
-
-#### Single database testing
-
-By default, all tests run with [multiple databases](../database/multiple_databases.md).
-
-We also run tests with a single database in nightly scheduled pipelines, and in merge requests that touch database-related files.
-
-If you want to force tests to run with a single database, you can add the `pipeline:run-single-db` label to the merge request.
-
-### Monitoring
-
-The GitLab test suite is [monitored](../performance.md#rspec-profiling) for the `main` branch, and any branch
-that includes `rspec-profile` in their name.
-
-### Logging
-
-- Rails logging to `log/test.log` is disabled by default in CI
- [for performance reasons](https://jtway.co/speed-up-your-rails-test-suite-by-6-in-1-line-13fedb869ec4).
- To override this setting, provide the
- `RAILS_ENABLE_TEST_LOG` environment variable.
-
-## Review app jobs
+### Review app jobs
Consult the [Review Apps](../testing_guide/review_apps.md) dedicated page for more information.
If you want to force a Review App to be deployed regardless of your changes, you can add the `pipeline:run-review-app` label to the merge request.
-## As-if-FOSS jobs
+### As-if-FOSS jobs
The `* as-if-foss` jobs run the GitLab test suite "as if FOSS", meaning as if the jobs would run in the context
of `gitlab-org/gitlab-foss`. These jobs are only created in the following cases:
@@ -297,7 +227,7 @@ set and get the `ee/` folder removed before the tests start running.
The intent is to ensure that a change doesn't introduce a failure after `gitlab-org/gitlab` is synced to `gitlab-org/gitlab-foss`.
-## As-if-JH cross project downstream pipeline
+### As-if-JH cross project downstream pipeline
The `start-as-if-jh` job triggers a cross project downstream pipeline which
runs the GitLab test suite "as if JiHu", meaning as if the pipeline would run
@@ -321,13 +251,13 @@ The intent is to ensure that a change doesn't introduce a failure after
[GitLab](https://gitlab.com/gitlab-org/gitlab) is synchronized to
[GitLab JH](https://jihulab.com/gitlab-cn/gitlab).
-### When to consider applying `pipeline:run-as-if-jh` label
+#### When to consider applying `pipeline:run-as-if-jh` label
If a Ruby file is renamed and there's a corresponding [`prepend_mod` line](../jh_features_review.md#jh-features-based-on-ce-or-ee-features),
it's likely that GitLab JH is relying on it and requires a corresponding
change to rename the module or class it's prepending.
-### Corresponding JH branch
+#### Corresponding JH branch
You can create a corresponding JH branch on [GitLab JH](https://jihulab.com/gitlab-cn/gitlab) by
appending `-jh` to the branch name. If a corresponding JH branch is found,
@@ -344,7 +274,7 @@ it does not include any corresponding JH branch beside the default `main-jh`.
This is why when we want to fetch corresponding JH branch we should fetch it
from the main mirror, rather than the validation project.
-### How as-if-JH pipeline was configured
+#### How as-if-JH pipeline was configured
The whole process looks like this:
@@ -373,14 +303,14 @@ flowchart TD
JH --"pull mirror with corresponding JH branches"--> Mirror
```
-#### Tokens set in the project variables
+##### Tokens set in the project variables
- `ADD_JH_FILES_TOKEN`: This is a [GitLab JH mirror](https://gitlab.com/gitlab-org/gitlab-jh-mirrors/gitlab)
project token with `read_api` permission, to be able to download JiHu files.
- `AS_IF_JH_TOKEN`: This is a [GitLab JH validation](https://gitlab.com/gitlab-org-sandbox/gitlab-jh-validation)
project token with `write_repository` permission, to push generated `as-if-jh/*` branch.
-#### How we generate the as-if-JH branch
+##### How we generate the as-if-JH branch
First `add-jh-files` job will download the required JiHu files from the
corresponding JH branch, saving in artifacts. Next `prepare-as-if-jh-branch`
@@ -388,13 +318,13 @@ job will create a new branch from the merge request branch, commit the
changes, and finally push the branch to the
[validation project](https://gitlab.com/gitlab-org-sandbox/gitlab-jh-validation).
-#### How we trigger and run the as-if-JH pipeline
+##### How we trigger and run the as-if-JH pipeline
After having the `as-if-jh/*` branch, `start-as-if-jh` job will trigger a pipeline
in the [validation project](https://gitlab.com/gitlab-org-sandbox/gitlab-jh-validation)
to run the cross-project downstream pipeline.
-#### How the GitLab JH mirror project is set up
+##### How the GitLab JH mirror project is set up
The [GitLab JH mirror](https://gitlab.com/gitlab-org/gitlab-jh-mirrors/gitlab) project is private and CI is disabled.
@@ -408,7 +338,7 @@ engineering vault.
No password is used from mirroring because GitLab JH is a public project.
-#### How the GitLab JH validation project is set up
+##### How the GitLab JH validation project is set up
This [GitLab JH validation](https://gitlab.com/gitlab-org-sandbox/gitlab-jh-validation) project is public and CI is enabled, without any project variables.
@@ -432,24 +362,7 @@ running every day, updating cache.
The default CI/CD configuration file is also set at `jh/.gitlab-ci.yml` so it
runs exactly like [GitLab JH](https://jihulab.com/gitlab-cn/gitlab/-/blob/main-jh/jh/.gitlab-ci.yml).
-## Ruby 2.7 jobs
-
-We're running Ruby 3.0 for the merge requests and the default branch. However,
-we're still running Ruby 2.7 for GitLab.com and there are older versions that
-we need to maintain. We need a way to still try out Ruby 2.7 in merge requests.
-
-You can add the `pipeline:run-in-ruby2` label to the merge request to switch
-the Ruby version used for running the whole test suite to 2.7. When you do
-this, the test suite will no longer run in Ruby 3.0 (default), and an
-additional job `verify-ruby-3.0` will also run and always fail to remind us to
-remove the label and run in Ruby 3.0 before merging the merge request.
-
-This should let us:
-
-- Test changes for Ruby 2.7
-- Make sure it will not break anything when it's merged into the default branch
-
-## `undercover` RSpec test
+### `rspec:undercoverage` job
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74859) in GitLab 14.6.
@@ -463,7 +376,7 @@ In the event of an emergency, or false positive from this job, add the
`pipeline:skip-undercoverage` label to the merge request to allow this job to
fail.
-### Troubleshooting `rspec:undercoverage` failures
+#### Troubleshooting `rspec:undercoverage` failures
The `rspec:undercoverage` job has [known bugs](https://gitlab.com/groups/gitlab-org/-/epics/8254)
that can cause false positive failures. You can test coverage locally to determine if it's
@@ -475,13 +388,75 @@ test causing the failure:
If these commands return `undercover: ✅ No coverage is missing in latest changes` then you can apply `~"pipeline:skip-undercoverage"` to bypass pipeline failures.
-## Ruby versions testing
+## Test suite parallelization
+
+Our current RSpec tests parallelization setup is as follows:
+
+1. The `retrieve-tests-metadata` job in the `prepare` stage ensures we have a
+ `knapsack/report-master.json` file:
+ - The `knapsack/report-master.json` file is fetched from the latest `main` pipeline which runs `update-tests-metadata`
+ (for now it's the 2-hourly `maintenance` scheduled master pipeline), if it's not here we initialize the file with `{}`.
+1. Each `[rspec|rspec-ee] [migration|unit|integration|system|geo] n m` job are run with
+ `knapsack rspec` and should have an evenly distributed share of tests:
+ - It works because the jobs have access to the `knapsack/report-master.json`
+ since the "artifacts from all previous stages are passed by default".
+ - the jobs set their own report path to
+ `"knapsack/${TEST_TOOL}_${TEST_LEVEL}_${DATABASE}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json"`.
+ - if knapsack is doing its job, test files that are run should be listed under
+ `Report specs`, not under `Leftover specs`.
+1. The `update-tests-metadata` job (which only runs on scheduled pipelines for
+ [the canonical project](https://gitlab.com/gitlab-org/gitlab) takes all the
+ `knapsack/rspec*.json` files and merge them all together into a single
+ `knapsack/report-master.json` file that is saved as artifact.
+
+After that, the next pipeline uses the up-to-date `knapsack/report-master.json` file.
+
+## Flaky tests
+
+### Automatic skipping of flaky tests
+
+Tests that are [known to be flaky](../testing_guide/flaky_tests.md#automatic-retries-and-flaky-tests-detection) are
+skipped unless the `$SKIP_FLAKY_TESTS_AUTOMATICALLY` variable is set to `false` or if the `~"pipeline:run-flaky-tests"`
+label is set on the MR.
+
+See the [experiment issue](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1069).
+
+### Automatic retry of failing tests in a separate process
+
+Unless `$RETRY_FAILED_TESTS_IN_NEW_PROCESS` variable is set to `false` (`true` by default), RSpec tests that failed are automatically retried once in a separate
+RSpec process. The goal is to get rid of most side-effects from previous tests that may lead to a subsequent test failure.
+
+We keep track of retried tests in the `$RETRIED_TESTS_REPORT_FILE` file saved as artifact by the `rspec:flaky-tests-report` job.
+
+See the [experiment issue](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1148).
+
+## Compatibility testing
+
+By default, we run all tests with the versions that runs on GitLab.com.
+
+Other versions (usually one back-compatible version, and one forward-compatible version) should be running in nightly scheduled pipelines.
+
+Exceptions to this general guideline should be motivated and documented.
-Our test suite runs against Ruby 3 in merge requests and default branch pipelines.
+### Ruby versions testing
-We also run our test suite against Ruby 2.7 on another 2-hourly scheduled pipelines, as GitLab.com still runs on Ruby 2.7.
+We're running Ruby 3.0 for the merge requests and the default branch. However,
+we're still running Ruby 2.7 for GitLab.com and there are older versions that
+we need to maintain, so we also run our test suite against Ruby 2.7 on a
+dedicated 2-hourly scheduled pipelines.
-## PostgreSQL versions testing
+For merge requests, you can add the `pipeline:run-in-ruby2` label to switch
+the Ruby version used for running the whole test suite to 2.7. When you do
+this, the test suite will no longer run in Ruby 3.0 (default), and an
+additional job `verify-ruby-3.0` will also run and always fail to remind us to
+remove the label and run in Ruby 3.0 before merging the merge request.
+
+This should let us:
+
+- Test changes for Ruby 2.7
+- Make sure it will not break anything when it's merged into the default branch
+
+### PostgreSQL versions testing
Our test suite runs against PG12 as GitLab.com runs on PG12 and
[Omnibus defaults to PG12 for new installs and upgrades](../../administration/package_information/postgresql_versions.md).
@@ -490,7 +465,7 @@ We do run our test suite against PG11 and PG13 on nightly scheduled pipelines.
We also run our test suite against PG11 upon specific database library changes in MRs and `main` pipelines (with the `rspec db-library-code pg11` job).
-### Current versions testing
+#### Current versions testing
| Where? | PostgreSQL version | Ruby version |
|------------------------------------------------------------------------------------------------|-------------------------------------------------|--------------|
@@ -515,7 +490,7 @@ Previously, `ruby2-sync` was using a project token stored in `RUBY2_SYNC_TOKEN`
permissions issues, we ended up using an access token from `gitlab-bot` so now
`RUBY2_SYNC_TOKEN` is actually an access token from `gitlab-bot`.
-### Long-term plan
+#### Long-term plan
We follow the [PostgreSQL versions shipped with Omnibus GitLab](../../administration/package_information/postgresql_versions.md):
@@ -525,14 +500,14 @@ We follow the [PostgreSQL versions shipped with Omnibus GitLab](../../administra
| PG11 | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` |
| PG13 | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` |
-## Redis versions testing
+### Redis versions testing
Our test suite runs against Redis 6 as GitLab.com runs on Redis 6 and
[Omnibus defaults to Redis 6 for new installs and upgrades](https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/master/config/software/redis.rb).
We do run our test suite against Redis 5 on `nightly` scheduled pipelines, specifically when running backward-compatible and forward-compatible PostgreSQL jobs.
-### Current versions testing
+#### Current versions testing
| Where? | Redis version |
| ------ | ------------------ |
@@ -540,6 +515,26 @@ We do run our test suite against Redis 5 on `nightly` scheduled pipelines, speci
| `default branch` (non-scheduled pipelines) | 6 |
| `nightly` scheduled pipelines | 5 |
+### Single database testing
+
+By default, all tests run with [multiple databases](../database/multiple_databases.md).
+
+We also run tests with a single database in nightly scheduled pipelines, and in merge requests that touch database-related files.
+
+If you want to force tests to run with a single database, you can add the `pipeline:run-single-db` label to the merge request.
+
+## Monitoring
+
+The GitLab test suite is [monitored](../performance.md#rspec-profiling) for the `main` branch, and any branch
+that includes `rspec-profile` in their name.
+
+## Logging
+
+- Rails logging to `log/test.log` is disabled by default in CI
+ [for performance reasons](https://jtway.co/speed-up-your-rails-test-suite-by-6-in-1-line-13fedb869ec4).
+ To override this setting, provide the
+ `RAILS_ENABLE_TEST_LOG` environment variable.
+
## Pipelines types for merge requests
In general, pipelines for an MR fall into one of the following types (from shorter to longer), depending on the changes made in the MR:
diff --git a/doc/development/rake_tasks.md b/doc/development/rake_tasks.md
index caea2cecf57..82e96befd11 100644
--- a/doc/development/rake_tasks.md
+++ b/doc/development/rake_tasks.md
@@ -10,7 +10,7 @@ Rake tasks are available for developers and others contributing to GitLab.
## Set up database with developer seeds
-Note that if your database user does not have advanced privileges, you must create the database manually before running this command.
+If your database user does not have advanced privileges, you must create the database manually before running this command.
```shell
bundle exec rake setup
@@ -154,7 +154,7 @@ seeds, you can set the `FORCE` environment variable to `yes`:
FORCE=yes bundle exec rake setup
```
-This will skip the action confirmation/safety check, saving you from answering
+This skips the action confirmation/safety check, saving you from answering
`yes` manually.
### Discard `stdout`
@@ -168,7 +168,7 @@ it to a file. If we don't care about the output, we could just redirect it to
echo 'yes' | bundle exec rake setup > /dev/null
```
-Note that since you can't see the questions from `stdout`, you might just want
+Because you can't see the questions from `stdout`, you might just want
to `echo 'yes'` to keep it running. It would still print the errors on `stderr`
so no worries about missing errors.
@@ -182,7 +182,7 @@ There are a few environment flags you can pass to change how projects are seeded
## Run tests
-In order to run the test you can use the following commands:
+To run the test you can use the following commands:
- `bin/rake spec` to run the RSpec suite
- `bin/rake spec:unit` to run only the unit tests
diff --git a/doc/development/sidekiq/worker_attributes.md b/doc/development/sidekiq/worker_attributes.md
index 4fcd8e33d5c..a3bfe5f27cc 100644
--- a/doc/development/sidekiq/worker_attributes.md
+++ b/doc/development/sidekiq/worker_attributes.md
@@ -37,7 +37,7 @@ end
### Latency sensitive jobs
If a large number of background jobs get scheduled at once, queueing of jobs may
-occur while jobs wait for a worker node to be become available. This is normal
+occur while jobs wait for a worker node to be become available. This is standard
and gives the system resilience by allowing it to gracefully handle spikes in
traffic. Some jobs, however, are more sensitive to latency than others.
@@ -79,7 +79,7 @@ On GitLab.com, we run Sidekiq in several
each of which represents a particular type of workload.
When changing a queue's urgency, or adding a new queue, we need to take
-into account the expected workload on the new shard. Note that, if we're
+into account the expected workload on the new shard. If we're
changing an existing queue, there is also an effect on the old shard,
but that always reduces work.
@@ -108,7 +108,7 @@ shard_consumption = shard_rps * shard_duration_avg
If we expect an increase of **less than 5%**, then no further action is needed.
-Otherwise, please ping `@gitlab-org/scalability` on the merge request and ask
+Otherwise, ping `@gitlab-org/scalability` on the merge request and ask
for a review.
## Jobs with External Dependencies
@@ -121,7 +121,7 @@ However, some jobs are dependent on external services to complete
successfully. Some examples include:
1. Jobs which call web-hooks configured by a user.
-1. Jobs which deploy an application to a k8s cluster configured by a user.
+1. Jobs which deploy an application to a Kubernetes cluster configured by a user.
These jobs have "external dependencies". This is important for the operation of
the background processing cluster in several ways:
@@ -179,8 +179,8 @@ performance.
Likewise, if a worker uses large amounts of memory, we can run these on a
bespoke low concurrency, high memory fleet.
-Note that memory-bound workers create heavy GC workloads, with pauses of
-10-50ms. This has an impact on the latency requirements for the
+Memory-bound workers create heavy GC workloads, with pauses of
+10-50 ms. This has an impact on the latency requirements for the
worker. For this reason, `memory` bound, `urgency :high` jobs are not
permitted and fail CI. In general, `memory` bound workers are
discouraged, and alternative approaches to processing the work should be
@@ -219,7 +219,7 @@ We use the following approach to determine whether a worker is CPU-bound:
- Divide `cpu_s` by `duration` to get the percentage time spend on-CPU.
- If this ratio exceeds 33%, the worker is considered CPU-bound and should be
annotated as such.
-- Note that these values should not be used over small sample sizes, but
+- These values should not be used over small sample sizes, but
rather over fairly large aggregates.
## Feature category
@@ -254,7 +254,7 @@ When setting this field, consider the following trade-off:
- Prefer read replicas to add relief to the primary, but increase the likelihood of stale reads that have to be retried.
To maintain the same behavior compared to before this field was introduced, set it to `:always`, so
-database operations will only target the primary. Reasons for having to do so include workers
+database operations only target the primary. Reasons for having to do so include workers
that mostly or exclusively perform writes, or workers that read their own writes and who might run
into data consistency issues should a stale record be read back from a replica. **Try to avoid
these scenarios, since `:always` should be considered the exception, not the rule.**
@@ -270,10 +270,10 @@ The difference is in what happens when there is still replication lag after the
switch over to the primary right away, whereas `delayed` workers fail fast and are retried once.
If they still encounter replication lag, they also switch to the primary instead.
**If your worker never performs any writes, it is strongly advised to apply one of these consistency settings,
-since it will never need to rely on the primary database node.**
+since it never needs to rely on the primary database node.**
The table below shows the `data_consistency` attribute and its values, ordered by the degree to which
-they prefer read replicas and will wait for replicas to catch up:
+they prefer read replicas and wait for replicas to catch up:
| **Data Consistency** | **Description** |
|--------------|-----------------------------|
@@ -300,14 +300,14 @@ end
The `feature_flag` property allows you to toggle a job's `data_consistency`,
which permits you to safely toggle load balancing capabilities for a specific job.
-When `feature_flag` is disabled, the job defaults to `:always`, which means that the job will always use the primary database.
+When `feature_flag` is disabled, the job defaults to `:always`, which means that the job always uses the primary database.
The `feature_flag` property does not allow the use of
[feature gates based on actors](../feature_flags/index.md).
This means that the feature flag cannot be toggled only for particular
projects, groups, or users, but instead, you can safely use [percentage of time rollout](../feature_flags/index.md).
-Note that since we check the feature flag on both Sidekiq client and server, rolling out a 10% of the time,
-will likely results in 1% (`0.1` `[from client]*0.1` `[from server]`) of effective jobs using replicas.
+Since we check the feature flag on both Sidekiq client and server, rolling out a 10% of the time,
+likely results in 1% (`0.1` `[from client]*0.1` `[from server]`) of effective jobs using replicas.
Example:
diff --git a/doc/development/uploads/working_with_uploads.md b/doc/development/uploads/working_with_uploads.md
index a3951fb4c7e..6955f9c31cd 100644
--- a/doc/development/uploads/working_with_uploads.md
+++ b/doc/development/uploads/working_with_uploads.md
@@ -38,7 +38,7 @@ is:
File.join(model.class.underscore, mounted_as.to_s, model.id.to_s)
```
-If you look around in the GitLab code base you will find quite a few
+If you look around in the GitLab code base you find quite a few
Uploaders that have their own storage location. For object storage,
this means Uploaders have their own buckets. We now **discourage**
adding new buckets for the following reasons:
@@ -53,7 +53,7 @@ and friction. The `Gitlab.config.uploads` storage location, which is what
## Implementing Direct Upload support
-Below we will outline how to implement [direct upload](#direct-upload-via-workhorse) support.
+Below we outline how to implement [direct upload](#direct-upload-via-workhorse) support.
Using direct upload is not always necessary but it is usually a good
idea. Unless the uploads handled by your feature are both infrequent
@@ -107,7 +107,7 @@ You should also manually verify that when you perform an upload
request for your new feature, Workhorse makes a pre-authorization
request. You can check this by looking at the Rails access logs. This
is necessary because if you make a mistake in your routing rule you
-won't get a hard failure: you just end up using the less efficient
+don't get a hard failure: you just end up using the less efficient
default path.
### Adding a pre-authorization endpoint
@@ -123,8 +123,8 @@ Consider accepting your file upload via Grape instead.
For Grape pre-authorization endpoints, look for existing examples that
implement `/authorize` routes. One example is the
[POST `:id/uploads/authorize` endpoint](https://gitlab.com/gitlab-org/gitlab/-/blob/9ad53d623eecebb799ce89eada951e4f4a59c116/lib/api/projects.rb#L642-651).
-Note that this particular example is using FileUploader, which means
-that the upload will be stored in the storage location (bucket) of
+This particular example is using FileUploader, which means
+that the upload is stored in the storage location (bucket) of
that Uploader class.
For Rails endpoints you can use the
@@ -154,7 +154,7 @@ scale.
GitLab uses a modified version of
[CarrierWave](https://github.com/carrierwaveuploader/carrierwave) to
-manage uploads. Below we will describe how we use CarrierWave and how
+manage uploads. Below we describe how we use CarrierWave and how
we modified it.
The central concept of CarrierWave is the **Uploader** class. The
@@ -197,15 +197,15 @@ particular, you currently cannot use the `version` mechanism of
CarrierWave. Things you can do include:
- Filename validation
-- **Incompatible with direct upload:** One time pre-processing of file contents, e.g. image resizing
+- **Incompatible with direct upload:** One time pre-processing of file contents, for example, image resizing
- **Incompatible with direct upload:** Encryption at rest
-Note that CarrierWave pre-processing behaviors such as image resizing
+CarrierWave pre-processing behaviors such as image resizing
or encryption require local access to the uploaded file. This forces
you to upload the processed file from Ruby. This flies against direct
upload, which is all about _not_ doing the upload in Ruby. If you use
direct upload with an Uploader with pre-processing behaviors then the
-pre-processing behaviors will be skipped silently.
+pre-processing behaviors are skipped silently.
### CarrierWave Storage engines
@@ -218,7 +218,7 @@ CarrierWave has 2 storage engines:
GitLab uses both of these engines, depending on configuration.
-The normal way to choose a storage engine in CarrierWave is to use the
+The typical way to choose a storage engine in CarrierWave is to use the
`Uploader.storage` class method. In GitLab we do not do this; we have
overridden `Uploader#storage` instead. This allows us to vary the
storage engine file by file.
@@ -228,13 +228,13 @@ storage engine file by file.
An Uploader is associated with two storage areas: regular storage and
cache storage. Each has its own storage engine. If you assign a file
to a mount point setter (`project.avatar = File.open('/tmp/tanuki.png')`)
-you will copy/move the file to cache
+you have to copy/move the file to cache
storage as a side effect via the `cache!` method. To persist the file
you must somehow call the `store!` method. This either happens via
[ActiveRecord callbacks](https://github.com/carrierwaveuploader/carrierwave/blob/v1.3.2/lib/carrierwave/orm/activerecord.rb#L55)
or by calling `store!` on an Uploader instance.
-Normally you do not need to interact with `cache!` and `store!` but if
+Typically you do not need to interact with `cache!` and `store!` but if
you need to debug GitLab CarrierWave modifications it is useful to
know that they are there and that they always get called.
Specifically, it is good to know that CarrierWave pre-processing
@@ -278,7 +278,7 @@ Direct upload works as follows.
1. Rails deletes the temporary upload
1. Workhorse deletes the temporary upload a second time in case Rails timed out
-Normally, `cache!` returns an instance of
+Typically, `cache!` returns an instance of
`CarrierWave::SanitizedFile`, and `store!` then
[uploads that file using Fog](https://github.com/carrierwaveuploader/carrierwave/blob/v1.3.2/lib/carrierwave/storage/fog.rb#L327-L335).
@@ -294,7 +294,7 @@ this file to its intended location.
## Tables
-The Scalability::Frameworks team is going to make object storage and uploads more easy to use and more robust. If you add or change uploaders, it helps us if you update this table too. This helps us keep an overview of where and how uploaders are used.
+The Scalability::Frameworks team is making object storage and uploads more easy to use and more robust. If you add or change uploaders, it helps us if you update this table too. This helps us keep an overview of where and how uploaders are used.
### Feature bucket details
diff --git a/doc/development/workhorse/gitlab_features.md b/doc/development/workhorse/gitlab_features.md
index 824ebda0441..b4146e8b62c 100644
--- a/doc/development/workhorse/gitlab_features.md
+++ b/doc/development/workhorse/gitlab_features.md
@@ -12,7 +12,7 @@ GitLab that would not work efficiently without Workhorse.
To put the efficiency benefit in context, consider that in 2020Q3 on
GitLab.com [we see](https://thanos-query.ops.gitlab.net/graph?g0.range_input=1h&g0.max_source_resolution=0s&g0.expr=sum(ruby_process_resident_memory_bytes%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)%20%2F%20sum(puma_max_threads%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)&g0.tab=1&g1.range_input=1h&g1.max_source_resolution=0s&g1.expr=sum(go_memstats_sys_bytes%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)%2Fsum(go_goroutines%7Bapp%3D%22webservice%22%2Cenv%3D%22gprd%22%2Crelease%3D%22gitlab%22%7D)&g1.tab=1)
Rails application threads using on average
-about 200MB of RSS vs about 200KB for Workhorse goroutines.
+about 200 MB of RSS vs about 200 KB for Workhorse goroutines.
Examples of features that rely on Workhorse:
diff --git a/doc/install/docker.md b/doc/install/docker.md
index c417ab9ef47..40eb3a9796e 100644
--- a/doc/install/docker.md
+++ b/doc/install/docker.md
@@ -345,7 +345,7 @@ sudo docker run --detach \
gitlab/gitlab-ee:latest
```
-Note that every time you execute a `docker run` command, you need to provide
+Every time you execute a `docker run` command, you need to provide
the `GITLAB_OMNIBUS_CONFIG` option. The content of `GITLAB_OMNIBUS_CONFIG` is
_not_ preserved between subsequent runs.
@@ -623,7 +623,7 @@ sudo docker exec -it gitlab /bin/bash
```
From within the container you can administer the GitLab container as you would
-normally administer an
+usually administer an
[Omnibus installation](https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/README.md)
### 500 Internal Error
@@ -702,7 +702,7 @@ variety of statistics on the health and performance of GitLab. The files
required for this gets written to a temporary file system (like `/run` or
`/dev/shm`).
-By default, Docker allocates 64MB to the shared memory directory (mounted at
+By default, Docker allocates 64 MB to the shared memory directory (mounted at
`/dev/shm`). This is insufficient to hold all the Prometheus metrics related
files generated, and will generate error logs like the following:
@@ -717,7 +717,7 @@ writing value to /dev/shm/gitlab/sidekiq/histogram_sidekiq_0-0.db failed with un
```
Other than disabling the Prometheus Metrics from the Admin Area, the recommended
-solution to fix this problem is to increase the size of shared memory to at least 256MB.
+solution to fix this problem is to increase the size of shared memory to at least 256 MB.
If using `docker run`, this can be done by passing the flag `--shm-size 256m`.
If using a `docker-compose.yml` file, the `shm_size` key can be used for this
purpose.
diff --git a/doc/user/application_security/dast_api/index.md b/doc/user/application_security/dast_api/index.md
index 0144c37c3ff..ca021e9f924 100644
--- a/doc/user/application_security/dast_api/index.md
+++ b/doc/user/application_security/dast_api/index.md
@@ -1161,7 +1161,7 @@ Example usage for setting a `body-json` override:
}
```
-Note that each JSON property name in the object `body-json` is set to a [JSON Path](https://goessner.net/articles/JsonPath/)
+Each JSON property name in the object `body-json` is set to a [JSON Path](https://goessner.net/articles/JsonPath/)
expression. The JSON Path expression `$.credentials.access-token` identifies the node to be
overridden with the value `iddqd!42.$`. The override engine uses `body-json` when the request body
has only [JSON](https://www.json.org/json-en.html) content.
@@ -1200,7 +1200,7 @@ the second entry overrides an XML element:
}
```
-Note that each JSON property name in the object `body-xml` is set to an
+Each JSON property name in the object `body-xml` is set to an
[XPath v2](https://www.w3.org/TR/xpath20/)
expression. The XPath expression `/credentials/@isEnabled` identifies the attribute node to override
with the value `true`. The XPath expression `/credentials/access-token/text()` identifies the
@@ -1340,7 +1340,7 @@ By default the output of the overrides command is hidden. If the overrides comma
It is also possible to write messages from your script to a log file that is collected when the job completes or fails. The log file must be created in a specific location and following a naming convention.
-Adding some basic logging to your overrides script is useful in case the script fails unexpectedly during normal running of the job. The log file is automatically included as an artifact of the job, allowing you to download it after the job has finished.
+Adding some basic logging to your overrides script is useful in case the script fails unexpectedly during standard running of the job. The log file is automatically included as an artifact of the job, allowing you to download it after the job has finished.
Following our example, we provided `renew_token.py` in the environment variable `DAST_API_OVERRIDES_CMD`. Notice two things in the script:
@@ -1449,7 +1449,7 @@ logging.info("Override file has been updated")
# end
```
-In the overrides command example, the Python script depends on the `backoff` library. To make sure the library is installed before executing the Python script, the `DAST_API_PRE_SCRIPT` is set to a script that will install the dependencies of your overrides command.
+In the overrides command example, the Python script depends on the `backoff` library. To make sure the library is installed before executing the Python script, the `DAST_API_PRE_SCRIPT` is set to a script that installs the dependencies of your overrides command.
As for example, the following script `user-pre-scan-set-up.sh`
```shell
@@ -1495,7 +1495,7 @@ In the previous sample, you could use the script `user-pre-scan-set-up.sh` to al
The request headers feature lets you specify fixed values for the headers during the scan session. For example, you can use the configuration variable `DAST_API_REQUEST_HEADERS` to set a fixed value in the `Cache-Control` header. If the headers you need to set include sensitive values like the `Authorization` header, use the [masked variable](../../../ci/variables/index.md#mask-a-cicd-variable) feature along with the [variable `DAST_API_REQUEST_HEADERS_BASE64`](#base64).
-Note that if the `Authorization` header or any other header needs to get updated while the scan is in progress, consider using the [overrides](#overrides) feature.
+If the `Authorization` header or any other header needs to get updated while the scan is in progress, consider using the [overrides](#overrides) feature.
The variable `DAST_API_REQUEST_HEADERS` lets you specify a comma-separated (`,`) list of headers. These headers are included on each request that the scanner performs. Each header entry in the list consists of a name followed by a colon (`:`) and then by its value. Whitespace before the key or value is ignored. For example, to declare a header name `Cache-Control` with the value `max-age=604800`, the header entry is `Cache-Control: max-age=604800`. To use two headers, `Cache-Control: max-age=604800` and `Age: 100`, set `DAST_API_REQUEST_HEADERS` variable to `Cache-Control: max-age=604800, Age: 100`.
@@ -1606,7 +1606,7 @@ variables:
While testing an API you may might want to exclude a parameter (query string, header, or body element) from testing. This may be needed because a parameter always causes a failure, slows down testing, or for other reasons. To exclude parameters, you can set one of the following variables: `DAST_API_EXCLUDE_PARAMETER_ENV` or `DAST_API_EXCLUDE_PARAMETER_FILE`.
-The `DAST_API_EXCLUDE_PARAMETER_ENV` allows providing a JSON string containing excluded parameters. This is a good option if the JSON is short and will not often change. Another option is the variable `DAST_API_EXCLUDE_PARAMETER_FILE`. This variable is set to a file path that can be checked into the repository, created by another job as an artifact, or generated at runtime with a pre-script using `DAST_API_PRE_SCRIPT`.
+The `DAST_API_EXCLUDE_PARAMETER_ENV` allows providing a JSON string containing excluded parameters. This is a good option if the JSON is short and does not change often. Another option is the variable `DAST_API_EXCLUDE_PARAMETER_FILE`. This variable is set to a file path that can be checked into the repository, created by another job as an artifact, or generated at runtime with a pre-script using `DAST_API_PRE_SCRIPT`.
#### Exclude parameters using a JSON document
@@ -1833,7 +1833,7 @@ The `dast-api-exclude-parameters.json` is a JSON document that follows the struc
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/357195) in GitLab 14.10.
-As an alternative to excluding by paths, you can filter by any other component in the URL by using the `DAST_API_EXCLUDE_URLS` CI/CD variable. This variable can be set in your `.gitlab-ci.yml` file. The variable can store multiple values, separated by commas (`,`). Each value is a regular expression. Because each entry is a regular expression, an entry like `.*` will exclude all URLs because it is a regular expression that matches everything.
+As an alternative to excluding by paths, you can filter by any other component in the URL by using the `DAST_API_EXCLUDE_URLS` CI/CD variable. This variable can be set in your `.gitlab-ci.yml` file. The variable can store multiple values, separated by commas (`,`). Each value is a regular expression. Because each entry is a regular expression, an entry like `.*` excludes all URLs because it is a regular expression that matches everything.
In your job output you can check if any URLs matched any provided regular expression from `DAST_API_EXCLUDE_URLS`. Matching operations are listed in the **Excluded Operations** section. Operations listed in the **Excluded Operations** should not be listed in the **Tested Operations** section. For example the following portion of a job output:
@@ -1888,7 +1888,7 @@ variables:
##### Excluding two URLs and their child resources
-In order to exclude the URLs: `http://target/api/buy` and `http://target/api/sell`, and their child resources. To provide multiple URLs we use the `,` character as follows:
+To exclude the URLs: `http://target/api/buy` and `http://target/api/sell`, and their child resources. To provide multiple URLs we use the `,` character as follows:
```yaml
stages:
@@ -1905,7 +1905,7 @@ variables:
##### Excluding URL using regular expressions
-In order to exclude exactly `https://target/api/v1/user/create` and `https://target/api/v2/user/create` or any other version (`v3`,`v4`, and more). We could use `https://target/api/v.*/user/create$`, in the previous regular expression `.` indicates any character and `*` indicates zero or more times, additionally `$` indicates that the URL should end there.
+To exclude exactly `https://target/api/v1/user/create` and `https://target/api/v2/user/create` or any other version (`v3`,`v4`, and more). We could use `https://target/api/v.*/user/create$`, in the previous regular expression `.` indicates any character and `*` indicates zero or more times, additionally `$` indicates that the URL should end there.
```yaml
stages:
@@ -1922,7 +1922,7 @@ variables:
## Running your first scan
-When configured correctly, a CI/CD pipeline contains a `dast` stage and an `dast_api` job. The job only fails when an invalid configuration is provided. During normal operation, the job always succeeds even if vulnerabilities are identified during testing.
+When configured correctly, a CI/CD pipeline contains a `dast` stage and an `dast_api` job. The job only fails when an invalid configuration is provided. During typical operation, the job always succeeds even if vulnerabilities are identified during testing.
Vulnerabilities are displayed on the **Security** pipeline tab with the suite name. When testing against the repositories default branch, the DAST API vulnerabilities are also shown on the Security & Compliance's Vulnerability Report page.
@@ -1957,7 +1957,7 @@ Follow these steps to view details of a vulnerability:
| Method | HTTP method used to detect the vulnerability. |
| URL | URL at which the vulnerability was detected. |
| Request | The HTTP request that caused the vulnerability. |
- | Unmodified Response | Response from an unmodified request. This is what a normal working response looks like. |
+ | Unmodified Response | Response from an unmodified request. This is what a typical working response looks like.|
| Actual Response | Response received from test request. |
| Evidence | How we determined a vulnerability occurred. |
| Identifiers | The DAST API check used to find this vulnerability. |
@@ -2274,7 +2274,7 @@ dast_api_v2:
In the case of one or two slow operations, the team might decide to skip testing the operations, or exclude them from feature branch tests, but include them for default branch tests. Excluding the operation is done using the `DAST_API_EXCLUDE_PATHS` configuration [variable as explained in this section.](#exclude-paths)
-In this example, we have an operation that returns a large amount of data. The operation is `GET http://target:7777/api/large_response_json`. To exclude it we provide the `DAST_API_EXCLUDE_PATHS` configuration variable with the path portion of our operation URL `/api/large_response_json`. Our configuration disables the main `dast_api` job and creates two new jobs `dast_api_main` and `dast_api_branch`. The `dast_api_branch` is set up to exclude the long operation and only run on non-default branches (e.g. feature branches). The `dast_api_main` branch is set up to only execute on the default branch (`main` in this example). The `dast_api_branch` jobs run faster, allowing for quick development cycles, while the `dast_api_main` job which only runs on default branch builds, takes longer to run.
+In this example, we have an operation that returns a large amount of data. The operation is `GET http://target:7777/api/large_response_json`. To exclude it we provide the `DAST_API_EXCLUDE_PATHS` configuration variable with the path portion of our operation URL `/api/large_response_json`. Our configuration disables the main `dast_api` job and creates two new jobs `dast_api_main` and `dast_api_branch`. The `dast_api_branch` is set up to exclude the long operation and only run on non-default branches (for example, feature branches). The `dast_api_main` branch is set up to only execute on the default branch (`main` in this example). The `dast_api_branch` jobs run faster, allowing for quick development cycles, while the `dast_api_main` job which only runs on default branch builds, takes longer to run.
To verify the operation is excluded, run the DAST API job and review the job console output. It includes a list of included and excluded operations at the end of the test.
@@ -2368,7 +2368,7 @@ The DAST API engine outputs an error message when it cannot establish a connecti
**Solution**
-- Remove the `DAST_API_API` variable from the `.gitlab-ci.yml` file. The value will be inherited from the DAST API CI/CD template. We recommend this method instead of manually setting a value.
+- Remove the `DAST_API_API` variable from the `.gitlab-ci.yml` file. The value inherits from the DAST API CI/CD template. We recommend this method instead of manually setting a value.
- If removing the variable is not possible, check to see if this value has changed in the latest version of the [DAST API CI/CD template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml). If so, update the value in the `.gitlab-ci.yml` file.
### `Failed to start session with scanner. Please retry, and if the problem persists reach out to support.`
@@ -2412,9 +2412,9 @@ Once you have confirmed the issue was produced because the port was already take
The DAST API engine outputs an error message when it cannot determine the target API after inspecting the OpenAPI document. This error message is shown when the target API has not been set in the `.gitlab-ci.yml` file, it is not available in the `environment_url.txt` file, and it could not be computed using the OpenAPI document.
-There is a order of precedence in which the DAST API engine tries to get the target API when checking the different sources. First, it will try to use the `DAST_API_TARGET_URL`. If the environment variable has not been set, then the DAST API engine will attempt to use the `environment_url.txt` file. If there is no file `environment_url.txt`, then the DAST API engine will use the OpenAPI document contents and the URL provided in `DAST_API_OPENAPI` (if a URL is provided) to try to compute the target API.
+There is a order of precedence in which the DAST API engine tries to get the target API when checking the different sources. First, it tries to use the `DAST_API_TARGET_URL`. If the environment variable has not been set, then the DAST API engine attempts to use the `environment_url.txt` file. If there is no file `environment_url.txt`, then the DAST API engine uses the OpenAPI document contents and the URL provided in `DAST_API_OPENAPI` (if a URL is provided) to try to compute the target API.
-The best-suited solution will depend on whether or not your target API changes for each deployment. In static environments, the target API is the same for each deployment, in this case refer to the [static environment solution](#static-environment-solution). If the target API changes for each deployment a [dynamic environment solution](#dynamic-environment-solutions) should be applied.
+The best-suited solution depends on whether or not your target API changes for each deployment. In static environments, the target API is the same for each deployment, in this case refer to the [static environment solution](#static-environment-solution). If the target API changes for each deployment a [dynamic environment solution](#dynamic-environment-solutions) should be applied.
#### Static environment solution
@@ -2498,7 +2498,7 @@ variables:
### `No operation in the OpenAPI document is consuming any supported media type`
-DAST API uses the specified media types in the OpenAPI document to generate requests. If no request can be created due to the lack of supported media types, then an error will be thrown.
+DAST API uses the specified media types in the OpenAPI document to generate requests. If no request can be created due to the lack of supported media types, then an error is thrown.
**Error message**
diff --git a/doc/user/group/saml_sso/troubleshooting.md b/doc/user/group/saml_sso/troubleshooting.md
index 0151ae52bb6..eadf385feb3 100644
--- a/doc/user/group/saml_sso/troubleshooting.md
+++ b/doc/user/group/saml_sso/troubleshooting.md
@@ -207,7 +207,7 @@ Alternatively, the SAML response may be missing the `InResponseTo` attribute in
The identity provider administrator should ensure that the login is
initiated by the service provider and not only the identity provider.
-### Message: "Sign in to GitLab to connect your organization's account" **(PREMIUM SAAS)**
+### Message: "There is already a GitLab account associated with this email address. Sign in with your existing credentials to connect your organization's account" **(PREMIUM SAAS)**
A user can see this message when they are trying to [manually link SAML to their existing GitLab.com account](index.md#linking-saml-to-your-existing-gitlabcom-account).
diff --git a/doc/user/packages/maven_repository/index.md b/doc/user/packages/maven_repository/index.md
index 2d1efd024a0..1899cdc213f 100644
--- a/doc/user/packages/maven_repository/index.md
+++ b/doc/user/packages/maven_repository/index.md
@@ -232,7 +232,7 @@ to [Maven Central](https://search.maven.org/).
When the feature flag is enabled, administrators can disable this behavior in the
[Continuous Integration settings](../../admin_area/settings/continuous_integration.md).
-There are many ways to configure your Maven project so that it will request packages
+There are many ways to configure your Maven project so that it requests packages
in Maven Central from GitLab. Maven repositories are queried in a
[specific order](https://maven.apache.org/guides/mini/guide-multiple-repositories.html#repository-order).
By default, maven-central is usually checked first through the
diff --git a/doc/user/project/deploy_keys/index.md b/doc/user/project/deploy_keys/index.md
index e87c5f57fc1..fc88535dc77 100644
--- a/doc/user/project/deploy_keys/index.md
+++ b/doc/user/project/deploy_keys/index.md
@@ -99,7 +99,7 @@ To create a public deploy key:
1. Select **New deploy key**.
1. Complete the fields.
- Use a meaningful description for **Name**. For example, include the name of the external host
- or application that will use the public deploy key.
+ or application that uses the public deploy key.
You can modify only a public deploy key's name.
@@ -148,7 +148,7 @@ What happens to the deploy key when it is disabled depends on the following:
### Deploy key cannot push to a protected branch
-There are a few scenarios where a deploy key will fail to push to a
+There are a few scenarios where a deploy key fails to push to a
[protected branch](../protected_branches.md).
- The owner associated to a deploy key does not have access to the protected branch.
diff --git a/doc/user/project/import/index.md b/doc/user/project/import/index.md
index b5f70fe9d0d..9ea78631ee1 100644
--- a/doc/user/project/import/index.md
+++ b/doc/user/project/import/index.md
@@ -117,7 +117,7 @@ to create a new project from a template.
## LFS authentication
-When importing a project that contains LFS objects, if the project has an [`.lfsconfig`](https://github.com/git-lfs/git-lfs/blob/master/docs/man/git-lfs-config.5.ronn)
+When importing a project that contains LFS objects, if the project has an [`.lfsconfig`](https://github.com/git-lfs/git-lfs/blob/main/docs/man/git-lfs-config.adoc)
file with a URL host (`lfs.url`) different from the repository URL host, LFS files are not downloaded.
## Project aliases **(PREMIUM SELF)**
diff --git a/doc/user/project/integrations/slack.md b/doc/user/project/integrations/slack.md
index 5c1006b9044..d35c682b5d3 100644
--- a/doc/user/project/integrations/slack.md
+++ b/doc/user/project/integrations/slack.md
@@ -58,13 +58,13 @@ The following triggers are available for Slack notifications:
| Trigger name | Trigger event |
|--------------------------------------------------------------------------|------------------------------------------------------|
| **Push** | A push to the repository. |
-| **Issue** | An issue is created, updated, or closed. |
-| **Incident** | An incident is created, updated, or closed. |
-| **Confidential issue** | A confidential issue is created, updated, or closed. |
-| **Merge request** | A merge request is created, updated, or merged. |
+| **Issue** | An issue is created or closed. |
+| **Incident** | An incident is created or closed. |
+| **Confidential issue** | A confidential issue is created or closed. |
+| **Merge request** | A merge request is created, merged, or closed. |
| **Note** | A comment is added. |
| **Confidential note** | A confidential note is added. |
-| **Tag push** | A new tag is pushed to the repository. |
+| **Tag push** | A new tag is pushed to the repository or removed. |
| **Pipeline** | A pipeline status changed. |
| **Wiki page** | A wiki page is created or updated. |
| **Deployment** | A deployment starts or finishes. |
diff --git a/doc/user/project/merge_requests/status_checks.md b/doc/user/project/merge_requests/status_checks.md
index fc3240f3889..dadb1b392d9 100644
--- a/doc/user/project/merge_requests/status_checks.md
+++ b/doc/user/project/merge_requests/status_checks.md
@@ -22,8 +22,6 @@ respond with an associated status. This status is then displayed as a non-blocki
widget within the merge request to surface this status to the merge request author or reviewers
at the merge request level itself.
-The lack of a status check response does not block the merging of a merge request.
-
You can configure merge request status checks for each individual project. These are not shared between projects.
To learn more about use cases, feature discovery, and development timelines,
@@ -33,14 +31,18 @@ see the [external status checks epic](https://gitlab.com/groups/gitlab-org/-/epi
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/369859) in GitLab 15.5 [with a flag](../../../administration/feature_flags.md) named `only_allow_merge_if_all_status_checks_passed`. Disabled by default.
> - [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/372340) in GitLab 15.8.
+> - Enabled on self-managed in GitLab 15.9.
FLAG:
-On self-managed GitLab, by default this feature is not available. To make it available per project or for your entire instance, ask an administrator to
-[enable the feature flag](../../../administration/feature_flags.md) named `only_allow_merge_if_all_status_checks_passed`. On GitLab.com, this feature is available but can be configured by GitLab.com administrators only.
+On self-managed GitLab, this feature is available by default. To disable it per project or for your entire instance, ask an administrator to
+[disable the feature flag](../../../administration/feature_flags.md) named `only_allow_merge_if_all_status_checks_passed`. On GitLab.com, this feature is available but can be configured by GitLab.com administrators only.
+
+By default, merge requests in projects can be merged even if external status checks fail. To block the merging of merge requests when external checks fail:
-By default, merge requests in projects can be merged even if external status checks fail. To block the merging of merge requests when external checks fail, enable this feature
-using the [project API](../../../api/projects.md#edit-project). You must also [enable the feature flag](../../../administration/feature_flags.md) named
-`only_allow_merge_if_all_status_checks_passed` on self-managed GitLab.
+1. On the top bar, select **Main menu > Projects** and find your project.
+1. On the left sidebar, select **Settings > Merge requests**.
+1. Select the **Status checks must succeed** checkbox.
+1. Select **Save changes**.
## Lifecycle
@@ -63,7 +65,7 @@ Merge requests return a `409 Conflict` error to any responses that do not refer
External status checks have the following states:
-- `pending` - The default state. No response can been received by the merge request from the external service.
+- `pending` - The default state. No response has been received by the merge request from the external service.
- `passed` - A response from the external service has been received and approved by it.
- `failed` - A response from the external service has been received and denied by it.
diff --git a/doc/user/project/settings/index.md b/doc/user/project/settings/index.md
index 3798643549d..39c14b333c2 100644
--- a/doc/user/project/settings/index.md
+++ b/doc/user/project/settings/index.md
@@ -241,6 +241,7 @@ Prerequisites:
- The group must allow creation of new projects.
- The project must not contain any [container images](../../packages/container_registry/index.md#move-or-rename-container-registry-repositories).
- Remove any npm packages. If you transfer a project to a different root namespace, the project must not contain any npm packages. When you update the path of a user or group, or transfer a subgroup or project, you must remove any npm packages first. You cannot update the root namespace of a project with npm packages. Make sure you update your .npmrc files to follow the naming convention and run npm publish if necessary.
+- If a security policy is assigned to the project, it is automatically unassigned during the transfer.
To transfer a project:
diff --git a/lib/gitlab.rb b/lib/gitlab.rb
index 2eff78efbec..1190c92ce17 100644
--- a/lib/gitlab.rb
+++ b/lib/gitlab.rb
@@ -4,11 +4,8 @@ require 'pathname'
require 'forwardable'
require_relative 'gitlab_edition'
-require_relative 'gitlab/utils'
module Gitlab
- GITLAB_SIMULATE_SAAS = Gitlab::Utils.to_boolean(ENV['GITLAB_SIMULATE_SAAS'], default: false)
-
class << self
extend Forwardable
@@ -52,18 +49,10 @@ module Gitlab
INSTALLATION_TYPE = File.read(root.join("INSTALLATION_TYPE")).strip.freeze
HTTP_PROXY_ENV_VARS = %w(http_proxy https_proxy HTTP_PROXY HTTPS_PROXY).freeze
- # We allow GitLab instances to "pretend" they are SaaS to test SaaS-specific code
- # paths, but only when in development mode or when running on production instances
- # with a license issued to a GitLab team member.
def self.simulate_com?
- return false unless GITLAB_SIMULATE_SAAS
- return false if Rails.env.test?
-
- Rails.env.development? || licensed_to_gitlab_team_member?
- end
+ return false unless Rails.env.development?
- def self.licensed_to_gitlab_team_member?
- ee? && ::License.current&.issued_to_gitlab_team_member?
+ Gitlab::Utils.to_boolean(ENV['GITLAB_SIMULATE_SAAS'])
end
def self.com?
diff --git a/lib/gitlab/ci/config/external/file/project.rb b/lib/gitlab/ci/config/external/file/project.rb
index ba1a56754eb..a3d464232bd 100644
--- a/lib/gitlab/ci/config/external/file/project.rb
+++ b/lib/gitlab/ci/config/external/file/project.rb
@@ -12,7 +12,13 @@ module Gitlab
attr_reader :project_name, :ref_name
def initialize(params, context)
- @location = params[:file]
+ @location = if ::Feature.enabled?(:ci_batch_request_for_local_and_project_includes, context.project)
+ # `Repository#blobs_at` does not support files with the `/` prefix.
+ Gitlab::Utils.remove_leading_slashes(params[:file])
+ else
+ params[:file]
+ end
+
@project_name = get_project_name(params[:project])
@ref_name = params[:ref] || 'HEAD'
@@ -70,6 +76,26 @@ module Gitlab
end
def fetch_local_content
+ if ::Feature.disabled?(:ci_batch_request_for_local_and_project_includes, context.project)
+ return legacy_fetch_local_content
+ end
+
+ return unless can_access_local_content?
+ return unless sha
+
+ BatchLoader.for([sha, location])
+ .batch(key: project) do |locations, loader, args|
+ context.logger.instrument(:config_file_fetch_project_content) do
+ args[:key].repository.blobs_at(locations).each do |blob|
+ loader.call([blob.commit_id, blob.path], blob.data)
+ end
+ end
+ rescue GRPC::NotFound, GRPC::Internal
+ # no-op
+ end
+ end
+
+ def legacy_fetch_local_content
return unless can_access_local_content?
return unless sha
diff --git a/lib/gitlab/ci/parsers/instrumentation.rb b/lib/gitlab/ci/parsers/instrumentation.rb
index ab4a923d9aa..5e97b22ecdf 100644
--- a/lib/gitlab/ci/parsers/instrumentation.rb
+++ b/lib/gitlab/ci/parsers/instrumentation.rb
@@ -6,7 +6,7 @@ module Gitlab
module Instrumentation
BUCKETS = [0.25, 1, 5, 10].freeze
- def parse!(*args)
+ def parse!(...)
parser_result = nil
duration = Benchmark.realtime do
diff --git a/lib/gitlab/ci/pipeline/chain/create_deployments.rb b/lib/gitlab/ci/pipeline/chain/create_deployments.rb
deleted file mode 100644
index 99e438ddbae..00000000000
--- a/lib/gitlab/ci/pipeline/chain/create_deployments.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Ci
- module Pipeline
- module Chain
- class CreateDeployments < Chain::Base
- def perform!
- create_deployments! if Feature.disabled?(:move_create_deployments_to_worker, pipeline.project)
- end
-
- def break?
- false
- end
-
- private
-
- def create_deployments!
- pipeline.stages.map(&:statuses).flatten.map(&method(:create_deployment))
- end
-
- def create_deployment(build)
- ::Deployments::CreateForBuildService.new.execute(build)
- end
- end
- end
- end
- end
-end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 41b6a377cc3..61a1c029e80 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -27793,9 +27793,6 @@ msgstr ""
msgid "New issue"
msgstr ""
-msgid "New issue in %{project}"
-msgstr ""
-
msgid "New issue title"
msgstr ""
@@ -36937,10 +36934,10 @@ msgstr ""
msgid "SAML|Sign in to %{groupName}"
msgstr ""
-msgid "SAML|Sign in to GitLab to connect your organization's account"
+msgid "SAML|The %{strongOpen}%{group_path}%{strongClose} group allows you to sign in using single sign-on."
msgstr ""
-msgid "SAML|The %{strongOpen}%{group_path}%{strongClose} group allows you to sign in using single sign-on."
+msgid "SAML|There is already a GitLab account associated with this email address. Sign in with your existing credentials to connect your organization's account"
msgstr ""
msgid "SAML|To access %{groupName}, you must sign in using single sign-on through an external sign-in page."
@@ -38657,9 +38654,6 @@ msgstr ""
msgid "Select project to create %{type}"
msgstr ""
-msgid "Select project to create issue"
-msgstr ""
-
msgid "Select projects"
msgstr ""
@@ -40551,6 +40545,9 @@ msgstr ""
msgid "StatusCheck|Apply this status check to all branches or a specific protected branch."
msgstr ""
+msgid "StatusCheck|Check for a status response in merge requests. %{link_start}Learn more%{link_end}."
+msgstr ""
+
msgid "StatusCheck|Check for a status response in merge requests. Failures do not block merges. %{link_start}Learn more%{link_end}."
msgstr ""
@@ -50400,6 +50397,9 @@ msgstr ""
msgid "metric_id must be unique across a project"
msgstr ""
+msgid "milestone"
+msgstr ""
+
msgid "milestone should belong either to a project or a group."
msgstr ""
diff --git a/spec/frontend/invite_members/components/invite_modal_base_spec.js b/spec/frontend/invite_members/components/invite_modal_base_spec.js
index e1b90332f6c..4c7022c8684 100644
--- a/spec/frontend/invite_members/components/invite_modal_base_spec.js
+++ b/spec/frontend/invite_members/components/invite_modal_base_spec.js
@@ -18,6 +18,7 @@ import {
INVITE_BUTTON_TEXT_DISABLED,
INVITE_BUTTON_TEXT,
ON_SHOW_TRACK_LABEL,
+ ON_CELEBRATION_TRACK_LABEL,
} from '~/invite_members/constants';
import { propsData, membersPath, purchasePath } from '../mock_data/modal_base';
@@ -237,16 +238,16 @@ describe('InviteModalBase', () => {
const mockEvent = { preventDefault: jest.fn() };
modal.vm.$emit('shown');
- expectTracking('render');
+ expectTracking('render', ON_CELEBRATION_TRACK_LABEL);
modal.vm.$emit('primary', mockEvent);
- expectTracking('click_invite');
+ expectTracking('click_invite', ON_CELEBRATION_TRACK_LABEL);
modal.vm.$emit('cancel', mockEvent);
- expectTracking('click_cancel');
+ expectTracking('click_cancel', ON_CELEBRATION_TRACK_LABEL);
modal.vm.$emit('close');
- expectTracking('click_x');
+ expectTracking('click_x', ON_CELEBRATION_TRACK_LABEL);
unmockTracking();
});
diff --git a/spec/frontend/issues/list/components/empty_state_without_any_issues_spec.js b/spec/frontend/issues/list/components/empty_state_without_any_issues_spec.js
index f2d12511707..0a2e4e7c671 100644
--- a/spec/frontend/issues/list/components/empty_state_without_any_issues_spec.js
+++ b/spec/frontend/issues/list/components/empty_state_without_any_issues_spec.js
@@ -2,7 +2,7 @@ import { GlEmptyState, GlLink } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import CsvImportExportButtons from '~/issuable/components/csv_import_export_buttons.vue';
import EmptyStateWithoutAnyIssues from '~/issues/list/components/empty_state_without_any_issues.vue';
-import NewIssueDropdown from '~/vue_shared/components/new_issue_dropdown/new_issue_dropdown.vue';
+import NewResourceDropdown from '~/vue_shared/components/new_resource_dropdown/new_resource_dropdown.vue';
import { i18n } from '~/issues/list/constants';
describe('EmptyStateWithoutAnyIssues component', () => {
@@ -32,7 +32,7 @@ describe('EmptyStateWithoutAnyIssues component', () => {
wrapper.findByRole('link', { name: i18n.noIssuesDescription });
const findJiraDocsLink = () =>
wrapper.findByRole('link', { name: 'Enable the Jira integration' });
- const findNewIssueDropdown = () => wrapper.findComponent(NewIssueDropdown);
+ const findNewResourceDropdown = () => wrapper.findComponent(NewResourceDropdown);
const findNewIssueLink = () => wrapper.findByRole('link', { name: i18n.newIssueLabel });
const findNewProjectLink = () => wrapper.findByRole('link', { name: i18n.newProjectLabel });
@@ -47,7 +47,7 @@ describe('EmptyStateWithoutAnyIssues component', () => {
...provide,
},
stubs: {
- NewIssueDropdown: true,
+ NewResourceDropdown: true,
},
});
};
@@ -156,7 +156,7 @@ describe('EmptyStateWithoutAnyIssues component', () => {
it('renders', () => {
mountComponent({ props: { showNewIssueDropdown: true } });
- expect(findNewIssueDropdown().exists()).toBe(true);
+ expect(findNewResourceDropdown().exists()).toBe(true);
});
});
@@ -164,7 +164,7 @@ describe('EmptyStateWithoutAnyIssues component', () => {
it('does not render', () => {
mountComponent({ props: { showNewIssueDropdown: false } });
- expect(findNewIssueDropdown().exists()).toBe(false);
+ expect(findNewResourceDropdown().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index c1e2a460c08..dc01f37df27 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -30,7 +30,7 @@ import { IssuableListTabs, IssuableStates } from '~/vue_shared/issuable/list/con
import EmptyStateWithAnyIssues from '~/issues/list/components/empty_state_with_any_issues.vue';
import EmptyStateWithoutAnyIssues from '~/issues/list/components/empty_state_without_any_issues.vue';
import IssuesListApp from '~/issues/list/components/issues_list_app.vue';
-import NewIssueDropdown from '~/vue_shared/components/new_issue_dropdown/new_issue_dropdown.vue';
+import NewResourceDropdown from '~/vue_shared/components/new_resource_dropdown/new_resource_dropdown.vue';
import {
CREATED_DESC,
RELATIVE_POSITION,
@@ -130,7 +130,7 @@ describe('CE IssuesListApp component', () => {
const findGlButtons = () => wrapper.findAllComponents(GlButton);
const findGlButtonAt = (index) => findGlButtons().at(index);
const findIssuableList = () => wrapper.findComponent(IssuableList);
- const findNewIssueDropdown = () => wrapper.findComponent(NewIssueDropdown);
+ const findNewResourceDropdown = () => wrapper.findComponent(NewResourceDropdown);
const findLabelsToken = () =>
findIssuableList()
@@ -320,13 +320,13 @@ describe('CE IssuesListApp component', () => {
it('does not render in a project context', () => {
wrapper = mountComponent({ provide: { isProject: true }, mountFn: mount });
- expect(findNewIssueDropdown().exists()).toBe(false);
+ expect(findNewResourceDropdown().exists()).toBe(false);
});
it('renders in a group context', () => {
wrapper = mountComponent({ provide: { isProject: false }, mountFn: mount });
- expect(findNewIssueDropdown().exists()).toBe(true);
+ expect(findNewResourceDropdown().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_shared/components/new_issue_dropdown/mock_data.js b/spec/frontend/vue_shared/components/new_resource_dropdown/mock_data.js
index 792506cda7a..19b1453e8ac 100644
--- a/spec/frontend/vue_shared/components/new_issue_dropdown/mock_data.js
+++ b/spec/frontend/vue_shared/components/new_resource_dropdown/mock_data.js
@@ -17,7 +17,6 @@ export const emptySearchProjectsWithinGroupQueryResponse = {
export const project1 = {
id: 'gid://gitlab/Group/26',
- issuesEnabled: true,
name: 'Super Mario Project',
nameWithNamespace: 'Mushroom Kingdom / Super Mario Project',
webUrl: 'https://127.0.0.1:3000/mushroom-kingdom/super-mario-project',
@@ -25,7 +24,6 @@ export const project1 = {
export const project2 = {
id: 'gid://gitlab/Group/59',
- issuesEnabled: false,
name: 'Mario Kart Project',
nameWithNamespace: 'Mushroom Kingdom / Mario Kart Project',
webUrl: 'https://127.0.0.1:3000/mushroom-kingdom/mario-kart-project',
@@ -33,7 +31,6 @@ export const project2 = {
export const project3 = {
id: 'gid://gitlab/Group/103',
- issuesEnabled: true,
name: 'Mario Party Project',
nameWithNamespace: 'Mushroom Kingdom / Mario Party Project',
webUrl: 'https://127.0.0.1:3000/mushroom-kingdom/mario-party-project',
diff --git a/spec/frontend/vue_shared/components/new_issue_dropdown/new_issue_dropdown_spec.js b/spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js
index be9529202a7..31320b1d2a6 100644
--- a/spec/frontend/vue_shared/components/new_issue_dropdown/new_issue_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/new_resource_dropdown/new_resource_dropdown_spec.js
@@ -4,8 +4,9 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import NewIssueDropdown from '~/vue_shared/components/new_issue_dropdown/new_issue_dropdown.vue';
-import searchUserProjectsQuery from '~/vue_shared/components/new_issue_dropdown/graphql/search_user_projects.query.graphql';
+import NewResourceDropdown from '~/vue_shared/components/new_resource_dropdown/new_resource_dropdown.vue';
+import searchUserProjectsWithIssuesEnabledQuery from '~/vue_shared/components/new_resource_dropdown/graphql/search_user_projects_with_issues_enabled.query.graphql';
+import { RESOURCE_TYPES } from '~/vue_shared/components/new_resource_dropdown/constants';
import searchProjectsWithinGroupQuery from '~/issues/list/queries/search_projects.query.graphql';
import { DASH_SCOPE, joinPaths } from '~/lib/utils/url_utility';
import { DEBOUNCE_DELAY } from '~/vue_shared/components/filtered_search_bar/constants';
@@ -14,6 +15,7 @@ import {
emptySearchProjectsQueryResponse,
emptySearchProjectsWithinGroupQueryResponse,
project1,
+ project2,
project3,
searchProjectsQueryResponse,
searchProjectsWithinGroupQueryResponse,
@@ -21,7 +23,7 @@ import {
jest.mock('~/flash');
-describe('NewIssueDropdown component', () => {
+describe('NewResourceDropdown component', () => {
useLocalStorageSpy();
let wrapper;
@@ -37,7 +39,7 @@ describe('NewIssueDropdown component', () => {
const mountComponent = ({
search = '',
- query = searchUserProjectsQuery,
+ query = searchUserProjectsWithIssuesEnabledQuery,
queryResponse = searchProjectsQueryResponse,
mountFn = shallowMount,
propsData = {},
@@ -45,7 +47,7 @@ describe('NewIssueDropdown component', () => {
const requestHandlers = [[query, jest.fn().mockResolvedValue(queryResponse)]];
const apolloProvider = createMockApollo(requestHandlers);
- return mountFn(NewIssueDropdown, {
+ return mountFn(NewResourceDropdown, {
apolloProvider,
propsData,
data() {
@@ -76,7 +78,9 @@ describe('NewIssueDropdown component', () => {
it('renders a label for the dropdown toggle button', () => {
wrapper = mountComponent();
- expect(findDropdown().attributes('toggle-text')).toBe(NewIssueDropdown.i18n.toggleButtonLabel);
+ expect(findDropdown().attributes('toggle-text')).toBe(
+ NewResourceDropdown.i18n.toggleButtonLabel,
+ );
});
it('focuses on input when dropdown is shown', async () => {
@@ -90,18 +94,19 @@ describe('NewIssueDropdown component', () => {
});
describe.each`
- description | propsData | query | queryResponse | emptyResponse
- ${'by default'} | ${undefined} | ${searchUserProjectsQuery} | ${searchProjectsQueryResponse} | ${emptySearchProjectsQueryResponse}
- ${'within a group'} | ${withinGroupProps} | ${searchProjectsWithinGroupQuery} | ${searchProjectsWithinGroupQueryResponse} | ${emptySearchProjectsWithinGroupQueryResponse}
+ description | propsData | query | queryResponse | emptyResponse
+ ${'by default'} | ${undefined} | ${searchUserProjectsWithIssuesEnabledQuery} | ${searchProjectsQueryResponse} | ${emptySearchProjectsQueryResponse}
+ ${'within a group'} | ${withinGroupProps} | ${searchProjectsWithinGroupQuery} | ${searchProjectsWithinGroupQueryResponse} | ${emptySearchProjectsWithinGroupQueryResponse}
`('$description', ({ propsData, query, queryResponse, emptyResponse }) => {
- it('renders projects with issues enabled', async () => {
+ it('renders projects options', async () => {
wrapper = mountComponent({ mountFn: mount, query, queryResponse, propsData });
await showDropdown();
const listItems = wrapper.findAll('li');
expect(listItems.at(0).text()).toBe(project1.nameWithNamespace);
- expect(listItems.at(1).text()).toBe(project3.nameWithNamespace);
+ expect(listItems.at(1).text()).toBe(project2.nameWithNamespace);
+ expect(listItems.at(2).text()).toBe(project3.nameWithNamespace);
});
it('renders `No matches found` when there are no matches', async () => {
@@ -115,41 +120,60 @@ describe('NewIssueDropdown component', () => {
await showDropdown();
- expect(wrapper.find('li').text()).toBe(NewIssueDropdown.i18n.noMatchesFound);
+ expect(wrapper.find('li').text()).toBe(NewResourceDropdown.i18n.noMatchesFound);
});
- describe('when no project is selected', () => {
- beforeEach(() => {
- wrapper = mountComponent({ query, queryResponse, propsData });
- });
-
- it('dropdown button is not a link', () => {
- expect(findDropdown().attributes('split-href')).toBeUndefined();
- });
-
- it('displays default text on the dropdown button', () => {
- expect(findDropdown().props('text')).toBe(NewIssueDropdown.i18n.defaultDropdownText);
- });
- });
-
- describe('when a project is selected', () => {
- beforeEach(async () => {
- wrapper = mountComponent({ mountFn: mount, query, queryResponse, propsData });
- await showDropdown();
-
- wrapper.findComponent(GlDropdownItem).vm.$emit('click', project1);
- });
-
- it('dropdown button is a link', () => {
- const href = joinPaths(project1.webUrl, DASH_SCOPE, 'issues/new');
-
- expect(findDropdown().attributes('split-href')).toBe(href);
- });
-
- it('displays project name on the dropdown button', () => {
- expect(findDropdown().props('text')).toBe(`New issue in ${project1.name}`);
- });
- });
+ describe.each`
+ resourceType | expectedDefaultLabel | expectedPath | expectedLabel
+ ${'issue'} | ${'Select project to create issue'} | ${'issues/new'} | ${'New issue in'}
+ ${'merge-request'} | ${'Select project to create merge request'} | ${'merge_requests/new'} | ${'New merge request in'}
+ ${'milestone'} | ${'Select project to create milestone'} | ${'milestones/new'} | ${'New milestone in'}
+ `(
+ 'with resource type $resourceType',
+ ({ resourceType, expectedDefaultLabel, expectedPath, expectedLabel }) => {
+ describe('when no project is selected', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({
+ query,
+ queryResponse,
+ propsData: { ...propsData, resourceType },
+ });
+ });
+
+ it('dropdown button is not a link', () => {
+ expect(findDropdown().attributes('split-href')).toBeUndefined();
+ });
+
+ it('displays default text on the dropdown button', () => {
+ expect(findDropdown().props('text')).toBe(expectedDefaultLabel);
+ });
+ });
+
+ describe('when a project is selected', () => {
+ beforeEach(async () => {
+ wrapper = mountComponent({
+ mountFn: mount,
+ query,
+ queryResponse,
+ propsData: { ...propsData, resourceType },
+ });
+ await showDropdown();
+
+ wrapper.findComponent(GlDropdownItem).vm.$emit('click', project1);
+ });
+
+ it('dropdown button is a link', () => {
+ const href = joinPaths(project1.webUrl, DASH_SCOPE, expectedPath);
+
+ expect(findDropdown().attributes('split-href')).toBe(href);
+ });
+
+ it('displays project name on the dropdown button', () => {
+ expect(findDropdown().props('text')).toBe(`${expectedLabel} ${project1.name}`);
+ });
+ });
+ },
+ );
});
describe('without localStorage', () => {
@@ -201,5 +225,38 @@ describe('NewIssueDropdown component', () => {
);
expect(dropdown.props('text')).toBe(`New issue in ${project1.name}`);
});
+
+ describe.each(RESOURCE_TYPES)('with resource type %s', (resourceType) => {
+ it('computes the local storage key without a group', async () => {
+ wrapper = mountComponent({
+ mountFn: mount,
+ propsData: { resourceType, withLocalStorage: true },
+ });
+ await showDropdown();
+ wrapper.findComponent(GlDropdownItem).vm.$emit('click', project1);
+ await nextTick();
+
+ expect(localStorage.setItem).toHaveBeenLastCalledWith(
+ `group--new-${resourceType}-recent-project`,
+ expect.any(String),
+ );
+ });
+
+ it('computes the local storage key with a group', async () => {
+ const groupId = '22';
+ wrapper = mountComponent({
+ mountFn: mount,
+ propsData: { groupId, resourceType, withLocalStorage: true },
+ });
+ await showDropdown();
+ wrapper.findComponent(GlDropdownItem).vm.$emit('click', project1);
+ await nextTick();
+
+ expect(localStorage.setItem).toHaveBeenLastCalledWith(
+ `group-${groupId}-new-${resourceType}-recent-project`,
+ expect.any(String),
+ );
+ });
+ });
});
});
diff --git a/spec/graphql/types/permission_types/work_item_spec.rb b/spec/graphql/types/permission_types/work_item_spec.rb
index e604ce5d6e0..db6d78b1538 100644
--- a/spec/graphql/types/permission_types/work_item_spec.rb
+++ b/spec/graphql/types/permission_types/work_item_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Types::PermissionTypes::WorkItem do
it do
expected_permissions = [
- :read_work_item, :update_work_item, :delete_work_item
+ :read_work_item, :update_work_item, :delete_work_item, :admin_work_item
]
expected_permissions.each do |permission|
diff --git a/spec/lib/gitlab/ci/config/external/file/project_spec.rb b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
index 0ba92d1e92d..400ca10e645 100644
--- a/spec/lib/gitlab/ci/config/external/file/project_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::External::File::Project do
+RSpec.describe Gitlab::Ci::Config::External::File::Project, feature_category: :pipeline_authoring do
+ include RepoHelpers
+
let_it_be(:context_project) { create(:project) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@@ -12,11 +14,12 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:project_file) { described_class.new(params, context) }
let(:variables) { project.predefined_variables.to_runner_variables }
+ let(:project_sha) { project.commit.sha }
let(:context_params) do
{
project: context_project,
- sha: '12345',
+ sha: project_sha,
user: context_user,
parent_pipeline: parent_pipeline,
variables: variables
@@ -76,10 +79,10 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
{ project: project.full_path, file: '/file.yml' }
end
- let(:root_ref_sha) { project.repository.root_ref_sha }
-
- before do
- stub_project_blob(root_ref_sha, '/file.yml') { 'image: image:1.0' }
+ around(:all) do |example|
+ create_and_delete_files(project, { '/file.yml' => 'image: image:1.0' }) do
+ example.run
+ end
end
it { is_expected.to be_truthy }
@@ -99,10 +102,10 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
{ project: project.full_path, ref: 'master', file: '/file.yml' }
end
- let(:ref_sha) { project.commit('master').sha }
-
- before do
- stub_project_blob(ref_sha, '/file.yml') { 'image: image:1.0' }
+ around(:all) do |example|
+ create_and_delete_files(project, { '/file.yml' => 'image: image:1.0' }) do
+ example.run
+ end
end
it { is_expected.to be_truthy }
@@ -114,15 +117,16 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
end
let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'GITLAB_TOKEN', 'value' => 'secret_file', 'masked' => true }]) }
- let(:root_ref_sha) { project.repository.root_ref_sha }
- before do
- stub_project_blob(root_ref_sha, '/secret_file.yml') { '' }
+ around(:all) do |example|
+ create_and_delete_files(project, { '/secret_file.yml' => '' }) do
+ example.run
+ end
end
it 'returns false' do
expect(valid?).to be_falsy
- expect(project_file.error_message).to include("Project `#{project.full_path}` file `/xxxxxxxxxxx.yml` is empty!")
+ expect(project_file.error_message).to include("Project `#{project.full_path}` file `xxxxxxxxxxx.yml` is empty!")
end
end
@@ -146,7 +150,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
it 'returns false' do
expect(valid?).to be_falsy
- expect(project_file.error_message).to include("Project `#{project.full_path}` file `/xxxxxxxxxxxxxxxxxxx.yml` does not exist!")
+ expect(project_file.error_message).to include("Project `#{project.full_path}` file `xxxxxxxxxxxxxxxxxxx.yml` does not exist!")
end
end
@@ -157,7 +161,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
it 'returns false' do
expect(valid?).to be_falsy
- expect(project_file.error_message).to include('Included file `/invalid-file` does not have YAML extension!')
+ expect(project_file.error_message).to include('Included file `invalid-file` does not have YAML extension!')
end
end
@@ -200,7 +204,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
is_expected.to include(
user: user,
project: project,
- sha: project.commit('master').id,
+ sha: project_sha,
parent_pipeline: parent_pipeline,
variables: project.predefined_variables.to_runner_variables)
end
@@ -216,11 +220,11 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
it {
is_expected.to eq(
context_project: context_project.full_path,
- context_sha: '12345',
+ context_sha: project_sha,
type: :file,
- location: '/file.yml',
- blob: "http://localhost/#{project.full_path}/-/blob/#{project.commit('master').id}/file.yml",
- raw: "http://localhost/#{project.full_path}/-/raw/#{project.commit('master').id}/file.yml",
+ location: 'file.yml',
+ blob: "http://localhost/#{project.full_path}/-/blob/#{project_sha}/file.yml",
+ raw: "http://localhost/#{project.full_path}/-/raw/#{project_sha}/file.yml",
extra: { project: project.full_path, ref: 'HEAD' }
)
}
@@ -239,9 +243,9 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
it {
is_expected.to eq(
context_project: context_project.full_path,
- context_sha: '12345',
+ context_sha: project_sha,
type: :file,
- location: '/file.yml',
+ location: 'file.yml',
blob: nil,
raw: nil,
extra: { project: 'xxxxxxxxxxxxxxxxxxxxxxxx', ref: 'xxxxxxxxxxxxxxxxxxxxxxxx' }
@@ -249,12 +253,4 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
}
end
end
-
- private
-
- def stub_project_blob(ref, path)
- allow_next_instance_of(Repository) do |instance|
- allow(instance).to receive(:blob_data_at).with(ref, path) { yield }
- end
- end
end
diff --git a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
index fa67b134406..4303f279cac 100644
--- a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
@@ -94,6 +94,56 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category:
end
end
+ context 'when files are project files' do
+ let_it_be(:included_project) { create(:project, :repository, namespace: project.namespace, creator: user) }
+
+ let(:files) do
+ [
+ Gitlab::Ci::Config::External::File::Project.new(
+ { file: 'myfolder/file1.yml', project: included_project.full_path }, context
+ ),
+ Gitlab::Ci::Config::External::File::Project.new(
+ { file: 'myfolder/file2.yml', project: included_project.full_path }, context
+ ),
+ Gitlab::Ci::Config::External::File::Project.new(
+ { file: 'myfolder/file3.yml', project: included_project.full_path }, context
+ )
+ ]
+ end
+
+ around(:all) do |example|
+ create_and_delete_files(included_project, project_files) do
+ example.run
+ end
+ end
+
+ it 'returns an array of file objects' do
+ expect(process.map(&:location)).to contain_exactly(
+ 'myfolder/file1.yml', 'myfolder/file2.yml', 'myfolder/file3.yml'
+ )
+ end
+
+ it 'adds files to the expandset' do
+ expect { process }.to change { context.expandset.count }.by(3)
+ end
+
+ it 'calls Gitaly only once for all files', :request_store do
+ # 1 for project.commit.id, 3 for the sha check, 1 for the files
+ expect { process }.to change { Gitlab::GitalyClient.get_request_count }.by(5)
+ end
+
+ context 'when the FF ci_batch_request_for_local_and_project_includes is disabled' do
+ before do
+ stub_feature_flags(ci_batch_request_for_local_and_project_includes: false)
+ end
+
+ it 'calls Gitaly for each file', :request_store do
+ # 1 for project.commit.id, 3 for the sha check, 3 for the files
+ expect { process }.to change { Gitlab::GitalyClient.get_request_count }.by(7)
+ end
+ end
+ end
+
context 'when a file includes other files' do
let(:files) do
[
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index b397bfa38ee..344e9095fab 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
# This will be moved from a `shared_context` to a `describe` once every feature flag is removed.
-RSpec.shared_context 'gitlab_ci_config_external_mapper' do
+# - ci_batch_request_for_local_and_project_includes_enabled is also removed with the FF.
+RSpec.shared_context 'gitlab_ci_config_external_mapper' do |ci_batch_request_for_local_and_project_includes_enabled|
include StubRequests
include RepoHelpers
@@ -167,7 +168,11 @@ RSpec.shared_context 'gitlab_ci_config_external_mapper' do
an_instance_of(Gitlab::Ci::Config::External::File::Project))
end
- it_behaves_like 'logging config file fetch', 'config_file_fetch_project_content_duration_s', 2
+ if ci_batch_request_for_local_and_project_includes_enabled
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_project_content_duration_s', 1
+ else
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_project_content_duration_s', 2
+ end
end
end
@@ -465,13 +470,13 @@ RSpec.shared_context 'gitlab_ci_config_external_mapper' do
end
RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline_authoring do
- it_behaves_like 'gitlab_ci_config_external_mapper'
+ it_behaves_like 'gitlab_ci_config_external_mapper', true
context 'when the FF ci_batch_request_for_local_and_project_includes is disabled' do
before do
stub_feature_flags(ci_batch_request_for_local_and_project_includes: false)
end
- it_behaves_like 'gitlab_ci_config_external_mapper'
+ it_behaves_like 'gitlab_ci_config_external_mapper', false
end
end
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index e0080b252c6..311b433b7d2 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -334,7 +334,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel
context_project: project.full_path,
context_sha: sha },
{ type: :file,
- location: '/templates/my-workflow.yml',
+ location: 'templates/my-workflow.yml',
blob: "http://localhost/#{another_project.full_path}/-/blob/#{another_project.commit.sha}/templates/my-workflow.yml",
raw: "http://localhost/#{another_project.full_path}/-/raw/#{another_project.commit.sha}/templates/my-workflow.yml",
extra: { project: another_project.full_path, ref: 'HEAD' },
@@ -465,7 +465,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel
expect(context.includes).to contain_exactly(
{ type: :file,
- location: '/templates/my-build.yml',
+ location: 'templates/my-build.yml',
blob: "http://localhost/#{another_project.full_path}/-/blob/#{another_project.commit.sha}/templates/my-build.yml",
raw: "http://localhost/#{another_project.full_path}/-/raw/#{another_project.commit.sha}/templates/my-build.yml",
extra: { project: another_project.full_path, ref: 'HEAD' },
@@ -474,7 +474,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel
{ type: :file,
blob: "http://localhost/#{another_project.full_path}/-/blob/#{another_project.commit.sha}/templates/my-test.yml",
raw: "http://localhost/#{another_project.full_path}/-/raw/#{another_project.commit.sha}/templates/my-test.yml",
- location: '/templates/my-test.yml',
+ location: 'templates/my-test.yml',
extra: { project: another_project.full_path, ref: 'HEAD' },
context_project: project.full_path,
context_sha: sha }
diff --git a/spec/lib/gitlab/ci/parsers/instrumentation_spec.rb b/spec/lib/gitlab/ci/parsers/instrumentation_spec.rb
index 30bcce21be2..6772c62ab93 100644
--- a/spec/lib/gitlab/ci/parsers/instrumentation_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/instrumentation_spec.rb
@@ -8,14 +8,14 @@ RSpec.describe Gitlab::Ci::Parsers::Instrumentation do
Class.new do
prepend Gitlab::Ci::Parsers::Instrumentation
- def parse!(arg1, arg2)
+ def parse!(arg1, arg2:)
"parse #{arg1} #{arg2}"
end
end
end
it 'sets metrics for duration of parsing' do
- result = parser_class.new.parse!('hello', 'world')
+ result = parser_class.new.parse!('hello', arg2: 'world')
expect(result).to eq('parse hello world')
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
deleted file mode 100644
index bec80a43a76..00000000000
--- a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
+++ /dev/null
@@ -1,72 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Ci::Pipeline::Chain::CreateDeployments, feature_category: :continuous_integration do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
-
- let(:stage) { build(:ci_stage, project: project, statuses: [job]) }
- let(:pipeline) { create(:ci_pipeline, project: project, stages: [stage]) }
-
- let(:command) do
- Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user)
- end
-
- let(:step) { described_class.new(pipeline, command) }
-
- describe '#perform!' do
- subject { step.perform! }
-
- before do
- stub_feature_flags(move_create_deployments_to_worker: false)
- job.pipeline = pipeline
- end
-
- context 'when a pipeline contains a deployment job' do
- let!(:job) { build(:ci_build, :start_review_app, project: project) }
- let!(:environment) { create(:environment, project: project, name: job.expanded_environment_name) }
-
- it 'creates a deployment record' do
- expect { subject }.to change { Deployment.count }.by(1)
-
- job.reset
- expect(job.deployment.project).to eq(job.project)
- expect(job.deployment.ref).to eq(job.ref)
- expect(job.deployment.sha).to eq(job.sha)
- expect(job.deployment.deployable).to eq(job)
- expect(job.deployment.deployable_type).to eq('CommitStatus')
- expect(job.deployment.environment).to eq(job.persisted_environment)
- end
-
- context 'when the corresponding environment does not exist' do
- let!(:environment) {}
-
- it 'does not create a deployment record' do
- expect { subject }.not_to change { Deployment.count }
-
- expect(job.deployment).to be_nil
- end
- end
- end
-
- context 'when a pipeline contains a teardown job' do
- let!(:job) { build(:ci_build, :stop_review_app, project: project) }
- let!(:environment) { create(:environment, name: job.expanded_environment_name) }
-
- it 'does not create a deployment record' do
- expect { subject }.not_to change { Deployment.count }
-
- expect(job.deployment).to be_nil
- end
- end
-
- context 'when a pipeline does not contain a deployment job' do
- let!(:job) { build(:ci_build, project: project) }
-
- it 'does not create any deployments' do
- expect { subject }.not_to change { Deployment.count }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab_spec.rb b/spec/lib/gitlab_spec.rb
index a554e0b26a8..c44bb64a5c0 100644
--- a/spec/lib/gitlab_spec.rb
+++ b/spec/lib/gitlab_spec.rb
@@ -81,6 +81,10 @@ RSpec.describe Gitlab do
describe '.com?' do
context 'when not simulating SaaS' do
+ before do
+ stub_env('GITLAB_SIMULATE_SAAS', '0')
+ end
+
it "is true when on #{Gitlab::Saas.com_url}" do
stub_config_setting(url: Gitlab::Saas.com_url)
@@ -114,31 +118,17 @@ RSpec.describe Gitlab do
end
end
- context 'when simulating SaaS' do
- before do
- stub_const('Gitlab::GITLAB_SIMULATE_SAAS', '1')
- end
-
- it 'is false in tests' do
- expect(described_class.com?).to eq false
- end
-
- it 'is true in development' do
- stub_rails_env('development')
+ it 'is true when GITLAB_SIMULATE_SAAS is true and in development' do
+ stub_rails_env('development')
+ stub_env('GITLAB_SIMULATE_SAAS', '1')
- expect(described_class.com?).to eq true
- end
+ expect(described_class.com?).to eq true
+ end
- # See ee/spec/lib/gitlab_spec.rb for EE-specific changes to this behavior.
- context 'in a production environment' do
- before do
- stub_rails_env('production')
- end
+ it 'is false when GITLAB_SIMULATE_SAAS is true and in test' do
+ stub_env('GITLAB_SIMULATE_SAAS', '1')
- it 'is false' do
- expect(described_class.com?).to eq false
- end
- end
+ expect(described_class.com?).to eq false
end
end
@@ -246,6 +236,54 @@ RSpec.describe Gitlab do
end
end
+ describe '.simulate_com?' do
+ subject { described_class.simulate_com? }
+
+ context 'when GITLAB_SIMULATE_SAAS is true' do
+ before do
+ stub_env('GITLAB_SIMULATE_SAAS', '1')
+ end
+
+ it 'is false when test env' do
+ expect(subject).to eq false
+ end
+
+ it 'is true when dev env' do
+ stub_rails_env('development')
+
+ expect(subject).to eq true
+ end
+
+ it 'is false when env is not dev' do
+ stub_rails_env('production')
+
+ expect(subject).to eq false
+ end
+ end
+
+ context 'when GITLAB_SIMULATE_SAAS is false' do
+ before do
+ stub_env('GITLAB_SIMULATE_SAAS', '0')
+ end
+
+ it 'is false when test env' do
+ expect(subject).to eq false
+ end
+
+ it 'is false when dev env' do
+ stub_rails_env('development')
+
+ expect(subject).to eq false
+ end
+
+ it 'is false when env is not dev or test' do
+ stub_rails_env('production')
+
+ expect(subject).to eq false
+ end
+ end
+ end
+
describe '.dev_or_test_env?' do
subject { described_class.dev_or_test_env? }
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 3cea65ce699..47430dc6651 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -23,17 +23,19 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
it { is_expected.to belong_to(:trigger_request) }
it { is_expected.to belong_to(:erased_by) }
- it { is_expected.to have_many(:needs) }
- it { is_expected.to have_many(:sourced_pipelines) }
- it { is_expected.to have_one(:sourced_pipeline) }
- it { is_expected.to have_many(:job_variables) }
- it { is_expected.to have_many(:report_results) }
- it { is_expected.to have_many(:pages_deployments) }
+ it { is_expected.to have_many(:needs).with_foreign_key(:build_id) }
+ it { is_expected.to have_many(:sourced_pipelines).with_foreign_key(:source_job_id) }
+ it { is_expected.to have_one(:sourced_pipeline).with_foreign_key(:source_job_id) }
+ it { is_expected.to have_many(:job_variables).with_foreign_key(:job_id) }
+ it { is_expected.to have_many(:report_results).with_foreign_key(:build_id) }
+ it { is_expected.to have_many(:pages_deployments).with_foreign_key(:ci_build_id) }
it { is_expected.to have_one(:deployment) }
- it { is_expected.to have_one(:runner_session) }
- it { is_expected.to have_one(:trace_metadata) }
- it { is_expected.to have_many(:terraform_state_versions).inverse_of(:build) }
+ it { is_expected.to have_one(:runner_session).with_foreign_key(:build_id) }
+ it { is_expected.to have_one(:trace_metadata).with_foreign_key(:build_id) }
+ it { is_expected.to have_one(:runtime_metadata).with_foreign_key(:build_id) }
+ it { is_expected.to have_one(:pending_state).with_foreign_key(:build_id) }
+ it { is_expected.to have_many(:terraform_state_versions).inverse_of(:build).with_foreign_key(:ci_build_id) }
it { is_expected.to validate_presence_of(:ref) }
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 2ea6c396117..f277dd054ee 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -7260,6 +7260,54 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
+ describe '#group_protected_branches' do
+ subject { project.group_protected_branches }
+
+ let(:project) { create(:project, group: group) }
+ let(:group) { create(:group) }
+ let(:protected_branch) { create(:protected_branch, group: group, project: nil) }
+
+ it 'returns protected branches of the group' do
+ is_expected.to match_array([protected_branch])
+ end
+
+ context 'when project belongs to namespace' do
+ let(:project) { create(:project) }
+
+ it 'returns empty relation' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ describe '#all_protected_branches' do
+ let(:group) { create(:group) }
+ let!(:group_protected_branch) { create(:protected_branch, group: group, project: nil) }
+ let!(:project_protected_branch) { create(:protected_branch, project: subject) }
+
+ subject { create(:project, group: group) }
+
+ context 'when feature flag `group_protected_branches` enabled' do
+ before do
+ stub_feature_flags(group_protected_branches: true)
+ end
+
+ it 'return all protected branches' do
+ expect(subject.all_protected_branches).to match_array([group_protected_branch, project_protected_branch])
+ end
+ end
+
+ context 'when feature flag `group_protected_branches` disabled' do
+ before do
+ stub_feature_flags(group_protected_branches: false)
+ end
+
+ it 'return only project-level protected branches' do
+ expect(subject.all_protected_branches).to match_array([project_protected_branch])
+ end
+ end
+ end
+
describe '#lfs_objects_oids' do
let(:project) { create(:project) }
let(:lfs_object) { create(:lfs_object) }
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index 94c463dfe43..d5e9b04f03e 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -341,23 +341,61 @@ RSpec.describe ProtectedBranch do
end
describe "#allow_force_push?" do
- context "when the attr allow_force_push is true" do
- let(:subject_branch) { create(:protected_branch, allow_force_push: true, name: "foo") }
+ context "when feature flag disabled" do
+ before do
+ stub_feature_flags(group_protected_branches: false)
+ end
+
+ let(:subject_branch) { create(:protected_branch, allow_force_push: allow_force_push, name: "foo") }
+ let(:project) { subject_branch.project }
- it "returns true" do
- project = subject_branch.project
+ context "when the attr allow_force_push is true" do
+ let(:allow_force_push) { true }
- expect(described_class.allow_force_push?(project, "foo")).to eq(true)
+ it "returns true" do
+ expect(described_class.allow_force_push?(project, "foo")).to eq(true)
+ end
+ end
+
+ context "when the attr allow_force_push is false" do
+ let(:allow_force_push) { false }
+
+ it "returns false" do
+ expect(described_class.allow_force_push?(project, "foo")).to eq(false)
+ end
end
end
- context "when the attr allow_force_push is false" do
- let(:subject_branch) { create(:protected_branch, allow_force_push: false, name: "foo") }
+ context "when feature flag enabled" do
+ using RSpec::Parameterized::TableSyntax
- it "returns false" do
- project = subject_branch.project
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
- expect(described_class.allow_force_push?(project, "foo")).to eq(false)
+ where(:group_level_value, :project_level_value, :result) do
+ true | false | true
+ false | true | false
+ true | nil | true
+ false | nil | false
+ nil | nil | false
+ end
+
+ with_them do
+ before do
+ stub_feature_flags(group_protected_branches: true)
+
+ unless group_level_value.nil?
+ create(:protected_branch, allow_force_push: group_level_value, name: "foo", project: nil, group: group)
+ end
+
+ unless project_level_value.nil?
+ create(:protected_branch, allow_force_push: project_level_value, name: "foo", project: project)
+ end
+ end
+
+ it "returns result" do
+ expect(described_class.allow_force_push?(project, "foo")).to eq(result)
+ end
end
end
end
@@ -390,6 +428,36 @@ RSpec.describe ProtectedBranch do
end
end
+ describe '.protected_refs' do
+ let_it_be(:project) { create(:project) }
+
+ subject { described_class.protected_refs(project) }
+
+ context 'when feature flag enabled' do
+ before do
+ stub_feature_flags(group_protected_branches: true)
+ end
+
+ it 'call `all_protected_branches`' do
+ expect(project).to receive(:all_protected_branches)
+
+ subject
+ end
+ end
+
+ context 'when feature flag disabled' do
+ before do
+ stub_feature_flags(group_protected_branches: false)
+ end
+
+ it 'call `protected_branches`' do
+ expect(project).to receive(:protected_branches)
+
+ subject
+ end
+ end
+ end
+
describe '.by_name' do
let!(:protected_branch) { create(:protected_branch, name: 'master') }
let!(:another_protected_branch) { create(:protected_branch, name: 'stable') }
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index eba1a06b5e4..058ddaebd79 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -279,7 +279,7 @@ RSpec.describe API::Branches, feature_category: :source_code_management do
expect do
get api(route, current_user), params: { per_page: 100 }
- end.not_to exceed_query_limit(control)
+ end.not_to exceed_query_limit(control).with_threshold(1)
end
end
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index 6b5d437df83..1f321d1dec3 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -55,7 +55,12 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
'title' => work_item.title,
'confidential' => work_item.confidential,
'workItemType' => hash_including('id' => work_item.work_item_type.to_gid.to_s),
- 'userPermissions' => { 'readWorkItem' => true, 'updateWorkItem' => true, 'deleteWorkItem' => false },
+ 'userPermissions' => {
+ 'readWorkItem' => true,
+ 'updateWorkItem' => true,
+ 'deleteWorkItem' => false,
+ 'adminWorkItem' => true
+ },
'project' => hash_including('id' => project.to_gid.to_s, 'fullPath' => project.full_path)
)
end
diff --git a/spec/services/projects/protect_default_branch_service_spec.rb b/spec/services/projects/protect_default_branch_service_spec.rb
index c8aa421cdd4..9f9e89ff8f8 100644
--- a/spec/services/projects/protect_default_branch_service_spec.rb
+++ b/spec/services/projects/protect_default_branch_service_spec.rb
@@ -233,6 +233,38 @@ RSpec.describe Projects::ProtectDefaultBranchService do
end
end
+ describe '#protected_branch_exists?' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ let(:default_branch) { "default-branch" }
+
+ before do
+ allow(project).to receive(:default_branch).and_return(default_branch)
+ create(:protected_branch, project: nil, group: group, name: default_branch)
+ end
+
+ context 'when feature flag `group_protected_branches` disabled' do
+ before do
+ stub_feature_flags(group_protected_branches: false)
+ end
+
+ it 'return false' do
+ expect(service.protected_branch_exists?).to eq(false)
+ end
+ end
+
+ context 'when feature flag `group_protected_branches` enabled' do
+ before do
+ stub_feature_flags(group_protected_branches: true)
+ end
+
+ it 'return true' do
+ expect(service.protected_branch_exists?).to eq(true)
+ end
+ end
+ end
+
describe '#default_branch' do
it 'returns the default branch of the project' do
allow(project)
diff --git a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
index 6f4072ba762..532ef99f25f 100644
--- a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
+++ b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
@@ -1003,7 +1003,7 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context
let(:params) { { issue_types: ['nonsense'] } }
it 'returns no items' do
- expect(items).to eq(items_model.none)
+ expect(items.none?).to eq(true)
end
end
end
diff --git a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
index 974fc8f402a..0ef9ab25505 100644
--- a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
@@ -276,7 +276,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
describe 'Push events' do
let_it_be(:user) { create(:user) }
- let_it_be_with_reload(:project) { create(:project, :repository, creator: user) }
+ let_it_be_with_refind(:project) { create(:project, :repository, creator: user) }
before do
allow(chat_integration).to receive_messages(
@@ -520,7 +520,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
describe 'Pipeline events' do
let_it_be(:user) { create(:user) }
- let_it_be_with_reload(:project) { create(:project, :repository, creator: user) }
+ let_it_be_with_refind(:project) { create(:project, :repository, creator: user) }
let(:pipeline) do
create(:ci_pipeline,
project: project, status: status,
@@ -671,7 +671,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
describe 'Deployment events' do
let_it_be(:user) { create(:user) }
- let_it_be_with_reload(:project) { create(:project, :repository, creator: user) }
+ let_it_be_with_refind(:project) { create(:project, :repository, creator: user) }
let_it_be(:deployment) do
create(:deployment, :success, project: project, sha: project.commit.sha, ref: project.default_branch)
diff --git a/tooling/danger/config_files.rb b/tooling/danger/config_files.rb
index 1b09da6c8c9..e165792471f 100644
--- a/tooling/danger/config_files.rb
+++ b/tooling/danger/config_files.rb
@@ -8,7 +8,7 @@ module Tooling
module ConfigFiles
include ::Tooling::Danger::Suggestor
- MISSING_INTRODUCED_BY_REGEX = /^\+?(?<attr_name>\s*introduced_by_url):\s*$/
+ MISSING_INTRODUCED_BY_REGEX = /^\+?(?<attr_name>\s*introduced_by_url):\s*$/.freeze
CONFIG_DIRS = %w[
config/feature_flags
diff --git a/vendor/gems/gitlab_active_record/.gitignore b/vendor/gems/gitlab_active_record/.gitignore
new file mode 100644
index 00000000000..b04a8c840df
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/.gitignore
@@ -0,0 +1,11 @@
+/.bundle/
+/.yardoc
+/_yardoc/
+/coverage/
+/doc/
+/pkg/
+/spec/reports/
+/tmp/
+
+# rspec failure tracking
+.rspec_status
diff --git a/vendor/gems/gitlab_active_record/.gitlab-ci.yml b/vendor/gems/gitlab_active_record/.gitlab-ci.yml
new file mode 100644
index 00000000000..a1e883119e8
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/.gitlab-ci.yml
@@ -0,0 +1,28 @@
+workflow:
+ rules:
+ - if: $CI_MERGE_REQUEST_ID
+
+.rspec:
+ cache:
+ key: gitlab_active_record-ruby
+ paths:
+ - vendor/gems/gitlab_active_record/vendor/ruby
+ before_script:
+ - cd vendor/gems/gitlab_active_record
+ - ruby -v # Print out ruby version for debugging
+ - gem install bundler --no-document # Bundler is not installed with the image
+ - bundle config set --local path 'vendor' # Install dependencies into ./vendor/ruby
+ - bundle config set with 'development'
+ - bundle config set --local frozen 'true' # Disallow Gemfile.lock changes on CI
+ - bundle config # Show bundler configuration
+ - bundle install -j $(nproc)
+ script:
+ - bundle exec rspec
+
+rspec-2.7:
+ image: "ruby:2.7"
+ extends: .rspec
+
+rspec-3.0:
+ image: "ruby:3.0"
+ extends: .rspec
diff --git a/vendor/gems/gitlab_active_record/.rspec b/vendor/gems/gitlab_active_record/.rspec
new file mode 100644
index 00000000000..34c5164d9b5
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/.rspec
@@ -0,0 +1,3 @@
+--format documentation
+--color
+--require spec_helper
diff --git a/vendor/gems/gitlab_active_record/Gemfile b/vendor/gems/gitlab_active_record/Gemfile
new file mode 100644
index 00000000000..e694fe26c66
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/Gemfile
@@ -0,0 +1,6 @@
+# frozen_string_literal: true
+
+source "https://rubygems.org"
+
+# Specify your gem's dependencies in gitlab_active_record.gemspec
+gemspec
diff --git a/vendor/gems/gitlab_active_record/Gemfile.lock b/vendor/gems/gitlab_active_record/Gemfile.lock
new file mode 100644
index 00000000000..93aecbc7276
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/Gemfile.lock
@@ -0,0 +1,54 @@
+PATH
+ remote: .
+ specs:
+ gitlab_active_record (0.1.0)
+ activerecord (~> 6.1)
+ activesupport (~> 6.1)
+
+GEM
+ remote: https://rubygems.org/
+ specs:
+ activemodel (6.1.7)
+ activesupport (= 6.1.7)
+ activerecord (6.1.7)
+ activemodel (= 6.1.7)
+ activesupport (= 6.1.7)
+ activesupport (6.1.7)
+ concurrent-ruby (~> 1.0, >= 1.0.2)
+ i18n (>= 1.6, < 2)
+ minitest (>= 5.1)
+ tzinfo (~> 2.0)
+ zeitwerk (~> 2.3)
+ concurrent-ruby (1.1.10)
+ diff-lcs (1.5.0)
+ i18n (1.12.0)
+ concurrent-ruby (~> 1.0)
+ minitest (5.16.3)
+ rake (13.0.6)
+ rspec (3.11.0)
+ rspec-core (~> 3.11.0)
+ rspec-expectations (~> 3.11.0)
+ rspec-mocks (~> 3.11.0)
+ rspec-core (3.11.0)
+ rspec-support (~> 3.11.0)
+ rspec-expectations (3.11.0)
+ diff-lcs (>= 1.2.0, < 2.0)
+ rspec-support (~> 3.11.0)
+ rspec-mocks (3.11.0)
+ diff-lcs (>= 1.2.0, < 2.0)
+ rspec-support (~> 3.11.0)
+ rspec-support (3.11.0)
+ tzinfo (2.0.5)
+ concurrent-ruby (~> 1.0)
+ zeitwerk (2.6.6)
+
+PLATFORMS
+ ruby
+
+DEPENDENCIES
+ gitlab_active_record!
+ rake (~> 13.0)
+ rspec (~> 3.0)
+
+BUNDLED WITH
+ 2.3.26
diff --git a/vendor/gems/gitlab_active_record/LICENSE b/vendor/gems/gitlab_active_record/LICENSE
new file mode 100644
index 00000000000..aafb7f79450
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/LICENSE
@@ -0,0 +1,7 @@
+Copyright 2022 GitLab B.V.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/gems/gitlab_active_record/Rakefile b/vendor/gems/gitlab_active_record/Rakefile
new file mode 100644
index 00000000000..b6ae734104e
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/Rakefile
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require "bundler/gem_tasks"
+require "rspec/core/rake_task"
+
+RSpec::Core::RakeTask.new(:spec)
+
+task default: :spec
diff --git a/vendor/gems/gitlab_active_record/bin/console b/vendor/gems/gitlab_active_record/bin/console
new file mode 100755
index 00000000000..a436c04dd66
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/bin/console
@@ -0,0 +1,15 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require "bundler/setup"
+require "gitlab_active_record"
+
+# You can add fixtures and/or initialization code here to make experimenting
+# with your gem easier. You can also use a different console, if you like.
+
+# (If you use this, don't forget to add pry to your Gemfile!)
+# require "pry"
+# Pry.start
+
+require "irb"
+IRB.start(__FILE__)
diff --git a/vendor/gems/gitlab_active_record/bin/setup b/vendor/gems/gitlab_active_record/bin/setup
new file mode 100755
index 00000000000..dce67d860af
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/bin/setup
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+set -euo pipefail
+IFS=$'\n\t'
+set -vx
+
+bundle install
+
+# Do any other automated setup that you need to do here
diff --git a/vendor/gems/gitlab_active_record/gitlab_active_record.gemspec b/vendor/gems/gitlab_active_record/gitlab_active_record.gemspec
new file mode 100644
index 00000000000..17e7d8f40d6
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/gitlab_active_record.gemspec
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require_relative "lib/gitlab_active_record/version"
+
+Gem::Specification.new do |spec|
+ spec.name = "gitlab_active_record"
+ spec.version = GitlabActiveRecord::VERSION
+ spec.authors = ["GitLab"]
+ spec.email = [""]
+
+ spec.summary = "ActiveRecord patches for CI partitioning"
+ spec.description = "ActiveRecord patches for CI partitioning"
+ spec.homepage = "https://gitlab.com/gitlab-org/gitlab"
+ spec.required_ruby_version = ">= 2.6.0"
+
+ spec.metadata["homepage_uri"] = spec.homepage
+ spec.metadata["source_code_uri"] = "https://gitlab.com/gitlab-org/gitlab"
+
+ spec.files = Dir.glob("lib/**/*")
+ spec.bindir = "exe"
+ spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
+ spec.require_paths = ["lib"]
+
+ spec.add_dependency 'activerecord', '~> 6.1'
+ spec.add_dependency 'activesupport', '~> 6.1'
+
+ spec.add_development_dependency 'rake', '~> 13.0'
+ spec.add_development_dependency 'rspec', '~> 3.0'
+end
diff --git a/vendor/gems/gitlab_active_record/lib/gitlab_active_record.rb b/vendor/gems/gitlab_active_record/lib/gitlab_active_record.rb
new file mode 100644
index 00000000000..2ac8c71939f
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/lib/gitlab_active_record.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require_relative "gitlab_active_record/version"
+
+module GitlabActiveRecord
+ class Error < StandardError; end
+end
diff --git a/vendor/gems/gitlab_active_record/lib/gitlab_active_record/version.rb b/vendor/gems/gitlab_active_record/lib/gitlab_active_record/version.rb
new file mode 100644
index 00000000000..d274361efd7
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/lib/gitlab_active_record/version.rb
@@ -0,0 +1,5 @@
+# frozen_string_literal: true
+
+module GitlabActiveRecord
+ VERSION = "0.1.0"
+end
diff --git a/vendor/gems/gitlab_active_record/spec/gitlab_active_record_spec.rb b/vendor/gems/gitlab_active_record/spec/gitlab_active_record_spec.rb
new file mode 100644
index 00000000000..d9263a08dfd
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/spec/gitlab_active_record_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+RSpec.describe GitlabActiveRecord do
+ it "has a version number" do
+ expect(GitlabActiveRecord::VERSION).not_to be nil
+ end
+end
diff --git a/vendor/gems/gitlab_active_record/spec/spec_helper.rb b/vendor/gems/gitlab_active_record/spec/spec_helper.rb
new file mode 100644
index 00000000000..3cfabb45b1a
--- /dev/null
+++ b/vendor/gems/gitlab_active_record/spec/spec_helper.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require "gitlab_active_record"
+
+RSpec.configure do |config|
+ # Enable flags like --only-failures and --next-failure
+ config.example_status_persistence_file_path = ".rspec_status"
+
+ # Disable RSpec exposing methods globally on `Module` and `main`
+ config.disable_monkey_patching!
+
+ config.expect_with :rspec do |c|
+ c.syntax = :expect
+ end
+end