Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-09-27 18:10:13 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-09-27 18:10:13 +0300
commit28b15b6b1c591fd7796bf3a7a20508d991216b51 (patch)
tree8f049840cc174d995ec38184ecd1fa4d801472ae
parentcddf2db96b2280ad995b589b70ff23ff77cceb7b (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.rubocop_manual_todo.yml1
-rw-r--r--CHANGELOG.md6
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue10
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/constants.js40
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue14
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js50
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/stores/state_maps.js10
-rw-r--r--app/controllers/concerns/group_tree.rb9
-rw-r--r--app/models/concerns/checksummable.rb6
-rw-r--r--app/models/group.rb14
-rw-r--r--app/models/lfs_object.rb2
-rw-r--r--app/models/upload.rb2
-rw-r--r--app/presenters/clusters/cluster_presenter.rb39
-rw-r--r--app/views/admin/application_settings/_spam.html.haml49
-rw-r--r--app/views/admin/application_settings/reporting.html.haml4
-rw-r--r--config/feature_flags/development/linear_group_ancestor_scopes.yml8
-rw-r--r--config/feature_flags/development/linear_group_tree_ancestor_scopes.yml8
-rw-r--r--db/post_migrate/20210923133143_remove_redundant_taggings_index.rb15
-rw-r--r--db/schema_migrations/202109231331431
-rw-r--r--db/structure.sql2
-rw-r--r--doc/administration/auth/ldap/ldap-troubleshooting.md2
-rw-r--r--doc/administration/clusters/kas.md2
-rw-r--r--doc/administration/gitaly/troubleshooting.md2
-rw-r--r--doc/administration/instance_review.md2
-rw-r--r--doc/administration/reference_architectures/10k_users.md2
-rw-r--r--doc/administration/reference_architectures/25k_users.md2
-rw-r--r--doc/development/pipelines.md573
-rw-r--r--lib/gitlab/ci/trace.rb2
-rw-r--r--lib/gitlab/verify/uploads.rb2
-rw-r--r--locale/gitlab.pot72
-rw-r--r--qa/qa/resource/personal_access_token.rb8
-rw-r--r--qa/qa/runtime/api/client.rb73
-rw-r--r--spec/controllers/concerns/group_tree_spec.rb112
-rw-r--r--spec/features/admin/admin_settings_spec.rb8
-rw-r--r--spec/features/merge_request/user_merges_immediately_spec.rb2
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js2
-rw-r--r--spec/models/concerns/checksummable_spec.rb12
-rw-r--r--spec/models/group_spec.rb26
-rw-r--r--spec/presenters/clusters/cluster_presenter_spec.rb143
-rw-r--r--spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb6
40 files changed, 710 insertions, 633 deletions
diff --git a/.rubocop_manual_todo.yml b/.rubocop_manual_todo.yml
index 2cbfeec1048..8022eb7f605 100644
--- a/.rubocop_manual_todo.yml
+++ b/.rubocop_manual_todo.yml
@@ -2589,7 +2589,6 @@ Rails/IncludeUrlHelper:
- 'app/models/integrations/youtrack.rb'
- 'app/presenters/alert_management/alert_presenter.rb'
- 'app/presenters/ci/pipeline_presenter.rb'
- - 'app/presenters/clusters/cluster_presenter.rb'
- 'app/presenters/environment_presenter.rb'
- 'app/presenters/gitlab/blame_presenter.rb'
- 'app/presenters/group_clusterable_presenter.rb'
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5dabc7735f7..0645b2a668b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1089,6 +1089,12 @@ entry.
- [Add helpful text to URL group validation and limit text](gitlab-org/gitlab@59a5a6266cb0d5434596170ffa36e4e74b8d2c2c) ([merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65369)) **GitLab Enterprise Edition**
- [Refactor external storage admin area configuration UI and docs](gitlab-org/gitlab@497ba4fc8f4ec1d234c9f5f1ec5c69712b8c7cb3) ([merge request](gitlab-org/gitlab!66219))
+## 14.1.6 (2021-09-27)
+
+### Fixed (1 change)
+
+- [Fix Elastic::MigrationWorker current_migration (2nd attempt)](gitlab-org/gitlab@f07c7a5f173a2fc053247664f21c03d29df543a4) ([merge request](gitlab-org/gitlab!71187)) **GitLab Enterprise Edition**
+
## 14.1.5 (2021-09-02)
### Fixed (1 change)
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
index 7df65e995a5..eaa1d6f4cdd 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
@@ -29,6 +29,7 @@ import {
WARNING,
MT_MERGE_STRATEGY,
PIPELINE_FAILED_STATE,
+ STATE_MACHINE,
} from '../../constants';
import eventHub from '../../event_hub';
import mergeRequestQueryVariablesMixin from '../../mixins/merge_request_query_variables';
@@ -47,6 +48,9 @@ const MERGE_FAILED_STATUS = 'failed';
const MERGE_SUCCESS_STATUS = 'success';
const MERGE_HOOK_VALIDATION_ERROR_STATUS = 'hook_validation_error';
+const { transitions } = STATE_MACHINE;
+const { MERGE, MERGED, MERGE_FAILURE } = transitions;
+
export default {
name: 'ReadyToMerge',
apollo: {
@@ -361,6 +365,7 @@ export default {
}
this.isMakingRequest = true;
+ this.mr.transitionStateMachine({ transition: MERGE });
this.service
.merge(options)
.then((res) => res.data)
@@ -375,6 +380,7 @@ export default {
this.initiateMergePolling();
} else if (hasError) {
eventHub.$emit('FailedToMerge', data.merge_error);
+ this.mr.transitionStateMachine({ transition: MERGE_FAILURE });
}
if (this.glFeatures.mergeRequestWidgetGraphql) {
@@ -383,6 +389,7 @@ export default {
})
.catch(() => {
this.isMakingRequest = false;
+ this.mr.transitionStateMachine({ transition: MERGE_FAILURE });
createFlash({
message: __('Something went wrong. Please try again.'),
});
@@ -417,6 +424,7 @@ export default {
eventHub.$emit('FetchActionsContent');
MergeRequest.hideCloseButton();
MergeRequest.decreaseCounter();
+ this.mr.transitionStateMachine({ transition: MERGED });
stopPolling();
refreshUserMergeRequestCounts();
@@ -428,6 +436,7 @@ export default {
}
} else if (data.merge_error) {
eventHub.$emit('FailedToMerge', data.merge_error);
+ this.mr.transitionStateMachine({ transition: MERGE_FAILURE });
stopPolling();
} else {
// MR is not merged yet, continue polling until the state becomes 'merged'
@@ -438,6 +447,7 @@ export default {
createFlash({
message: __('Something went wrong while merging this merge request. Please try again.'),
});
+ this.mr.transitionStateMachine({ transition: MERGE_FAILURE });
stopPolling();
});
},
diff --git a/app/assets/javascripts/vue_merge_request_widget/constants.js b/app/assets/javascripts/vue_merge_request_widget/constants.js
index f5710f46b7e..5edd4684529 100644
--- a/app/assets/javascripts/vue_merge_request_widget/constants.js
+++ b/app/assets/javascripts/vue_merge_request_widget/constants.js
@@ -1,4 +1,5 @@
import { s__ } from '~/locale';
+import { stateToComponentMap as classStateMap, stateKey } from './stores/state_maps';
export const SUCCESS = 'success';
export const WARNING = 'warning';
@@ -52,3 +53,42 @@ export const MERGE_ACTIVE_STATUS_PHRASES = [
emoji: 'heart_eyes',
},
];
+
+const STATE_MACHINE = {
+ states: {
+ IDLE: 'IDLE',
+ MERGING: 'MERGING',
+ },
+ transitions: {
+ MERGE: 'start-merge',
+ MERGE_FAILURE: 'merge-failed',
+ MERGED: 'merge-done',
+ },
+};
+const { states, transitions } = STATE_MACHINE;
+
+STATE_MACHINE.definition = {
+ initial: states.IDLE,
+ states: {
+ [states.IDLE]: {
+ on: {
+ [transitions.MERGE]: states.MERGING,
+ },
+ },
+ [states.MERGING]: {
+ on: {
+ [transitions.MERGED]: states.IDLE,
+ [transitions.MERGE_FAILURE]: states.IDLE,
+ },
+ },
+ },
+};
+
+export const stateToTransitionMap = {
+ [stateKey.merging]: transitions.MERGE,
+ [stateKey.merged]: transitions.MERGED,
+};
+export const stateToComponentMap = {
+ [states.MERGING]: classStateMap[stateKey.merging],
+};
+export { STATE_MACHINE };
diff --git a/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue b/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue
index feca05a9329..59a3c9185b9 100644
--- a/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue
@@ -4,7 +4,7 @@ import { isEmpty } from 'lodash';
import MrWidgetApprovals from 'ee_else_ce/vue_merge_request_widget/components/approvals/approvals.vue';
import MRWidgetService from 'ee_else_ce/vue_merge_request_widget/services/mr_widget_service';
import MRWidgetStore from 'ee_else_ce/vue_merge_request_widget/stores/mr_widget_store';
-import stateMaps from 'ee_else_ce/vue_merge_request_widget/stores/state_maps';
+import { stateToComponentMap as classState } from 'ee_else_ce/vue_merge_request_widget/stores/state_maps';
import createFlash from '~/flash';
import { secondsToMilliseconds } from '~/lib/utils/datetime_utility';
import notify from '~/lib/utils/notify';
@@ -39,6 +39,7 @@ import ShaMismatch from './components/states/sha_mismatch.vue';
import UnresolvedDiscussionsState from './components/states/unresolved_discussions.vue';
import WorkInProgressState from './components/states/work_in_progress.vue';
import ExtensionsContainer from './components/extensions/container';
+import { STATE_MACHINE, stateToComponentMap } from './constants';
import eventHub from './event_hub';
import mergeRequestQueryVariablesMixin from './mixins/merge_request_query_variables';
import getStateQuery from './queries/get_state.query.graphql';
@@ -124,7 +125,9 @@ export default {
mr: store,
state: store && store.state,
service: store && this.createService(store),
+ machineState: store?.machineValue || STATE_MACHINE.definition.initial,
loading: true,
+ recomputeComponentName: 0,
};
},
computed: {
@@ -139,7 +142,7 @@ export default {
return this.mr.state !== 'nothingToMerge';
},
componentName() {
- return stateMaps.stateToComponentMap[this.mr.state];
+ return stateToComponentMap[this.machineState] || classState[this.mr.state];
},
hasPipelineMustSucceedConflict() {
return !this.mr.hasCI && this.mr.onlyAllowMergeIfPipelineSucceeds;
@@ -206,6 +209,11 @@ export default {
},
},
watch: {
+ 'mr.machineValue': {
+ handler(newValue) {
+ this.machineState = newValue;
+ },
+ },
state(newVal, oldVal) {
if (newVal !== oldVal && this.shouldRenderMergedPipeline) {
// init polling
@@ -247,6 +255,8 @@ export default {
this.mr = new MRWidgetStore({ ...window.gl.mrWidgetData, ...data });
}
+ this.machineState = this.mr.machineValue;
+
if (!this.state) {
this.state = this.mr.state;
}
diff --git a/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js b/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js
index 29e0c867f6b..116d3be2265 100644
--- a/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js
+++ b/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js
@@ -1,11 +1,21 @@
import getStateKey from 'ee_else_ce/vue_merge_request_widget/stores/get_state_key';
import { statusBoxState } from '~/issuable/components/status_box.vue';
import { formatDate, getTimeago } from '~/lib/utils/datetime_utility';
-import { MTWPS_MERGE_STRATEGY, MT_MERGE_STRATEGY, MWPS_MERGE_STRATEGY } from '../constants';
+import { machine } from '~/lib/utils/finite_state_machine';
+import {
+ MTWPS_MERGE_STRATEGY,
+ MT_MERGE_STRATEGY,
+ MWPS_MERGE_STRATEGY,
+ STATE_MACHINE,
+ stateToTransitionMap,
+} from '../constants';
import { stateKey } from './state_maps';
const { format } = getTimeago();
+const { states } = STATE_MACHINE;
+const { IDLE } = states;
+
export default class MergeRequestStore {
constructor(data) {
this.sha = data.diff_head_sha;
@@ -16,6 +26,9 @@ export default class MergeRequestStore {
this.apiUnapprovePath = data.api_unapprove_path;
this.hasApprovalsAvailable = data.has_approvals_available;
+ this.stateMachine = machine(STATE_MACHINE.definition);
+ this.machineValue = this.stateMachine.value;
+
this.setPaths(data);
this.setData(data);
@@ -215,10 +228,7 @@ export default class MergeRequestStore {
setState() {
if (this.mergeOngoing) {
this.state = 'merging';
- return;
- }
-
- if (this.isOpen) {
+ } else if (this.isOpen) {
this.state = getStateKey.call(this);
} else {
switch (this.mergeRequestState) {
@@ -232,6 +242,8 @@ export default class MergeRequestStore {
this.state = null;
}
}
+
+ this.translateStateToMachine();
}
setPaths(data) {
@@ -356,4 +368,32 @@ export default class MergeRequestStore {
(this.onlyAllowMergeIfPipelineSucceeds && this.isPipelineFailed)
);
}
+
+ // Because the state machine doesn't yet handle every state and transition,
+ // some use-cases will need to force a state that can't be reached by
+ // a known transition. This is undesirable long-term (as it subverts
+ // the intent of a state machine), but is necessary until the machine
+ // can handle all possible combinations. (unsafeForce)
+ transitionStateMachine({ transition, state, unsafeForce = false } = {}) {
+ if (unsafeForce && state) {
+ this.stateMachine.value = state;
+ } else {
+ this.stateMachine.send(transition);
+ }
+
+ this.machineValue = this.stateMachine.value;
+ }
+ translateStateToMachine() {
+ const transition = stateToTransitionMap[this.state];
+ let transitionOptions = {
+ state: IDLE,
+ unsafeForce: true,
+ };
+
+ if (transition) {
+ transitionOptions = { transition };
+ }
+
+ this.transitionStateMachine(transitionOptions);
+ }
}
diff --git a/app/assets/javascripts/vue_merge_request_widget/stores/state_maps.js b/app/assets/javascripts/vue_merge_request_widget/stores/state_maps.js
index 04454882666..4cb23407a74 100644
--- a/app/assets/javascripts/vue_merge_request_widget/stores/state_maps.js
+++ b/app/assets/javascripts/vue_merge_request_widget/stores/state_maps.js
@@ -1,4 +1,4 @@
-const stateToComponentMap = {
+export const stateToComponentMap = {
merged: 'mr-widget-merged',
closed: 'mr-widget-closed',
merging: 'mr-widget-merging',
@@ -21,7 +21,7 @@ const stateToComponentMap = {
mergeChecksFailed: 'mergeChecksFailed',
};
-const statesToShowHelpWidget = [
+export const statesToShowHelpWidget = [
'merging',
'conflicts',
'workInProgress',
@@ -50,11 +50,7 @@ export const stateKey = {
notAllowedToMerge: 'notAllowedToMerge',
readyToMerge: 'readyToMerge',
rebase: 'rebase',
+ merging: 'merging',
merged: 'merged',
mergeChecksFailed: 'mergeChecksFailed',
};
-
-export default {
- stateToComponentMap,
- statesToShowHelpWidget,
-};
diff --git a/app/controllers/concerns/group_tree.rb b/app/controllers/concerns/group_tree.rb
index d076c62c707..35c1f358a77 100644
--- a/app/controllers/concerns/group_tree.rb
+++ b/app/controllers/concerns/group_tree.rb
@@ -38,8 +38,13 @@ module GroupTree
#
# Pagination needs to be applied before loading the ancestors to
# make sure ancestors are not cut off by pagination.
- Gitlab::ObjectHierarchy.new(Group.where(id: filtered_groups.select(:id)))
- .base_and_ancestors
+ filtered_groups_relation = Group.where(id: filtered_groups.select(:id))
+
+ if Feature.enabled?(:linear_group_tree_ancestor_scopes, current_user, default_enabled: :yaml)
+ filtered_groups_relation.self_and_ancestors
+ else
+ Gitlab::ObjectHierarchy.new(filtered_groups_relation).base_and_ancestors
+ end
end
# rubocop: enable CodeReuse/ActiveRecord
end
diff --git a/app/models/concerns/checksummable.rb b/app/models/concerns/checksummable.rb
index 056abafd0ce..9812c62fcc4 100644
--- a/app/models/concerns/checksummable.rb
+++ b/app/models/concerns/checksummable.rb
@@ -8,8 +8,12 @@ module Checksummable
Zlib.crc32(data)
end
- def hexdigest(path)
+ def sha256_hexdigest(path)
::Digest::SHA256.file(path).hexdigest
end
+
+ def md5_hexdigest(path)
+ ::Digest::MD5.file(path).hexdigest
+ end
end
end
diff --git a/app/models/group.rb b/app/models/group.rb
index 437c750afa6..23b0d7e2197 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -192,9 +192,15 @@ class Group < Namespace
# Returns the ids of the passed group models where the `emails_disabled`
# column is set to true anywhere in the ancestor hierarchy.
def ids_with_disabled_email(groups)
- innner_query = Gitlab::ObjectHierarchy
- .new(Group.where('id = namespaces_with_emails_disabled.id'))
- .base_and_ancestors
+ inner_groups = Group.where('id = namespaces_with_emails_disabled.id')
+
+ inner_ancestors = if Feature.enabled?(:linear_group_ancestor_scopes, default_enabled: :yaml)
+ inner_groups.self_and_ancestors
+ else
+ Gitlab::ObjectHierarchy.new(inner_groups).base_and_ancestors
+ end
+
+ inner_query = inner_ancestors
.where(emails_disabled: true)
.select('1')
.limit(1)
@@ -202,7 +208,7 @@ class Group < Namespace
group_ids = Namespace
.from('(SELECT * FROM namespaces) as namespaces_with_emails_disabled')
.where(namespaces_with_emails_disabled: { id: groups })
- .where('EXISTS (?)', innner_query)
+ .where('EXISTS (?)', inner_query)
.pluck(:id)
Set.new(group_ids)
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index 53e7d52c558..9765ac6f2e9 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -49,7 +49,7 @@ class LfsObject < ApplicationRecord
end
def self.calculate_oid(path)
- self.hexdigest(path)
+ self.sha256_hexdigest(path)
end
end
diff --git a/app/models/upload.rb b/app/models/upload.rb
index 0a4acdfc7e3..d16a2d1bd84 100644
--- a/app/models/upload.rb
+++ b/app/models/upload.rb
@@ -67,7 +67,7 @@ class Upload < ApplicationRecord
self.checksum = nil
return unless needs_checksum?
- self.checksum = self.class.hexdigest(absolute_path)
+ self.checksum = self.class.sha256_hexdigest(absolute_path)
end
# Initialize the associated Uploader class with current model
diff --git a/app/presenters/clusters/cluster_presenter.rb b/app/presenters/clusters/cluster_presenter.rb
index 892ea14267b..ce060476cfd 100644
--- a/app/presenters/clusters/cluster_presenter.rb
+++ b/app/presenters/clusters/cluster_presenter.rb
@@ -3,24 +3,11 @@
module Clusters
class ClusterPresenter < Gitlab::View::Presenter::Delegated
include ::Gitlab::Utils::StrongMemoize
- include ActionView::Helpers::SanitizeHelper
- include ActionView::Helpers::UrlHelper
- include IconsHelper
delegator_override_with ::Gitlab::Utils::StrongMemoize # TODO: Remove `::Gitlab::Utils::StrongMemoize` inclusion as it's duplicate
presents ::Clusters::Cluster, as: :cluster
- # We do not want to show the group path for clusters belonging to the
- # clusterable, only for the ancestor clusters.
- def item_link(clusterable_presenter, *html_options)
- if cluster.group_type? && clusterable != clusterable_presenter.subject
- contracted_group_name(cluster.group) + ' / ' + link_to_cluster
- else
- link_to_cluster(*html_options)
- end
- end
-
def provider_label
if aws?
s_('ClusterIntegration|Elastic Kubernetes Service')
@@ -41,16 +28,6 @@ module Clusters
can?(current_user, :read_cluster, cluster)
end
- def cluster_type_description
- if cluster.project_type?
- s_("ClusterIntegration|Project cluster")
- elsif cluster.group_type?
- s_("ClusterIntegration|Group cluster")
- elsif cluster.instance_type?
- s_("ClusterIntegration|Instance cluster")
- end
- end
-
def show_path(params: {})
if cluster.project_type?
project_cluster_path(project, cluster, params)
@@ -109,7 +86,7 @@ module Clusters
private
def image_path(path)
- ActionController::Base.helpers.image_path(path)
+ ApplicationController.helpers.image_path(path)
end
# currently log explorer is only available in the scope of the project
@@ -129,20 +106,6 @@ module Clusters
cluster.project
end
end
-
- def contracted_group_name(group)
- sanitize(group.full_name)
- .sub(%r{\/.*\/}, "/ #{contracted_icon} /")
- .html_safe
- end
-
- def contracted_icon
- sprite_icon('ellipsis_h', size: 12, css_class: 'vertical-align-middle')
- end
-
- def link_to_cluster(html_options: {})
- link_to_if(can_read_cluster?, cluster.name, show_path, html_options)
- end
end
end
diff --git a/app/views/admin/application_settings/_spam.html.haml b/app/views/admin/application_settings/_spam.html.haml
index 011bce3ca99..53ca4d4aa79 100644
--- a/app/views/admin/application_settings/_spam.html.haml
+++ b/app/views/admin/application_settings/_spam.html.haml
@@ -2,6 +2,11 @@
= form_errors(@application_setting)
%fieldset
+ %h5
+ = _('reCAPTCHA')
+ %p
+ = _('reCAPTCHA helps prevent credential stuffing.')
+ = link_to _('Only reCAPTCHA v2 is supported:'), 'https://developers.google.com/recaptcha/docs/versions', target: '_blank', rel: 'noopener noreferrer'
.form-group
.form-check
= f.check_box :recaptcha_enabled, class: 'form-check-input'
@@ -9,25 +14,31 @@
= _("Enable reCAPTCHA")
%span.form-text.text-muted#recaptcha_help_block
= _('Helps prevent bots from creating accounts.')
+ = link_to _('How do I configure it?'), help_page_path('integration/recaptcha.md'), target: '_blank', rel: 'noopener noreferrer'
.form-group
.form-check
= f.check_box :login_recaptcha_protection_enabled, class: 'form-check-input'
= f.label :login_recaptcha_protection_enabled, class: 'form-check-label' do
- = _("Enable reCAPTCHA for login")
+ = _('Enable reCAPTCHA for login.')
%span.form-text.text-muted#recaptcha_help_block
= _('Helps prevent bots from brute-force attacks.')
.form-group
- = f.label :recaptcha_site_key, _('reCAPTCHA Site Key'), class: 'label-bold'
+ = f.label :recaptcha_site_key, _('reCAPTCHA site key'), class: 'label-bold'
= f.text_field :recaptcha_site_key, class: 'form-control gl-form-input'
.form-text.text-muted
= _("Generate site and private keys at")
%a{ href: 'http://www.google.com/recaptcha', target: 'blank' } http://www.google.com/recaptcha
.form-group
- = f.label :recaptcha_private_key, _('reCAPTCHA Private Key'), class: 'label-bold'
- .form-group
+ = f.label :recaptcha_private_key, _('reCAPTCHA private key'), class: 'label-bold'
= f.text_field :recaptcha_private_key, class: 'form-control gl-form-input'
+ %h5
+ = _('Invisible Captcha')
+ %p
+ = _('Invisible Captcha helps prevent the creation of spam accounts. It adds a honeypot field and time-sensitive form submission to the account signup form.')
+ = link_to _('Read their documentation.'), 'https://github.com/markets/invisible_captcha', target: '_blank', rel: 'noopener noreferrer'
+
.form-group
.form-check
= f.check_box :invisible_captcha_enabled, class: 'form-check-input'
@@ -36,12 +47,18 @@
%span.form-text.text-muted
= _('Helps prevent bots from creating accounts.')
+ %h5
+ = _('Akismet')
+ %p
+ = _('Akismet helps prevent the creation of spam issues in public projects.')
+ = link_to _('How do I configure Akismet?'), help_page_path('integration/akismet.md'), target: '_blank', rel: 'noopener noreferrer'
+
.form-group
.form-check
= f.check_box :akismet_enabled, class: 'form-check-input'
= f.label :akismet_enabled, class: 'form-check-label' do
Enable Akismet
- %span.form-text.text-muted#akismet_help_block= _("Helps prevent bots from creating issues")
+ %span.form-text.text-muted#akismet_help_block= _("Helps prevent bots from creating issues.")
.form-group
= f.label :akismet_api_key, _('Akismet API Key'), class: 'label-bold'
@@ -50,25 +67,31 @@
Generate API key at
%a{ href: 'http://www.akismet.com', target: 'blank' } http://www.akismet.com
+ %h5
+ = _('IP address restrictions')
+
.form-group
.form-check
= f.check_box :unique_ips_limit_enabled, class: 'form-check-input'
= f.label :unique_ips_limit_enabled, class: 'form-check-label' do
- = _("Limit sign in from multiple ips")
+ = _("Limit sign in from multiple IP addresses")
%span.form-text.text-muted#unique_ip_help_block
- = _("Helps prevent malicious users hide their activity")
+ = _("Helps prevent malicious users hide their activity.")
.form-group
- = f.label :unique_ips_limit_per_user, _('IPs per user'), class: 'label-bold'
+ = f.label :unique_ips_limit_per_user, _('IP addresses per user'), class: 'label-bold'
= f.number_field :unique_ips_limit_per_user, class: 'form-control gl-form-input'
.form-text.text-muted
- = _("Maximum number of unique IPs per user")
+ = _("Maximum number of unique IP addresses per user.")
.form-group
- = f.label :unique_ips_limit_time_window, _('IP expiration time'), class: 'label-bold'
+ = f.label :unique_ips_limit_time_window, _('IP address expiration time'), class: 'label-bold'
= f.number_field :unique_ips_limit_time_window, class: 'form-control gl-form-input'
.form-text.text-muted
- = _("How many seconds an IP will be counted towards the limit")
+ = _("How many seconds an IP counts toward the IP address limit.")
+
+ %h5
+ = _('Spam Check')
.form-group
.form-check
@@ -79,8 +102,8 @@
= f.label :spam_check_endpoint_url, _('URL of the external Spam Check endpoint'), class: 'label-bold'
= f.text_field :spam_check_endpoint_url, class: 'form-control gl-form-input'
.form-group
- = f.label :spam_check_api_key, _('Spam Check API Key'), class: 'gl-font-weight-bold'
+ = f.label :spam_check_api_key, _('Spam Check API key'), class: 'gl-font-weight-bold'
= f.text_field :spam_check_api_key, class: 'form-control gl-form-input'
- .form-text.text-muted= _('The API key used by GitLab for accessing the Spam Check service endpoint')
+ .form-text.text-muted= _('The API key used by GitLab for accessing the Spam Check service endpoint.')
= f.submit _('Save changes'), class: "gl-button btn btn-confirm"
diff --git a/app/views/admin/application_settings/reporting.html.haml b/app/views/admin/application_settings/reporting.html.haml
index 914a09ff5db..f05231a3cd2 100644
--- a/app/views/admin/application_settings/reporting.html.haml
+++ b/app/views/admin/application_settings/reporting.html.haml
@@ -9,9 +9,7 @@
%button.btn.gl-button.btn-default.js-settings-toggle{ type: 'button' }
= expanded_by_default? ? _('Collapse') : _('Expand')
%p
- - recaptcha_v2_link_url = 'https://developers.google.com/recaptcha/docs/versions'
- - recaptcha_v2_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: recaptcha_v2_link_url }
- = _('Enable reCAPTCHA, Invisible Captcha, Akismet and set IP limits. For reCAPTCHA, we currently only support %{recaptcha_v2_link_start}v2%{recaptcha_v2_link_end}').html_safe % { recaptcha_v2_link_start: recaptcha_v2_link_start, recaptcha_v2_link_end: '</a>'.html_safe }
+ = _('Configure CAPTCHAs, IP address limits, and other anti-spam measures.')
.settings-content
= render 'spam'
diff --git a/config/feature_flags/development/linear_group_ancestor_scopes.yml b/config/feature_flags/development/linear_group_ancestor_scopes.yml
new file mode 100644
index 00000000000..f23399c1e6f
--- /dev/null
+++ b/config/feature_flags/development/linear_group_ancestor_scopes.yml
@@ -0,0 +1,8 @@
+---
+name: linear_group_ancestor_scopes
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70495
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/341115
+milestone: '14.4'
+type: development
+group: group::access
+default_enabled: false
diff --git a/config/feature_flags/development/linear_group_tree_ancestor_scopes.yml b/config/feature_flags/development/linear_group_tree_ancestor_scopes.yml
new file mode 100644
index 00000000000..3a195242fa1
--- /dev/null
+++ b/config/feature_flags/development/linear_group_tree_ancestor_scopes.yml
@@ -0,0 +1,8 @@
+---
+name: linear_group_tree_ancestor_scopes
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70503
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/341117
+milestone: '14.4'
+type: development
+group: group::access
+default_enabled: false
diff --git a/db/post_migrate/20210923133143_remove_redundant_taggings_index.rb b/db/post_migrate/20210923133143_remove_redundant_taggings_index.rb
new file mode 100644
index 00000000000..a33885cc87b
--- /dev/null
+++ b/db/post_migrate/20210923133143_remove_redundant_taggings_index.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+class RemoveRedundantTaggingsIndex < Gitlab::Database::Migration[1.0]
+ disable_ddl_transaction!
+
+ INDEX_NAME = :index_taggings_on_taggable_id_and_taggable_type
+
+ def up
+ remove_concurrent_index_by_name :taggings, INDEX_NAME
+ end
+
+ def down
+ add_concurrent_index :taggings, [:taggable_id, :taggable_type], name: INDEX_NAME
+ end
+end
diff --git a/db/schema_migrations/20210923133143 b/db/schema_migrations/20210923133143
new file mode 100644
index 00000000000..c0e7bb485fb
--- /dev/null
+++ b/db/schema_migrations/20210923133143
@@ -0,0 +1 @@
+d2736a06009d6232d832a03d6842a81b1de2ce79b901331a0e09ac40fc51a463 \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 0d8014e1e0b..364ca21de47 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -26508,8 +26508,6 @@ CREATE UNIQUE INDEX index_system_note_metadata_on_note_id ON system_note_metadat
CREATE INDEX index_taggings_on_tag_id ON taggings USING btree (tag_id);
-CREATE INDEX index_taggings_on_taggable_id_and_taggable_type ON taggings USING btree (taggable_id, taggable_type);
-
CREATE INDEX index_taggings_on_taggable_id_and_taggable_type_and_context ON taggings USING btree (taggable_id, taggable_type, context);
CREATE UNIQUE INDEX index_tags_on_name ON tags USING btree (name);
diff --git a/doc/administration/auth/ldap/ldap-troubleshooting.md b/doc/administration/auth/ldap/ldap-troubleshooting.md
index 223e48a5f4e..784a18d2b29 100644
--- a/doc/administration/auth/ldap/ldap-troubleshooting.md
+++ b/doc/administration/auth/ldap/ldap-troubleshooting.md
@@ -387,7 +387,7 @@ the following are true:
- The configured `admin_group` in the `gitlab.rb` is a CN, rather than a DN or an array.
- This CN falls under the scope of the configured `group_base`.
- The members of the `admin_group` have already signed into GitLab with their LDAP
- credentials. GitLab only grants this administrator access to the users whose
+ credentials. GitLab only grants the Administrator role to the users whose
accounts are already connected to LDAP.
If all the above are true and the users are still not getting access, [run a manual
diff --git a/doc/administration/clusters/kas.md b/doc/administration/clusters/kas.md
index 6afaff73396..226710a8911 100644
--- a/doc/administration/clusters/kas.md
+++ b/doc/administration/clusters/kas.md
@@ -104,7 +104,7 @@ In Omnibus GitLab, find the logs in `/var/log/gitlab/gitlab-kas/`.
See also the [user documentation](../../user/clusters/agent/index.md#troubleshooting)
for troubleshooting problems with individual agents.
-### KAS logs - GitOps: failed to get project info
+### KAS logs - GitOps: failed to get project information
If you get the following error message:
diff --git a/doc/administration/gitaly/troubleshooting.md b/doc/administration/gitaly/troubleshooting.md
index a2b34db5058..4b68edfca17 100644
--- a/doc/administration/gitaly/troubleshooting.md
+++ b/doc/administration/gitaly/troubleshooting.md
@@ -393,7 +393,7 @@ $ sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config
praefect sql-migrate: OK (applied 21 migrations)
```
-### Requests fail with 'repo scoped: invalid Repository' errors
+### Requests fail with 'repository scoped: invalid Repository' errors
This indicates that the virtual storage name used in the
[Praefect configuration](praefect.md#praefect) does not match the storage name used in
diff --git a/doc/administration/instance_review.md b/doc/administration/instance_review.md
index b166bb32aa1..62897651166 100644
--- a/doc/administration/instance_review.md
+++ b/doc/administration/instance_review.md
@@ -12,7 +12,7 @@ If you run a medium-sized self-managed instance (50+ users) of a free version of
[either Community Edition or unlicensed Enterprise Edition](https://about.gitlab.com/install/ce-or-ee/),
you qualify for a free Instance Review.
-1. Sign in as a user with administrator [permissions](../user/permissions.md).
+1. Sign in as a user with Administrator [role](../user/permissions.md).
1. In the top menu, click your user icon, and select
**Get a free instance review**:
diff --git a/doc/administration/reference_architectures/10k_users.md b/doc/administration/reference_architectures/10k_users.md
index 0f29fe4a5f2..dead537a94e 100644
--- a/doc/administration/reference_architectures/10k_users.md
+++ b/doc/administration/reference_architectures/10k_users.md
@@ -1271,7 +1271,7 @@ the details of each Gitaly node that makes up the cluster. Each storage is also
and this name is used in several areas of the configuration. In this guide, the name of the storage will be
`default`. Also, this guide is geared towards new installs, if upgrading an existing environment
to use Gitaly Cluster, you may need to use a different name.
-Refer to the [Praefect documentation](../gitaly/praefect.md#praefect) for more info.
+Refer to the [Praefect documentation](../gitaly/praefect.md#praefect) for more information.
The following IPs will be used as an example:
diff --git a/doc/administration/reference_architectures/25k_users.md b/doc/administration/reference_architectures/25k_users.md
index 4d51aaa3030..68f86afdcd3 100644
--- a/doc/administration/reference_architectures/25k_users.md
+++ b/doc/administration/reference_architectures/25k_users.md
@@ -1277,7 +1277,7 @@ the details of each Gitaly node that makes up the cluster. Each storage is also
and this name is used in several areas of the configuration. In this guide, the name of the storage will be
`default`. Also, this guide is geared towards new installs, if upgrading an existing environment
to use Gitaly Cluster, you may need to use a different name.
-Refer to the [Praefect documentation](../gitaly/praefect.md#praefect) for more info.
+Refer to the [Praefect documentation](../gitaly/praefect.md#praefect) for more information.
The following IPs will be used as an example:
diff --git a/doc/development/pipelines.md b/doc/development/pipelines.md
index dd45091a31b..f3946131914 100644
--- a/doc/development/pipelines.md
+++ b/doc/development/pipelines.md
@@ -6,8 +6,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Pipelines for the GitLab project
-Pipelines for [`gitlab-org/gitlab`](https://gitlab.com/gitlab-org/gitlab) and [`gitlab-org/gitlab-foss`](https://gitlab.com/gitlab-org/gitlab-foss) (as well as the
-`dev` instance's mirrors) are configured in the usual
+Pipelines for [`gitlab-org/gitlab`](https://gitlab.com/gitlab-org/gitlab) (as well as the `dev` instance's) is configured in the usual
[`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml)
which itself includes files under
[`.gitlab/ci/`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/.gitlab/ci)
@@ -17,29 +16,159 @@ We're striving to [dogfood](https://about.gitlab.com/handbook/engineering/#dogfo
GitLab [CI/CD features and best-practices](../ci/yaml/index.md)
as much as possible.
-## Overview
+## Minimal test jobs before a merge request is approved
-Pipelines for the GitLab project are created using the [`workflow:rules` keyword](../ci/yaml/index.md#workflow)
-feature of the GitLab CI/CD.
+**To reduce the pipeline cost and shorten the job duration, before a merge request is approved, the pipeline will run a minimal set of RSpec & Jest tests that are related to the merge request changes.**
-Pipelines are always created for the following scenarios:
+After a merge request has been approved, the pipeline would contain the full RSpec & Jest tests. This will ensure that all tests
+have been run before a merge request is merged.
-- `main` branch, including on schedules, pushes, merges, and so on.
-- Merge requests.
-- Tags.
-- Stable, `auto-deploy`, and security branches.
+### RSpec minimal jobs
-Pipeline creation is also affected by the following CI/CD variables:
+#### Determining related RSpec test files in a merge request
-- If `$FORCE_GITLAB_CI` is set, pipelines are created.
-- If `$GITLAB_INTERNAL` is not set, pipelines are not created.
+To identify the minimal set of tests needed, we use the [`test_file_finder` gem](https://gitlab.com/gitlab-org/ci-cd/test_file_finder), with two strategies:
-No pipeline is created in any other cases (for example, when pushing a branch with no
-MR for it).
+- dynamic mapping from test coverage tracing (generated via the [Crystalball gem](https://github.com/toptal/crystalball))
+ ([see where it's used](https://gitlab.com/gitlab-org/gitlab/-/blob/47d507c93779675d73a05002e2ec9c3c467cd698/tooling/bin/find_tests#L15))
+- static mapping maintained in the [`tests.yml` file](https://gitlab.com/gitlab-org/gitlab/-/blob/master/tests.yml) for special cases that cannot
+ be mapped via coverage tracing ([see where it's used](https://gitlab.com/gitlab-org/gitlab/-/blob/47d507c93779675d73a05002e2ec9c3c467cd698/tooling/bin/find_tests#L12))
-The source of truth for these workflow rules is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
+The test mappings contain a map of each source files to a list of test files which is dependent of the source file.
+
+In the `detect-tests` job, we use this mapping to identify the minimal tests needed for the current merge request.
+
+#### Exceptional cases
+
+In addition, there are a few circumstances where we would always run the full RSpec tests:
+
+- when the `pipeline:run-all-rspec` label is set on the merge request
+- when the merge request is created by an automation (e.g. Gitaly update or MR targeting a stable branch)
+- when any CI config file is changed (i.e. `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
+
+### Jest minimal jobs
+
+#### Determining related Jest test files in a merge request
+
+To identify the minimal set of tests needed, we pass a list of all the changed files into `jest` using the [`--findRelatedTests`](https://jestjs.io/docs/cli#--findrelatedtests-spaceseparatedlistofsourcefiles) option.
+In this mode, `jest` would resolve all the dependencies of related to the changed files, which include test files that have these files in the dependency chain.
+
+#### Exceptional cases
+
+In addition, there are a few circumstances where we would always run the full Jest tests:
+
+- when the `pipeline:run-all-rspec` label is set on the merge request
+- when the merge request is created by an automation (e.g. Gitaly update or MR targeting a stable branch)
+- when any CI config file is changed (i.e. `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
+- when any frontend "core" file is changed (i.e. `package.json`, `yarn.lock`, `babel.config.js`, `jest.config.*.js`, `config/helpers/**/*.js`)
+- when any vendored JavaScript file is changed (i.e. `vendor/assets/javascripts/**/*`)
+- when any backend file is changed ([see the patterns list for details](https://gitlab.com/gitlab-org/gitlab/-/blob/3616946936c1adbd9e754c1bd06f86ba670796d8/.gitlab/ci/rules.gitlab-ci.yml#L205-216))
+
+## Fail-fast job in merge request pipelines
+
+To provide faster feedback when a merge request breaks existing tests, we are experimenting with a
+fail-fast mechanism.
+
+An `rspec fail-fast` job is added in parallel to all other `rspec` jobs in a merge
+request pipeline. This job runs the tests that are directly related to the changes
+in the merge request.
+
+If any of these tests fail, the `rspec fail-fast` job fails, triggering a
+`fail-pipeline-early` job to run. The `fail-pipeline-early` job:
+
+- Cancels the currently running pipeline and all in-progress jobs.
+- Sets pipeline to have status `failed`.
+
+For example:
+
+```mermaid
+graph LR
+ subgraph "prepare stage";
+ A["detect-tests"]
+ end
+
+ subgraph "test stage";
+ B["jest"];
+ C["rspec migration"];
+ D["rspec unit"];
+ E["rspec integration"];
+ F["rspec system"];
+ G["rspec fail-fast"];
+ end
+
+ subgraph "post-test stage";
+ Z["fail-pipeline-early"];
+ end
+
+ A --"artifact: list of test files"--> G
+ G --"on failure"--> Z
+```
+
+The `rspec fail-fast` is a no-op if there are more than 10 test files related to the
+merge request. This prevents `rspec fail-fast` duration from exceeding the average
+`rspec` job duration and defeating its purpose.
+
+This number can be overridden by setting a CI/CD variable named `RSPEC_FAIL_FAST_TEST_FILE_COUNT_THRESHOLD`.
+
+## Test jobs
+
+Consult [GitLab tests in the Continuous Integration (CI) context](testing_guide/ci.md)
+for more information.
+
+We have dedicated jobs for each [testing level](testing_guide/testing_levels.md) and each job runs depending on the
+changes made in your merge request.
+If you want to force all the RSpec jobs to run regardless of your changes, you can add the `pipeline:run-all-rspec` label to the merge request.
+
+> Forcing all jobs on docs only related MRs would not have the prerequisite jobs and would lead to errors
+
+## Review app jobs
+
+Consult the [Review Apps](testing_guide/review_apps.md) dedicated page for more information.
+
+## As-if-FOSS jobs
+
+The `* as-if-foss` jobs run the GitLab test suite "as-if-FOSS", meaning as if the jobs would run in the context
+of the `gitlab-org/gitlab-foss` project. These jobs are only created in the following cases:
+
+- when the `pipeline:run-as-if-foss` label is set on the merge request
+- when the merge request is created in the `gitlab-org/security/gitlab` project
+- when any CI config file is changed (i.e. `.gitlab-ci.yml` or `.gitlab/ci/**/*`)
+
+The `* as-if-foss` jobs are run in addition to the regular EE-context jobs. They have the `FOSS_ONLY='1'` variable
+set and get their EE-specific folders removed before the tests start running.
+
+The intent is to ensure that a change doesn't introduce a failure after the `gitlab-org/gitlab` project is synced to
+the `gitlab-org/gitlab-foss` project.
+
+## PostgreSQL versions testing
+
+Our test suite runs against PG12 as GitLab.com runs on PG12 and
+[Omnibus defaults to PG12 for new installs and upgrades](../administration/package_information/postgresql_versions.md).
-### Pipelines for Merge Requests
+We do run our test suite against PG11 and PG13 on nightly scheduled pipelines.
+
+We also run our test suite against PG11 upon specific database library changes in MRs and `main` pipelines (with the `rspec db-library-code pg11` job).
+
+### Current versions testing
+
+| Where? | PostgreSQL version |
+| ------ | ------------------ |
+| MRs | 12, 11 for DB library changes |
+| `main` (non-scheduled pipelines) | 12, 11 for DB library changes |
+| 2-hourly scheduled pipelines | 12, 11 for DB library changes |
+| `nightly` scheduled pipelines | 12, 11, 13 |
+
+### Long-term plan
+
+We follow the [PostgreSQL versions shipped with Omnibus GitLab](../administration/package_information/postgresql_versions.md):
+
+| PostgreSQL version | 14.1 (July 2021) | 14.2 (August 2021) | 14.3 (September 2021) | 14.4 (October 2021) | 14.5 (November 2021) | 14.6 (December 2021) |
+| -------------------| ---------------------- | ---------------------- | ---------------------- | ---------------------- | ---------------------- | ---------------------- |
+| PG12 | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` |
+| PG11 | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` |
+| PG13 | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` | `nightly` |
+
+## Pipelines types for merge requests
In general, pipelines for an MR fall into one or more of the following types,
depending on the changes made in the MR:
@@ -53,7 +182,7 @@ We use the [`rules:`](../ci/yaml/index.md#rules) and [`needs:`](../ci/yaml/index
to determine the jobs that need to be run in a pipeline. Note that an MR that includes multiple types of changes would
have a pipelines that include jobs from multiple types (for example, a combination of docs-only and code-only pipelines).
-#### Documentation only MR pipeline
+### Documentation only MR pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/-/pipelines/250546928):
@@ -71,7 +200,7 @@ graph LR
end
```
-#### Code-only MR pipeline
+### Code-only MR pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/pipelines/136295694)
@@ -173,7 +302,7 @@ graph RL;
end
```
-#### Frontend-only MR pipeline
+### Frontend-only MR pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/pipelines/134661039):
@@ -299,7 +428,7 @@ graph RL;
end
```
-#### QA-only MR pipeline
+### QA-only MR pipeline
[Reference pipeline](https://gitlab.com/gitlab-org/gitlab/pipelines/134645109):
@@ -358,261 +487,53 @@ graph RL;
end
```
-### Fail-fast pipeline in Merge Requests
-
-To provide faster feedback when a Merge Request breaks existing tests, we are experimenting with a
-fail-fast mechanism.
-
-An `rspec fail-fast` job is added in parallel to all other `rspec` jobs in a Merge
-Request pipeline. This job runs the tests that are directly related to the changes
-in the Merge Request.
-
-If any of these tests fail, the `rspec fail-fast` job fails, triggering a
-`fail-pipeline-early` job to run. The `fail-pipeline-early` job:
-
-- Cancels the currently running pipeline and all in-progress jobs.
-- Sets pipeline to have status `failed`.
-
-For example:
-
-```mermaid
-graph LR
- subgraph "prepare stage";
- A["detect-tests"]
- end
-
- subgraph "test stage";
- B["jest"];
- C["rspec migration"];
- D["rspec unit"];
- E["rspec integration"];
- F["rspec system"];
- G["rspec fail-fast"];
- end
-
- subgraph "post-test stage";
- Z["fail-pipeline-early"];
- end
-
- A --"artifact: list of test files"--> G
- G --"on failure"--> Z
-```
-
-A Merge Request author may choose to opt-out of the fail fast mechanism by doing one of the following:
-
-- Adding the `pipeline:skip-rspec-fail-fast` label to the merge request
-- Starting the `dont-interrupt-me` job found in the `sync` stage of a Merge Request pipeline.
-
-The `rspec fail-fast` is a no-op if there are more than 10 test files related to the
-Merge Request. This prevents `rspec fail-fast` duration from exceeding the average
-`rspec` job duration and defeating its purpose.
-
-This number can be overridden by setting a CI/CD variable named `RSPEC_FAIL_FAST_TEST_FILE_COUNT_THRESHOLD`.
-
-NOTE:
-This experiment is only enabled when the CI/CD variable `RSPEC_FAIL_FAST_ENABLED=true` is set.
-
-#### Determining related test files in a Merge Request
-
-The test files related to the Merge Request are determined using the [`test_file_finder`](https://gitlab.com/gitlab-org/ci-cd/test_file_finder) gem.
-We are using a custom mapping between source file to test files, maintained in the `tests.yml` file.
-
-### RSpec minimal jobs
-
-Before a merge request is approved, the pipeline will run a minimal set of RSpec tests that are related to the merge request changes.
-This is to reduce the pipeline cost and shorten the job duration.
-
-To identify the minimal set of tests needed, we use [Crystalball gem](https://github.com/toptal/crystalball) to create a test mapping.
-The test mapping contains a map of each source files to a list of test files which is dependent of the source file.
-This mapping is currently generated using a combination of test coverage tracing and a static mapping.
-In the `detect-tests` job, we use this mapping to identify the minimal tests needed for the current Merge Request.
-
-After a merge request has been approved, the pipeline would contain the full RSpec tests. This will ensure that all tests
-have been run before a merge request is merged.
-
-### Jest minimal jobs
-
-Before a merge request is approved, the pipeline will run a minimal set of Jest tests that are related to the merge request changes.
-This is to reduce the pipeline cost and shorten the job duration.
-
-To identify the minimal set of tests needed, we pass a list of all the changed files into `jest` using the [`--findRelatedTests`](https://jestjs.io/docs/cli#--findrelatedtests-spaceseparatedlistofsourcefiles) option.
-In this mode, `jest` would resolve all the dependencies of related to the changed files, which include test files that have these files in the dependency chain.
-
-After a merge request has been approved, the pipeline would contain the full Jest tests. This will ensure that all tests
-have been run before a merge request is merged.
-
-In addition, there are a few circumstances where we would always run the full Jest tests:
-
-- when `package.json`, `yarn.lock`, `jest` config changes
-- when vendored JavaScript is changed
-- when `.graphql` files are changed
-
-### PostgreSQL versions testing
-
-Our test suite runs against PG12 as GitLab.com runs on PG12 and
-[Omnibus defaults to PG12 for new installs and upgrades](../administration/package_information/postgresql_versions.md),
-Our test suite is currently running against PG11, since GitLab.com still runs on PG11.
-
-We do run our test suite against PG11 on nightly scheduled pipelines as well as upon specific
-database library changes in MRs and `main` pipelines (with the `rspec db-library-code pg11` job).
-
-#### Current versions testing
-
-| Where? | PostgreSQL version |
-| ------ | ------------------ |
-| MRs | 12, 11 for DB library changes |
-| `main` (non-scheduled pipelines) | 12, 11 for DB library changes |
-| 2-hourly scheduled pipelines | 12, 11 for DB library changes |
-| `nightly` scheduled pipelines | 12, 11 |
-
-#### Long-term plan
-
-We follow the [PostgreSQL versions shipped with Omnibus GitLab](../administration/package_information/postgresql_versions.md):
-
-| PostgreSQL version | 13.11 (April 2021) | 13.12 (May 2021) | 14.0 (June 2021?) |
-| -------------------| ---------------------- | ---------------------- | ---------------------- |
-| PG12 | `nightly` | MRs/`2-hour`/`nightly` | MRs/`2-hour`/`nightly` |
-| PG11 | MRs/`2-hour`/`nightly` | `nightly` | `nightly` |
-
-### Test jobs
-
-Consult [GitLab tests in the Continuous Integration (CI) context](testing_guide/ci.md)
-for more information.
-
-We have dedicated jobs for each [testing level](testing_guide/testing_levels.md) and each job runs depending on the
-changes made in your merge request.
-If you want to force all the RSpec jobs to run regardless of your changes, you can add the `pipeline:run-all-rspec` label to the merge request.
-
-> Forcing all jobs on docs only related MRs would not have the prerequisite jobs and would lead to errors
-
-### Review app jobs
-
-Consult the [Review Apps](testing_guide/review_apps.md) dedicated page for more information.
-
-### As-if-FOSS jobs
-
-The `* as-if-foss` jobs allows the GitLab test suite "as-if-FOSS", meaning as if the jobs would run in the context
-of the `gitlab-org/gitlab-foss` project. These jobs are only created in the following cases:
-
-- `gitlab-org/security/gitlab` merge requests.
-- Merge requests with the `pipeline:run-as-if-foss` label
-- Merge requests that changes the CI configuration.
-
-The `* as-if-foss` jobs are run in addition to the regular EE-context jobs. They have the `FOSS_ONLY='1'` variable
-set and get their EE-specific folders removed before the tests start running.
-
-The intent is to ensure that a change doesn't introduce a failure after the `gitlab-org/gitlab` project is synced to
-the `gitlab-org/gitlab-foss` project.
-
-## Performance
-
-### Interruptible pipelines
-
-By default, all jobs are [interruptible](../ci/yaml/index.md#interruptible), except the
-`dont-interrupt-me` job which runs automatically on `main`, and is `manual`
-otherwise.
-
-If you want a running pipeline to finish even if you push new commits to a merge
-request, be sure to start the `dont-interrupt-me` job before pushing.
-
-### Caching strategy
-
-1. All jobs must only pull caches by default.
-1. All jobs must be able to pass with an empty cache. In other words, caches are only there to speed up jobs.
-1. We currently have several different cache definitions defined in
- [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml),
- with fixed keys:
- - `.setup-test-env-cache`
- - `.rails-cache`
- - `.static-analysis-cache`
- - `.coverage-cache`
- - `.danger-review-cache`
- - `.qa-cache`
- - `.yarn-cache`
- - `.assets-compile-cache` (the key includes `${NODE_ENV}` so it's actually two different caches).
-1. These cache definitions are composed of [multiple atomic caches](../ci/caching/index.md#use-multiple-caches).
-1. Only the following jobs, running in 2-hourly scheduled pipelines, are pushing (that is, updating) to the caches:
- - `update-setup-test-env-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
- - `update-gitaly-binaries-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
- - `update-static-analysis-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
- - `update-qa-cache`, defined in [`.gitlab/ci/qa.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/qa.gitlab-ci.yml).
- - `update-assets-compile-production-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- - `update-assets-compile-test-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- - `update-yarn-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- - `update-storybook-yarn-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
-1. These jobs can also be forced to run in merge requests with the `pipeline:update-cache` label (this can be useful to warm the caches in a MR that updates the cache keys).
-
-### Artifacts strategy
+## CI configuration internals
-We limit the artifacts that are saved and retrieved by jobs to the minimum in order to reduce the upload/download time and costs, as well as the artifacts storage.
+### Workflow rules
-### Pre-clone step
+Pipelines for the GitLab project are created using the [`workflow:rules` keyword](../ci/yaml/index.md#workflow)
+feature of the GitLab CI/CD.
-The `gitlab-org/gitlab` project on GitLab.com uses a [pre-clone step](https://gitlab.com/gitlab-org/gitlab/-/issues/39134)
-to seed the project with a recent archive of the repository. This is done for
-several reasons:
+Pipelines are always created for the following scenarios:
-- It speeds up builds because a 800 MB download only takes seconds, as opposed to a full Git clone.
-- It significantly reduces load on the file server, as smaller deltas mean less time spent in `git pack-objects`.
+- `main` branch, including on schedules, pushes, merges, and so on.
+- Merge requests.
+- Tags.
+- Stable, `auto-deploy`, and security branches.
-The pre-clone step works by using the `CI_PRE_CLONE_SCRIPT` variable
-[defined by GitLab.com shared runners](../ci/runners/build_cloud/linux_build_cloud.md#pre-clone-script).
+Pipeline creation is also affected by the following CI/CD variables:
-The `CI_PRE_CLONE_SCRIPT` is currently defined as a project CI/CD variable:
+- If `$FORCE_GITLAB_CI` is set, pipelines are created.
+- If `$GITLAB_INTERNAL` is not set, pipelines are not created.
-```shell
-(
- echo "Downloading archived master..."
- wget -O /tmp/gitlab.tar.gz https://storage.googleapis.com/gitlab-ci-git-repo-cache/project-278964/gitlab-master-shallow.tar.gz
+No pipeline is created in any other cases (for example, when pushing a branch with no
+MR for it).
- if [ ! -f /tmp/gitlab.tar.gz ]; then
- echo "Repository cache not available, cloning a new directory..."
- exit
- fi
+The source of truth for these workflow rules is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
- rm -rf $CI_PROJECT_DIR
- echo "Extracting tarball into $CI_PROJECT_DIR..."
- mkdir -p $CI_PROJECT_DIR
- cd $CI_PROJECT_DIR
- tar xzf /tmp/gitlab.tar.gz
- rm -f /tmp/gitlab.tar.gz
- chmod a+w $CI_PROJECT_DIR
-)
-```
+### Default image
-The first step of the script downloads `gitlab-master.tar.gz` from
-Google Cloud Storage. There is a [GitLab CI job named `cache-repo`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/cache-repo.gitlab-ci.yml#L5)
-that is responsible for keeping that archive up-to-date. Every two hours
-on a scheduled pipeline, it does the following:
+The default image is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
-1. Creates a fresh clone of the `gitlab-org/gitlab` repository on GitLab.com.
-1. Saves the data as a `.tar.gz`.
-1. Uploads it into the Google Cloud Storage bucket.
+<!-- vale gitlab.Spelling = NO -->
-When a CI job runs with this configuration, the output looks something like this:
+It includes Ruby, Go, Git, Git LFS, Chrome, Node, Yarn, PostgreSQL, and Graphics Magick.
-```shell
-$ eval "$CI_PRE_CLONE_SCRIPT"
-Downloading archived master...
-Extracting tarball into /builds/group/project...
-Fetching changes...
-Reinitialized existing Git repository in /builds/group/project/.git/
-```
+<!-- vale gitlab.Spelling = YES -->
-Note that the `Reinitialized existing Git repository` message shows that
-the pre-clone step worked. The runner runs `git init`, which
-overwrites the Git configuration with the appropriate settings to fetch
-from the GitLab repository.
+The images used in our pipelines are configured in the
+[`gitlab-org/gitlab-build-images`](https://gitlab.com/gitlab-org/gitlab-build-images)
+project, which is push-mirrored to [`gitlab/gitlab-build-images`](https://dev.gitlab.org/gitlab/gitlab-build-images)
+for redundancy.
-`CI_REPO_CACHE_CREDENTIALS` contains the Google Cloud service account
-JSON for uploading to the `gitlab-ci-git-repo-cache` bucket. (If you're a
-GitLab Team Member, find credentials in the
-[GitLab shared 1Password account](https://about.gitlab.com/handbook/security/#1password-for-teams).
+The current version of the build images can be found in the
+["Used by GitLab section"](https://gitlab.com/gitlab-org/gitlab-build-images/blob/master/.gitlab-ci.yml).
-Note that this bucket should be located in the same continent as the
-runner, or [you can incur network egress charges](https://cloud.google.com/storage/pricing).
+### Default variables
-## CI configuration internals
+In addition to the [predefined CI/CD variables](../ci/variables/predefined_variables.md),
+each pipeline includes default variables defined in
+[`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
### Stages
@@ -644,24 +565,6 @@ that is deployed in stage `review`.
[an issue with the deployment](https://gitlab.com/gitlab-org/gitlab/-/issues/233458)).
- `notify`: This stage includes jobs that notify various failures to Slack.
-### Default image
-
-The default image is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
-
-<!-- vale gitlab.Spelling = NO -->
-
-It includes Ruby, Go, Git, Git LFS, Chrome, Node, Yarn, PostgreSQL, and Graphics Magick.
-
-<!-- vale gitlab.Spelling = YES -->
-
-The images used in our pipelines are configured in the
-[`gitlab-org/gitlab-build-images`](https://gitlab.com/gitlab-org/gitlab-build-images)
-project, which is push-mirrored to [`gitlab/gitlab-build-images`](https://dev.gitlab.org/gitlab/gitlab-build-images)
-for redundancy.
-
-The current version of the build images can be found in the
-["Used by GitLab section"](https://gitlab.com/gitlab-org/gitlab-build-images/blob/master/.gitlab-ci.yml).
-
### Dependency Proxy
Some of the jobs are using images from Docker Hub, where we also use
@@ -681,12 +584,6 @@ Projects in the `gitlab-org` group pull from the Dependency Proxy, while
forks that reside on any other personal namespaces or groups fall back to
Docker Hub unless `${GITLAB_DEPENDENCY_PROXY}` is also defined there.
-### Default variables
-
-In addition to the [predefined CI/CD variables](../ci/variables/predefined_variables.md),
-each pipeline includes default variables defined in
-[`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
-
### Common job definitions
Most of the jobs [extend from a few CI definitions](../ci/yaml/index.md#extends)
@@ -756,8 +653,6 @@ and included in `rules` definitions via [YAML anchors](../ci/yaml/index.md#ancho
| `if-dot-com-gitlab-org-and-security-tag` | Limit jobs creation to tags for the `gitlab-org` and `gitlab-org/security` groups on GitLab.com. | |
| `if-dot-com-ee-schedule` | Limits jobs to scheduled pipelines for the `gitlab-org/gitlab` project on GitLab.com. | |
| `if-cache-credentials-schedule` | Limits jobs to scheduled pipelines with the `$CI_REPO_CACHE_CREDENTIALS` variable set. | |
-| `if-rspec-fail-fast-disabled` | Limits jobs to pipelines with `$RSPEC_FAIL_FAST_ENABLED` CI/CD variable not set to `"true"`. | |
-| `if-rspec-fail-fast-skipped` | Matches if the pipeline is for a merge request and the MR has label ~"pipeline:skip-rspec-fail-fast". | |
| `if-security-pipeline-merge-result` | Matches if the pipeline is for a security merge request triggered by `@gitlab-release-tools-bot`. | |
<!-- vale gitlab.Substitutions = YES -->
@@ -783,6 +678,114 @@ and included in `rules` definitions via [YAML anchors](../ci/yaml/index.md#ancho
| `code-qa-patterns` | Combination of `code-patterns` and `qa-patterns`. |
| `code-backstage-qa-patterns` | Combination of `code-patterns`, `backstage-patterns`, and `qa-patterns`. |
+## Performance
+
+### Interruptible pipelines
+
+By default, all jobs are [interruptible](../ci/yaml/index.md#interruptible), except the
+`dont-interrupt-me` job which runs automatically on `main`, and is `manual`
+otherwise.
+
+If you want a running pipeline to finish even if you push new commits to a merge
+request, be sure to start the `dont-interrupt-me` job before pushing.
+
+### Caching strategy
+
+1. All jobs must only pull caches by default.
+1. All jobs must be able to pass with an empty cache. In other words, caches are only there to speed up jobs.
+1. We currently have several different cache definitions defined in
+ [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/global.gitlab-ci.yml),
+ with fixed keys:
+ - `.setup-test-env-cache`
+ - `.rails-cache`
+ - `.static-analysis-cache`
+ - `.coverage-cache`
+ - `.danger-review-cache`
+ - `.qa-cache`
+ - `.yarn-cache`
+ - `.assets-compile-cache` (the key includes `${NODE_ENV}` so it's actually two different caches).
+1. These cache definitions are composed of [multiple atomic caches](../ci/caching/index.md#use-multiple-caches).
+1. Only the following jobs, running in 2-hourly scheduled pipelines, are pushing (that is, updating) to the caches:
+ - `update-setup-test-env-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
+ - `update-gitaly-binaries-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
+ - `update-static-analysis-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
+ - `update-qa-cache`, defined in [`.gitlab/ci/qa.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/qa.gitlab-ci.yml).
+ - `update-assets-compile-production-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
+ - `update-assets-compile-test-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
+ - `update-yarn-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
+ - `update-storybook-yarn-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
+1. These jobs can also be forced to run in merge requests with the `pipeline:update-cache` label (this can be useful to warm the caches in a MR that updates the cache keys).
+
+### Artifacts strategy
+
+We limit the artifacts that are saved and retrieved by jobs to the minimum in order to reduce the upload/download time and costs, as well as the artifacts storage.
+
+### Pre-clone step
+
+The `gitlab-org/gitlab` project on GitLab.com uses a [pre-clone step](https://gitlab.com/gitlab-org/gitlab/-/issues/39134)
+to seed the project with a recent archive of the repository. This is done for
+several reasons:
+
+- It speeds up builds because a 800 MB download only takes seconds, as opposed to a full Git clone.
+- It significantly reduces load on the file server, as smaller deltas mean less time spent in `git pack-objects`.
+
+The pre-clone step works by using the `CI_PRE_CLONE_SCRIPT` variable
+[defined by GitLab.com shared runners](../ci/runners/build_cloud/linux_build_cloud.md#pre-clone-script).
+
+The `CI_PRE_CLONE_SCRIPT` is currently defined as a project CI/CD variable:
+
+```shell
+(
+ echo "Downloading archived master..."
+ wget -O /tmp/gitlab.tar.gz https://storage.googleapis.com/gitlab-ci-git-repo-cache/project-278964/gitlab-master-shallow.tar.gz
+
+ if [ ! -f /tmp/gitlab.tar.gz ]; then
+ echo "Repository cache not available, cloning a new directory..."
+ exit
+ fi
+
+ rm -rf $CI_PROJECT_DIR
+ echo "Extracting tarball into $CI_PROJECT_DIR..."
+ mkdir -p $CI_PROJECT_DIR
+ cd $CI_PROJECT_DIR
+ tar xzf /tmp/gitlab.tar.gz
+ rm -f /tmp/gitlab.tar.gz
+ chmod a+w $CI_PROJECT_DIR
+)
+```
+
+The first step of the script downloads `gitlab-master.tar.gz` from
+Google Cloud Storage. There is a [GitLab CI job named `cache-repo`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/cache-repo.gitlab-ci.yml#L5)
+that is responsible for keeping that archive up-to-date. Every two hours
+on a scheduled pipeline, it does the following:
+
+1. Creates a fresh clone of the `gitlab-org/gitlab` repository on GitLab.com.
+1. Saves the data as a `.tar.gz`.
+1. Uploads it into the Google Cloud Storage bucket.
+
+When a CI job runs with this configuration, the output looks something like this:
+
+```shell
+$ eval "$CI_PRE_CLONE_SCRIPT"
+Downloading archived master...
+Extracting tarball into /builds/group/project...
+Fetching changes...
+Reinitialized existing Git repository in /builds/group/project/.git/
+```
+
+Note that the `Reinitialized existing Git repository` message shows that
+the pre-clone step worked. The runner runs `git init`, which
+overwrites the Git configuration with the appropriate settings to fetch
+from the GitLab repository.
+
+`CI_REPO_CACHE_CREDENTIALS` contains the Google Cloud service account
+JSON for uploading to the `gitlab-ci-git-repo-cache` bucket. (If you're a
+GitLab Team Member, find credentials in the
+[GitLab shared 1Password account](https://about.gitlab.com/handbook/security/#1password-for-teams).
+
+Note that this bucket should be located in the same continent as the
+runner, or [you can incur network egress charges](https://cloud.google.com/storage/pricing).
+
---
[Return to Development documentation](index.md)
diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb
index 72a94dcd412..efdbe7deae3 100644
--- a/lib/gitlab/ci/trace.rb
+++ b/lib/gitlab/ci/trace.rb
@@ -261,7 +261,7 @@ module Gitlab
project: job.project,
file_type: :trace,
file: stream,
- file_sha256: self.class.hexdigest(path))
+ file_sha256: self.class.sha256_hexdigest(path))
trace_metadata.track_archival!(trace_artifact.id)
end
diff --git a/lib/gitlab/verify/uploads.rb b/lib/gitlab/verify/uploads.rb
index afcdbd087d2..0faf794e14d 100644
--- a/lib/gitlab/verify/uploads.rb
+++ b/lib/gitlab/verify/uploads.rb
@@ -28,7 +28,7 @@ module Gitlab
end
def actual_checksum(upload)
- Upload.hexdigest(upload.absolute_path)
+ Upload.sha256_hexdigest(upload.absolute_path)
end
def remote_object_exists?(upload)
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index a355063d5ea..f6c91b6df33 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -2915,9 +2915,15 @@ msgstr ""
msgid "After you've reviewed these contribution guidelines, you'll be all set to"
msgstr ""
+msgid "Akismet"
+msgstr ""
+
msgid "Akismet API Key"
msgstr ""
+msgid "Akismet helps prevent the creation of spam issues in public projects."
+msgstr ""
+
msgid "AlertManagement|Acknowledged"
msgstr ""
@@ -8533,6 +8539,9 @@ msgstr ""
msgid "Configure %{repository_checks_link_start}repository checks%{link_end} and %{housekeeping_link_start}housekeeping%{link_end} on repositories."
msgstr ""
+msgid "Configure CAPTCHAs, IP address limits, and other anti-spam measures."
+msgstr ""
+
msgid "Configure Dependency Scanning in `.gitlab-ci.yml` using the GitLab managed template. You can [add variable overrides](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings) to customize Dependency Scanning settings."
msgstr ""
@@ -12629,10 +12638,7 @@ msgstr ""
msgid "Enable reCAPTCHA"
msgstr ""
-msgid "Enable reCAPTCHA for login"
-msgstr ""
-
-msgid "Enable reCAPTCHA, Invisible Captcha, Akismet and set IP limits. For reCAPTCHA, we currently only support %{recaptcha_v2_link_start}v2%{recaptcha_v2_link_end}"
+msgid "Enable reCAPTCHA for login."
msgstr ""
msgid "Enable repository checks"
@@ -16706,10 +16712,10 @@ msgstr ""
msgid "Helps prevent bots from creating accounts."
msgstr ""
-msgid "Helps prevent bots from creating issues"
+msgid "Helps prevent bots from creating issues."
msgstr ""
-msgid "Helps prevent malicious users hide their activity"
+msgid "Helps prevent malicious users hide their activity."
msgstr ""
msgid "Helps reduce request volume (for example, from crawlers or abusive bots)"
@@ -16833,6 +16839,12 @@ msgstr ""
msgid "Housekeeping, export, path, transfer, remove, archive."
msgstr ""
+msgid "How do I configure Akismet?"
+msgstr ""
+
+msgid "How do I configure it?"
+msgstr ""
+
msgid "How do I configure runners?"
msgstr ""
@@ -16866,7 +16878,7 @@ msgstr ""
msgid "How many days need to pass between marking entity for deletion and actual removing it."
msgstr ""
-msgid "How many seconds an IP will be counted towards the limit"
+msgid "How many seconds an IP counts toward the IP address limit."
msgstr ""
msgid "How the job limiter handles jobs exceeding the thresholds specified below. The 'track' mode only logs the jobs. The 'compress' mode compresses the jobs and raises an exception if the compressed size exceeds the limit."
@@ -16956,13 +16968,16 @@ msgstr ""
msgid "IP Address"
msgstr ""
-msgid "IP expiration time"
+msgid "IP address expiration time"
msgstr ""
-msgid "IP subnet restriction only allowed for top-level groups"
+msgid "IP address restrictions"
+msgstr ""
+
+msgid "IP addresses per user"
msgstr ""
-msgid "IPs per user"
+msgid "IP subnet restriction only allowed for top-level groups"
msgstr ""
msgid "Identifier"
@@ -18455,6 +18470,12 @@ msgstr ""
msgid "Investigate vulnerability: %{title}"
msgstr ""
+msgid "Invisible Captcha"
+msgstr ""
+
+msgid "Invisible Captcha helps prevent the creation of spam accounts. It adds a honeypot field and time-sensitive form submission to the account signup form."
+msgstr ""
+
msgid "Invitation"
msgstr ""
@@ -20155,10 +20176,10 @@ msgstr ""
msgid "LicenseCompliance|Acceptable license to be used in the project"
msgstr ""
-msgid "LicenseCompliance|Add a license"
+msgid "LicenseCompliance|Add license and related policy"
msgstr ""
-msgid "LicenseCompliance|Add license and related policy"
+msgid "LicenseCompliance|Add license policy"
msgstr ""
msgid "LicenseCompliance|Allow"
@@ -20314,7 +20335,7 @@ msgstr ""
msgid "Limit namespaces and projects that can be indexed"
msgstr ""
-msgid "Limit sign in from multiple ips"
+msgid "Limit sign in from multiple IP addresses"
msgstr ""
msgid "Limit the number of concurrent operations this secondary site can run in the background."
@@ -20958,7 +20979,7 @@ msgstr ""
msgid "Maximum number of projects."
msgstr ""
-msgid "Maximum number of unique IPs per user"
+msgid "Maximum number of unique IP addresses per user."
msgstr ""
msgid "Maximum page reached"
@@ -23738,6 +23759,9 @@ msgstr ""
msgid "Only projects created under a Ultimate license are available in Security Dashboards."
msgstr ""
+msgid "Only reCAPTCHA v2 is supported:"
+msgstr ""
+
msgid "Only verified users with an email address in any of these domains can be added to the group."
msgstr ""
@@ -27782,6 +27806,9 @@ msgstr ""
msgid "Read more about related issues"
msgstr ""
+msgid "Read their documentation."
+msgstr ""
+
msgid "Ready to get started with GitLab? Follow these steps to set up your workspace, plan and commit changes, and deploy your project."
msgstr ""
@@ -31965,7 +31992,10 @@ msgstr ""
msgid "SourcegraphPreferences|Uses a custom %{linkStart}Sourcegraph instance%{linkEnd}."
msgstr ""
-msgid "Spam Check API Key"
+msgid "Spam Check"
+msgstr ""
+
+msgid "Spam Check API key"
msgstr ""
msgid "Spam Logs"
@@ -33508,7 +33538,7 @@ msgid_plural "The %{type} contains the following errors:"
msgstr[0] ""
msgstr[1] ""
-msgid "The API key used by GitLab for accessing the Spam Check service endpoint"
+msgid "The API key used by GitLab for accessing the Spam Check service endpoint."
msgstr ""
msgid "The GitLab subscription service (customers.gitlab.com) is currently experiencing an outage. You can monitor the status and get updates at %{linkStart}status.gitlab.com%{linkEnd}."
@@ -40764,10 +40794,16 @@ msgstr ""
msgid "quick actions"
msgstr ""
-msgid "reCAPTCHA Private Key"
+msgid "reCAPTCHA"
+msgstr ""
+
+msgid "reCAPTCHA helps prevent credential stuffing."
+msgstr ""
+
+msgid "reCAPTCHA private key"
msgstr ""
-msgid "reCAPTCHA Site Key"
+msgid "reCAPTCHA site key"
msgstr ""
msgid "recent activity"
diff --git a/qa/qa/resource/personal_access_token.rb b/qa/qa/resource/personal_access_token.rb
index 924e4206166..d992d7987b4 100644
--- a/qa/qa/resource/personal_access_token.rb
+++ b/qa/qa/resource/personal_access_token.rb
@@ -8,7 +8,7 @@ module QA
attr_accessor :name
# The user for which the personal access token is to be created
- # This *could* be different than the api_client.user or the api_user provided by the QA::Resource::ApiFabricator module
+ # This *could* be different than the api_client.user or the api_user provided by the QA::Resource::ApiFabricator
attr_writer :user
attribute :token
@@ -17,7 +17,9 @@ module QA
# If Runtime::Env.admin_personal_access_token is provided, fabricate via the API,
# else, fabricate via the browser.
def fabricate_via_api!
- @token = QA::Resource::PersonalAccessTokenCache.get_token_for_username(user.username)
+ QA::Resource::PersonalAccessTokenCache.get_token_for_username(user.username).tap do |cached_token|
+ @token = cached_token if cached_token
+ end
return if @token
resource = if Runtime::Env.admin_personal_access_token && !@user.nil?
@@ -28,7 +30,7 @@ module QA
fabricate!
end
- QA::Resource::PersonalAccessTokenCache.set_token_for_username(user.username, self.token)
+ QA::Resource::PersonalAccessTokenCache.set_token_for_username(user.username, token)
resource
end
diff --git a/qa/qa/runtime/api/client.rb b/qa/qa/runtime/api/client.rb
index 8a5e22fbc37..b5b572890c1 100644
--- a/qa/qa/runtime/api/client.rb
+++ b/qa/qa/runtime/api/client.rb
@@ -16,17 +16,21 @@ module QA
enable_ip_limits if ip_limits
end
+ # Personal access token
+ #
+ # It is possible to set the environment variable GITLAB_QA_ACCESS_TOKEN
+ # to use a specific access token rather than create one from the UI
+ # unless a specific user has been passed
+ #
+ # @return [String]
def personal_access_token
- @personal_access_token ||= begin
- # you can set the environment variable GITLAB_QA_ACCESS_TOKEN
- # to use a specific access token rather than create one from the UI
- # unless a specific user has been passed
- @user.nil? ? Runtime::Env.personal_access_token ||= create_personal_access_token : create_personal_access_token
- end
+ @personal_access_token ||= if user.nil?
+ Runtime::Env.personal_access_token ||= create_personal_access_token
+ else
+ create_personal_access_token
+ end
- if @user&.admin?
- Runtime::Env.admin_personal_access_token = @personal_access_token
- end
+ Runtime::Env.admin_personal_access_token = @personal_access_token if user&.admin? # rubocop:disable Cop/UserAdmin
@personal_access_token
end
@@ -82,27 +86,38 @@ module QA
Page::Main::Menu.perform(&:sign_out)
end
+ # Create PAT
+ #
+ # Use api if admin personal access token is present and skip any UI actions otherwise perform creation via UI
+ #
+ # @return [String]
def create_personal_access_token
- signed_in_initially = Page::Main::Menu.perform(&:signed_in?)
-
- Page::Main::Menu.perform(&:sign_out) if @is_new_session && signed_in_initially
-
- token = Resource::PersonalAccessToken.fabricate! do |pat|
- pat.user = user
- end.token
-
- # If this is a new session, that tests that follow could fail if they
- # try to sign in without starting a new session.
- # Also, if the browser wasn't already signed in, leaving it
- # signed in could cause tests to fail when they try to sign
- # in again. For example, that would happen if a test has a
- # before(:context) block that fabricates via the API, and
- # it's the first test to run so it creates an access token
- #
- # Sign out so the tests can successfully sign in
- Page::Main::Menu.perform(&:sign_out) if @is_new_session || !signed_in_initially
-
- token
+ if Runtime::Env.admin_personal_access_token
+ Resource::PersonalAccessToken.fabricate_via_api! do |pat|
+ pat.user = user
+ end.token
+ else
+ signed_in_initially = Page::Main::Menu.perform(&:signed_in?)
+
+ Page::Main::Menu.perform(&:sign_out) if @is_new_session && signed_in_initially
+
+ token = Resource::PersonalAccessToken.fabricate! do |pat|
+ pat.user = user
+ end.token
+
+ # If this is a new session, that tests that follow could fail if they
+ # try to sign in without starting a new session.
+ # Also, if the browser wasn't already signed in, leaving it
+ # signed in could cause tests to fail when they try to sign
+ # in again. For example, that would happen if a test has a
+ # before(:context) block that fabricates via the API, and
+ # it's the first test to run so it creates an access token
+ #
+ # Sign out so the tests can successfully sign in
+ Page::Main::Menu.perform(&:sign_out) if @is_new_session || !signed_in_initially
+
+ token
+ end
end
end
end
diff --git a/spec/controllers/concerns/group_tree_spec.rb b/spec/controllers/concerns/group_tree_spec.rb
index a0707688e54..e808f1caa6e 100644
--- a/spec/controllers/concerns/group_tree_spec.rb
+++ b/spec/controllers/concerns/group_tree_spec.rb
@@ -21,82 +21,94 @@ RSpec.describe GroupTree do
end
describe 'GET #index' do
- it 'filters groups' do
- other_group = create(:group, name: 'filter')
- other_group.add_owner(user)
+ shared_examples 'returns filtered groups' do
+ it 'filters groups' do
+ other_group = create(:group, name: 'filter')
+ other_group.add_owner(user)
- get :index, params: { filter: 'filt' }, format: :json
+ get :index, params: { filter: 'filt' }, format: :json
- expect(assigns(:groups)).to contain_exactly(other_group)
- end
+ expect(assigns(:groups)).to contain_exactly(other_group)
+ end
- context 'for subgroups' do
- it 'only renders root groups when no parent was given' do
- create(:group, :public, parent: group)
+ context 'for subgroups' do
+ it 'only renders root groups when no parent was given' do
+ create(:group, :public, parent: group)
- get :index, format: :json
+ get :index, format: :json
- expect(assigns(:groups)).to contain_exactly(group)
- end
+ expect(assigns(:groups)).to contain_exactly(group)
+ end
- it 'contains only the subgroup when a parent was given' do
- subgroup = create(:group, :public, parent: group)
+ it 'contains only the subgroup when a parent was given' do
+ subgroup = create(:group, :public, parent: group)
- get :index, params: { parent_id: group.id }, format: :json
+ get :index, params: { parent_id: group.id }, format: :json
- expect(assigns(:groups)).to contain_exactly(subgroup)
- end
+ expect(assigns(:groups)).to contain_exactly(subgroup)
+ end
- it 'allows filtering for subgroups and includes the parents for rendering' do
- subgroup = create(:group, :public, parent: group, name: 'filter')
+ it 'allows filtering for subgroups and includes the parents for rendering' do
+ subgroup = create(:group, :public, parent: group, name: 'filter')
- get :index, params: { filter: 'filt' }, format: :json
+ get :index, params: { filter: 'filt' }, format: :json
- expect(assigns(:groups)).to contain_exactly(group, subgroup)
- end
+ expect(assigns(:groups)).to contain_exactly(group, subgroup)
+ end
- it 'does not include groups the user does not have access to' do
- parent = create(:group, :private)
- subgroup = create(:group, :private, parent: parent, name: 'filter')
- subgroup.add_developer(user)
- _other_subgroup = create(:group, :private, parent: parent, name: 'filte')
+ it 'does not include groups the user does not have access to' do
+ parent = create(:group, :private)
+ subgroup = create(:group, :private, parent: parent, name: 'filter')
+ subgroup.add_developer(user)
+ _other_subgroup = create(:group, :private, parent: parent, name: 'filte')
- get :index, params: { filter: 'filt' }, format: :json
+ get :index, params: { filter: 'filt' }, format: :json
- expect(assigns(:groups)).to contain_exactly(parent, subgroup)
- end
+ expect(assigns(:groups)).to contain_exactly(parent, subgroup)
+ end
- it 'preloads parents regardless of pagination' do
- allow(Kaminari.config).to receive(:default_per_page).and_return(1)
- group = create(:group, :public)
- subgroup = create(:group, :public, parent: group)
- search_result = create(:group, :public, name: 'result', parent: subgroup)
+ it 'preloads parents regardless of pagination' do
+ allow(Kaminari.config).to receive(:default_per_page).and_return(1)
+ group = create(:group, :public)
+ subgroup = create(:group, :public, parent: group)
+ search_result = create(:group, :public, name: 'result', parent: subgroup)
- get :index, params: { filter: 'resu' }, format: :json
+ get :index, params: { filter: 'resu' }, format: :json
- expect(assigns(:groups)).to contain_exactly(group, subgroup, search_result)
+ expect(assigns(:groups)).to contain_exactly(group, subgroup, search_result)
+ end
end
- end
- context 'json content' do
- it 'shows groups as json' do
- get :index, format: :json
+ context 'json content' do
+ it 'shows groups as json' do
+ get :index, format: :json
- expect(json_response.first['id']).to eq(group.id)
- end
+ expect(json_response.first['id']).to eq(group.id)
+ end
- context 'nested groups' do
- it 'expands the tree when filtering' do
- subgroup = create(:group, :public, parent: group, name: 'filter')
+ context 'nested groups' do
+ it 'expands the tree when filtering' do
+ subgroup = create(:group, :public, parent: group, name: 'filter')
- get :index, params: { filter: 'filt' }, format: :json
+ get :index, params: { filter: 'filt' }, format: :json
- children_response = json_response.first['children']
+ children_response = json_response.first['children']
- expect(json_response.first['id']).to eq(group.id)
- expect(children_response.first['id']).to eq(subgroup.id)
+ expect(json_response.first['id']).to eq(group.id)
+ expect(children_response.first['id']).to eq(subgroup.id)
+ end
end
end
end
+
+ it_behaves_like 'returns filtered groups'
+
+ context 'when feature flag :linear_group_tree_ancestor_scopes is disabled' do
+ before do
+ stub_feature_flags(linear_group_tree_ancestor_scopes: false)
+ end
+
+ it_behaves_like 'returns filtered groups'
+ end
end
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 5f55983beb8..6d2cb22e0a1 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -450,14 +450,14 @@ RSpec.describe 'Admin updates settings' do
visit reporting_admin_application_settings_path
page.within('.as-spam') do
- fill_in 'reCAPTCHA Site Key', with: 'key'
- fill_in 'reCAPTCHA Private Key', with: 'key'
+ fill_in 'reCAPTCHA site key', with: 'key'
+ fill_in 'reCAPTCHA private key', with: 'key'
check 'Enable reCAPTCHA'
check 'Enable reCAPTCHA for login'
- fill_in 'IPs per user', with: 15
+ fill_in 'IP addresses per user', with: 15
check 'Enable Spam Check via external API endpoint'
fill_in 'URL of the external Spam Check endpoint', with: 'grpc://www.example.com/spamcheck'
- fill_in 'Spam Check API Key', with: 'SPAM_CHECK_API_KEY'
+ fill_in 'Spam Check API key', with: 'SPAM_CHECK_API_KEY'
click_button 'Save changes'
end
diff --git a/spec/features/merge_request/user_merges_immediately_spec.rb b/spec/features/merge_request/user_merges_immediately_spec.rb
index bca6e6ceba5..3a05f35a671 100644
--- a/spec/features/merge_request/user_merges_immediately_spec.rb
+++ b/spec/features/merge_request/user_merges_immediately_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'Merge requests > User merges immediately', :js do
Sidekiq::Testing.fake! do
click_button 'Merge immediately'
- expect(find('.accept-merge-request.btn-confirm')).to have_content('Merge in progress')
+ expect(find('.media-body h4')).to have_content('Merging!')
wait_for_requests
end
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index e41fb815c8d..0bf44fd2177 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -45,6 +45,8 @@ const createTestMr = (customConfig) => {
preferredAutoMergeStrategy: MWPS_MERGE_STRATEGY,
availableAutoMergeStrategies: [MWPS_MERGE_STRATEGY],
mergeImmediatelyDocsPath: 'path/to/merge/immediately/docs',
+ transitionStateMachine: () => eventHub.$emit('StateMachineValueChanged', { value: 'value' }),
+ translateStateToMachine: () => this.transitionStateMachine(),
};
Object.assign(mr, customConfig.mr);
diff --git a/spec/models/concerns/checksummable_spec.rb b/spec/models/concerns/checksummable_spec.rb
index 3a0387333e8..93a65605b50 100644
--- a/spec/models/concerns/checksummable_spec.rb
+++ b/spec/models/concerns/checksummable_spec.rb
@@ -13,11 +13,19 @@ RSpec.describe Checksummable do
end
end
- describe ".hexdigest" do
+ describe ".sha256_hexdigest" do
it 'returns the SHA256 sum of the file' do
expected = Digest::SHA256.file(__FILE__).hexdigest
- expect(subject.hexdigest(__FILE__)).to eq(expected)
+ expect(subject.sha256_hexdigest(__FILE__)).to eq(expected)
+ end
+ end
+
+ describe ".md5_hexdigest" do
+ it 'returns the MD5 sum of the file' do
+ expected = Digest::MD5.file(__FILE__).hexdigest
+
+ expect(subject.md5_hexdigest(__FILE__)).to eq(expected)
end
end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 74563f77cbe..fca99ebb856 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -2608,17 +2608,29 @@ RSpec.describe Group do
end
describe '.ids_with_disabled_email' do
- let!(:parent_1) { create(:group, emails_disabled: true) }
- let!(:child_1) { create(:group, parent: parent_1) }
+ let_it_be(:parent_1) { create(:group, emails_disabled: true) }
+ let_it_be(:child_1) { create(:group, parent: parent_1) }
- let!(:parent_2) { create(:group, emails_disabled: false) }
- let!(:child_2) { create(:group, parent: parent_2) }
+ let_it_be(:parent_2) { create(:group, emails_disabled: false) }
+ let_it_be(:child_2) { create(:group, parent: parent_2) }
- let!(:other_group) { create(:group, emails_disabled: false) }
+ let_it_be(:other_group) { create(:group, emails_disabled: false) }
- subject(:group_ids_where_email_is_disabled) { described_class.ids_with_disabled_email([child_1, child_2, other_group]) }
+ shared_examples 'returns namespaces with disabled email' do
+ subject(:group_ids_where_email_is_disabled) { described_class.ids_with_disabled_email([child_1, child_2, other_group]) }
- it { is_expected.to eq(Set.new([child_1.id])) }
+ it { is_expected.to eq(Set.new([child_1.id])) }
+ end
+
+ it_behaves_like 'returns namespaces with disabled email'
+
+ context 'when feature flag :linear_group_ancestor_scopes is disabled' do
+ before do
+ stub_feature_flags(linear_group_ancestor_scopes: false)
+ end
+
+ it_behaves_like 'returns namespaces with disabled email'
+ end
end
describe '.timelogs' do
diff --git a/spec/presenters/clusters/cluster_presenter_spec.rb b/spec/presenters/clusters/cluster_presenter_spec.rb
index c8da8a54f16..49126ed8e5f 100644
--- a/spec/presenters/clusters/cluster_presenter_spec.rb
+++ b/spec/presenters/clusters/cluster_presenter_spec.rb
@@ -30,129 +30,6 @@ RSpec.describe Clusters::ClusterPresenter do
end
end
- describe '#item_link' do
- let(:clusterable_presenter) { double('ClusterablePresenter', subject: clusterable) }
-
- subject { presenter.item_link(clusterable_presenter) }
-
- context 'for a group cluster' do
- let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [group]) }
- let(:group) { create(:group, name: 'Foo') }
- let(:cluster_link) { "<a href=\"#{group_cluster_path(cluster.group, cluster)}\">#{cluster.name}</a>" }
-
- before do
- group.add_maintainer(user)
- end
-
- shared_examples 'ancestor clusters' do
- context 'ancestor clusters' do
- let(:root_group) { create(:group, name: 'Root Group') }
- let(:parent) { create(:group, name: 'parent', parent: root_group) }
- let(:child) { create(:group, name: 'child', parent: parent) }
- let(:group) { create(:group, name: 'group', parent: child) }
-
- before do
- root_group.add_maintainer(user)
- end
-
- context 'top level group cluster' do
- let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [root_group]) }
-
- it 'returns full group names and link for cluster' do
- expect(subject).to eq("Root Group / #{cluster_link}")
- end
-
- it 'is html safe' do
- expect(presenter).to receive(:sanitize).with('Root Group').and_call_original
-
- expect(subject).to be_html_safe
- end
- end
-
- context 'first level group cluster' do
- let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [parent]) }
-
- it 'returns full group names and link for cluster' do
- expect(subject).to eq("Root Group / parent / #{cluster_link}")
- end
-
- it 'is html safe' do
- expect(presenter).to receive(:sanitize).with('Root Group / parent').and_call_original
-
- expect(subject).to be_html_safe
- end
- end
-
- context 'second level group cluster' do
- let(:cluster) { create(:cluster, cluster_type: :group_type, groups: [child]) }
-
- let(:ellipsis_h) do
- /.*ellipsis_h.*/
- end
-
- it 'returns clipped group names and link for cluster' do
- expect(subject).to match("Root Group / #{ellipsis_h} / child / #{cluster_link}")
- end
-
- it 'is html safe' do
- expect(presenter).to receive(:sanitize).with('Root Group / parent / child').and_call_original
-
- expect(subject).to be_html_safe
- end
- end
- end
- end
-
- context 'for a project clusterable' do
- let(:clusterable) { project }
- let(:project) { create(:project, group: group) }
-
- it 'returns the group name and the link for cluster' do
- expect(subject).to eq("Foo / #{cluster_link}")
- end
-
- it 'is html safe' do
- expect(presenter).to receive(:sanitize).with('Foo').and_call_original
-
- expect(subject).to be_html_safe
- end
-
- include_examples 'ancestor clusters'
- end
-
- context 'for the group clusterable for the cluster' do
- let(:clusterable) { group }
-
- it 'returns link for cluster' do
- expect(subject).to eq(cluster_link)
- end
-
- include_examples 'ancestor clusters'
-
- it 'is html safe' do
- expect(subject).to be_html_safe
- end
- end
- end
-
- context 'for a project cluster' do
- let(:cluster) { create(:cluster, :project) }
- let(:cluster_link) { "<a href=\"#{project_cluster_path(cluster.project, cluster)}\">#{cluster.name}</a>" }
-
- before do
- cluster.project.add_maintainer(user)
- end
-
- context 'for the project clusterable' do
- let(:clusterable) { cluster.project }
-
- it 'returns link for cluster' do
- expect(subject).to eq(cluster_link)
- end
- end
- end
- end
-
describe '#provider_label' do
let(:cluster) { create(:cluster, provider_type: provider_type) }
@@ -191,26 +68,6 @@ RSpec.describe Clusters::ClusterPresenter do
end
end
- describe '#cluster_type_description' do
- subject { described_class.new(cluster).cluster_type_description }
-
- context 'project_type cluster' do
- it { is_expected.to eq('Project cluster') }
- end
-
- context 'group_type cluster' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :group) }
-
- it { is_expected.to eq('Group cluster') }
- end
-
- context 'instance_type cluster' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
-
- it { is_expected.to eq('Instance cluster') }
- end
- end
-
describe '#show_path' do
subject { described_class.new(cluster).show_path }
diff --git a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
index 3760325675a..c768ba912cc 100644
--- a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
@@ -497,7 +497,7 @@ RSpec.shared_examples 'trace with disabled live trace feature' do
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(File.exist?(src_path)).to be_falsy
expect(src_checksum)
- .to eq(described_class.hexdigest(build.job_artifacts_trace.file.path))
+ .to eq(described_class.sha256_hexdigest(build.job_artifacts_trace.file.path))
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
@@ -523,7 +523,7 @@ RSpec.shared_examples 'trace with disabled live trace feature' do
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(build.old_trace).to be_nil
expect(src_checksum)
- .to eq(described_class.hexdigest(build.job_artifacts_trace.file.path))
+ .to eq(described_class.sha256_hexdigest(build.job_artifacts_trace.file.path))
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
@@ -861,7 +861,7 @@ RSpec.shared_examples 'trace with enabled live trace feature' do
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
expect(src_checksum)
- .to eq(described_class.hexdigest(build.job_artifacts_trace.file.path))
+ .to eq(described_class.sha256_hexdigest(build.job_artifacts_trace.file.path))
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end