Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-03-10 18:09:11 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-03-10 18:09:11 +0300
commitd2091d1e924e2887eb9db4fad761965a24d024f1 (patch)
tree482fe37d97a4169895ae3ddf6d6f42e6f632cbb9
parent152b3268d701b54cac9b615a0e29e0e5726bfd99 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.eslintignore6
-rw-r--r--.eslintrc.yml12
-rw-r--r--.rubocop_manual_todo.yml5
-rw-r--r--app/assets/javascripts/access_tokens/components/projects_token_selector.vue14
-rw-r--r--app/assets/javascripts/experimentation/constants.js1
-rw-r--r--app/assets/javascripts/experimentation/experiment_tracking.js (renamed from app/assets/javascripts/experiment_tracking.js)11
-rw-r--r--app/assets/javascripts/experimentation/utils.js10
-rw-r--r--app/assets/javascripts/graphql_shared/utils.js34
-rw-r--r--app/assets/javascripts/lib/utils/experimentation.js3
-rw-r--r--app/assets/javascripts/projects/upload_file_experiment.js2
-rw-r--r--app/assets/javascripts/tracking.js8
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/states/nothing_to_merge.vue36
-rw-r--r--app/experiments/application_experiment.rb10
-rw-r--r--app/models/merge_request.rb30
-rw-r--r--app/models/project.rb18
-rw-r--r--app/models/project_services/discord_service.rb3
-rw-r--r--app/workers/all_queues.yml12
-rw-r--r--app/workers/archive_trace_worker.rb2
-rw-r--r--app/workers/build_finished_worker.rb1
-rw-r--r--app/workers/chat_notification_worker.rb1
-rw-r--r--app/workers/expire_build_instance_artifacts_worker.rb1
-rw-r--r--babel.config.js2
-rw-r--r--changelogs/unreleased/321659-handle-discord-errors.yml5
-rw-r--r--changelogs/unreleased/323163-restore-gitlab_database_transaction_seconds_sum-for-sidekiq.yml5
-rw-r--r--config/helpers/incremental_webpack_compiler.js3
-rw-r--r--config/helpers/is_eslint.js2
-rw-r--r--config/helpers/vendor_dll_hash.js2
-rw-r--r--config/initializers/1_settings.rb3
-rw-r--r--config/karma.config.js19
-rw-r--r--config/metrics/counts_28d/20210222041219_i_quickactions_invite_email_single_monthly.yml1
-rw-r--r--config/metrics/counts_28d/20210222041235_i_quickactions_invite_email_multiple_monthly.yml1
-rw-r--r--config/webpack.config.js62
-rw-r--r--config/webpack.vendor.config.js2
-rw-r--r--doc/development/experiment_guide/experimentation.md399
-rw-r--r--doc/development/experiment_guide/gitlab_experiment.md556
-rw-r--r--doc/development/experiment_guide/index.md413
-rw-r--r--doc/development/usage_ping/index.md4
-rw-r--r--doc/user/admin_area/credentials_inventory.md42
-rw-r--r--doc/user/admin_area/img/credentials_inventory_gpg_keys_v13_10.pngbin0 -> 62501 bytes
-rw-r--r--doc/user/admin_area/img/credentials_inventory_v13_10.pngbin0 -> 100241 bytes
-rw-r--r--doc/user/admin_area/img/credentials_inventory_v13_4.pngbin28945 -> 0 bytes
-rw-r--r--doc/user/project/repository/gpg_signed_commits/index.md1
-rw-r--r--jest.config.base.js2
-rw-r--r--lib/gitlab/database.rb30
-rw-r--r--lib/gitlab/database/similarity_score.rb7
-rw-r--r--lib/gitlab/graphql/pagination/keyset/connection.rb1
-rw-r--r--lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb47
-rw-r--r--lib/gitlab/graphql/pagination/keyset/last_items.rb38
-rw-r--r--lib/gitlab/graphql/pagination/keyset/order_info.rb7
-rw-r--r--lib/gitlab/metrics/background_transaction.rb49
-rw-r--r--lib/gitlab/metrics/subscribers/active_record.rb32
-rw-r--r--lib/gitlab/pagination/keyset/column_order_definition.rb224
-rw-r--r--lib/gitlab/pagination/keyset/order.rb248
-rw-r--r--lib/gitlab/sidekiq_middleware/server_metrics.rb3
-rw-r--r--lib/gitlab/usage/metrics/aggregates/aggregate.rb2
-rw-r--r--lib/gitlab/usage_data_counters/aggregated_metrics/common.yml2
-rw-r--r--locale/gitlab.pot7
-rw-r--r--scripts/frontend/block_dependencies.js8
-rwxr-xr-xscripts/frontend/check_page_bundle_mixins_css_for_sideeffects.js2
-rw-r--r--scripts/frontend/extract_gettext_all.js39
-rwxr-xr-xscripts/frontend/file_test_coverage.js44
-rw-r--r--scripts/frontend/merge_coverage_frontend.js13
-rw-r--r--scripts/frontend/parallel_ci_sequencer.js24
-rw-r--r--scripts/frontend/stylelint/stylelint-duplicate-selectors.js6
-rw-r--r--scripts/frontend/stylelint/stylelint-utility-classes.js5
-rw-r--r--scripts/frontend/stylelint/stylelint-utility-map.js30
-rw-r--r--scripts/frontend/stylelint/stylelint-utils.js11
-rwxr-xr-xscripts/frontend/webpack_dev_server.js12
-rw-r--r--spec/experiments/application_experiment_spec.rb40
-rw-r--r--spec/frontend/experimentation/experiment_tracking_spec.js (renamed from spec/frontend/experiment_tracking_spec.js)26
-rw-r--r--spec/frontend/experimentation/utils_spec.js38
-rw-r--r--spec/frontend/graphql_shared/utils_spec.js34
-rw-r--r--spec/frontend/lib/utils/experimentation_spec.js20
-rw-r--r--spec/frontend/projects/upload_file_experiment_spec.js18
-rw-r--r--spec/frontend/tracking_spec.js26
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js12
-rw-r--r--spec/lib/gitlab/database/similarity_score_spec.rb2
-rw-r--r--spec/lib/gitlab/database_spec.rb108
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb12
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb38
-rw-r--r--spec/lib/gitlab/metrics/background_transaction_spec.rb67
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb33
-rw-r--r--spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb188
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb420
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb8
-rw-r--r--spec/models/project_services/discord_service_spec.rb11
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb48
-rw-r--r--spec/support/gitlab_experiment.rb11
-rw-r--r--spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb57
-rw-r--r--spec/support/snowplow.rb19
-rw-r--r--spec/support/stub_snowplow.rb23
92 files changed, 3026 insertions, 870 deletions
diff --git a/.eslintignore b/.eslintignore
index c41556f6aae..73e11dfd974 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -1,15 +1,9 @@
/app/assets/javascripts/locale/**/app.js
-/config/
/builds/
/coverage/
/coverage-frontend/
/coverage-javascript/
/node_modules/
/public/
-/scripts/
/tmp/
/vendor/
-jest.config.js
-jest.config.*.js
-karma.config.js
-webpack.config.js
diff --git a/.eslintrc.yml b/.eslintrc.yml
index 75c52ac1319..678750c085b 100644
--- a/.eslintrc.yml
+++ b/.eslintrc.yml
@@ -89,3 +89,15 @@ overrides:
rules:
'@gitlab/require-i18n-strings': off
'@gitlab/no-runtime-template-compiler': off
+ - files:
+ - 'config/**/*'
+ - 'scripts/**/*'
+ - '*.config.js'
+ - 'jest.*.js'
+ rules:
+ '@gitlab/require-i18n-strings': off
+ import/no-extraneous-dependencies: off
+ import/no-commonjs: off
+ import/no-nodejs-modules: off
+ filenames/match-regex: off
+ no-console: off
diff --git a/.rubocop_manual_todo.yml b/.rubocop_manual_todo.yml
index 14ccfa6723e..730b4ad607c 100644
--- a/.rubocop_manual_todo.yml
+++ b/.rubocop_manual_todo.yml
@@ -2355,7 +2355,6 @@ Gitlab/NamespacedClass:
- 'ee/app/serializers/vulnerability_note_serializer.rb'
- 'ee/app/serializers/vulnerability_serializer.rb'
- 'ee/app/services/clear_namespace_shared_runners_minutes_service.rb'
- - 'ee/app/services/fetch_subscription_plans_service.rb'
- 'ee/app/services/ldap_group_reset_service.rb'
- 'ee/app/services/start_pull_mirroring_service.rb'
- 'ee/app/services/timebox_report_service.rb'
@@ -2518,7 +2517,3 @@ Style/ClassEqualityComparison:
Exclude:
- spec/lib/peek/views/active_record_spec.rb
- ee/spec/lib/peek/views/active_record_spec.rb
-
-Lint/HashCompareByIdentity:
- Exclude:
- - ee/lib/gitlab/database/load_balancing/load_balancer.rb
diff --git a/app/assets/javascripts/access_tokens/components/projects_token_selector.vue b/app/assets/javascripts/access_tokens/components/projects_token_selector.vue
index 37ef3f28236..cc5532696c7 100644
--- a/app/assets/javascripts/access_tokens/components/projects_token_selector.vue
+++ b/app/assets/javascripts/access_tokens/components/projects_token_selector.vue
@@ -8,7 +8,7 @@ import {
} from '@gitlab/ui';
import produce from 'immer';
-import { getIdFromGraphQLId, convertToGraphQLId } from '~/graphql_shared/utils';
+import { convertToGraphQLIds, convertNodeIdsFromGraphQLIds } from '~/graphql_shared/utils';
import getProjectsQuery from '../graphql/queries/get_projects.query.graphql';
@@ -51,7 +51,7 @@ export default {
},
update({ projects }) {
return {
- list: this.formatProjectNodes(projects),
+ list: convertNodeIdsFromGraphQLIds(projects.nodes),
pageInfo: projects.pageInfo,
};
},
@@ -64,7 +64,7 @@ export default {
query: getProjectsQuery,
variables() {
return {
- ids: this.initialProjectIds.map((id) => convertToGraphQLId(GRAPHQL_ENTITY_TYPE, id)),
+ ids: convertToGraphQLIds(GRAPHQL_ENTITY_TYPE, this.initialProjectIds),
};
},
manual: true,
@@ -72,7 +72,7 @@ export default {
return !this.initialProjectIds.length;
},
result({ data: { projects } }) {
- this.$emit('input', this.formatProjectNodes(projects));
+ this.$emit('input', convertNodeIdsFromGraphQLIds(projects.nodes));
},
},
},
@@ -88,12 +88,6 @@ export default {
};
},
methods: {
- formatProjectNodes(projects) {
- return projects.nodes.map((project) => ({
- ...project,
- id: getIdFromGraphQLId(project.id),
- }));
- },
handleSearch(query) {
this.isSearching = true;
this.searchQuery = query;
diff --git a/app/assets/javascripts/experimentation/constants.js b/app/assets/javascripts/experimentation/constants.js
new file mode 100644
index 00000000000..b7e61d43b11
--- /dev/null
+++ b/app/assets/javascripts/experimentation/constants.js
@@ -0,0 +1 @@
+export const TRACKING_CONTEXT_SCHEMA = 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0';
diff --git a/app/assets/javascripts/experiment_tracking.js b/app/assets/javascripts/experimentation/experiment_tracking.js
index 2a725886679..c721828036e 100644
--- a/app/assets/javascripts/experiment_tracking.js
+++ b/app/assets/javascripts/experimentation/experiment_tracking.js
@@ -1,16 +1,15 @@
-import { get } from 'lodash';
import Tracking from '~/tracking';
-
-const TRACKING_CONTEXT_SCHEMA = 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0';
+import { TRACKING_CONTEXT_SCHEMA } from './constants';
+import { getExperimentData } from './utils';
export default class ExperimentTracking {
constructor(experimentName, trackingArgs = {}) {
this.trackingArgs = trackingArgs;
- this.experimentData = get(window, ['gon', 'global', 'experiment', experimentName]);
+ this.data = getExperimentData(experimentName);
}
event(action) {
- if (!this.experimentData) {
+ if (!this.data) {
return false;
}
@@ -18,7 +17,7 @@ export default class ExperimentTracking {
...this.trackingArgs,
context: {
schema: TRACKING_CONTEXT_SCHEMA,
- data: this.experimentData,
+ data: this.data,
},
});
}
diff --git a/app/assets/javascripts/experimentation/utils.js b/app/assets/javascripts/experimentation/utils.js
new file mode 100644
index 00000000000..d3e7800f643
--- /dev/null
+++ b/app/assets/javascripts/experimentation/utils.js
@@ -0,0 +1,10 @@
+// This file only applies to use of experiments through https://gitlab.com/gitlab-org/gitlab-experiment
+import { get } from 'lodash';
+
+export function getExperimentData(experimentName) {
+ return get(window, ['gon', 'experiment', experimentName]);
+}
+
+export function isExperimentVariant(experimentName, variantName) {
+ return getExperimentData(experimentName)?.variant === variantName;
+}
diff --git a/app/assets/javascripts/graphql_shared/utils.js b/app/assets/javascripts/graphql_shared/utils.js
index 4715bbc94f6..e64e8009a5f 100644
--- a/app/assets/javascripts/graphql_shared/utils.js
+++ b/app/assets/javascripts/graphql_shared/utils.js
@@ -1,3 +1,5 @@
+import { isArray } from 'lodash';
+
/**
* Ids generated by GraphQL endpoints are usually in the format
* gid://gitlab/Environments/123. This method extracts Id number
@@ -52,3 +54,35 @@ export const convertToGraphQLId = (type, id) => {
* @returns {Array}
*/
export const convertToGraphQLIds = (type, ids) => ids.map((id) => convertToGraphQLId(type, id));
+
+/**
+ * Ids generated by GraphQL endpoints are usually in the format
+ * gid://gitlab/Groups/123. This method takes an array of
+ * GraphQL Ids and converts them to a number.
+ *
+ * @param {Array} ids An array of GraphQL IDs
+ * @returns {Array}
+ */
+export const convertFromGraphQLIds = (ids) => {
+ if (!isArray(ids)) {
+ throw new TypeError(`ids must be an array; got ${typeof ids}`);
+ }
+
+ return ids.map((id) => getIdFromGraphQLId(id));
+};
+
+/**
+ * Ids generated by GraphQL endpoints are usually in the format
+ * gid://gitlab/Groups/123. This method takes an array of nodes
+ * and converts the `id` properties from a GraphQL Id to a number.
+ *
+ * @param {Array} nodes An array of nodes with an `id` property
+ * @returns {Array}
+ */
+export const convertNodeIdsFromGraphQLIds = (nodes) => {
+ if (!isArray(nodes)) {
+ throw new TypeError(`nodes must be an array; got ${typeof nodes}`);
+ }
+
+ return nodes.map((node) => (node.id ? { ...node, id: getIdFromGraphQLId(node.id) } : node));
+};
diff --git a/app/assets/javascripts/lib/utils/experimentation.js b/app/assets/javascripts/lib/utils/experimentation.js
deleted file mode 100644
index 555e76055e0..00000000000
--- a/app/assets/javascripts/lib/utils/experimentation.js
+++ /dev/null
@@ -1,3 +0,0 @@
-export function isExperimentEnabled(experimentKey) {
- return Boolean(window.gon?.experiments?.[experimentKey]);
-}
diff --git a/app/assets/javascripts/projects/upload_file_experiment.js b/app/assets/javascripts/projects/upload_file_experiment.js
index e4e4c609f0a..7d61df36a75 100644
--- a/app/assets/javascripts/projects/upload_file_experiment.js
+++ b/app/assets/javascripts/projects/upload_file_experiment.js
@@ -1,4 +1,4 @@
-import ExperimentTracking from '~/experiment_tracking';
+import ExperimentTracking from '~/experimentation/experiment_tracking';
function trackEvent(eventName) {
const isEmpty = Boolean(document.querySelector('.project-home-panel.empty-project'));
diff --git a/app/assets/javascripts/tracking.js b/app/assets/javascripts/tracking.js
index 008eda5e505..01de034417e 100644
--- a/app/assets/javascripts/tracking.js
+++ b/app/assets/javascripts/tracking.js
@@ -1,4 +1,6 @@
-import { omitBy, isUndefined, get } from 'lodash';
+import { omitBy, isUndefined } from 'lodash';
+import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
+import { getExperimentData } from '~/experimentation/utils';
const standardContext = { ...window.gl?.snowplowStandardContext };
@@ -32,8 +34,8 @@ const createEventPayload = (el, { suffix = '' } = {}) => {
let context = el.dataset.trackContext;
if (el.dataset.trackExperiment) {
- const data = get(window, ['gon', 'global', 'experiment', el.dataset.trackExperiment]);
- if (data) context = { schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0', data };
+ const data = getExperimentData(el.dataset.trackExperiment);
+ if (data) context = { schema: TRACKING_CONTEXT_SCHEMA, data };
}
const data = {
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/nothing_to_merge.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/nothing_to_merge.vue
index f0259a975db..01e0b91bd4a 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/states/nothing_to_merge.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/nothing_to_merge.vue
@@ -1,12 +1,15 @@
<script>
/* eslint-disable vue/no-v-html */
-import { GlButton } from '@gitlab/ui';
+import { GlButton, GlSprintf, GlLink } from '@gitlab/ui';
import emptyStateSVG from 'icons/_mr_widget_empty_state.svg';
+import { helpPagePath } from '~/helpers/help_page_helper';
export default {
name: 'MRWidgetNothingToMerge',
components: {
GlButton,
+ GlSprintf,
+ GlLink,
},
props: {
mr: {
@@ -17,6 +20,7 @@ export default {
data() {
return { emptyStateSVG };
},
+ ciHelpPage: helpPagePath('/ci/quick_start/index.html'),
};
</script>
@@ -30,25 +34,20 @@ export default {
</div>
<div class="text col-md-7 order-md-first col-12">
<p class="highlight">
- {{
- s__(
- 'mrWidgetNothingToMerge|Merge requests are a place to propose changes you have made to a project and discuss those changes with others.',
- )
- }}
+ {{ s__('mrWidgetNothingToMerge|This merge request contains no changes.') }}
</p>
<p>
- {{
- s__(
- 'mrWidgetNothingToMerge|Interested parties can even contribute by pushing commits if they want to.',
- )
- }}
- </p>
- <p>
- {{
- s__(
- "mrWidgetNothingToMerge|Currently there are no changes in this merge request's source branch. Please push new commits or use a different branch.",
- )
- }}
+ <gl-sprintf
+ :message="
+ s__(
+ 'mrWidgetNothingToMerge|Use merge requests to propose changes to your project and discuss them with your team. To make changes, push a commit or edit this merge request to use a different branch. With %{linkStart}CI/CD%{linkEnd}, automatically test your changes before merging.',
+ )
+ "
+ >
+ <template #link="{ content }">
+ <gl-link :href="$options.ciHelpPage" target="_blank">{{ content }}</gl-link>
+ </template>
+ </gl-sprintf>
</p>
<div>
<gl-button
@@ -56,6 +55,7 @@ export default {
:href="mr.newBlobPath"
category="secondary"
variant="success"
+ data-testid="createFileButton"
>
{{ __('Create file') }}
</gl-button>
diff --git a/app/experiments/application_experiment.rb b/app/experiments/application_experiment.rb
index ec73382ed3b..6ba851fbc8b 100644
--- a/app/experiments/application_experiment.rb
+++ b/app/experiments/application_experiment.rb
@@ -11,7 +11,9 @@ class ApplicationExperiment < Gitlab::Experiment # rubocop:disable Gitlab/Namesp
def publish(_result)
track(:assignment) # track that we've assigned a variant for this context
- Gon.global.push({ experiment: { name => signature } }, true) # push the experiment data to the client
+
+ # push the experiment data to the client
+ Gon.push({ experiment: { name => signature } }, true) if in_request_cycle?
end
def track(action, **event_args)
@@ -47,6 +49,12 @@ class ApplicationExperiment < Gitlab::Experiment # rubocop:disable Gitlab/Namesp
name.tr('/', '_')
end
+ def in_request_cycle?
+ # Gon is only accessible when having a request. This will be fixed with
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/323352
+ context.instance_variable_defined?(:@request)
+ end
+
def resolve_variant_name
case rollout_strategy
when :round_robin
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index ba09c0a9dd8..3fe31a64984 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -310,10 +310,28 @@ class MergeRequest < ApplicationRecord
end
scope :by_target_branch, ->(branch_name) { where(target_branch: branch_name) }
scope :order_merged_at, ->(direction) do
- query = join_metrics.order(Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', direction))
-
- # Add `merge_request_metrics.merged_at` to the `SELECT` in order to make the keyset pagination work.
- query.select(*query.arel.projections, MergeRequest::Metrics.arel_table[:merged_at].as('"merge_request_metrics.merged_at"'))
+ reverse_direction = { 'ASC' => 'DESC', 'DESC' => 'ASC' }
+ reversed_direction = reverse_direction[direction] || raise("Unknown sort direction was given: #{direction}")
+
+ order = Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'merge_request_metrics_merged_at',
+ column_expression: MergeRequest::Metrics.arel_table[:merged_at],
+ order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', direction),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', reversed_direction),
+ order_direction: direction,
+ nullable: :nulls_last,
+ distinct: false,
+ add_to_projections: true
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'merge_request_metrics_id',
+ order_expression: MergeRequest::Metrics.arel_table[:id].desc,
+ add_to_projections: true
+ )
+ ])
+
+ order.apply_cursor_conditions(join_metrics).order(order)
end
scope :order_merged_at_asc, -> { order_merged_at('ASC') }
scope :order_merged_at_desc, -> { order_merged_at('DESC') }
@@ -411,8 +429,8 @@ class MergeRequest < ApplicationRecord
def self.sort_by_attribute(method, excluded_labels: [])
case method.to_s
- when 'merged_at', 'merged_at_asc' then order_merged_at_asc.with_order_id_desc
- when 'merged_at_desc' then order_merged_at_desc.with_order_id_desc
+ when 'merged_at', 'merged_at_asc' then order_merged_at_asc
+ when 'merged_at_desc' then order_merged_at_desc
else
super
end
diff --git a/app/models/project.rb b/app/models/project.rb
index d960732b16f..ef92dda443a 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -493,10 +493,22 @@ class Project < ApplicationRecord
{ column: arel_table["description"], multiplier: 0.2 }
])
- query = reorder(order_expression.desc, arel_table['id'].desc)
+ order = Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'similarity',
+ column_expression: order_expression,
+ order_expression: order_expression.desc,
+ order_direction: :desc,
+ distinct: false,
+ add_to_projections: true
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: Project.arel_table[:id].desc
+ )
+ ])
- query = query.select(*query.arel.projections, order_expression.as('similarity')) if include_in_select
- query
+ order.apply_cursor_conditions(reorder(order))
end
scope :with_packages, -> { joins(:packages) }
diff --git a/app/models/project_services/discord_service.rb b/app/models/project_services/discord_service.rb
index 941b7f64263..37bbb9b8752 100644
--- a/app/models/project_services/discord_service.rb
+++ b/app/models/project_services/discord_service.rb
@@ -59,6 +59,9 @@ class DiscordService < ChatNotificationService
embed.description = (message.pretext + "\n" + Array.wrap(message.attachments).join("\n")).gsub(ATTACHMENT_REGEX, " \\k<entry> - \\k<name>\n")
end
end
+ rescue RestClient::Exception => error
+ log_error(error.message)
+ false
end
def custom_data(data)
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index 20e1636b2fa..f6ea1828ab6 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -1106,8 +1106,7 @@
:resource_boundary: :unknown
:weight: 1
:idempotent:
- :tags:
- - :requires_disk_io
+ :tags: []
- :name: pipeline_background:ci_build_trace_chunk_flush
:feature_category: :continuous_integration
:has_external_dependencies:
@@ -1251,8 +1250,7 @@
:resource_boundary: :cpu
:weight: 5
:idempotent:
- :tags:
- - :requires_disk_io
+ :tags: []
- :name: pipeline_processing:build_queue
:feature_category: :continuous_integration
:has_external_dependencies:
@@ -1484,8 +1482,7 @@
:resource_boundary: :unknown
:weight: 2
:idempotent:
- :tags:
- - :requires_disk_io
+ :tags: []
- :name: ci_delete_objects
:feature_category: :continuous_integration
:has_external_dependencies:
@@ -1645,8 +1642,7 @@
:resource_boundary: :unknown
:weight: 1
:idempotent:
- :tags:
- - :requires_disk_io
+ :tags: []
- :name: export_csv
:feature_category: :issue_tracking
:has_external_dependencies:
diff --git a/app/workers/archive_trace_worker.rb b/app/workers/archive_trace_worker.rb
index b0c5bef336a..3ddb5686bf2 100644
--- a/app/workers/archive_trace_worker.rb
+++ b/app/workers/archive_trace_worker.rb
@@ -4,8 +4,6 @@ class ArchiveTraceWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include PipelineBackgroundQueue
- tags :requires_disk_io
-
# rubocop: disable CodeReuse/ActiveRecord
def perform(job_id)
Ci::Build.without_archived_trace.find_by(id: job_id).try do |job|
diff --git a/app/workers/build_finished_worker.rb b/app/workers/build_finished_worker.rb
index 1f4ebe58638..3f99b30fdf7 100644
--- a/app/workers/build_finished_worker.rb
+++ b/app/workers/build_finished_worker.rb
@@ -7,7 +7,6 @@ class BuildFinishedWorker # rubocop:disable Scalability/IdempotentWorker
queue_namespace :pipeline_processing
urgency :high
worker_resource_boundary :cpu
- tags :requires_disk_io
ARCHIVE_TRACES_IN = 2.minutes.freeze
diff --git a/app/workers/chat_notification_worker.rb b/app/workers/chat_notification_worker.rb
index 94a0197b862..5fab437f49f 100644
--- a/app/workers/chat_notification_worker.rb
+++ b/app/workers/chat_notification_worker.rb
@@ -7,7 +7,6 @@ class ChatNotificationWorker # rubocop:disable Scalability/IdempotentWorker
sidekiq_options retry: false
feature_category :chatops
- tags :requires_disk_io
urgency :low # Can't be high as it has external dependencies
weight 2
worker_has_external_dependencies!
diff --git a/app/workers/expire_build_instance_artifacts_worker.rb b/app/workers/expire_build_instance_artifacts_worker.rb
index a5571473b43..e6cd60a3e47 100644
--- a/app/workers/expire_build_instance_artifacts_worker.rb
+++ b/app/workers/expire_build_instance_artifacts_worker.rb
@@ -4,7 +4,6 @@ class ExpireBuildInstanceArtifactsWorker # rubocop:disable Scalability/Idempoten
include ApplicationWorker
feature_category :continuous_integration
- tags :requires_disk_io
# rubocop: disable CodeReuse/ActiveRecord
def perform(build_id)
diff --git a/babel.config.js b/babel.config.js
index 6c9d4640535..4dfca8f6144 100644
--- a/babel.config.js
+++ b/babel.config.js
@@ -1,5 +1,3 @@
-/* eslint-disable import/no-commonjs, filenames/match-regex */
-
const BABEL_ENV = process.env.BABEL_ENV || process.env.NODE_ENV || null;
let presets = [
diff --git a/changelogs/unreleased/321659-handle-discord-errors.yml b/changelogs/unreleased/321659-handle-discord-errors.yml
new file mode 100644
index 00000000000..2bf969dd8b0
--- /dev/null
+++ b/changelogs/unreleased/321659-handle-discord-errors.yml
@@ -0,0 +1,5 @@
+---
+title: Handle RestClient errors in Discord integration
+merge_request: 56112
+author:
+type: fixed
diff --git a/changelogs/unreleased/323163-restore-gitlab_database_transaction_seconds_sum-for-sidekiq.yml b/changelogs/unreleased/323163-restore-gitlab_database_transaction_seconds_sum-for-sidekiq.yml
new file mode 100644
index 00000000000..3a942971ee2
--- /dev/null
+++ b/changelogs/unreleased/323163-restore-gitlab_database_transaction_seconds_sum-for-sidekiq.yml
@@ -0,0 +1,5 @@
+---
+title: Port essential database metrics to Sidekiq
+merge_request: 56005
+author:
+type: changed
diff --git a/config/helpers/incremental_webpack_compiler.js b/config/helpers/incremental_webpack_compiler.js
index 786bb6071fa..5d4f9bd040d 100644
--- a/config/helpers/incremental_webpack_compiler.js
+++ b/config/helpers/incremental_webpack_compiler.js
@@ -1,3 +1,4 @@
+/* eslint-disable max-classes-per-file, no-underscore-dangle */
const fs = require('fs');
const path = require('path');
@@ -7,6 +8,7 @@ const log = (msg, ...rest) => console.log(`IncrementalWebpackCompiler: ${msg}`,
// Five seconds seem to work fine and the user can read the message
const TIMEOUT = 5000;
+/* eslint-disable class-methods-use-this */
class NoopCompiler {
constructor() {
this.enabled = false;
@@ -20,6 +22,7 @@ class NoopCompiler {
setupMiddleware() {}
}
+/* eslint-enable class-methods-use-this */
class IncrementalWebpackCompiler {
constructor(historyFilePath) {
diff --git a/config/helpers/is_eslint.js b/config/helpers/is_eslint.js
index 60528796962..9a3a9bfca12 100644
--- a/config/helpers/is_eslint.js
+++ b/config/helpers/is_eslint.js
@@ -2,7 +2,7 @@
* Returns true if the given module is required from eslint
*/
const isESLint = (mod) => {
- let parent = mod.parent;
+ let { parent } = mod;
while (parent) {
if (parent.filename && parent.filename.includes('/eslint')) {
diff --git a/config/helpers/vendor_dll_hash.js b/config/helpers/vendor_dll_hash.js
index 2df97e7b95d..cdbaebc9789 100644
--- a/config/helpers/vendor_dll_hash.js
+++ b/config/helpers/vendor_dll_hash.js
@@ -1,6 +1,6 @@
const fs = require('fs');
-const path = require('path');
const crypto = require('crypto');
+const path = require('path');
const CACHE_PATHS = [
'./config/webpack.config.js',
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 8de8584b748..b8dc464deed 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -586,6 +586,9 @@ Gitlab.ee do
Settings.cron_jobs['geo_verification_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_verification_cron_worker']['cron'] ||= '* * * * *'
Settings.cron_jobs['geo_verification_cron_worker']['job_class'] ||= 'Geo::VerificationCronWorker'
+ Settings.cron_jobs['geo_secondary_usage_data_cron_worker'] ||= Settingslogic.new({})
+ Settings.cron_jobs['geo_secondary_usage_data_cron_worker']['cron'] ||= '0 0 * * 0'
+ Settings.cron_jobs['geo_secondary_usage_data_cron_worker']['job_class'] ||= 'Geo::SecondaryUsageDataCronWorker'
Settings.cron_jobs['geo_file_download_dispatch_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_file_download_dispatch_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['geo_file_download_dispatch_worker']['job_class'] ||= 'Geo::FileDownloadDispatchWorker'
diff --git a/config/karma.config.js b/config/karma.config.js
index 1d65e65ce2a..1c2dd21c189 100644
--- a/config/karma.config.js
+++ b/config/karma.config.js
@@ -1,13 +1,14 @@
-const path = require('path');
-const glob = require('glob');
+/* eslint-disable no-inner-declarations, no-param-reassign */
const chalk = require('chalk');
-const webpack = require('webpack');
const argumentsParser = require('commander');
-const webpackConfig = require('./webpack.config.js');
+const glob = require('glob');
+const path = require('path');
+const webpack = require('webpack');
const IS_EE = require('./helpers/is_ee_env');
+const webpackConfig = require('./webpack.config.js');
const ROOT_PATH = path.resolve(__dirname, '..');
-const SPECS_PATH = /^(?:\.[\\\/])?(ee[\\\/])?spec[\\\/]javascripts[\\\/]/;
+const SPECS_PATH = /^(?:\.[\\/])?(ee[\\/])?spec[\\/]javascripts[\\/]/;
function exitError(message) {
console.error(chalk.red(`\nError: ${message}\n`));
@@ -77,7 +78,7 @@ if (specFilters.length) {
root: ROOT_PATH,
matchBase: true,
})
- .filter((path) => path.endsWith('spec.js')),
+ .filter((filePath) => filePath.endsWith('spec.js')),
);
// flatten
@@ -97,14 +98,14 @@ if (specFilters.length) {
}
const CE_FILES = filteredSpecFiles.filter((file) => !file.startsWith('ee'));
- createContext(CE_FILES, /[^e]{2}[\\\/]spec[\\\/]javascripts$/, 'spec/javascripts');
+ createContext(CE_FILES, /[^e]{2}[\\/]spec[\\/]javascripts$/, 'spec/javascripts');
const EE_FILES = filteredSpecFiles.filter((file) => file.startsWith('ee'));
- createContext(EE_FILES, /ee[\\\/]spec[\\\/]javascripts$/, 'ee/spec/javascripts');
+ createContext(EE_FILES, /ee[\\/]spec[\\/]javascripts$/, 'ee/spec/javascripts');
}
// Karma configuration
-module.exports = function (config) {
+module.exports = (config) => {
process.env.TZ = 'Etc/UTC';
const fixturesPath = `tmp/tests/frontend/fixtures${IS_EE ? '-ee' : ''}`;
diff --git a/config/metrics/counts_28d/20210222041219_i_quickactions_invite_email_single_monthly.yml b/config/metrics/counts_28d/20210222041219_i_quickactions_invite_email_single_monthly.yml
index a1f184872e4..d4761b84344 100644
--- a/config/metrics/counts_28d/20210222041219_i_quickactions_invite_email_single_monthly.yml
+++ b/config/metrics/counts_28d/20210222041219_i_quickactions_invite_email_single_monthly.yml
@@ -1,5 +1,4 @@
---
-# See Usage Ping metrics dictionary docs https://docs.gitlab.com/ee/development/usage_ping/metrics_dictionary.html
key_path: redis_hll_counters.quickactions.i_quickactions_invite_email_single_monthly
description:
product_section: dev
diff --git a/config/metrics/counts_28d/20210222041235_i_quickactions_invite_email_multiple_monthly.yml b/config/metrics/counts_28d/20210222041235_i_quickactions_invite_email_multiple_monthly.yml
index 9c044596c88..f0a5c3c29bd 100644
--- a/config/metrics/counts_28d/20210222041235_i_quickactions_invite_email_multiple_monthly.yml
+++ b/config/metrics/counts_28d/20210222041235_i_quickactions_invite_email_multiple_monthly.yml
@@ -1,5 +1,4 @@
---
-# See Usage Ping metrics dictionary docs https://docs.gitlab.com/ee/development/usage_ping/metrics_dictionary.html
key_path: redis_hll_counters.quickactions.i_quickactions_invite_email_multiple_monthly
description:
product_section: dev
diff --git a/config/webpack.config.js b/config/webpack.config.js
index db4d2c3f37e..39add7def22 100644
--- a/config/webpack.config.js
+++ b/config/webpack.config.js
@@ -1,25 +1,34 @@
const fs = require('fs');
-const path = require('path');
-const glob = require('glob');
-const webpack = require('webpack');
-const VueLoaderPlugin = require('vue-loader/lib/plugin');
-const StatsWriterPlugin = require('webpack-stats-plugin').StatsWriterPlugin;
+
+const SOURCEGRAPH_VERSION = require('@sourcegraph/code-host-integration/package.json').version;
+
const CompressionPlugin = require('compression-webpack-plugin');
-const MonacoWebpackPlugin = require('./plugins/monaco_webpack');
-const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin;
const CopyWebpackPlugin = require('copy-webpack-plugin');
-const vendorDllHash = require('./helpers/vendor_dll_hash');
+const glob = require('glob');
+const path = require('path');
+const VueLoaderPlugin = require('vue-loader/lib/plugin');
+const VUE_LOADER_VERSION = require('vue-loader/package.json').version;
+const VUE_VERSION = require('vue/package.json').version;
+const webpack = require('webpack');
+const { BundleAnalyzerPlugin } = require('webpack-bundle-analyzer');
+const { StatsWriterPlugin } = require('webpack-stats-plugin');
+const WEBPACK_VERSION = require('webpack/package.json').version;
+
const createIncrementalWebpackCompiler = require('./helpers/incremental_webpack_compiler');
+const IS_EE = require('./helpers/is_ee_env');
+const vendorDllHash = require('./helpers/vendor_dll_hash');
+
+const MonacoWebpackPlugin = require('./plugins/monaco_webpack');
const ROOT_PATH = path.resolve(__dirname, '..');
const VENDOR_DLL = process.env.WEBPACK_VENDOR_DLL && process.env.WEBPACK_VENDOR_DLL !== 'false';
const CACHE_PATH = process.env.WEBPACK_CACHE_PATH || path.join(ROOT_PATH, 'tmp/cache');
const IS_PRODUCTION = process.env.NODE_ENV === 'production';
const IS_DEV_SERVER = process.env.WEBPACK_DEV_SERVER === 'true';
-const IS_EE = require('./helpers/is_ee_env');
+
const DEV_SERVER_HOST = process.env.DEV_SERVER_HOST || 'localhost';
const DEV_SERVER_PORT = parseInt(process.env.DEV_SERVER_PORT, 10) || 3808;
-const DEV_SERVER_PUBLIC_ADDR = process.env.DEV_SERVER_PUBLIC_ADDR;
+const { DEV_SERVER_PUBLIC_ADDR } = process.env;
const DEV_SERVER_ALLOWED_HOSTS =
process.env.DEV_SERVER_ALLOWED_HOSTS && process.env.DEV_SERVER_ALLOWED_HOSTS.split(',');
const DEV_SERVER_HTTPS = process.env.DEV_SERVER_HTTPS && process.env.DEV_SERVER_HTTPS !== 'false';
@@ -38,11 +47,6 @@ const WEBPACK_OUTPUT_PATH = path.join(ROOT_PATH, 'public/assets/webpack');
const WEBPACK_PUBLIC_PATH = '/assets/webpack/';
const SOURCEGRAPH_PACKAGE = '@sourcegraph/code-host-integration';
-const VUE_VERSION = require('vue/package.json').version;
-const VUE_LOADER_VERSION = require('vue-loader/package.json').version;
-const WEBPACK_VERSION = require('webpack/package.json').version;
-const SOURCEGRAPH_VERSION = require(path.join(SOURCEGRAPH_PACKAGE, 'package.json')).version;
-
const SOURCEGRAPH_PATH = path.join('sourcegraph', SOURCEGRAPH_VERSION, '/');
const SOURCEGRAPH_OUTPUT_PATH = path.join(WEBPACK_OUTPUT_PATH, SOURCEGRAPH_PATH);
const SOURCEGRAPH_PUBLIC_PATH = path.join(WEBPACK_PUBLIC_PATH, SOURCEGRAPH_PATH);
@@ -67,19 +71,19 @@ function generateEntries() {
});
watchAutoEntries = [path.join(ROOT_PATH, 'app/assets/javascripts/pages/')];
- function generateAutoEntries(path, prefix = '.') {
- const chunkPath = path.replace(/\/index\.js$/, '');
+ function generateAutoEntries(entryPath, prefix = '.') {
+ const chunkPath = entryPath.replace(/\/index\.js$/, '');
const chunkName = chunkPath.replace(/\//g, '.');
- autoEntriesMap[chunkName] = `${prefix}/${path}`;
+ autoEntriesMap[chunkName] = `${prefix}/${entryPath}`;
}
- pageEntries.forEach((path) => generateAutoEntries(path));
+ pageEntries.forEach((entryPath) => generateAutoEntries(entryPath));
if (IS_EE) {
const eePageEntries = glob.sync('pages/**/index.js', {
cwd: path.join(ROOT_PATH, 'ee/app/assets/javascripts'),
});
- eePageEntries.forEach((path) => generateAutoEntries(path, 'ee'));
+ eePageEntries.forEach((entryPath) => generateAutoEntries(entryPath, 'ee'));
watchAutoEntries.push(path.join(ROOT_PATH, 'ee/app/assets/javascripts/pages/'));
}
@@ -197,9 +201,9 @@ module.exports = {
},
{
test: /\.js$/,
- exclude: (path) =>
- /node_modules\/(?!tributejs)|node_modules|vendor[\\/]assets/.test(path) &&
- !/\.vue\.js/.test(path),
+ exclude: (modulePath) =>
+ /node_modules\/(?!tributejs)|node_modules|vendor[\\/]assets/.test(modulePath) &&
+ !/\.vue\.js/.test(modulePath),
loader: 'babel-loader',
options: {
cacheDirectory: path.join(CACHE_PATH, 'babel-loader'),
@@ -353,7 +357,7 @@ module.exports = {
// webpack-rails only needs assetsByChunkName to function properly
new StatsWriterPlugin({
filename: 'manifest.json',
- transform: function (data, opts) {
+ transform(data, opts) {
const stats = opts.compiler.getStats().toJson({
chunkModules: false,
source: false,
@@ -411,6 +415,7 @@ module.exports = {
`Warning: No vendor DLL found at: ${dll.cacheFrom}. Compiling DLL automatically.`,
);
+ // eslint-disable-next-line global-require
const dllConfig = require('./webpack.vendor.config.js');
const dllCompiler = webpack(dllConfig);
@@ -434,7 +439,7 @@ module.exports = {
}
dll.exists = true;
- callback();
+ return callback();
});
}
});
@@ -458,6 +463,7 @@ module.exports = {
!IS_EE &&
new webpack.NormalModuleReplacementPlugin(/^ee_component\/(.*)\.vue/, (resource) => {
+ // eslint-disable-next-line no-param-reassign
resource.request = path.join(
ROOT_PATH,
'app/assets/javascripts/vue_shared/components/empty_component.js',
@@ -520,7 +526,7 @@ module.exports = {
// output the in-memory heap size upon compilation and exit
WEBPACK_MEMORY_TEST && {
apply(compiler) {
- compiler.hooks.emit.tapAsync('ReportMemoryConsumptionPlugin', (compilation, callback) => {
+ compiler.hooks.emit.tapAsync('ReportMemoryConsumptionPlugin', () => {
console.log('Assets compiled...');
if (global.gc) {
console.log('Running garbage collection...');
@@ -551,7 +557,9 @@ module.exports = {
);
// exit in case we're running webpack-dev-server
- IS_DEV_SERVER && process.exit();
+ if (IS_DEV_SERVER) {
+ process.exit();
+ }
});
},
},
diff --git a/config/webpack.vendor.config.js b/config/webpack.vendor.config.js
index 29c4c33314e..7e5365987ee 100644
--- a/config/webpack.vendor.config.js
+++ b/config/webpack.vendor.config.js
@@ -1,7 +1,7 @@
const path = require('path');
const webpack = require('webpack');
-const vendorDllHash = require('./helpers/vendor_dll_hash');
const { YarnCheck } = require('yarn-check-webpack-plugin');
+const vendorDllHash = require('./helpers/vendor_dll_hash');
const ROOT_PATH = path.resolve(__dirname, '..');
diff --git a/doc/development/experiment_guide/experimentation.md b/doc/development/experiment_guide/experimentation.md
new file mode 100644
index 00000000000..7135f8acd9b
--- /dev/null
+++ b/doc/development/experiment_guide/experimentation.md
@@ -0,0 +1,399 @@
+---
+stage: Growth
+group: Activation
+info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
+---
+
+# Create an A/B test with `Experimentation Module`
+
+## Implement the experiment
+
+1. Add the experiment to the `Gitlab::Experimentation::EXPERIMENTS` hash in
+ [`experimentation.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib%2Fgitlab%2Fexperimentation.rb):
+
+ ```ruby
+ EXPERIMENTS = {
+ other_experiment: {
+ #...
+ },
+ # Add your experiment here:
+ signup_flow: {
+ tracking_category: 'Growth::Activation::Experiment::SignUpFlow' # Used for providing the category when setting up tracking data
+ }
+ }.freeze
+ ```
+
+1. Use the experiment in the code.
+
+ Experiments can be performed on a `subject`. The provided `subject` should
+ respond to `to_global_id` or `to_s`.
+ The resulting string is bucketed and assigned to either the control or the
+ experimental group, so you must always provide the same `subject`
+ for an experiment to have the same experience.
+
+ 1. Use this standard for the experiment in a controller:
+
+ - Experiment run for a user:
+
+ ```ruby
+ class ProjectController < ApplicationController
+ def show
+ # experiment_enabled?(:experiment_key) is also available in views and helpers
+ if experiment_enabled?(:signup_flow, subject: current_user)
+ # render the experiment
+ else
+ # render the original version
+ end
+ end
+ end
+ ```
+
+ - Experiment run for a namespace:
+
+ ```ruby
+ if experiment_enabled?(:signup_flow, subject: namespace)
+ # experiment code
+ else
+ # control code
+ end
+ ```
+
+ When no subject is given, it falls back to a cookie that gets set and is consistent until
+ the cookie gets deleted.
+
+ ```ruby
+ class RegistrationController < ApplicationController
+ def show
+ # falls back to a cookie
+ if experiment_enabled?(:signup_flow)
+ # render the experiment
+ else
+ # render the original version
+ end
+ end
+ end
+ ```
+
+ 1. Make the experiment available to the frontend in a controller. This example
+ checks whether the experiment is enabled and pushes the result to the frontend:
+
+ ```ruby
+ before_action do
+ push_frontend_experiment(:signup_flow, subject: current_user)
+ end
+ ```
+
+ You can check the state of the feature flag in JavaScript:
+
+ ```javascript
+ import { isExperimentEnabled } from '~/experimentation';
+
+ if ( isExperimentEnabled('signupFlow') ) {
+ // ...
+ }
+ ```
+
+You can also run an experiment outside of the controller scope, such as in a worker:
+
+```ruby
+class SomeWorker
+ def perform
+ # Check if the experiment is active at all (the percentage_of_time_value > 0)
+ return unless Gitlab::Experimentation.active?(:experiment_key)
+
+ # Since we cannot access cookies in a worker, we need to bucket models
+ # based on a unique, unchanging attribute instead.
+ # It is therefore necessery to always provide the same subject.
+ if Gitlab::Experimentation.in_experiment_group?(:experiment_key, subject: user)
+ # execute experimental code
+ else
+ # execute control code
+ end
+ end
+end
+```
+
+## Implement tracking events
+
+To determine whether the experiment is a success or not, we must implement tracking events
+to acquire data for analyzing. We can send events to Snowplow via either the backend or frontend.
+Read the [product intelligence guide](https://about.gitlab.com/handbook/product/product-intelligence-guide/) for more details.
+
+### Track backend events
+
+The framework provides a helper method that is available in controllers:
+
+```ruby
+before_action do
+ track_experiment_event(:signup_flow, 'action', 'value', subject: current_user)
+end
+```
+
+To test it:
+
+```ruby
+context 'when the experiment is active and the user is in the experimental group' do
+ before do
+ stub_experiment(signup_flow: true)
+ stub_experiment_for_subject(signup_flow: true)
+ end
+
+ it 'tracks an event', :snowplow do
+ subject
+
+ expect_snowplow_event(
+ category: 'Growth::Activation::Experiment::SignUpFlow',
+ action: 'action',
+ value: 'value',
+ label: 'experimentation_subject_id',
+ property: 'experimental_group'
+ )
+ end
+end
+```
+
+### Track frontend events
+
+The framework provides a helper method that is available in controllers:
+
+```ruby
+before_action do
+ push_frontend_experiment(:signup_flow, subject: current_user)
+ frontend_experimentation_tracking_data(:signup_flow, 'action', 'value', subject: current_user)
+end
+```
+
+This pushes tracking data to `gon.experiments` and `gon.tracking_data`.
+
+```ruby
+expect(Gon.experiments['signupFlow']).to eq(true)
+
+expect(Gon.tracking_data).to eq(
+ {
+ category: 'Growth::Activation::Experiment::SignUpFlow',
+ action: 'action',
+ value: 'value',
+ label: 'experimentation_subject_id',
+ property: 'experimental_group'
+ }
+)
+```
+
+To track it:
+
+```javascript
+import { isExperimentEnabled } from '~/lib/utils/experimentation';
+import Tracking from '~/tracking';
+
+document.addEventListener('DOMContentLoaded', () => {
+ const signupFlowExperimentEnabled = isExperimentEnabled('signupFlow');
+
+ if (signupFlowExperimentEnabled && gon.tracking_data) {
+ const { category, action, ...data } = gon.tracking_data;
+
+ Tracking.event(category, action, data);
+ }
+}
+```
+
+To test it in Jest:
+
+```javascript
+import { withGonExperiment } from 'helpers/experimentation_helper';
+import Tracking from '~/tracking';
+
+describe('event tracking', () => {
+ describe('with tracking data', () => {
+ withGonExperiment('signupFlow');
+
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event').mockImplementation(() => {});
+
+ gon.tracking_data = {
+ category: 'Growth::Activation::Experiment::SignUpFlow',
+ action: 'action',
+ value: 'value',
+ label: 'experimentation_subject_id',
+ property: 'experimental_group'
+ };
+ });
+
+ it('should track data', () => {
+ performAction()
+
+ expect(Tracking.event).toHaveBeenCalledWith(
+ 'Growth::Activation::Experiment::SignUpFlow',
+ 'action',
+ {
+ value: 'value',
+ label: 'experimentation_subject_id',
+ property: 'experimental_group'
+ },
+ );
+ });
+ });
+});
+```
+
+## Record experiment user
+
+In addition to the anonymous tracking of events, we can also record which users
+have participated in which experiments, and whether they were given the control
+experience or the experimental experience.
+
+The `record_experiment_user` helper method is available to all controllers, and it
+enables you to record these experiment participants (the current user) and which
+experience they were given:
+
+```ruby
+before_action do
+ record_experiment_user(:signup_flow)
+end
+```
+
+Subsequent calls to this method for the same experiment and the same user have no
+effect unless the user is then enrolled into a different experience. This happens
+when we roll out the experimental experience to a greater percentage of users.
+
+This data is completely separate from the [events tracking data](#implement-tracking-events).
+They are not linked together in any way.
+
+### Add context
+
+You can add arbitrary context data in a hash which gets stored as part of the experiment
+user record. New calls to the `record_experiment_user` with newer contexts are merged
+deeply into the existing context.
+
+This data can then be used by data analytics dashboards.
+
+```ruby
+before_action do
+ record_experiment_user(:signup_flow, foo: 42, bar: { a: 22})
+ # context is { "foo" => 42, "bar" => { "a" => 22 }}
+end
+
+# Additional contexts for newer record calls are merged deeply
+record_experiment_user(:signup_flow, foo: 40, bar: { b: 2 }, thor: 3)
+# context becomes { "foo" => 40, "bar" => { "a" => 22, "b" => 2 }, "thor" => 3}
+```
+
+## Record experiment conversion event
+
+Along with the tracking of backend and frontend events and the
+[recording of experiment participants](#record-experiment-user), we can also record
+when a user performs the desired conversion event action. For example:
+
+- **Experimental experience:** Show an in-product nudge to test if the change causes more
+ people to sign up for trials.
+- **Conversion event:** The user starts a trial.
+
+The `record_experiment_conversion_event` helper method is available to all controllers.
+Use it to record the conversion event for the current user, regardless of whether
+the user is in the control or experimental group:
+
+```ruby
+before_action do
+ record_experiment_conversion_event(:signup_flow)
+end
+```
+
+Note that the use of this method requires that we have first
+[recorded the user](#record-experiment-user) as being part of the experiment.
+
+## Enable the experiment
+
+After all merge requests have been merged, use [ChatOps](../../ci/chatops/index.md) in the
+[appropriate channel](../feature_flags/controls.md#communicate-the-change) to start the experiment for 10% of the users.
+The feature flag should have the name of the experiment with the `_experiment_percentage` suffix appended.
+For visibility, share any commands run against production in the `#s_growth` channel:
+
+ ```shell
+ /chatops run feature set signup_flow_experiment_percentage 10
+ ```
+
+ If you notice issues with the experiment, you can disable the experiment by removing the feature flag:
+
+ ```shell
+ /chatops run feature delete signup_flow_experiment_percentage
+ ```
+
+## Add user to experiment group manually
+
+To force the application to add your current user into the experiment group,
+add a query string parameter to the path where the experiment runs. If you add the
+query string parameter, the experiment works only for this request, and doesn't work
+after following links or submitting forms.
+
+For example, to forcibly enable the `EXPERIMENT_KEY` experiment, add `force_experiment=EXPERIMENT_KEY`
+to the URL:
+
+```shell
+https://gitlab.com/<EXPERIMENT_ENTRY_URL>?force_experiment=<EXPERIMENT_KEY>
+```
+
+## Add user to experiment group with a cookie
+
+You can force the current user into the experiment group for `<EXPERIMENT_KEY>`
+during the browser session by using your browser's developer tools:
+
+```javascript
+document.cookie = "force_experiment=<EXPERIMENT_KEY>; path=/";
+```
+
+Use a comma to list more than one experiment to be forced:
+
+```javascript
+document.cookie = "force_experiment=<EXPERIMENT_KEY>,<ANOTHER_EXPERIMENT_KEY>; path=/";
+```
+
+To clear the experiments, unset the `force_experiment` cookie:
+
+```javascript
+document.cookie = "force_experiment=; path=/";
+```
+
+## Testing and test helpers
+
+### RSpec
+
+Use the following in RSpec to mock the experiment:
+
+```ruby
+context 'when the experiment is active' do
+ before do
+ stub_experiment(signup_flow: true)
+ end
+
+ context 'when the user is in the experimental group' do
+ before do
+ stub_experiment_for_subject(signup_flow: true)
+ end
+
+ it { is_expected.to do_experimental_thing }
+ end
+
+ context 'when the user is in the control group' do
+ before do
+ stub_experiment_for_subject(signup_flow: false)
+ end
+
+ it { is_expected.to do_control_thing }
+ end
+end
+```
+
+### Jest
+
+Use the following in Jest to mock the experiment:
+
+```javascript
+import { withGonExperiment } from 'helpers/experimentation_helper';
+
+describe('given experiment is enabled', () => {
+ withGonExperiment('signupFlow');
+
+ it('should do the experimental thing', () => {
+ expect(wrapper.find('.js-some-experiment-triggered-element')).toEqual(expect.any(Element));
+ });
+});
+```
diff --git a/doc/development/experiment_guide/gitlab_experiment.md b/doc/development/experiment_guide/gitlab_experiment.md
new file mode 100644
index 00000000000..2e58e1ed25e
--- /dev/null
+++ b/doc/development/experiment_guide/gitlab_experiment.md
@@ -0,0 +1,556 @@
+---
+stage: Growth
+group: Adoption
+info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
+---
+
+# Implementing an A/B/n experiment using GLEX
+
+## Introduction
+
+`Gitlab::Experiment` (GLEX) is tightly coupled with the concepts provided by
+[Feature flags in development of GitLab](../feature_flags/index.md). Here, we refer
+to this layer as feature flags, and may also use the term Flipper, because we
+built our development and experiment feature flags atop it.
+
+You're strongly encouraged to read and understand the
+[Feature flags in development of GitLab](../feature_flags/index.md) portion of the
+documentation before considering running experiments. Experiments add additional
+concepts which may seem confusing or advanced without understanding the underpinnings
+of how GitLab uses feature flags in development. One concept: GLEX supports multivariate
+experiments, which are sometimes referred to as A/B/n tests.
+
+The [`gitlab-experiment` project](https://gitlab.com/gitlab-org/gitlab-experiment)
+exists in a separate repository, so it can be shared across any GitLab property that uses
+Ruby. You should feel comfortable reading the documentation on that project as well
+if you want to dig into more advanced topics.
+
+## Glossary of terms
+
+To ensure a shared language, you should understand these fundamental terms we use
+when communicating about experiments:
+
+- `experiment`: Any deviation of code paths we want to run at some times, but not others.
+- `context`: A consistent experience we provide in an experiment.
+- `control`: The default, or "original" code path.
+- `candidate`: Defines an experiment with only one code path.
+- `variant(s)`: Defines an experiment with multiple code paths.
+
+### How it works
+
+Use this decision tree diagram to understand how GLEX works. When an experiment runs,
+the following logic is executed to determine what variant should be provided,
+given how the experiment has been defined and using the provided context:
+
+```mermaid
+graph TD
+ GP[General Pool/Population] --> Enabled?
+ Running? -->|Yes| Cached?[Cached? / Pre-segmented?]
+ Running? -->|No| Excluded[Control / No Tracking]
+ Cached? -->|No| Excluded?
+ Cached? -->|Yes| Cached[Cached Value]
+ Excluded? -->|Yes / Cached| Excluded
+ Excluded? -->|No| Segmented?
+ Segmented? -->|Yes / Cached| VariantA
+ Segmented? -->|No| Included?[Experiment Group?]
+ Included? -->|Yes| Rollout
+ Included? -->|No| Control
+ Rollout -->|Cached| VariantA
+ Rollout -->|Cached| VariantB
+ Rollout -->|Cached| VariantC
+
+classDef included fill:#380d75,color:#ffffff,stroke:none
+classDef excluded fill:#fca121,stroke:none
+classDef cached fill:#2e2e2e,color:#ffffff,stroke:none
+classDef default fill:#fff,stroke:#6e49cb
+
+class VariantA,VariantB,VariantC included
+class Control,Excluded excluded
+class Cached cached
+```
+
+## Implement an experiment
+
+Start by generating a feature flag using the `bin/feature-flag` command as you
+normally would for a development feature flag, making sure to use `experiment` for
+the type. For the sake of documentation let's name our feature flag (and experiment)
+"pill_color".
+
+```shell
+bin/feature-flag pill_color -t experiment
+```
+
+After you generate the desired feature flag, you can immediately implement an
+experiment in code. An experiment implementation can be as simple as:
+
+```ruby
+experiment(:pill_color, actor: current_user) do |e|
+ e.use { 'control' }
+ e.try(:red) { 'red' }
+ e.try(:blue) { 'blue' }
+end
+```
+
+When this code executes, the experiment is run, a variant is assigned, and (if within a
+controller or view) a `window.gon.experiment.pillColor` object will be available in the
+client layer, with details like:
+
+- The assigned variant.
+- The context key for client tracking events.
+
+In addition, when an experiment runs, an event is tracked for
+the experiment `:assignment`. We cover more about events, tracking, and
+the client layer later.
+
+In local development, you can make the experiment active by using the feature flag
+interface. You can also target specific cases by providing the relevant experiment
+to the call to enable the feature flag:
+
+```ruby
+# Enable for everyone
+Feature.enable(:pill_color)
+
+# Get the `experiment` method -- already available in controllers, views, and mailers.
+include Gitlab::Experiment::Dsl
+# Enable for only the first user
+Feature.enable(:pill_color, experiment(:pill_color, actor: User.first))
+```
+
+To roll out your experiment feature flag on an environment, run
+the following command using ChatOps (which is covered in more depth in the
+[Feature flags in development of GitLab](../feature_flags/index.md) documentation).
+This command creates a scenario where half of everyone who encounters
+the experiment would be assigned the _control_, 25% would be assigned the _red_
+variant, and 25% would be assigned the _blue_ variant:
+
+```slack_slash_commands
+/chatops run feature set pill_color 50 --actors
+```
+
+For an even distribution in this example, change the command to set it to 66% instead
+of 50.
+
+NOTE:
+To immediately stop running an experiment, use the
+`/chatops run feature set pill_color false` command.
+
+<div class="panel panel-danger">
+**DANGER**
+{: .panel-heading}
+<div class="panel-body">
+
+We strongly recommend using the `--actors` flag when using the ChatOps commands,
+because anything else may give odd behaviors due to:
+
+- How the caching of variant assignment is handled.
+- How the default `percentage_of_time` is unpredictable and pseudo-random.
+
+</div>
+</div>
+
+We can also implement this experiment in a HAML file with HTML wrappings:
+
+```haml
+#cta-interface
+ - experiment(:pill_color, actor: current_user) do |e|
+ - e.use do
+ .pill-button control
+ - e.try(:red) do
+ .pill-button.red red
+ - e.try(:blue) do
+ .pill-button.blue blue
+```
+
+### The importance of context
+
+In our previous example experiment, our context (this is an important term) is a hash
+that's set to `{ actor: current_user }`. Context must be unique based on how you
+want to run your experiment, and should be understood at a lower level.
+
+It's expected, and recommended, that you use some of these
+contexts to simplify reporting:
+
+- `{ actor: current_user }`: Assigns a variant and is "sticky" to each user
+ (or "client" if `current_user` is nil) who enters the experiment.
+- `{ project: project }`: Assigns a variant and is "sticky" to the project currently
+ being viewed. If running your experiment is more useful when viewing a project,
+ rather than when a specific user is viewing any project, consider this approach.
+- `{ group: group }`: Similar to the project example, but applies to a wider
+ scope of projects and users.
+- `{ actor: current_user, project: project }`: Assigns a variant and is "sticky"
+ to the user who is viewing the given project. This creates a different variant
+ assignment possibility for every project that `current_user` views. Understand this
+ can create a large cache size if an experiment like this in a highly trafficked part
+ of the application.
+- `{ wday: Time.current.wday }`: Assigns a variant based on the current day of the
+ week. In this example, it would consistently assign one variant on Friday, and a
+ potentially different variant on Saturday.
+
+Context is critical to how you define and report on your experiment. It's usually
+the most important aspect of how you choose to implement your experiment, so consider
+it carefully, and discuss it with the wider team if needed. Also, take into account
+that the context you choose affects our cache size.
+
+After the above examples, we can state the general case: *given a specific
+and consistent context, we can provide a consistent experience and track events for
+that experience.* To dive a bit deeper into the implementation details: a context key
+is generated from the context that's provided. Use this context key to:
+
+- Determine the assigned variant.
+- Identify events tracked against that context key.
+
+We can think about this as the experience that we've rendered, which is both dictated
+and tracked by the context key. The context key is used to track the interaction and
+results of the experience we've rendered to that context key. These concepts are
+somewhat abstract and hard to understand initially, but this approach enables us to
+communicate about experiments as something that's wider than just user behavior.
+
+NOTE:
+Using `actor:` utilizes cookies if the `current_user` is nil. If you don't need
+cookies though - meaning that the exposed functionality would only be visible to
+signed in users - `{ user: current_user }` would be just as effective.
+
+WARNING:
+The caching of variant assignment is done by using this context, and so consider
+your impact on the cache size when defining your experiment. If you use
+`{ time: Time.current }` you would be inflating the cache size every time the
+experiment is run. Not only that, your experiment would not be "sticky" and events
+wouldn't be resolvable.
+
+### Advanced experimentation
+
+GLEX allows for two general implementation styles:
+
+1. The simple experiment style described previously.
+1. A more advanced style where an experiment class can be provided.
+
+The advanced style is handled by naming convention, and works similar to what you
+would expect in Rails.
+
+To generate a custom experiment class that can override the defaults in
+`ApplicationExperiment` (our base GLEX implementation), use the rails generator:
+
+```shell
+rails generate gitlab:experiment pill_color control red blue
+```
+
+This generates an experiment class in `app/experiments/pill_color_experiment.rb`
+with the variants (or _behaviors_) we've provided to the generator. Here's an example
+of how that class would look after migrating the previous example into it:
+
+```ruby
+class PillColorExperiment < ApplicationExperiment
+ def control_behavior
+ 'control'
+ end
+
+ def red_behavior
+ 'red'
+ end
+
+ def blue_behavior
+ 'blue'
+ end
+end
+```
+
+We can now simplify where we run our experiment to the following call, instead of
+providing the block we were initially providing, by explicitly calling `run`:
+
+```ruby
+experiment(:pill_color, actor: current_user).run
+```
+
+The _behavior_ methods we defined in our experiment class represent the default
+implementation. You can still use the block syntax to override these _behavior_
+methods however, so the following would also be valid:
+
+```ruby
+experiment(:pill_color, actor: current_user) do |e|
+ e.use { '<strong>control</strong>' }
+end
+```
+
+NOTE:
+When passing a block to the `experiment` method, it is implicitly invoked as
+if `run` has been called.
+
+#### Segmentation rules
+
+You can use runtime segmentation rules to, for instance, segment contexts into a specific
+variant. The `segment` method is a callback (like `before_action`) and so allows providing
+a block or method name.
+
+In this example, any user named `'Richard'` would always be assigned the _red_
+variant, and any account older than 2 weeks old would be assigned the _blue_ variant:
+
+```ruby
+class PillColorExperiment < ApplicationExperiment
+ segment(variant: :red) { context.actor.first_name == 'Richard' }
+ segment :old_account?, variant: :blue
+
+ # ...behaviors
+
+ private
+
+ def old_account?
+ context.actor.created_at < 2.weeks.ago
+ end
+end
+```
+
+When an experiment runs, the segmentation rules are executed in the order they're
+defined. The first segmentation rule to produce a truthy result assigns the variant.
+
+In our example, any user named `'Richard'`, regardless of account age, will always
+be assigned the _red_ variant. If you want the opposite logic, flip the order.
+
+NOTE:
+Keep in mind when defining segmentation rules: after a truthy result, the remaining
+segmentation rules are skipped to achieve optimal performance.
+
+#### Exclusion rules
+
+Exclusion rules are similar to segmentation rules, but are intended to determine
+if a context should even be considered as something we should include in the experiment
+and track events toward. Exclusion means we don't care about the events in relation
+to the given context.
+
+These examples exclude all users named `'Richard'`, *and* any account
+older than 2 weeks old. Not only are they given the control behavior - which could
+be nothing - but no events are tracked in these cases as well.
+
+```ruby
+class PillColorExperiment < ApplicationExperiment
+ exclude :old_account?, ->{ context.actor.first_name == 'Richard' }
+
+ # ...behaviors
+
+ private
+
+ def old_account?
+ context.actor.created_at < 2.weeks.ago
+ end
+end
+```
+
+We can also do exclusion when we run the experiment. For instance,
+if we wanted to prevent the inclusion of non-administrators in an experiment, consider
+the following experiment. This type of logic enables us to do complex experiments
+while preventing us from passing things into our experiments, because
+we want to minimize passing things into our experiments:
+
+```ruby
+experiment(:pill_color, actor: current_user) do |e|
+ e.exclude! unless can?(current_user, :admin_project, project)
+end
+```
+
+You may also need to check exclusion in custom tracking logic by calling `should_track?`:
+
+```ruby
+class PillColorExperiment < ApplicationExperiment
+ # ...behaviors
+
+ def expensive_tracking_logic
+ return unless should_track?
+
+ track(:my_event, value: expensive_method_call)
+ end
+end
+```
+
+Exclusion rules aren't the best way to determine if an experiment is active. Override
+the `enabled?` method for a high-level way of determining if an experiment should
+run and track. Make the `enabled?` check as efficient as possible because it's the
+first early opt-out path an experiment can implement.
+
+### Tracking events
+
+One of the most important aspects of experiments is gathering data and reporting on
+it. GLEX provides an interface that allows tracking events across an experiment.
+You can implement it consistently if you provide the same context between
+calls to your experiment. If you do not yet understand context, you should read
+about contexts now.
+
+We can assume we run the experiment in one or a few places, but
+track events potentially in many places. The tracking call remains the same, with
+the arguments you would normally use when
+[tracking events using snowplow](../snowplow.md). The easiest example
+of tracking an event in Ruby would be:
+
+```ruby
+experiment(:pill_color, actor: current_user).track(:created)
+```
+
+When you run an experiment with any of these examples, an `:assigned` event
+is tracked automatically by default. All events that are tracked from an
+experiment have a special
+[experiment context](https://gitlab.com/gitlab-org/iglu/-/blob/master/public/schemas/com.gitlab/gitlab_experiment/jsonschema/1-0-0)
+added to the event. This can be used - typically by the data team - to create a connection
+between the events on a given experiment.
+
+If our current user hasn't encountered the experiment yet (meaning where the experiment
+is run), and we track an event for them, they are assigned a variant and see
+that variant if they ever encountered the experiment later, when an `:assignment`
+event would be tracked at that time for them.
+
+NOTE:
+GitLab tries to be sensitive and respectful of our customers regarding tracking,
+so GLEX allows us to implement an experiment without ever tracking identifying
+IDs. It's not always possible, though, based on experiment reporting requirements.
+You may be asked from time to time to track a specific record ID in experiments.
+The approach is largely up to the PM and engineer creating the implementation.
+No recommendations are provided here at this time.
+
+## Test with RSpec
+
+This gem provides some RSpec helpers and custom matchers. These are in flux as of GitLab 13.10.
+
+First, require the RSpec support file to mix in some of the basics:
+
+```ruby
+require 'gitlab/experiment/rspec'
+```
+
+You still need to include matchers and other aspects, which happens
+automatically for files in `spec/experiments`, but for other files and specs
+you want to include it in, you can specify the `:experiment` type:
+
+```ruby
+it "tests", :experiment do
+end
+```
+
+### Stub helpers
+
+You can stub experiments using `stub_experiments`. Pass it a hash using experiment
+names as the keys, and the variants you want each to resolve to, as the values:
+
+```ruby
+# Ensures the experiments named `:example` & `:example2` are both
+# "enabled" and that each will resolve to the given variant
+# (`:my_variant` & `:control` respectively).
+stub_experiments(example: :my_variant, example2: :control)
+
+experiment(:example) do |e|
+ e.enabled? # => true
+ e.variant.name # => 'my_variant'
+end
+
+experiment(:example2) do |e|
+ e.enabled? # => true
+ e.variant.name # => 'control'
+end
+```
+
+### Exclusion and segmentation matchers
+
+You can also test the exclusion and segmentation matchers.
+
+```ruby
+class ExampleExperiment < ApplicationExperiment
+ exclude { context.actor.first_name == 'Richard' }
+ segment(variant: :candidate) { context.actor.username == 'jejacks0n' }
+end
+
+excluded = double(username: 'rdiggitty', first_name: 'Richard')
+segmented = double(username: 'jejacks0n', first_name: 'Jeremy')
+
+# exclude matcher
+expect(experiment(:example)).to exclude(actor: excluded)
+expect(experiment(:example)).not_to exclude(actor: segmented)
+
+# segment matcher
+expect(experiment(:example)).to segment(actor: segmented).into(:candidate)
+expect(experiment(:example)).not_to segment(actor: excluded)
+```
+
+### Tracking matcher
+
+Tracking events is a major aspect of experimentation. We try
+to provide a flexible way to ensure your tracking calls are covered.
+
+You can do this on the instance level or at an "any instance" level:
+
+```ruby
+subject = experiment(:example)
+
+expect(subject).to track(:my_event)
+
+subject.track(:my_event)
+```
+
+You can use the `on_any_instance` chain method to specify that it could happen on
+any instance of the experiment. This helps you if you're calling
+`experiment(:example).track` downstream:
+
+```ruby
+expect(experiment(:example)).to track(:my_event).on_any_instance
+
+experiment(:example).track(:my_event)
+```
+
+A full example of the methods you can chain onto the `track` matcher:
+
+```ruby
+expect(experiment(:example)).to track(:my_event, value: 1, property: '_property_')
+ .on_any_instance
+ .with_context(foo: :bar)
+ .for(:variant_name)
+
+experiment(:example, :variant_name, foo: :bar).track(:my_event, value: 1, property: '_property_')
+```
+
+## Experiments in the client layer
+
+This is in flux as of GitLab 13.10, and can't be documented just yet.
+
+Any experiment that's been run in the request lifecycle surfaces in `window.gon.experiment`,
+and matches [this schema](https://gitlab.com/gitlab-org/iglu/-/blob/master/public/schemas/com.gitlab/gitlab_experiment/jsonschema/1-0-0)
+so you can use it when resolving some concepts around experimentation in the client layer.
+
+## Notes on feature flags
+
+NOTE:
+We use the terms "enabled" and "disabled" here, even though it's against our
+[documentation style guide recommendations](../documentation/styleguide/index.md#avoid-ableist-language)
+because these are the terms that the feature flag documentation uses.
+
+You may already be familiar with the concept of feature flags in GitLab, but using
+feature flags in experiments is a bit different. While in general terms, a feature flag
+is viewed as being either `on` or `off`, this isn't accurate for experiments.
+
+Generally, `off` means that when we ask if a feature flag is enabled, it will always
+return `false`, and `on` means that it will always return `true`. An interim state,
+considered `conditional`, also exists. GLEX takes advantage of this trinary state of
+feature flags. To understand this `conditional` aspect: consider that either of these
+settings puts a feature flag into this state:
+
+- Setting a `percentage_of_actors` of any percent greater than 0%.
+- Enabling it for a single user or group.
+
+Conditional means that it returns `true` in some situations, but not all situations.
+
+When a feature flag is disabled (meaning the state is `off`), the experiment is
+considered _inactive_. You can visualize this in the [decision tree diagram](#how-it-works)
+as reaching the first [Running?] node, and traversing the negative path.
+
+When a feature flag is rolled out to a `percentage_of_actors` or similar (meaning the
+state is `conditional`) the experiment is considered to be _running_
+where sometimes the control is assigned, and sometimes the candidate is assigned.
+We don't refer to this as being enabled, because that's a confusing and overloaded
+term here. In the experiment terms, our experiment is _running_, and the feature flag is
+`conditional`.
+
+When a feature flag is enabled (meaning the state is `on`), the candidate will always be
+assigned.
+
+We should try to be consistent with our terms, and so for experiments, we have an
+_inactive_ experiment until we set the feature flag to `conditional`. After which,
+our experiment is then considered _running_. If you choose to "enable" your feature flag,
+you should consider the experiment to be _resolved_, because everyone is assigned
+the candidate unless they've opted out of experimentation.
+
+As of GitLab 13.10, work is being done to improve this process and how we communicate
+about it.
diff --git a/doc/development/experiment_guide/index.md b/doc/development/experiment_guide/index.md
index 4e1fa65108e..652b4f8f80b 100644
--- a/doc/development/experiment_guide/index.md
+++ b/doc/development/experiment_guide/index.md
@@ -36,404 +36,27 @@ and link to the issue that resolves the experiment. If the experiment is
successful and becomes part of the product, any follow up issues should be
addressed.
-## Implement an experiment
+## Implementing an experiment
-There are two options to conduct experiments:
+There are currently two options when implementing an experiment.
-1. [GitLab Experiment](https://gitlab.com/gitlab-org/gitlab-experiment/) is a gem included in GitLab.
-1. [`Experimentation Module`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib%2Fgitlab%2Fexperimentation.rb) is built in the GitLab codebase.
+One is built into GitLab directly and has been around for a while (this is called
+`Exerimentation Module`), and the other is provided by
+[`gitlab-experiment`](https://gitlab.com/gitlab-org/gitlab-experiment) and is referred
+to as `Gitlab::Experiment` -- GLEX for short.
-Both methods use [experiment](../feature_flags/development.md#experiment-type) feature flags.
+Both approaches use [experiment](../feature_flags/development.md#experiment-type)
+feature flags, and there is currently no strong suggestion to use one over the other.
-Historical Context: `Experimentation Module` was built iteratively with the needs that appeared while implementing Growth sub-department experiments. The `gitlab-experiment` gem was built with the learnings of the `Experimentation Module` and an easier to use API.
+| Feature | `Experimentation Module` | GLEX |
+| -------------------- |------------------------- | ---- |
+| Record user grouping | Yes | No |
+| Uses feature flags | Yes | Yes |
+| Multivariate (A/B/n) | No | Yes |
-Currently both methods for running experiments are included in the codebase. The features are slightly different:
+- [Implementing an A/B experiment using `Experimentation Module`](experimentation.md)
+- [Implementing an A/B/n experiment using GLEX](gitlab_experiment.md)
-| Feature | `Experiment Module` | `gitlab-experiment` |
-| ------ | ------ | ------ |
-| Record user grouping | Yes | No (not natively) |
-| Uses feature flags | Yes | Yes |
-| Multivariate | No | Yes |
-
-However, there is currently no strong suggestion to use one over the other.
-
-### Experiments using `gitlab-experiment` **(FREE SAAS)**
-
-> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/300383) in GitLab 13.7.
-> - It's [deployed behind a feature flag](../../user/feature_flags.md), disabled by default.
-> - It's enabled on GitLab.com.
-> - It is not yet intended for use in GitLab self-managed instances.
-
-You find out how to conduct experiments using `gitlab-experiment` in the [README](https://gitlab.com/gitlab-org/gitlab-experiment/-/blob/master/README.md).
-
-### Experiments using the `Experimentation Module`
-
-1. Add the experiment to the `Gitlab::Experimentation::EXPERIMENTS` hash in the [`Experimentation Module`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib%2Fgitlab%2Fexperimentation.rb):
-
- ```ruby
- EXPERIMENTS = {
- other_experiment: {
- #...
- },
- # Add your experiment here:
- signup_flow: {
- tracking_category: 'Growth::Activation::Experiment::SignUpFlow' # Used for providing the category when setting up tracking data
- }
- }.freeze
- ```
-
-1. Use the experiment in the code.
-
- Experiments can be performed on a `subject`. The `subject` that gets provided needs to respond to `to_global_id` or `to_s`.
- The resulting string is bucketed and assigned to either the control or the experimental group. It's therefore necessary to always provide the same `subject` for an experiment to have the same experience.
-
- - Use this standard for the experiment in a controller:
-
- Experiment run for a user:
-
- ```ruby
- class ProjectController < ApplicationController
- def show
- # experiment_enabled?(:experiment_key) is also available in views and helpers
- if experiment_enabled?(:signup_flow, subject: current_user)
- # render the experiment
- else
- # render the original version
- end
- end
- end
- ```
-
- or experiment run for a namespace:
-
- ```ruby
- if experiment_enabled?(:signup_flow, subject: namespace)
- # experiment code
- else
- # control code
- end
- ```
-
- When no subject is given, it falls back to a cookie that gets set and is consistent until
- the cookie gets deleted.
-
- ```ruby
- class RegistrationController < ApplicationController
- def show
- # falls back to a cookie
- if experiment_enabled?(:signup_flow)
- # render the experiment
- else
- # render the original version
- end
- end
- end
- ```
-
- - Make the experiment available to the frontend in a controller:
-
- ```ruby
- before_action do
- push_frontend_experiment(:signup_flow, subject: current_user)
- end
- ```
-
- The above checks whether the experiment is enabled and pushes the result to the frontend.
-
- You can check the state of the feature flag in JavaScript:
-
- ```javascript
- import { isExperimentEnabled } from '~/experimentation';
-
- if ( isExperimentEnabled('signupFlow') ) {
- // ...
- }
- ```
-
- - It is also possible to run an experiment outside of the controller scope, for example in a worker:
-
- ```ruby
- class SomeWorker
- def perform
- # Check if the experiment is active at all (the percentage_of_time_value > 0)
- return unless Gitlab::Experimentation.active?(:experiment_key)
-
- # Since we cannot access cookies in a worker, we need to bucket models based on a unique, unchanging attribute instead.
- # It is therefore necessery to always provide the same subject.
- if Gitlab::Experimentation.in_experiment_group?(:experiment_key, subject: user)
- # execute experimental code
- else
- # execute control code
- end
- end
- end
- ```
-
-#### Implement the tracking events
-
-To determine whether the experiment is a success or not, we must implement tracking events
-to acquire data for analyzing. We can send events to Snowplow via either the backend or frontend.
-Read the [product intelligence guide](https://about.gitlab.com/handbook/product/product-intelligence-guide/) for more details.
-
-##### Track backend events
-
-The framework provides the following helper method that is available in controllers:
-
-```ruby
-before_action do
- track_experiment_event(:signup_flow, 'action', 'value', subject: current_user)
-end
-```
-
-Which can be tested as follows:
-
-```ruby
-context 'when the experiment is active and the user is in the experimental group' do
- before do
- stub_experiment(signup_flow: true)
- stub_experiment_for_subject(signup_flow: true)
- end
-
- it 'tracks an event', :snowplow do
- subject
-
- expect_snowplow_event(
- category: 'Growth::Activation::Experiment::SignUpFlow',
- action: 'action',
- value: 'value',
- label: 'experimentation_subject_id',
- property: 'experimental_group'
- )
- end
-end
-```
-
-##### Track frontend events
-
-The framework provides the following helper method that is available in controllers:
-
-```ruby
-before_action do
- push_frontend_experiment(:signup_flow, subject: current_user)
- frontend_experimentation_tracking_data(:signup_flow, 'action', 'value', subject: current_user)
-end
-```
-
-This pushes tracking data to `gon.experiments` and `gon.tracking_data`.
-
-```ruby
-expect(Gon.experiments['signupFlow']).to eq(true)
-
-expect(Gon.tracking_data).to eq(
- {
- category: 'Growth::Activation::Experiment::SignUpFlow',
- action: 'action',
- value: 'value',
- label: 'experimentation_subject_id',
- property: 'experimental_group'
- }
-)
-```
-
-Which can then be used for tracking as follows:
-
-```javascript
-import { isExperimentEnabled } from '~/lib/utils/experimentation';
-import Tracking from '~/tracking';
-
-document.addEventListener('DOMContentLoaded', () => {
- const signupFlowExperimentEnabled = isExperimentEnabled('signupFlow');
-
- if (signupFlowExperimentEnabled && gon.tracking_data) {
- const { category, action, ...data } = gon.tracking_data;
-
- Tracking.event(category, action, data);
- }
-}
-```
-
-Which can be tested in Jest as follows:
-
-```javascript
-import { withGonExperiment } from 'helpers/experimentation_helper';
-import Tracking from '~/tracking';
-
-describe('event tracking', () => {
- describe('with tracking data', () => {
- withGonExperiment('signupFlow');
-
- beforeEach(() => {
- jest.spyOn(Tracking, 'event').mockImplementation(() => {});
-
- gon.tracking_data = {
- category: 'Growth::Activation::Experiment::SignUpFlow',
- action: 'action',
- value: 'value',
- label: 'experimentation_subject_id',
- property: 'experimental_group'
- };
- });
-
- it('should track data', () => {
- performAction()
-
- expect(Tracking.event).toHaveBeenCalledWith(
- 'Growth::Activation::Experiment::SignUpFlow',
- 'action',
- {
- value: 'value',
- label: 'experimentation_subject_id',
- property: 'experimental_group'
- },
- );
- });
- });
-});
-```
-
-#### Record experiment user
-
-In addition to the anonymous tracking of events, we can also record which users have participated in which experiments and whether they were given the control experience or the experimental experience.
-
-The `record_experiment_user` helper method is available to all controllers, and it enables you to record these experiment participants (the current user) and which experience they were given:
-
-```ruby
-before_action do
- record_experiment_user(:signup_flow)
-end
-```
-
-Subsequent calls to this method for the same experiment and the same user have no effect unless the user has gets enrolled into a different experience. This happens when we roll out the experimental experience to a greater percentage of users.
-
-Note that this data is completely separate from the [events tracking data](#implement-the-tracking-events). They are not linked together in any way.
-
-##### Add context
-
-You can add arbitrary context data in a hash which gets stored as part of the experiment user record. New calls to the `record_experiment_user` with newer contexts get merged deeply into the existing context.
-
-This data can then be used by data analytics dashboards.
-
-```ruby
-before_action do
- record_experiment_user(:signup_flow, foo: 42, bar: { a: 22})
- # context is { "foo" => 42, "bar" => { "a" => 22 }}
-end
-
-# Additional contexts for newer record calls are merged deeply
-record_experiment_user(:signup_flow, foo: 40, bar: { b: 2 }, thor: 3)
-# context becomes { "foo" => 40, "bar" => { "a" => 22, "b" => 2 }, "thor" => 3}
-```
-
-#### Record experiment conversion event
-
-Along with the tracking of backend and frontend events and the [recording of experiment participants](#record-experiment-user), we can also record when a user performs the desired conversion event action. For example:
-
-- **Experimental experience:** Show an in-product nudge to see if it causes more people to sign up for trials.
-- **Conversion event:** The user starts a trial.
-
-The `record_experiment_conversion_event` helper method is available to all controllers. It enables us to record the conversion event for the current user, regardless of whether they are in the control or experimental group:
-
-```ruby
-before_action do
- record_experiment_conversion_event(:signup_flow)
-end
-```
-
-Note that the use of this method requires that we have first [recorded the user as being part of the experiment](#record-experiment-user).
-
-#### Enable the experiment
-
-After all merge requests have been merged, use [`chatops`](../../ci/chatops/index.md) in the
-[appropriate channel](../feature_flags/controls.md#communicate-the-change) to start the experiment for 10% of the users.
-The feature flag should have the name of the experiment with the `_experiment_percentage` suffix appended.
-For visibility, please also share any commands run against production in the `#s_growth` channel:
-
- ```shell
- /chatops run feature set signup_flow_experiment_percentage 10
- ```
-
- If you notice issues with the experiment, you can disable the experiment by removing the feature flag:
-
- ```shell
- /chatops run feature delete signup_flow_experiment_percentage
- ```
-
-#### Manually force the current user to be in the experiment group
-
-You may force the application to put your current user in the experiment group. To do so
-add a query string parameter to the path where the experiment runs. If you do so,
-the experiment will work only for this request and won't work after following links or submitting forms.
-
-For example, to forcibly enable the `EXPERIMENT_KEY` experiment, add `force_experiment=EXPERIMENT_KEY`
-to the URL:
-
-```shell
-https://gitlab.com/<EXPERIMENT_ENTRY_URL>?force_experiment=<EXPERIMENT_KEY>
-```
-
-#### A cookie-based approach to force an experiment
-
-It's possible to force the current user to be in the experiment group for `<EXPERIMENT_KEY>`
-during the browser session by using your browser's developer tools:
-
-```javascript
-document.cookie = "force_experiment=<EXPERIMENT_KEY>; path=/";
-```
-
-Use a comma to list more than one experiment to be forced:
-
-```javascript
-document.cookie = "force_experiment=<EXPERIMENT_KEY>,<ANOTHER_EXPERIMENT_KEY>; path=/";
-```
-
-To clear the experiments, unset the `force_experiment` cookie:
-
-```javascript
-document.cookie = "force_experiment=; path=/";
-```
-
-#### Testing and test helpers
-
-##### RSpec
-
-Use the following in RSpec to mock the experiment:
-
-```ruby
-context 'when the experiment is active' do
- before do
- stub_experiment(signup_flow: true)
- end
-
- context 'when the user is in the experimental group' do
- before do
- stub_experiment_for_subject(signup_flow: true)
- end
-
- it { is_expected.to do_experimental_thing }
- end
-
- context 'when the user is in the control group' do
- before do
- stub_experiment_for_subject(signup_flow: false)
- end
-
- it { is_expected.to do_control_thing }
- end
-end
-```
-
-##### Jest
-
-Use the following in Jest to mock the experiment:
-
-```javascript
-import { withGonExperiment } from 'helpers/experimentation_helper';
-
-describe('given experiment is enabled', () => {
- withGonExperiment('signupFlow');
-
- it('should do the experimental thing', () => {
- expect(wrapper.find('.js-some-experiment-triggered-element')).toEqual(expect.any(Element));
- });
-});
-```
+Historical Context: `Experimentation Module` was built iteratively with the needs that
+appeared while implementing Growth sub-department experiments, while GLEX was built
+with the learnings of the team and an easier to use API.
diff --git a/doc/development/usage_ping/index.md b/doc/development/usage_ping/index.md
index e339c5898bd..40635e20495 100644
--- a/doc/development/usage_ping/index.md
+++ b/doc/development/usage_ping/index.md
@@ -928,7 +928,9 @@ appear to be associated to any of the services running, because they all appear
WARNING:
This feature is intended solely for internal GitLab use.
-To add data for aggregated metrics into Usage Ping payload you should add corresponding definition in [`aggregated_metrics`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/usage_data_counters/aggregated_metrics/). Each aggregate definition includes following parts:
+To add data for aggregated metrics into Usage Ping payload you should add corresponding definition at [`lib/gitlab/usage_data_counters/aggregated_metrics/*.yaml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/usage_data_counters/aggregated_metrics/) for metrics available at Community Edition and at [`ee/lib/gitlab/usage_data_counters/aggregated_metrics/*.yaml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/gitlab/usage_data_counters/aggregated_metrics/) for Enterprise Edition ones.
+
+Each aggregate definition includes following parts:
- `name`: Unique name under which the aggregate metric is added to the Usage Ping payload.
- `operator`: Operator that defines how the aggregated metric data is counted. Available operators are:
diff --git a/doc/user/admin_area/credentials_inventory.md b/doc/user/admin_area/credentials_inventory.md
index 3d8a3a7c8c7..053cee82634 100644
--- a/doc/user/admin_area/credentials_inventory.md
+++ b/doc/user/admin_area/credentials_inventory.md
@@ -11,7 +11,9 @@ type: howto
GitLab administrators are responsible for the overall security of their instance. To assist, GitLab provides a Credentials inventory to keep track of all the credentials that can be used to access their self-managed instance.
-Using Credentials inventory, you can see all the personal access tokens (PAT) and SSH keys that exist in your GitLab instance. In addition, you can [revoke](#revoke-a-users-personal-access-token) and [delete](#delete-a-users-ssh-key) and see:
+Using Credentials inventory, you can see all the personal access tokens (PAT), SSH keys, and GPG keys
+that exist in your GitLab instance. In addition, you can [revoke](#revoke-a-users-personal-access-token)
+and [delete](#delete-a-users-ssh-key) and see:
- Who they belong to.
- Their access scope.
@@ -23,7 +25,7 @@ To access the Credentials inventory, navigate to **Admin Area > Credentials**.
The following is an example of the Credentials inventory page:
-![Credentials inventory page](img/credentials_inventory_v13_4.png)
+![Credentials inventory page](img/credentials_inventory_v13_10.png)
## Revoke a user's personal access token
@@ -50,3 +52,39 @@ You can **Delete** a user's SSH key by navigating to the credentials inventory's
The instance then notifies the user.
![Credentials inventory page - SSH keys](img/credentials_inventory_ssh_keys_v13_5.png)
+
+## Review existing GPG keys
+
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/282429) in GitLab 13.10.
+> - It's [deployed behind a feature flag](../feature_flags.md), disabled by default.
+> - It's disabled on GitLab.com.
+> - It's not recommended for production use.
+> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-the-gpg-keys-view).
+
+You can view all existing GPG in your GitLab instance by navigating to the
+credentials inventory GPG Keys tab, as well as the following properties:
+
+- Who the GPG key belongs to.
+- The ID of the GPG key.
+- Whether the GPG key is [verified or unverified](../project/repository/gpg_signed_commits/index.md)
+
+![Credentials inventory page - GPG keys](img/credentials_inventory_gpg_keys_v13_10.png)
+
+### Enable or disable the GPG keys view
+
+Enabling or disabling the GPG keys view is under development and not ready for production use. It is
+deployed behind a feature flag that is **disabled by default**.
+[GitLab administrators with access to the GitLab Rails console](../../administration/feature_flags.md)
+can enable it.
+
+To enable it:
+
+```ruby
+Feature.enable(:credential_inventory_gpg_keys)
+```
+
+To disable it:
+
+```ruby
+Feature.disable(:credential_inventory_gpg_keys)
+```
diff --git a/doc/user/admin_area/img/credentials_inventory_gpg_keys_v13_10.png b/doc/user/admin_area/img/credentials_inventory_gpg_keys_v13_10.png
new file mode 100644
index 00000000000..2486332c477
--- /dev/null
+++ b/doc/user/admin_area/img/credentials_inventory_gpg_keys_v13_10.png
Binary files differ
diff --git a/doc/user/admin_area/img/credentials_inventory_v13_10.png b/doc/user/admin_area/img/credentials_inventory_v13_10.png
new file mode 100644
index 00000000000..e41bbf35a8e
--- /dev/null
+++ b/doc/user/admin_area/img/credentials_inventory_v13_10.png
Binary files differ
diff --git a/doc/user/admin_area/img/credentials_inventory_v13_4.png b/doc/user/admin_area/img/credentials_inventory_v13_4.png
deleted file mode 100644
index 06925ea2f6f..00000000000
--- a/doc/user/admin_area/img/credentials_inventory_v13_4.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/repository/gpg_signed_commits/index.md b/doc/user/project/repository/gpg_signed_commits/index.md
index bf877bfee68..c41b3ed8615 100644
--- a/doc/user/project/repository/gpg_signed_commits/index.md
+++ b/doc/user/project/repository/gpg_signed_commits/index.md
@@ -282,6 +282,7 @@ For more details about GPG, see:
- [Managing OpenPGP Keys](https://riseup.net/en/security/message-security/openpgp/gpg-keys)
- [OpenPGP Best Practices](https://riseup.net/en/security/message-security/openpgp/best-practices)
- [Creating a new GPG key with subkeys](https://www.void.gr/kargig/blog/2013/12/02/creating-a-new-gpg-key-with-subkeys/) (advanced)
+- [Review existing GPG keys in your instance](../../../admin_area/credentials_inventory.md#review-existing-gpg-keys)
<!-- ## Troubleshooting
diff --git a/jest.config.base.js b/jest.config.base.js
index 98c70735d92..4e9b84d1d34 100644
--- a/jest.config.base.js
+++ b/jest.config.base.js
@@ -27,7 +27,7 @@ module.exports = (path) => {
// workaround for eslint-import-resolver-jest only resolving in test files
// see https://github.com/JoinColony/eslint-import-resolver-jest#note
if (isESLint(module)) {
- testMatch = testMatch.map((path) => path.replace('_spec.js', ''));
+ testMatch = testMatch.map((modulePath) => modulePath.replace('_spec.js', ''));
}
const TEST_FIXTURES_PATTERN = 'test_fixtures(/.*)$';
diff --git a/lib/gitlab/database.rb b/lib/gitlab/database.rb
index 45d271a2fd4..2b3c98ffa14 100644
--- a/lib/gitlab/database.rb
+++ b/lib/gitlab/database.rb
@@ -313,28 +313,18 @@ module Gitlab
ActiveRecord::Base.prepend(ActiveRecordBaseTransactionMetrics)
end
- # observe_transaction_duration is called from ActiveRecordBaseTransactionMetrics.transaction and used to
- # record transaction durations.
- def self.observe_transaction_duration(duration_seconds)
- if current_transaction = ::Gitlab::Metrics::Transaction.current
- current_transaction.observe(:gitlab_database_transaction_seconds, duration_seconds) do
- docstring "Time spent in database transactions, in seconds"
- end
- end
- rescue Prometheus::Client::LabelSetValidator::LabelSetError => err
- # Ensure that errors in recording these metrics don't affect the operation of the application
- Gitlab::AppLogger.error("Unable to observe database transaction duration: #{err}")
- end
-
# MonkeyPatch for ActiveRecord::Base for adding observability
module ActiveRecordBaseTransactionMetrics
- # A monkeypatch over ActiveRecord::Base.transaction.
- # It provides observability into transactional methods.
- def transaction(options = {}, &block)
- start_time = Gitlab::Metrics::System.monotonic_time
- super(options, &block)
- ensure
- Gitlab::Database.observe_transaction_duration(Gitlab::Metrics::System.monotonic_time - start_time)
+ extend ActiveSupport::Concern
+
+ class_methods do
+ # A monkeypatch over ActiveRecord::Base.transaction.
+ # It provides observability into transactional methods.
+ def transaction(**options, &block)
+ ActiveSupport::Notifications.instrument('transaction.active_record', { connection: connection }) do
+ super(**options, &block)
+ end
+ end
end
end
end
diff --git a/lib/gitlab/database/similarity_score.rb b/lib/gitlab/database/similarity_score.rb
index ff78fd0218c..40845c0d5e0 100644
--- a/lib/gitlab/database/similarity_score.rb
+++ b/lib/gitlab/database/similarity_score.rb
@@ -74,9 +74,14 @@ module Gitlab
end
# (SIMILARITY ...) + (SIMILARITY ...)
- expressions.inject(first_expression) do |expression1, expression2|
+ additions = expressions.inject(first_expression) do |expression1, expression2|
Arel::Nodes::Addition.new(expression1, expression2)
end
+
+ score_as_numeric = Arel::Nodes::NamedFunction.new('CAST', [Arel::Nodes::Grouping.new(additions).as('numeric')])
+
+ # Rounding the score to two decimals
+ Arel::Nodes::NamedFunction.new('ROUND', [score_as_numeric, 2])
end
def self.order_by_similarity?(arel_query)
diff --git a/lib/gitlab/graphql/pagination/keyset/connection.rb b/lib/gitlab/graphql/pagination/keyset/connection.rb
index f95c91c5706..e525996ec10 100644
--- a/lib/gitlab/graphql/pagination/keyset/connection.rb
+++ b/lib/gitlab/graphql/pagination/keyset/connection.rb
@@ -33,6 +33,7 @@ module Gitlab
include Gitlab::Utils::StrongMemoize
include ::Gitlab::Graphql::ConnectionCollectionMethods
prepend ::Gitlab::Graphql::ConnectionRedaction
+ prepend GenericKeysetPagination
# rubocop: disable Naming/PredicateName
# https://relay.dev/graphql/connections.htm#sec-undefined.PageInfo.Fields
diff --git a/lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb b/lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb
new file mode 100644
index 00000000000..318c6e1734f
--- /dev/null
+++ b/lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ module Pagination
+ module Keyset
+ # Use the generic keyset implementation if the given ActiveRecord scope supports it.
+ # Note: this module is temporary, at some point it will be merged with Keyset::Connection
+ module GenericKeysetPagination
+ extend ActiveSupport::Concern
+
+ def ordered_items
+ return super unless Gitlab::Pagination::Keyset::Order.keyset_aware?(items)
+
+ items
+ end
+
+ def cursor_for(node)
+ return super unless Gitlab::Pagination::Keyset::Order.keyset_aware?(items)
+
+ order = Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(items)
+ encode(order.cursor_attributes_for_node(node).to_json)
+ end
+
+ def slice_nodes(sliced, encoded_cursor, before_or_after)
+ return super unless Gitlab::Pagination::Keyset::Order.keyset_aware?(sliced)
+
+ order = Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(sliced)
+ order = order.reversed_order if before_or_after == :before
+
+ decoded_cursor = ordering_from_encoded_json(encoded_cursor)
+ order.apply_cursor_conditions(sliced, decoded_cursor)
+ end
+
+ def sliced_nodes
+ return super unless Gitlab::Pagination::Keyset::Order.keyset_aware?(items)
+
+ sliced = ordered_items
+ sliced = slice_nodes(sliced, before, :before) if before.present?
+ sliced = slice_nodes(sliced, after, :after) if after.present?
+ sliced
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/pagination/keyset/last_items.rb b/lib/gitlab/graphql/pagination/keyset/last_items.rb
index 45bf15236c1..960567a6fbc 100644
--- a/lib/gitlab/graphql/pagination/keyset/last_items.rb
+++ b/lib/gitlab/graphql/pagination/keyset/last_items.rb
@@ -10,46 +10,14 @@ module Gitlab
class LastItems
# rubocop: disable CodeReuse/ActiveRecord
def self.take_items(scope, count)
- if custom_order = lookup_custom_reverse_order(scope.order_values)
- items = scope.reorder(*custom_order).first(count) # returns a single record when count is nil
+ if Gitlab::Pagination::Keyset::Order.keyset_aware?(scope)
+ order = Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(scope)
+ items = scope.reorder(order.reversed_order).first(count)
items.is_a?(Array) ? items.reverse : items
else
scope.last(count)
end
end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # Detect special ordering and provide the reversed order
- def self.lookup_custom_reverse_order(order_values)
- if ordering_by_merged_at_and_mr_id_desc?(order_values)
- [
- Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', 'ASC'), # reversing the order
- MergeRequest.arel_table[:id].asc
- ]
- elsif ordering_by_merged_at_and_mr_id_asc?(order_values)
- [
- Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', 'DESC'),
- MergeRequest.arel_table[:id].asc
- ]
- end
- end
-
- def self.ordering_by_merged_at_and_mr_id_desc?(order_values)
- order_values.size == 2 &&
- order_values.first.to_s == Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', 'DESC') &&
- order_values.last.is_a?(Arel::Nodes::Descending) &&
- order_values.last.to_sql == MergeRequest.arel_table[:id].desc.to_sql
- end
-
- def self.ordering_by_merged_at_and_mr_id_asc?(order_values)
- order_values.size == 2 &&
- order_values.first.to_s == Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', 'ASC') &&
- order_values.last.is_a?(Arel::Nodes::Descending) &&
- order_values.last.to_sql == MergeRequest.arel_table[:id].desc.to_sql
- end
-
- private_class_method :ordering_by_merged_at_and_mr_id_desc?
- private_class_method :ordering_by_merged_at_and_mr_id_asc?
end
end
end
diff --git a/lib/gitlab/graphql/pagination/keyset/order_info.rb b/lib/gitlab/graphql/pagination/keyset/order_info.rb
index d37264c1343..0494329bfd9 100644
--- a/lib/gitlab/graphql/pagination/keyset/order_info.rb
+++ b/lib/gitlab/graphql/pagination/keyset/order_info.rb
@@ -92,8 +92,6 @@ module Gitlab
def extract_attribute_values(order_value)
if ordering_by_lower?(order_value)
[order_value.expr.expressions[0].name.to_s, order_value.direction, order_value.expr]
- elsif ordering_by_similarity?(order_value)
- ['similarity', order_value.direction, order_value.expr]
elsif ordering_by_case?(order_value)
['case_order_value', order_value.direction, order_value.expr]
elsif ordering_by_array_position?(order_value)
@@ -113,11 +111,6 @@ module Gitlab
order_value.expr.is_a?(Arel::Nodes::NamedFunction) && order_value.expr&.name&.downcase == 'array_position'
end
- # determine if ordering using SIMILARITY scoring based on Gitlab::Database::SimilarityScore
- def ordering_by_similarity?(order_value)
- Gitlab::Database::SimilarityScore.order_by_similarity?(order_value)
- end
-
# determine if ordering using CASE
def ordering_by_case?(order_value)
order_value.expr.is_a?(Arel::Nodes::Case)
diff --git a/lib/gitlab/metrics/background_transaction.rb b/lib/gitlab/metrics/background_transaction.rb
new file mode 100644
index 00000000000..3dda68bf93f
--- /dev/null
+++ b/lib/gitlab/metrics/background_transaction.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Metrics
+ class BackgroundTransaction < Transaction
+ # Separate web transaction instance and background transaction instance
+ BACKGROUND_THREAD_KEY = :_gitlab_metrics_background_transaction
+ BACKGROUND_BASE_LABEL_KEYS = %i(endpoint_id feature_category).freeze
+
+ class << self
+ def current
+ Thread.current[BACKGROUND_THREAD_KEY]
+ end
+
+ def prometheus_metric(name, type, &block)
+ fetch_metric(type, name) do
+ # set default metric options
+ docstring "#{name.to_s.humanize} #{type}"
+
+ evaluate(&block)
+ # always filter sensitive labels and merge with base ones
+ label_keys BACKGROUND_BASE_LABEL_KEYS | (label_keys - ::Gitlab::Metrics::Transaction::FILTERED_LABEL_KEYS)
+ end
+ end
+ end
+
+ def run
+ Thread.current[BACKGROUND_THREAD_KEY] = self
+
+ yield
+ ensure
+ Thread.current[BACKGROUND_THREAD_KEY] = nil
+ end
+
+ def labels
+ @labels ||= {
+ endpoint_id: current_context&.get_attribute(:caller_id),
+ feature_category: current_context&.get_attribute(:feature_category)
+ }
+ end
+
+ private
+
+ def current_context
+ Labkit::Context.current
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/subscribers/active_record.rb b/lib/gitlab/metrics/subscribers/active_record.rb
index fb20f2c9f0b..5eefef02507 100644
--- a/lib/gitlab/metrics/subscribers/active_record.rb
+++ b/lib/gitlab/metrics/subscribers/active_record.rb
@@ -11,6 +11,15 @@ module Gitlab
DB_COUNTERS = %i{db_count db_write_count db_cached_count}.freeze
SQL_COMMANDS_WITH_COMMENTS_REGEX = /\A(\/\*.*\*\/\s)?((?!(.*[^\w'"](DELETE|UPDATE|INSERT INTO)[^\w'"])))(WITH.*)?(SELECT)((?!(FOR UPDATE|FOR SHARE)).)*$/i.freeze
+ DURATION_BUCKET = [0.05, 0.1, 0.25].freeze
+
+ # This event is published from ActiveRecordBaseTransactionMetrics and
+ # used to record a database transaction duration when calling
+ # ActiveRecord::Base.transaction {} block.
+ def transaction(event)
+ observe(:gitlab_database_transaction_seconds, event)
+ end
+
def sql(event)
# Mark this thread as requiring a database connection. This is used
# by the Gitlab::Metrics::Samplers::ThreadsSampler to count threads
@@ -20,10 +29,11 @@ module Gitlab
payload = event.payload
return if ignored_query?(payload)
- increment_db_counters(payload)
- current_transaction&.observe(:gitlab_sql_duration_seconds, event.duration / 1000.0) do
- buckets [0.05, 0.1, 0.25]
- end
+ increment(:db_count)
+ increment(:db_cached_count) if cached_query?(payload)
+ increment(:db_write_count) unless select_sql_command?(payload)
+
+ observe(:gitlab_sql_duration_seconds, event)
end
def self.db_counter_payload
@@ -50,20 +60,20 @@ module Gitlab
payload[:sql].match(SQL_COMMANDS_WITH_COMMENTS_REGEX)
end
- def increment_db_counters(payload)
- increment(:db_count)
- increment(:db_cached_count) if cached_query?(payload)
- increment(:db_write_count) unless select_sql_command?(payload)
- end
-
def increment(counter)
current_transaction&.increment("gitlab_transaction_#{counter}_total".to_sym, 1)
Gitlab::SafeRequestStore[counter] = Gitlab::SafeRequestStore[counter].to_i + 1
end
+ def observe(histogram, event)
+ current_transaction&.observe(histogram, event.duration / 1000.0) do
+ buckets DURATION_BUCKET
+ end
+ end
+
def current_transaction
- ::Gitlab::Metrics::Transaction.current
+ ::Gitlab::Metrics::WebTransaction.current || ::Gitlab::Metrics::BackgroundTransaction.current
end
end
end
diff --git a/lib/gitlab/pagination/keyset/column_order_definition.rb b/lib/gitlab/pagination/keyset/column_order_definition.rb
new file mode 100644
index 00000000000..0c8ec02a56b
--- /dev/null
+++ b/lib/gitlab/pagination/keyset/column_order_definition.rb
@@ -0,0 +1,224 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Pagination
+ module Keyset
+ # This class stores information for one column (or SQL expression) which can be used in an
+ # ORDER BY SQL clasue.
+ # The goal of this class is to encapsulate all the metadata in one place which are needed to
+ # make keyset pagination work in a generalized way.
+ #
+ # == Arguments
+ #
+ # **order expression** (Arel::Nodes::Node | String)
+ #
+ # The actual SQL expression for the ORDER BY clause.
+ #
+ # Examples:
+ # # Arel column order definition
+ # Project.arel_table[:id].asc # ORDER BY projects.id ASC
+ #
+ # # Arel expression, calculated order definition
+ # Arel::Nodes::NamedFunction.new("COALESCE", [Project.arel_table[:issue_count].asc, 0]).asc # ORDER BY COALESCE(projects.issue_count, 0)
+ #
+ # # Another Arel expression
+ # Arel::Nodes::Multiplication(Issue.arel_table[:weight], Issue.arel_table[:time_spent]).desc
+ #
+ # # Raw string order definition
+ # 'issues.type DESC NULLS LAST'
+ #
+ # **column_expression** (Arel::Nodes::Node | String)
+ #
+ # Expression for the database column or an expression. This value will be used with logical operations (>, <, =, !=)
+ # when building the database query for the next page.
+ #
+ # Examples:
+ # # Arel column reference
+ # Issue.arel_table[:title]
+ #
+ # # Calculated value
+ # Arel::Nodes::Multiplication(Issue.arel_table[:weight], Issue.arel_table[:time_spent])
+ #
+ # **attribute_name** (String | Symbol)
+ #
+ # An attribute on the loaded ActiveRecord model where the value can be obtained.
+ #
+ # Examples:
+ # # Simple attribute definition
+ # attribute_name = :title
+ #
+ # # Later on this attribute will be used like this:
+ # my_record = Issue.find(x)
+ # value = my_record[attribute_name] # reads data from the title column
+ #
+ # # Calculated value based on an Arel or raw SQL expression
+ #
+ # attribute_name = :lowercase_title
+ #
+ # # `lowercase_title` is not is not a table column therefore we need to make sure it's available in the `SELECT` clause
+ #
+ # my_record = Issue.select(:id, 'LOWER(title) as lowercase_title').last
+ # value = my_record[:lowercase_title]
+ #
+ # **distinct**
+ #
+ # Boolean value.
+ #
+ # Tells us whether the database column contains only distinct values. If the column is covered by
+ # a unique index then set to true.
+ #
+ # **nullable** (:not_nullable | :nulls_last | :nulls_first)
+ #
+ # Tells us whether the database column is nullable or not. This information can be
+ # obtained from the DB schema.
+ #
+ # If the column is not nullable, set this attribute to :not_nullable.
+ #
+ # If the column is nullable, then additional information is needed. Based on the ordering, the null values
+ # will show up at the top or at the bottom of the resultset.
+ #
+ # Examples:
+ # # Nulls are showing up at the top (for example: ORDER BY column ASC):
+ # nullable = :nulls_first
+ #
+ # # Nulls are showing up at the bottom (for example: ORDER BY column DESC):
+ # nullable = :nulls_last
+ #
+ # **order_direction**
+ #
+ # :asc or :desc
+ #
+ # Note: this is an optional attribute, the value will be inferred from the order_expression.
+ # Sometimes it's not possible to infer the order automatically. In this case an exception will be
+ # raised (when the query is executed). If the reverse order cannot be computed, it must be provided explicitly.
+ #
+ # **reversed_order_expression**
+ #
+ # The reversed version of the order_expression.
+ #
+ # A ColumnOrderDefinition object is able to reverse itself which is used when paginating backwards.
+ # When a complex order_expression is provided (raw string), then reversing the order automatically
+ # is not possible. In this case an exception will be raised.
+ #
+ # Example:
+ #
+ # order_expression = Project.arel_table[:id].asc
+ # reversed_order_expression = Project.arel_table[:id].desc
+ #
+ # **add_to_projections**
+ #
+ # Set to true if the column is not part of the queried table. (Not part of SELECT *)
+ #
+ # Example:
+ #
+ # - When the order is a calculated expression or the column is in another table (JOIN-ed)
+ #
+ # If the add_to_projections is true, the query builder will automatically add the column to the SELECT values
+ class ColumnOrderDefinition
+ REVERSED_ORDER_DIRECTIONS = { asc: :desc, desc: :asc }.freeze
+ REVERSED_NULL_POSITIONS = { nulls_first: :nulls_last, nulls_last: :nulls_first }.freeze
+ AREL_ORDER_CLASSES = { Arel::Nodes::Ascending => :asc, Arel::Nodes::Descending => :desc }.freeze
+ ALLOWED_NULLABLE_VALUES = [:not_nullable, :nulls_first, :nulls_last].freeze
+
+ attr_reader :attribute_name, :column_expression, :order_expression, :add_to_projections
+
+ def initialize(attribute_name:, order_expression:, column_expression: nil, reversed_order_expression: nil, nullable: :not_nullable, distinct: true, order_direction: nil, add_to_projections: false)
+ @attribute_name = attribute_name
+ @order_expression = order_expression
+ @column_expression = column_expression || calculate_column_expression(order_expression)
+ @distinct = distinct
+ @reversed_order_expression = reversed_order_expression || calculate_reversed_order(order_expression)
+ @nullable = parse_nullable(nullable, distinct)
+ @order_direction = parse_order_direction(order_expression, order_direction)
+ @add_to_projections = add_to_projections
+ end
+
+ def reverse
+ self.class.new(
+ attribute_name: attribute_name,
+ column_expression: column_expression,
+ order_expression: reversed_order_expression,
+ reversed_order_expression: order_expression,
+ nullable: not_nullable? ? :not_nullable : REVERSED_NULL_POSITIONS[nullable],
+ distinct: distinct,
+ order_direction: REVERSED_ORDER_DIRECTIONS[order_direction]
+ )
+ end
+
+ def ascending_order?
+ order_direction == :asc
+ end
+
+ def descending_order?
+ order_direction == :desc
+ end
+
+ def nulls_first?
+ nullable == :nulls_first
+ end
+
+ def nulls_last?
+ nullable == :nulls_last
+ end
+
+ def not_nullable?
+ nullable == :not_nullable
+ end
+
+ def nullable?
+ !not_nullable?
+ end
+
+ def distinct?
+ distinct
+ end
+
+ private
+
+ attr_reader :reversed_order_expression, :nullable, :distinct, :order_direction
+
+ def calculate_reversed_order(order_expression)
+ unless AREL_ORDER_CLASSES.has_key?(order_expression.class) # Arel can reverse simple orders
+ raise "Couldn't determine reversed order for `#{order_expression}`, please provide the `reversed_order_expression` parameter."
+ end
+
+ order_expression.reverse
+ end
+
+ def calculate_column_expression(order_expression)
+ if order_expression.respond_to?(:expr)
+ order_expression.expr
+ else
+ raise("Couldn't calculate the column expression. Please pass an ARel node as the order_expression, not a string.")
+ end
+ end
+
+ def parse_order_direction(order_expression, order_direction)
+ transformed_order_direction = if order_direction.nil? && AREL_ORDER_CLASSES[order_expression.class]
+ AREL_ORDER_CLASSES[order_expression.class]
+ elsif order_direction.present?
+ order_direction.to_s.downcase.to_sym
+ end
+
+ unless REVERSED_ORDER_DIRECTIONS.has_key?(transformed_order_direction)
+ raise "Invalid or missing `order_direction` (value: #{order_direction}) was given, the allowed values are: :asc or :desc"
+ end
+
+ transformed_order_direction
+ end
+
+ def parse_nullable(nullable, distinct)
+ if ALLOWED_NULLABLE_VALUES.exclude?(nullable)
+ raise "Invalid `nullable` is given (value: #{nullable}), the allowed values are: #{ALLOWED_NULLABLE_VALUES.join(', ')}"
+ end
+
+ if nullable != :not_nullable && distinct
+ raise 'Invalid column definition, `distinct` and `nullable` columns are not allowed at the same time'
+ end
+
+ nullable
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/pagination/keyset/order.rb b/lib/gitlab/pagination/keyset/order.rb
new file mode 100644
index 00000000000..e8e68a5c4a5
--- /dev/null
+++ b/lib/gitlab/pagination/keyset/order.rb
@@ -0,0 +1,248 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Pagination
+ module Keyset
+ # This class is a special ORDER BY clause which is compatible with ActiveRecord. It helps
+ # building keyset paginated queries.
+ #
+ # In ActiveRecord we use the `order()` method which will generate the `ORDER BY X` SQL clause
+ #
+ # Project.where(active: true).order(id: :asc)
+ #
+ # # Or
+ #
+ # Project.where(active: true).order(created_at: :asc, id: desc)
+ #
+ # Gitlab::Pagination::Keyset::Order class encapsulates more information about the order columns
+ # in order to implement keyset pagination in a generic way
+ #
+ # - Extract values from a record (usually the last item of the previous query)
+ # - Build query conditions based on the column configuration
+ #
+ # Example 1: Order by primary key
+ #
+ # # Simple order definition for the primary key as an ActiveRecord scope
+ # scope :id_asc_ordered, -> {
+ # keyset_order = Gitlab::Pagination::Keyset::Order.build([
+ # Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ # attribute: :id,
+ # order_expression: Project.arel_table[:id].asc
+ # )
+ # ])
+ #
+ # reorder(keyset_order)
+ # }
+ #
+ # # ... Later in the application code:
+ #
+ # # Compatible with ActiveRecord's `order()` method
+ # page1 = Project.where(active: true).id_asc_ordered.limit(5)
+ # keyset_order = Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(page1)
+ #
+ # last_record = page1.last
+ # cursor_values = keyset_order.cursor_attributes_for_node(last_record) # { id: x }
+ #
+ # page2 = keyset_order.apply_cursor_conditions(Project.where(active: true).id_asc_ordered, cursor_values).limit(5)
+ #
+ # last_record = page2.last
+ # cursor_values = keyset_order.cursor_attributes_for_node(last_record)
+ #
+ # page3 = keyset_order.apply_cursor_conditions(Project.where(active: true).id_asc_ordered, cursor_values).limit(5)
+ #
+ # Example 2: Order by creation time and primary key (primary key is the tie breaker)
+ #
+ # scope :created_at_ordered, -> {
+ # keyset_order = Gitlab::Pagination::Keyset::Order.build([
+ # Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ # attribute: :created_at,
+ # column_expression: Project.arel_table[:created_at],
+ # order_expression: Project.arel_table[:created_at].asc,
+ # distinct: false, # values in the column are not unique
+ # nullable: :nulls_last # we might see NULL values (bottom)
+ # ),
+ # Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ # attribute: :id,
+ # order_expression: Project.arel_table[:id].asc
+ # )
+ # ])
+ #
+ # reorder(keyset_order)
+ # }
+ #
+ class Order < Arel::Nodes::SqlLiteral
+ attr_reader :column_definitions
+
+ def initialize(column_definitions:)
+ @column_definitions = column_definitions
+
+ super(to_sql_literal(@column_definitions))
+ end
+
+ # Tells whether the given ActiveRecord::Relation has keyset ordering
+ def self.keyset_aware?(scope)
+ scope.order_values.first.is_a?(self) && scope.order_values.one?
+ end
+
+ def self.extract_keyset_order_object(scope)
+ scope.order_values.first
+ end
+
+ def self.build(column_definitions)
+ new(column_definitions: column_definitions)
+ end
+
+ def cursor_attributes_for_node(node)
+ column_definitions.each_with_object({}) do |column_definition, hash|
+ field_value = node[column_definition.attribute_name]
+ hash[column_definition.attribute_name] = if field_value.is_a?(Time)
+ field_value.strftime('%Y-%m-%d %H:%M:%S.%N %Z')
+ elsif field_value.nil?
+ nil
+ else
+ field_value.to_s
+ end
+ end
+ end
+
+ # This methods builds the conditions for the keyset pagination
+ #
+ # Example:
+ #
+ # |created_at|id|
+ # |----------|--|
+ # |2020-01-01| 1|
+ # | null| 2|
+ # | null| 3|
+ # |2020-02-01| 4|
+ #
+ # Note: created_at is not distinct and nullable
+ # Order `ORDER BY created_at DESC, id DESC`
+ #
+ # We get the following cursor values from the previous page:
+ # { id: 4, created_at: '2020-02-01' }
+ #
+ # To get the next rows, we need to build the following conditions:
+ #
+ # (created_at = '2020-02-01' AND id < 4) OR (created_at < '2020-01-01')
+ #
+ # DESC ordering ensures that NULL values are on top so we don't need conditions for NULL values
+ #
+ # Another cursor example:
+ # { id: 3, created_at: nil }
+ #
+ # To get the next rows, we need to build the following conditions:
+ #
+ # (id < 3 AND created_at IS NULL) OR (created_at IS NOT NULL)
+ def build_where_values(values)
+ return if values.blank?
+
+ verify_incoming_values!(values)
+
+ where_values = []
+
+ reversed_column_definitions = column_definitions.reverse
+ reversed_column_definitions.each_with_index do |column_definition, i|
+ value = values[column_definition.attribute_name]
+
+ conditions_for_column(column_definition, value).each do |condition|
+ column_definitions_after_index = reversed_column_definitions.last(column_definitions.reverse.size - i - 1)
+
+ equal_conditon_for_rest = column_definitions_after_index.map do |definition|
+ definition.column_expression.eq(values[definition.attribute_name])
+ end
+
+ where_values << Arel::Nodes::Grouping.new(Arel::Nodes::And.new([condition, *equal_conditon_for_rest].compact))
+ end
+ end
+
+ build_or_query(where_values)
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def apply_cursor_conditions(scope, values = {})
+ scope = apply_custom_projections(scope)
+ scope.where(build_where_values(values))
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def reversed_order
+ self.class.build(column_definitions.map(&:reverse))
+ end
+
+ private
+
+ # Adds extra columns to the SELECT clause
+ def apply_custom_projections(scope)
+ additional_projections = column_definitions.select(&:add_to_projections).map do |column_definition|
+ # avoid mutating the original column_expression
+ column_definition.column_expression.dup.as(column_definition.attribute_name).to_sql
+ end
+
+ scope = scope.select(*scope.arel.projections, *additional_projections) if additional_projections
+ scope
+ end
+
+ def conditions_for_column(column_definition, value)
+ conditions = []
+ # Depending on the order, build a query condition fragment for taking the next rows
+ if column_definition.distinct? || (!column_definition.distinct? && value.present?)
+ conditions << compare_column_with_value(column_definition, value)
+ end
+
+ # When the column is nullable, additional conditions for NULL a NOT NULL values are necessary.
+ # This depends on the position of the nulls (top or bottom of the resultset).
+ if column_definition.nulls_first? && value.blank?
+ conditions << column_definition.column_expression.not_eq(nil)
+ elsif column_definition.nulls_last? && value.present?
+ conditions << column_definition.column_expression.eq(nil)
+ end
+
+ conditions
+ end
+
+ def compare_column_with_value(column_definition, value)
+ if column_definition.descending_order?
+ column_definition.column_expression.lt(value)
+ else
+ column_definition.column_expression.gt(value)
+ end
+ end
+
+ def build_or_query(expressions)
+ or_expression = expressions.reduce { |or_expression, expression| Arel::Nodes::Or.new(or_expression, expression) }
+
+ Arel::Nodes::Grouping.new(or_expression)
+ end
+
+ def to_sql_literal(column_definitions)
+ column_definitions.map do |column_definition|
+ if column_definition.order_expression.respond_to?(:to_sql)
+ column_definition.order_expression.to_sql
+ else
+ column_definition.order_expression.to_s
+ end
+ end.join(', ')
+ end
+
+ def verify_incoming_values!(values)
+ value_keys = values.keys.map(&:to_s)
+ order_attrbute_names = column_definitions.map(&:attribute_name).map(&:to_s)
+ missing_items = order_attrbute_names - value_keys
+ extra_items = value_keys - order_attrbute_names
+
+ if missing_items.any? || extra_items.any?
+ error_text = ['Incorrect cursor values were given']
+
+ error_text << "Extra items: #{extra_items.join(', ')}" if extra_items.any?
+ error_text << "Missing items: #{missing_items.join(', ')}" if missing_items.any?
+
+ error_text.compact
+
+ raise error_text.join('. ')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware/server_metrics.rb b/lib/gitlab/sidekiq_middleware/server_metrics.rb
index 4ab8d313ad8..cf768811ffd 100644
--- a/lib/gitlab/sidekiq_middleware/server_metrics.rb
+++ b/lib/gitlab/sidekiq_middleware/server_metrics.rb
@@ -34,7 +34,8 @@ module Gitlab
monotonic_time_start = Gitlab::Metrics::System.monotonic_time
job_thread_cputime_start = get_thread_cputime
begin
- yield
+ transaction = Gitlab::Metrics::BackgroundTransaction.new
+ transaction.run { yield }
job_succeeded = true
ensure
monotonic_time_end = Gitlab::Metrics::System.monotonic_time
diff --git a/lib/gitlab/usage/metrics/aggregates/aggregate.rb b/lib/gitlab/usage/metrics/aggregates/aggregate.rb
index 7b1ad58ed1b..1aeca87d849 100644
--- a/lib/gitlab/usage/metrics/aggregates/aggregate.rb
+++ b/lib/gitlab/usage/metrics/aggregates/aggregate.rb
@@ -169,3 +169,5 @@ module Gitlab
end
end
end
+
+Gitlab::Usage::Metrics::Aggregates::Aggregate.prepend_if_ee('EE::Gitlab::Usage::Metrics::Aggregates::Aggregate')
diff --git a/lib/gitlab/usage_data_counters/aggregated_metrics/common.yml b/lib/gitlab/usage_data_counters/aggregated_metrics/common.yml
index 482c2f728d7..73a55b5d5fa 100644
--- a/lib/gitlab/usage_data_counters/aggregated_metrics/common.yml
+++ b/lib/gitlab/usage_data_counters/aggregated_metrics/common.yml
@@ -1,3 +1,5 @@
+# Aggregated metrics that include EE only event names within `events:` attribute have to be defined at ee/lib/gitlab/usage_data_counters/aggregated_metrics/common.yml
+# instead of this file.
#- name: unique name of aggregated metric
# operator: aggregation operator. Valid values are:
# - "OR": counts unique elements that were observed triggering any of following events
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 7c430ea2ab6..3350b35020e 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -35736,13 +35736,10 @@ msgstr ""
msgid "mrWidgetCommitsAdded|1 merge commit"
msgstr ""
-msgid "mrWidgetNothingToMerge|Currently there are no changes in this merge request's source branch. Please push new commits or use a different branch."
+msgid "mrWidgetNothingToMerge|This merge request contains no changes."
msgstr ""
-msgid "mrWidgetNothingToMerge|Interested parties can even contribute by pushing commits if they want to."
-msgstr ""
-
-msgid "mrWidgetNothingToMerge|Merge requests are a place to propose changes you have made to a project and discuss those changes with others."
+msgid "mrWidgetNothingToMerge|Use merge requests to propose changes to your project and discuss them with your team. To make changes, push a commit or edit this merge request to use a different branch. With %{linkStart}CI/CD%{linkEnd}, automatically test your changes before merging."
msgstr ""
msgid "mrWidget| Please restore it or use a different %{missingBranchName} branch"
diff --git a/scripts/frontend/block_dependencies.js b/scripts/frontend/block_dependencies.js
index a1ff8d5ee36..f229f317cbb 100644
--- a/scripts/frontend/block_dependencies.js
+++ b/scripts/frontend/block_dependencies.js
@@ -1,8 +1,8 @@
-const path = require('path');
-const packageJson = require(path.join(process.cwd(), 'package.json'));
+const packageJson = require('../../package.json');
+
const blockedDependencies = packageJson.blockedDependencies || {};
-const dependencies = packageJson.dependencies;
-const devDependencies = packageJson.devDependencies;
+const { dependencies } = packageJson;
+const { devDependencies } = packageJson;
const blockedDependenciesNames = Object.keys(blockedDependencies);
const blockedDependenciesFound = blockedDependenciesNames.filter(
(blockedDependency) => dependencies[blockedDependency] || devDependencies[blockedDependency],
diff --git a/scripts/frontend/check_page_bundle_mixins_css_for_sideeffects.js b/scripts/frontend/check_page_bundle_mixins_css_for_sideeffects.js
index 34e939e3ceb..22a4aac762b 100755
--- a/scripts/frontend/check_page_bundle_mixins_css_for_sideeffects.js
+++ b/scripts/frontend/check_page_bundle_mixins_css_for_sideeffects.js
@@ -7,9 +7,9 @@ if (process.env.RAILS_ENV !== 'production') {
process.exit(0);
}
-const path = require('path');
const fs = require('fs');
const glob = require('glob');
+const path = require('path');
const pjs = require('postcss');
const paths = glob.sync('public/assets/page_bundles/_mixins_and_variables_and_functions*.css', {
diff --git a/scripts/frontend/extract_gettext_all.js b/scripts/frontend/extract_gettext_all.js
index 67163a601bc..896790a73bb 100644
--- a/scripts/frontend/extract_gettext_all.js
+++ b/scripts/frontend/extract_gettext_all.js
@@ -5,6 +5,7 @@ const {
decorateJSParserWithVueSupport,
decorateExtractorWithHelpers,
} = require('gettext-extractor-vue');
+const vue2TemplateCompiler = require('vue-template-compiler');
const ensureSingleLine = require('../../app/assets/javascripts/locale/ensure_single_line.js');
const args = argumentsParser
@@ -37,12 +38,12 @@ const jsParser = extractor.createJsParser([
]);
const vueParser = decorateJSParserWithVueSupport(jsParser, {
- vue2TemplateCompiler: require('vue-template-compiler'),
+ vue2TemplateCompiler,
});
function printJson() {
- const messages = extractor.getMessages().reduce((result, message) => {
- let text = message.text;
+ const messages = extractor.getMessages().reduce((acc, message) => {
+ let { text } = message;
if (message.textPlural) {
text += `\u0000${message.textPlural}`;
}
@@ -50,25 +51,35 @@ function printJson() {
message.references.forEach((reference) => {
const filename = reference.replace(/:\d+$/, '');
- if (!Array.isArray(result[filename])) {
- result[filename] = [];
+ if (!Array.isArray(acc[filename])) {
+ acc[filename] = [];
}
- result[filename].push([text, reference]);
+ acc[filename].push([text, reference]);
});
- return result;
+ return acc;
}, {});
console.log(JSON.stringify(messages));
}
-if (args.file) {
- vueParser.parseFile(args.file).then(() => printJson());
-} else if (args.all) {
- vueParser.parseFilesGlob('{ee/app,app}/assets/javascripts/**/*.{js,vue}').then(() => printJson());
-} else {
- console.warn('ERROR: Please use the script correctly:');
+async function main() {
+ if (args.file) {
+ return vueParser.parseFile(args.file).then(() => printJson());
+ }
+
+ if (args.all) {
+ return vueParser
+ .parseFilesGlob('{ee/app,app}/assets/javascripts/**/*.{js,vue}')
+ .then(() => printJson());
+ }
+
+ throw new Error('ERROR: Please use the script correctly:');
+}
+
+main().catch((error) => {
+ console.warn(error.message);
args.outputHelp();
process.exit(1);
-}
+});
diff --git a/scripts/frontend/file_test_coverage.js b/scripts/frontend/file_test_coverage.js
index ec6ec4a1e9d..04a9035fce2 100755
--- a/scripts/frontend/file_test_coverage.js
+++ b/scripts/frontend/file_test_coverage.js
@@ -31,28 +31,6 @@ let numTestFiles = 0;
const isVerbose = process.argv.some((arg) => arg === '-v');
-const countSourceFiles = (path) =>
- forEachFileIn(path, (fileName) => {
- if (fileName.endsWith('.vue') || fileName.endsWith('.js')) {
- if (isVerbose) {
- console.log(`source file: ${fileName}`);
- }
-
- numSourceFiles += 1;
- }
- });
-
-const countTestFiles = (path) =>
- forEachFileIn(path, (fileName) => {
- if (fileName.endsWith('_spec.js')) {
- if (isVerbose) {
- console.log(`test file: ${fileName}`);
- }
-
- numTestFiles += 1;
- }
- });
-
function forEachFileIn(dirPath, callback) {
fs.readdir(dirPath, (err, files) => {
if (err) {
@@ -75,6 +53,28 @@ function forEachFileIn(dirPath, callback) {
});
}
+const countSourceFiles = (currentPath) =>
+ forEachFileIn(currentPath, (fileName) => {
+ if (fileName.endsWith('.vue') || fileName.endsWith('.js')) {
+ if (isVerbose) {
+ console.log(`source file: ${fileName}`);
+ }
+
+ numSourceFiles += 1;
+ }
+ });
+
+const countTestFiles = (currentPath) =>
+ forEachFileIn(currentPath, (fileName) => {
+ if (fileName.endsWith('_spec.js')) {
+ if (isVerbose) {
+ console.log(`test file: ${fileName}`);
+ }
+
+ numTestFiles += 1;
+ }
+ });
+
console.log(`Source directories: ${sourceDirectories.join(', ')}`);
console.log(`Test directories: ${testDirectories.join(', ')}`);
diff --git a/scripts/frontend/merge_coverage_frontend.js b/scripts/frontend/merge_coverage_frontend.js
index 0c45a38b9b5..6b3826ddac7 100644
--- a/scripts/frontend/merge_coverage_frontend.js
+++ b/scripts/frontend/merge_coverage_frontend.js
@@ -1,8 +1,8 @@
-const { create } = require('istanbul-reports');
+const { sync } = require('glob');
const { createCoverageMap } = require('istanbul-lib-coverage');
const { createContext } = require('istanbul-lib-report');
+const { create } = require('istanbul-reports');
const { resolve } = require('path');
-const { sync } = require('glob');
const coverageMap = createCoverageMap();
@@ -12,7 +12,7 @@ const reportFiles = sync(`${coverageDir}/*/coverage-final.json`);
// Normalize coverage report generated by jest that has additional "data" key
// https://github.com/facebook/jest/issues/2418#issuecomment-423806659
const normalizeReport = (report) => {
- const normalizedReport = Object.assign({}, report);
+ const normalizedReport = { ...report };
Object.entries(normalizedReport).forEach(([k, v]) => {
if (v.data) normalizedReport[k] = v.data;
});
@@ -20,11 +20,14 @@ const normalizeReport = (report) => {
};
reportFiles
- .map((reportFile) => require(reportFile))
+ .map((reportFile) => {
+ // eslint-disable-next-line global-require, import/no-dynamic-require
+ return require(reportFile);
+ })
.map(normalizeReport)
.forEach((report) => coverageMap.merge(report));
-const context = createContext({ coverageMap: coverageMap, dir: 'coverage-frontend' });
+const context = createContext({ coverageMap, dir: 'coverage-frontend' });
['json', 'lcov', 'text-summary', 'clover', 'cobertura'].forEach((reporter) => {
create(reporter, {}).execute(context);
diff --git a/scripts/frontend/parallel_ci_sequencer.js b/scripts/frontend/parallel_ci_sequencer.js
index d7a674535a6..262e9e2256e 100644
--- a/scripts/frontend/parallel_ci_sequencer.js
+++ b/scripts/frontend/parallel_ci_sequencer.js
@@ -1,5 +1,15 @@
const Sequencer = require('@jest/test-sequencer').default;
+const sortByPath = (test1, test2) => {
+ if (test1.path < test2.path) {
+ return -1;
+ }
+ if (test1.path > test2.path) {
+ return 1;
+ }
+ return 0;
+};
+
class ParallelCISequencer extends Sequencer {
constructor() {
super();
@@ -8,7 +18,7 @@ class ParallelCISequencer extends Sequencer {
}
sort(tests) {
- const sortedTests = this.sortByPath(tests);
+ const sortedTests = [...tests].sort(sortByPath);
const testsForThisRunner = this.distributeAcrossCINodes(sortedTests);
console.log(`CI_NODE_INDEX: ${this.ciNodeIndex}`);
@@ -19,18 +29,6 @@ class ParallelCISequencer extends Sequencer {
return testsForThisRunner;
}
- sortByPath(tests) {
- return tests.sort((test1, test2) => {
- if (test1.path < test2.path) {
- return -1;
- }
- if (test1.path > test2.path) {
- return 1;
- }
- return 0;
- });
- }
-
distributeAcrossCINodes(tests) {
return tests.filter((test, index) => {
return index % this.ciNodeTotal === this.ciNodeIndex - 1;
diff --git a/scripts/frontend/stylelint/stylelint-duplicate-selectors.js b/scripts/frontend/stylelint/stylelint-duplicate-selectors.js
index 89242158157..982ddf524a3 100644
--- a/scripts/frontend/stylelint/stylelint-duplicate-selectors.js
+++ b/scripts/frontend/stylelint/stylelint-duplicate-selectors.js
@@ -1,5 +1,6 @@
const stylelint = require('stylelint');
const utils = require('./stylelint-utils');
+
const ruleName = 'stylelint-gitlab/duplicate-selectors';
const messages = stylelint.utils.ruleMessages(ruleName, {
@@ -8,12 +9,13 @@ const messages = stylelint.utils.ruleMessages(ruleName, {
},
});
-module.exports = stylelint.createPlugin(ruleName, function (enabled) {
+module.exports = stylelint.createPlugin(ruleName, (enabled) => {
if (!enabled) {
return;
}
- return function (root, result) {
+ // eslint-disable-next-line consistent-return
+ return (root, result) => {
const selectorGroups = {};
utils.createPropertiesHashmap(root, result, ruleName, messages, selectorGroups, true);
};
diff --git a/scripts/frontend/stylelint/stylelint-utility-classes.js b/scripts/frontend/stylelint/stylelint-utility-classes.js
index 1b266fc31c9..420fe82d826 100644
--- a/scripts/frontend/stylelint/stylelint-utility-classes.js
+++ b/scripts/frontend/stylelint/stylelint-utility-classes.js
@@ -10,12 +10,13 @@ const messages = stylelint.utils.ruleMessages(ruleName, {
},
});
-module.exports = stylelint.createPlugin(ruleName, function (enabled) {
+module.exports = stylelint.createPlugin(ruleName, (enabled) => {
if (!enabled) {
return;
}
- return function (root, result) {
+ // eslint-disable-next-line consistent-return
+ return (root, result) => {
utils.createPropertiesHashmap(root, result, ruleName, messages, utilityClasses, false);
};
});
diff --git a/scripts/frontend/stylelint/stylelint-utility-map.js b/scripts/frontend/stylelint/stylelint-utility-map.js
index bf8ee362740..545aade9ccc 100644
--- a/scripts/frontend/stylelint/stylelint-utility-map.js
+++ b/scripts/frontend/stylelint/stylelint-utility-map.js
@@ -1,10 +1,11 @@
-const sass = require('node-sass');
-const postcss = require('postcss');
const fs = require('fs');
+const sass = require('node-sass');
const path = require('path');
+const postcss = require('postcss');
const prettier = require('prettier');
const utils = require('./stylelint-utils');
+
const ROOT_PATH = path.resolve(__dirname, '../../..');
const hashMapPath = path.resolve(__dirname, './utility-classes-map.js');
@@ -22,19 +23,28 @@ sass.render(
includePaths: [path.resolve(ROOT_PATH, 'node_modules/bootstrap/scss')],
},
(err, result) => {
- if (err) console.error('Error ', err);
+ if (err) {
+ return console.error('Error ', err);
+ }
const cssResult = result.css.toString();
// We just use postcss to create a CSS tree
- postcss([])
+ return postcss([])
.process(cssResult, {
// This suppresses a postcss warning
from: undefined,
})
- .then((result) => {
+ .then((processedResult) => {
const selectorGroups = {};
- utils.createPropertiesHashmap(result.root, result, null, null, selectorGroups, true);
+ utils.createPropertiesHashmap(
+ processedResult.root,
+ processedResult,
+ null,
+ null,
+ selectorGroups,
+ true,
+ );
const prettierOptions = prettier.resolveConfig.sync(hashMapPath);
const prettyHashmap = prettier.format(
@@ -42,12 +52,12 @@ sass.render(
prettierOptions,
);
- fs.writeFile(hashMapPath, prettyHashmap, function (err) {
- if (err) {
- return console.log(err);
+ fs.writeFile(hashMapPath, prettyHashmap, (e) => {
+ if (e) {
+ return console.log(e);
}
- console.log('The file was saved!');
+ return console.log('The file was saved!');
});
});
},
diff --git a/scripts/frontend/stylelint/stylelint-utils.js b/scripts/frontend/stylelint/stylelint-utils.js
index e7452b0cdb2..c9d9c7d9aad 100644
--- a/scripts/frontend/stylelint/stylelint-utils.js
+++ b/scripts/frontend/stylelint/stylelint-utils.js
@@ -1,5 +1,5 @@
-const stylelint = require('stylelint');
const md5 = require('md5');
+const stylelint = require('stylelint');
module.exports.createPropertiesHashmap = (
ruleRoot,
@@ -15,7 +15,7 @@ module.exports.createPropertiesHashmap = (
if (
rule &&
rule.parent &&
- rule.parent.type != 'atrule' &&
+ rule.parent.type !== 'atrule' &&
!(
selector.includes('-webkit-') ||
selector.includes('-moz-') ||
@@ -25,7 +25,7 @@ module.exports.createPropertiesHashmap = (
)
) {
let cssArray = [];
- rule.nodes.forEach(function (property) {
+ rule.nodes.forEach((property) => {
const { prop, value } = property;
if (property && value) {
const propval = `${prop}${value}${property.important ? '!important' : ''}`;
@@ -41,11 +41,11 @@ module.exports.createPropertiesHashmap = (
const selObj = selectorGroups[hashValue];
const selectorLine = `${selector} (${
- rule.source.input.file ? rule.source.input.file + ' -' : ''
+ rule.source.input.file ? `${rule.source.input.file} -` : ''
}${rule.source.start.line}:${rule.source.start.column})`;
if (selObj) {
- if (selectorGroups[hashValue].selectors.indexOf(selector) == -1) {
+ if (selectorGroups[hashValue].selectors.indexOf(selector) === -1) {
let lastSelector =
selectorGroups[hashValue].selectors[selectorGroups[hashValue].selectors.length - 1];
@@ -67,6 +67,7 @@ module.exports.createPropertiesHashmap = (
}
}
} else if (addSelectors) {
+ // eslint-disable-next-line no-param-reassign
selectorGroups[hashValue] = {
selectors: [selectorLine],
};
diff --git a/scripts/frontend/webpack_dev_server.js b/scripts/frontend/webpack_dev_server.js
index fbb80c9617d..a76e6dc024a 100755
--- a/scripts/frontend/webpack_dev_server.js
+++ b/scripts/frontend/webpack_dev_server.js
@@ -2,8 +2,8 @@ const nodemon = require('nodemon');
const DEV_SERVER_HOST = process.env.DEV_SERVER_HOST || 'localhost';
const DEV_SERVER_PORT = process.env.DEV_SERVER_PORT || '3808';
-const STATIC_MODE = process.env.DEV_SERVER_STATIC && process.env.DEV_SERVER_STATIC != 'false';
-const DLL_MODE = process.env.WEBPACK_VENDOR_DLL && process.env.WEBPACK_VENDOR_DLL != 'false';
+const STATIC_MODE = process.env.DEV_SERVER_STATIC && process.env.DEV_SERVER_STATIC !== 'false';
+const DLL_MODE = process.env.WEBPACK_VENDOR_DLL && process.env.WEBPACK_VENDOR_DLL !== 'false';
const baseConfig = {
ignoreRoot: ['.git', 'node_modules/*/'],
@@ -30,7 +30,7 @@ if (STATIC_MODE) {
// run webpack through webpack-dev-server, optionally compiling a DLL to reduce memory
else {
- let watch = ['config/webpack.config.js'];
+ const watch = ['config/webpack.config.js'];
// if utilizing the vendor DLL, we need to restart the process when dependency changes occur
if (DLL_MODE) {
@@ -51,7 +51,7 @@ else {
// print useful messages for nodemon events
nodemon
- .on('start', function () {
+ .on('start', () => {
console.log(`Starting webpack webserver on http://${DEV_SERVER_HOST}:${DEV_SERVER_PORT}`);
if (STATIC_MODE) {
console.log('You are starting webpack in compile-once mode');
@@ -59,10 +59,10 @@ nodemon
console.log('If you change them often, you might want to unset DEV_SERVER_STATIC');
}
})
- .on('quit', function () {
+ .on('quit', () => {
console.log('Shutting down webpack process');
process.exit();
})
- .on('restart', function (files) {
+ .on('restart', (files) => {
console.log('Restarting webpack process due to: ', files);
});
diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb
index 2595512eec3..a0fe9f0f310 100644
--- a/spec/experiments/application_experiment_spec.rb
+++ b/spec/experiments/application_experiment_spec.rb
@@ -64,21 +64,35 @@ RSpec.describe ApplicationExperiment, :experiment do
subject.publish(nil)
end
- it "pushes the experiment knowledge into the client using Gon.global" do
- expect(Gon.global).to receive(:push).with(
- {
- experiment: {
- 'namespaced/stub' => { # string key because it can be namespaced
- experiment: 'namespaced/stub',
- key: '86208ac54ca798e11f127e8b23ec396a',
- variant: 'control'
+ context "when inside a request cycle" do
+ before do
+ subject.context.instance_variable_set(:@request, double('Request', headers: 'true'))
+ end
+
+ it "pushes the experiment knowledge into the client using Gon" do
+ expect(Gon).to receive(:push).with(
+ {
+ experiment: {
+ 'namespaced/stub' => { # string key because it can be namespaced
+ experiment: 'namespaced/stub',
+ key: '86208ac54ca798e11f127e8b23ec396a',
+ variant: 'control'
+ }
}
- }
- },
- true
- )
+ },
+ true
+ )
- subject.publish(nil)
+ subject.publish(nil)
+ end
+ end
+
+ context "when outside a request cycle" do
+ it "does not push to gon when outside request cycle" do
+ expect(Gon).not_to receive(:push)
+
+ subject.publish(nil)
+ end
end
end
diff --git a/spec/frontend/experiment_tracking_spec.js b/spec/frontend/experimentation/experiment_tracking_spec.js
index db209b783cb..20f45a7015a 100644
--- a/spec/frontend/experiment_tracking_spec.js
+++ b/spec/frontend/experimentation/experiment_tracking_spec.js
@@ -1,17 +1,16 @@
-import ExperimentTracking from '~/experiment_tracking';
+import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
+import ExperimentTracking from '~/experimentation/experiment_tracking';
+import { getExperimentData } from '~/experimentation/utils';
import Tracking from '~/tracking';
-jest.mock('~/tracking');
-
-const oldGon = window.gon;
-
-let newGon = {};
let experimentTracking;
let label;
let property;
+jest.mock('~/tracking');
+jest.mock('~/experimentation/utils', () => ({ getExperimentData: jest.fn() }));
+
const setup = () => {
- window.gon = newGon;
experimentTracking = new ExperimentTracking('sidebar_experiment', { label, property });
};
@@ -20,16 +19,18 @@ beforeEach(() => {
});
afterEach(() => {
- window.gon = oldGon;
- Tracking.mockClear();
label = undefined;
property = undefined;
});
describe('event', () => {
+ beforeEach(() => {
+ getExperimentData.mockReturnValue(undefined);
+ });
+
describe('when experiment data exists for experimentName', () => {
beforeEach(() => {
- newGon = { global: { experiment: { sidebar_experiment: 'experiment-data' } } };
+ getExperimentData.mockReturnValue('experiment-data');
setup();
});
@@ -45,7 +46,7 @@ describe('event', () => {
label: 'sidebar-drawer',
property: 'dark-mode',
context: {
- schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0',
+ schema: TRACKING_CONTEXT_SCHEMA,
data: 'experiment-data',
},
});
@@ -58,7 +59,7 @@ describe('event', () => {
expect(Tracking.event).toHaveBeenCalledTimes(1);
expect(Tracking.event).toHaveBeenCalledWith('issues-page', 'click_sidebar_trigger', {
context: {
- schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0',
+ schema: TRACKING_CONTEXT_SCHEMA,
data: 'experiment-data',
},
});
@@ -67,7 +68,6 @@ describe('event', () => {
describe('when experiment data does NOT exists for the experimentName', () => {
beforeEach(() => {
- newGon = { global: { experiment: { unrelated_experiment: 'not happening' } } };
setup();
});
diff --git a/spec/frontend/experimentation/utils_spec.js b/spec/frontend/experimentation/utils_spec.js
new file mode 100644
index 00000000000..87dd2d595ba
--- /dev/null
+++ b/spec/frontend/experimentation/utils_spec.js
@@ -0,0 +1,38 @@
+import * as experimentUtils from '~/experimentation/utils';
+
+const TEST_KEY = 'abc';
+
+describe('experiment Utilities', () => {
+ const oldGon = window.gon;
+
+ afterEach(() => {
+ window.gon = oldGon;
+ });
+
+ describe('getExperimentData', () => {
+ it.each`
+ gon | input | output
+ ${{ experiment: { [TEST_KEY]: '_data_' } }} | ${[TEST_KEY]} | ${'_data_'}
+ ${{}} | ${[TEST_KEY]} | ${undefined}
+ `('with input=$input and gon=$gon, returns $output', ({ gon, input, output }) => {
+ window.gon = gon;
+
+ expect(experimentUtils.getExperimentData(...input)).toEqual(output);
+ });
+ });
+
+ describe('isExperimentVariant', () => {
+ it.each`
+ gon | input | output
+ ${{ experiment: { [TEST_KEY]: { variant: 'control' } } }} | ${[TEST_KEY, 'control']} | ${true}
+ ${{ experiment: { [TEST_KEY]: { variant: '_variant_name' } } }} | ${[TEST_KEY, '_variant_name']} | ${true}
+ ${{ experiment: { [TEST_KEY]: { variant: '_variant_name' } } }} | ${[TEST_KEY, '_bogus_name']} | ${false}
+ ${{ experiment: { [TEST_KEY]: { variant: '_variant_name' } } }} | ${['boguskey', '_variant_name']} | ${false}
+ ${{}} | ${[TEST_KEY, '_variant_name']} | ${false}
+ `('with input=$input and gon=$gon, returns $output', ({ gon, input, output }) => {
+ window.gon = gon;
+
+ expect(experimentUtils.isExperimentVariant(...input)).toEqual(output);
+ });
+ });
+});
diff --git a/spec/frontend/graphql_shared/utils_spec.js b/spec/frontend/graphql_shared/utils_spec.js
index d392b0f0575..56bfb02ea4a 100644
--- a/spec/frontend/graphql_shared/utils_spec.js
+++ b/spec/frontend/graphql_shared/utils_spec.js
@@ -2,6 +2,8 @@ import {
getIdFromGraphQLId,
convertToGraphQLId,
convertToGraphQLIds,
+ convertFromGraphQLIds,
+ convertNodeIdsFromGraphQLIds,
} from '~/graphql_shared/utils';
const mockType = 'Group';
@@ -81,3 +83,35 @@ describe('convertToGraphQLIds', () => {
expect(() => convertToGraphQLIds(type, ids)).toThrow(new TypeError(message));
});
});
+
+describe('convertFromGraphQLIds', () => {
+ it.each`
+ ids | expected
+ ${[mockGid]} | ${[mockId]}
+ ${[mockGid, 'invalid id']} | ${[mockId, null]}
+ `('converts $ids from GraphQL Ids', ({ ids, expected }) => {
+ expect(convertFromGraphQLIds(ids)).toEqual(expected);
+ });
+
+ it("throws TypeError if `ids` parameter isn't an array", () => {
+ expect(() => convertFromGraphQLIds('invalid')).toThrow(
+ new TypeError('ids must be an array; got string'),
+ );
+ });
+});
+
+describe('convertNodeIdsFromGraphQLIds', () => {
+ it.each`
+ nodes | expected
+ ${[{ id: mockGid, name: 'foo bar' }, { id: mockGid, name: 'baz' }]} | ${[{ id: mockId, name: 'foo bar' }, { id: mockId, name: 'baz' }]}
+ ${[{ name: 'foo bar' }]} | ${[{ name: 'foo bar' }]}
+ `('converts `id` properties in $nodes from GraphQL Id', ({ nodes, expected }) => {
+ expect(convertNodeIdsFromGraphQLIds(nodes)).toEqual(expected);
+ });
+
+ it("throws TypeError if `nodes` parameter isn't an array", () => {
+ expect(() => convertNodeIdsFromGraphQLIds('invalid')).toThrow(
+ new TypeError('nodes must be an array; got string'),
+ );
+ });
+});
diff --git a/spec/frontend/lib/utils/experimentation_spec.js b/spec/frontend/lib/utils/experimentation_spec.js
deleted file mode 100644
index 2c5d2f89297..00000000000
--- a/spec/frontend/lib/utils/experimentation_spec.js
+++ /dev/null
@@ -1,20 +0,0 @@
-import * as experimentUtils from '~/lib/utils/experimentation';
-
-const TEST_KEY = 'abc';
-
-describe('experiment Utilities', () => {
- describe('isExperimentEnabled', () => {
- it.each`
- experiments | value
- ${{ [TEST_KEY]: true }} | ${true}
- ${{ [TEST_KEY]: false }} | ${false}
- ${{ def: true }} | ${false}
- ${{}} | ${false}
- ${null} | ${false}
- `('returns correct value of $value for experiments=$experiments', ({ experiments, value }) => {
- window.gon = { experiments };
-
- expect(experimentUtils.isExperimentEnabled(TEST_KEY)).toEqual(value);
- });
- });
-});
diff --git a/spec/frontend/projects/upload_file_experiment_spec.js b/spec/frontend/projects/upload_file_experiment_spec.js
index 8cad49425f4..aa1b1c44a31 100644
--- a/spec/frontend/projects/upload_file_experiment_spec.js
+++ b/spec/frontend/projects/upload_file_experiment_spec.js
@@ -1,21 +1,13 @@
-import ExperimentTracking from '~/experiment_tracking';
+import ExperimentTracking from '~/experimentation/experiment_tracking';
import * as UploadFileExperiment from '~/projects/upload_file_experiment';
-const mockExperimentTrackingEvent = jest.fn();
-jest.mock('~/experiment_tracking', () =>
- jest.fn().mockImplementation(() => ({
- event: mockExperimentTrackingEvent,
- })),
-);
+jest.mock('~/experimentation/experiment_tracking');
const fixture = `<a class='js-upload-file-experiment-trigger' data-toggle='modal' data-target='#modal-upload-blob'></a><div id='modal-upload-blob'></div><div class='project-home-panel empty-project'></div>`;
const findModal = () => document.querySelector('[aria-modal="true"]');
const findTrigger = () => document.querySelector('.js-upload-file-experiment-trigger');
beforeEach(() => {
- ExperimentTracking.mockClear();
- mockExperimentTrackingEvent.mockClear();
-
document.body.innerHTML = fixture;
});
@@ -31,7 +23,9 @@ describe('trackUploadFileFormSubmitted', () => {
label: 'blob-upload-modal',
property: 'empty',
});
- expect(mockExperimentTrackingEvent).toHaveBeenCalledWith('click_upload_modal_form_submit');
+ expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith(
+ 'click_upload_modal_form_submit',
+ );
});
it('initializes ExperimentTracking with the correct arguments when the project is not empty', () => {
@@ -53,6 +47,6 @@ describe('initUploadFileTrigger', () => {
expect(findModal()).not.toExist();
findTrigger().click();
expect(findModal()).toExist();
- expect(mockExperimentTrackingEvent).toHaveBeenCalledWith('click_upload_modal_trigger');
+ expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith('click_upload_modal_trigger');
});
});
diff --git a/spec/frontend/tracking_spec.js b/spec/frontend/tracking_spec.js
index 726dc0edede..6a22de3be5c 100644
--- a/spec/frontend/tracking_spec.js
+++ b/spec/frontend/tracking_spec.js
@@ -1,12 +1,18 @@
import { setHTMLFixture } from 'helpers/fixtures';
+import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
+import { getExperimentData } from '~/experimentation/utils';
import Tracking, { initUserTracking, initDefaultTrackers, STANDARD_CONTEXT } from '~/tracking';
+jest.mock('~/experimentation/utils', () => ({ getExperimentData: jest.fn() }));
+
describe('Tracking', () => {
let snowplowSpy;
let bindDocumentSpy;
let trackLoadEventsSpy;
beforeEach(() => {
+ getExperimentData.mockReturnValue(undefined);
+
window.snowplow = window.snowplow || (() => {});
window.snowplowOptions = {
namespace: '_namespace_',
@@ -245,18 +251,18 @@ describe('Tracking', () => {
});
it('brings in experiment data if linked to an experiment', () => {
- const data = {
+ const mockExperimentData = {
variant: 'candidate',
experiment: 'repo_integrations_link',
key: '2bff73f6bb8cc11156c50a8ba66b9b8b',
};
+ getExperimentData.mockReturnValue(mockExperimentData);
- window.gon.global = { experiment: { example: data } };
document.querySelector('[data-track-event="click_input3"]').click();
expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input3', {
value: '_value_',
- context: { schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0', data },
+ context: { schema: TRACKING_CONTEXT_SCHEMA, data: mockExperimentData },
});
});
});
@@ -301,21 +307,21 @@ describe('Tracking', () => {
describe('tracking mixin', () => {
describe('trackingOptions', () => {
- it('return the options defined on initialisation', () => {
+ it('returns the options defined on initialisation', () => {
const mixin = Tracking.mixin({ foo: 'bar' });
expect(mixin.computed.trackingOptions()).toEqual({ foo: 'bar' });
});
- it('local tracking value override and extend options', () => {
+ it('lets local tracking value override and extend options', () => {
const mixin = Tracking.mixin({ foo: 'bar' });
- // the value of this in the vue lifecyle is different, but this serve the tests purposes
+ // The value of this in the Vue lifecyle is different, but this serves the test's purposes
mixin.computed.tracking = { foo: 'baz', baz: 'bar' };
expect(mixin.computed.trackingOptions()).toEqual({ foo: 'baz', baz: 'bar' });
});
});
describe('trackingCategory', () => {
- it('return the category set in the component properties first', () => {
+ it('returns the category set in the component properties first', () => {
const mixin = Tracking.mixin({ category: 'foo' });
mixin.computed.tracking = {
category: 'bar',
@@ -323,12 +329,12 @@ describe('Tracking', () => {
expect(mixin.computed.trackingCategory()).toBe('bar');
});
- it('return the category set in the options', () => {
+ it('returns the category set in the options', () => {
const mixin = Tracking.mixin({ category: 'foo' });
expect(mixin.computed.trackingCategory()).toBe('foo');
});
- it('if no category is selected returns undefined', () => {
+ it('returns undefined if no category is selected', () => {
const mixin = Tracking.mixin();
expect(mixin.computed.trackingCategory()).toBe(undefined);
});
@@ -363,7 +369,7 @@ describe('Tracking', () => {
expect(eventSpy).toHaveBeenCalledWith(undefined, 'foo', {});
});
- it('give precedence to data for category and options', () => {
+ it('gives precedence to data for category and options', () => {
mixin.trackingCategory = mixin.trackingCategory();
mixin.trackingOptions = mixin.trackingOptions();
const data = { category: 'foo', label: 'baz' };
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js
index bd0bd36ebc2..2c04905d3a9 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js
@@ -14,20 +14,14 @@ describe('NothingToMerge', () => {
it('should have correct elements', () => {
expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
- expect(vm.$el.querySelector('a').href).toContain(newBlobPath);
- expect(vm.$el.innerText).toContain(
- "Currently there are no changes in this merge request's source branch",
- );
-
- expect(vm.$el.innerText.replace(/\s\s+/g, ' ')).toContain(
- 'Please push new commits or use a different branch.',
- );
+ expect(vm.$el.querySelector('[data-testid="createFileButton"]').href).toContain(newBlobPath);
+ expect(vm.$el.innerText).toContain('Use merge requests to propose changes to your project');
});
it('should not show new blob link if there is no link available', () => {
vm.mr.newBlobPath = null;
Vue.nextTick(() => {
- expect(vm.$el.querySelector('a')).toEqual(null);
+ expect(vm.$el.querySelector('[data-testid="createFileButton"]')).toEqual(null);
});
});
});
diff --git a/spec/lib/gitlab/database/similarity_score_spec.rb b/spec/lib/gitlab/database/similarity_score_spec.rb
index cf75e5a72d9..b7b66494390 100644
--- a/spec/lib/gitlab/database/similarity_score_spec.rb
+++ b/spec/lib/gitlab/database/similarity_score_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::Database::SimilarityScore do
let(:search) { 'xyz' }
it 'results have 0 similarity score' do
- expect(query_result.map { |row| row['similarity'] }).to all(eq(0))
+ expect(query_result.map { |row| row['similarity'].to_f }).to all(eq(0))
end
end
end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 3175040167b..1553a989dba 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -441,4 +441,112 @@ RSpec.describe Gitlab::Database do
end
end
end
+
+ describe 'ActiveRecordBaseTransactionMetrics' do
+ def subscribe_events
+ events = []
+
+ begin
+ subscriber = ActiveSupport::Notifications.subscribe('transaction.active_record') do |e|
+ events << e
+ end
+
+ yield
+ ensure
+ ActiveSupport::Notifications.unsubscribe(subscriber) if subscriber
+ end
+
+ events
+ end
+
+ context 'without a transaction block' do
+ it 'does not publish a transaction event' do
+ events = subscribe_events do
+ User.first
+ end
+
+ expect(events).to be_empty
+ end
+ end
+
+ context 'within a transaction block' do
+ it 'publishes a transaction event' do
+ events = subscribe_events do
+ ActiveRecord::Base.transaction do
+ User.first
+ end
+ end
+
+ expect(events.length).to be(1)
+
+ event = events.first
+ expect(event).not_to be_nil
+ expect(event.duration).to be > 0.0
+ expect(event.payload).to a_hash_including(
+ connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ )
+ end
+ end
+
+ context 'within an empty transaction block' do
+ it 'publishes a transaction event' do
+ events = subscribe_events do
+ ActiveRecord::Base.transaction {}
+ end
+
+ expect(events.length).to be(1)
+
+ event = events.first
+ expect(event).not_to be_nil
+ expect(event.duration).to be > 0.0
+ expect(event.payload).to a_hash_including(
+ connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ )
+ end
+ end
+
+ context 'within a nested transaction block' do
+ it 'publishes multiple transaction events' do
+ events = subscribe_events do
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.transaction do
+ User.first
+ end
+ end
+ end
+ end
+
+ expect(events.length).to be(3)
+
+ events.each do |event|
+ expect(event).not_to be_nil
+ expect(event.duration).to be > 0.0
+ expect(event.payload).to a_hash_including(
+ connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ )
+ end
+ end
+ end
+
+ context 'within a cancelled transaction block' do
+ it 'publishes multiple transaction events' do
+ events = subscribe_events do
+ ActiveRecord::Base.transaction do
+ User.first
+ raise ActiveRecord::Rollback
+ end
+ end
+
+ expect(events.length).to be(1)
+
+ event = events.first
+ expect(event).not_to be_nil
+ expect(event.duration).to be > 0.0
+ expect(event.payload).to a_hash_including(
+ connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ )
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb
index b45bb8b79d9..ec2ec4bf50d 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Graphql::Pagination::Keyset::LastItems do
let_it_be(:merge_request) { create(:merge_request) }
- let(:scope) { MergeRequest.order_merged_at_asc.with_order_id_desc }
+ let(:scope) { MergeRequest.order_merged_at_asc }
subject { described_class.take_items(*args) }
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
index eb28e6c8c0a..40ee47ece49 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
@@ -52,18 +52,6 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::OrderInfo do
end
end
- context 'when ordering by SIMILARITY' do
- let(:relation) { Project.sorted_by_similarity_desc('test', include_in_select: true) }
-
- it 'assigns the right attribute name, named function, and direction' do
- expect(order_list.count).to eq 2
- expect(order_list.first.attribute_name).to eq 'similarity'
- expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::Addition)
- expect(order_list.first.named_function.to_sql).to include 'SIMILARITY('
- expect(order_list.first.sort_direction).to eq :desc
- end
- end
-
context 'when ordering by CASE', :aggregate_failuers do
let(:relation) { Project.order(Arel::Nodes::Case.new(Project.arel_table[:pending_delete]).when(true).then(100).else(1000).asc) }
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
index fa631aa5666..31c02fd43e8 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
@@ -131,43 +131,5 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::QueryBuilder do
end
end
end
-
- context 'when sorting using SIMILARITY' do
- let(:relation) { Project.sorted_by_similarity_desc('test', include_in_select: true) }
- let(:arel_table) { Project.arel_table }
- let(:decoded_cursor) { { 'similarity' => 0.5, 'id' => 100 } }
- let(:similarity_function_call) { Gitlab::Database::SimilarityScore::SIMILARITY_FUNCTION_CALL_WITH_ANNOTATION }
- let(:similarity_sql) do
- [
- "(#{similarity_function_call}(COALESCE(\"projects\".\"path\", ''), 'test') * CAST('1' AS numeric))",
- "(#{similarity_function_call}(COALESCE(\"projects\".\"name\", ''), 'test') * CAST('0.7' AS numeric))",
- "(#{similarity_function_call}(COALESCE(\"projects\".\"description\", ''), 'test') * CAST('0.2' AS numeric))"
- ].join(' + ')
- end
-
- context 'when no values are nil' do
- context 'when :after' do
- it 'generates the correct condition' do
- conditions = builder.conditions.gsub(/\s+/, ' ')
-
- expect(conditions).to include "(#{similarity_sql} < 0.5)"
- expect(conditions).to include '"projects"."id" < 100'
- expect(conditions).to include "OR (#{similarity_sql} IS NULL)"
- end
- end
-
- context 'when :before' do
- let(:before_or_after) { :before }
-
- it 'generates the correct condition' do
- conditions = builder.conditions.gsub(/\s+/, ' ')
-
- expect(conditions).to include "(#{similarity_sql} > 0.5)"
- expect(conditions).to include '"projects"."id" > 100'
- expect(conditions).to include "OR ( #{similarity_sql} = 0.5"
- end
- end
- end
- end
end
end
diff --git a/spec/lib/gitlab/metrics/background_transaction_spec.rb b/spec/lib/gitlab/metrics/background_transaction_spec.rb
new file mode 100644
index 00000000000..b31a2f7549a
--- /dev/null
+++ b/spec/lib/gitlab/metrics/background_transaction_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::BackgroundTransaction do
+ let(:transaction) { described_class.new }
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Metric, base_labels: {}) }
+
+ before do
+ allow(described_class).to receive(:prometheus_metric).and_return(prometheus_metric)
+ end
+
+ describe '#run' do
+ it 'yields the supplied block' do
+ expect { |b| transaction.run(&b) }.to yield_control
+ end
+
+ it 'stores the transaction in the current thread' do
+ transaction.run do
+ expect(Thread.current[described_class::BACKGROUND_THREAD_KEY]).to eq(transaction)
+ end
+ end
+
+ it 'removes the transaction from the current thread upon completion' do
+ transaction.run { }
+
+ expect(Thread.current[described_class::BACKGROUND_THREAD_KEY]).to be_nil
+ end
+ end
+
+ describe '#labels' do
+ it 'provides labels with endpoint_id and feature_category' do
+ Labkit::Context.with_context(feature_category: 'projects', caller_id: 'TestWorker') do
+ expect(transaction.labels).to eq({ endpoint_id: 'TestWorker', feature_category: 'projects' })
+ end
+ end
+ end
+
+ RSpec.shared_examples 'metric with labels' do |metric_method|
+ it 'measures with correct labels and value' do
+ value = 1
+ expect(prometheus_metric).to receive(metric_method).with({ endpoint_id: 'TestWorker', feature_category: 'projects' }, value)
+
+ Labkit::Context.with_context(feature_category: 'projects', caller_id: 'TestWorker') do
+ transaction.send(metric_method, :test_metric, value)
+ end
+ end
+ end
+
+ describe '#increment' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, :increment, base_labels: {}) }
+
+ it_behaves_like 'metric with labels', :increment
+ end
+
+ describe '#set' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Gauge, :set, base_labels: {}) }
+
+ it_behaves_like 'metric with labels', :set
+ end
+
+ describe '#observe' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Histogram, :observe, base_labels: {}) }
+
+ it_behaves_like 'metric with labels', :observe
+ end
+end
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index b19b94152a2..dffd37eeb9d 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -6,8 +6,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
using RSpec::Parameterized::TableSyntax
let(:env) { {} }
- let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
- let(:subscriber) { described_class.new }
+ let(:subscriber) { described_class.new }
let(:connection) { double(:connection) }
let(:payload) { { sql: 'SELECT * FROM users WHERE id = 10', connection: connection } }
@@ -47,33 +46,15 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
with_them do
let(:payload) { { name: name, sql: sql(sql_query, comments: comments), connection: connection } }
- describe 'with a current transaction' do
- before do
- allow(subscriber).to receive(:current_transaction)
- .at_least(:once)
- .and_return(transaction)
- end
+ it 'marks the current thread as using the database' do
+ # since it would already have been toggled by other specs
+ Thread.current[:uses_db_connection] = nil
- it 'marks the current thread as using the database' do
- # since it would already have been toggled by other specs
- Thread.current[:uses_db_connection] = nil
-
- expect { subscriber.sql(event) }.to change { Thread.current[:uses_db_connection] }.from(nil).to(true)
- end
-
- it_behaves_like 'record ActiveRecord metrics'
- it_behaves_like 'store ActiveRecord info in RequestStore'
+ expect { subscriber.sql(event) }.to change { Thread.current[:uses_db_connection] }.from(nil).to(true)
end
- describe 'without a current transaction' do
- it 'does not track any metrics' do
- expect_any_instance_of(Gitlab::Metrics::Transaction)
- .not_to receive(:increment)
- subscriber.sql(event)
- end
-
- it_behaves_like 'store ActiveRecord info in RequestStore'
- end
+ it_behaves_like 'record ActiveRecord metrics'
+ it_behaves_like 'store ActiveRecord info in RequestStore'
end
end
diff --git a/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb b/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb
new file mode 100644
index 00000000000..6e9e987f90c
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb
@@ -0,0 +1,188 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
+ let_it_be(:project_name_column) do
+ described_class.new(
+ attribute_name: :name,
+ order_expression: Project.arel_table[:name].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ end
+
+ let_it_be(:project_name_lower_column) do
+ described_class.new(
+ attribute_name: :name,
+ order_expression: Project.arel_table[:name].lower.desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ end
+
+ let_it_be(:project_calculated_column_expression) do
+ # COALESCE("projects"."description", 'No Description')
+ Arel::Nodes::NamedFunction.new('COALESCE', [
+ Project.arel_table[:description],
+ Arel.sql("'No Description'")
+ ])
+ end
+
+ let_it_be(:project_calculated_column) do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: project_calculated_column_expression,
+ order_expression: project_calculated_column_expression.asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ end
+
+ describe '#order_direction' do
+ context 'inferring order_direction from order_expression' do
+ it { expect(project_name_column).to be_ascending_order }
+ it { expect(project_name_column).not_to be_descending_order }
+
+ it { expect(project_name_lower_column).to be_descending_order }
+ it { expect(project_name_lower_column).not_to be_ascending_order }
+
+ it { expect(project_calculated_column).to be_ascending_order }
+ it { expect(project_calculated_column).not_to be_descending_order }
+
+ it 'raises error when order direction cannot be infered' do
+ expect do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: 'name asc',
+ reversed_order_expression: 'name desc',
+ nullable: :not_nullable,
+ distinct: true
+ )
+ end.to raise_error(RuntimeError, /Invalid or missing `order_direction`/)
+ end
+
+ it 'does not raise error when order direction is explicitly given' do
+ column_order_definition = described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: 'name asc',
+ reversed_order_expression: 'name desc',
+ order_direction: :asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+
+ expect(column_order_definition).to be_ascending_order
+ end
+ end
+ end
+
+ describe '#column_expression' do
+ context 'inferring column_expression from order_expression' do
+ it 'infers the correct column expression' do
+ column_order_definition = described_class.new(attribute_name: :name, order_expression: Project.arel_table[:name].asc)
+
+ expect(column_order_definition.column_expression).to eq(Project.arel_table[:name])
+ end
+
+ it 'raises error when raw string is given as order expression' do
+ expect do
+ described_class.new(attribute_name: :name, order_expression: 'name DESC')
+ end.to raise_error(RuntimeError, /Couldn't calculate the column expression. Please pass an ARel node/)
+ end
+ end
+ end
+
+ describe '#reversed_order_expression' do
+ it 'raises error when order cannot be reversed automatically' do
+ expect do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: 'name asc',
+ order_direction: :asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ end.to raise_error(RuntimeError, /Couldn't determine reversed order/)
+ end
+ end
+
+ describe '#reverse' do
+ it { expect(project_name_column.reverse.order_expression).to eq(Project.arel_table[:name].desc) }
+ it { expect(project_name_column.reverse).to be_descending_order }
+
+ it { expect(project_calculated_column.reverse.order_expression).to eq(project_calculated_column_expression.desc) }
+ it { expect(project_calculated_column.reverse).to be_descending_order }
+
+ context 'when reversed_order_expression is given' do
+ it 'uses the given expression' do
+ column_order_definition = described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: 'name asc',
+ reversed_order_expression: 'name desc',
+ order_direction: :asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+
+ expect(column_order_definition.reverse.order_expression).to eq('name desc')
+ end
+ end
+ end
+
+ describe '#nullable' do
+ context 'when the column is nullable' do
+ let(:nulls_last_order) do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', :desc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', :asc),
+ order_direction: :desc,
+ nullable: :nulls_last, # null values are always last
+ distinct: false
+ )
+ end
+
+ it 'requires the position of the null values in the result' do
+ expect(nulls_last_order).to be_nulls_last
+ end
+
+ it 'reverses nullable correctly' do
+ expect(nulls_last_order.reverse).to be_nulls_first
+ end
+
+ it 'raises error when invalid nullable value is given' do
+ expect do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', :desc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', :asc),
+ order_direction: :desc,
+ nullable: true,
+ distinct: false
+ )
+ end.to raise_error(RuntimeError, /Invalid `nullable` is given/)
+ end
+
+ it 'raises error when the column is nullable and distinct' do
+ expect do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', :desc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', :asc),
+ order_direction: :desc,
+ nullable: :nulls_last,
+ distinct: true
+ )
+ end.to raise_error(RuntimeError, /Invalid column definition/)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
new file mode 100644
index 00000000000..665f790ee47
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -0,0 +1,420 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::Order do
+ let(:table) { Arel::Table.new(:my_table) }
+ let(:order) { nil }
+
+ def run_query(query)
+ ActiveRecord::Base.connection.execute(query).to_a
+ end
+
+ def build_query(order:, where_conditions: nil, limit: nil)
+ <<-SQL
+ SELECT id, year, month
+ FROM (#{table_data}) my_table (id, year, month)
+ WHERE #{where_conditions || '1=1'}
+ ORDER BY #{order}
+ LIMIT #{limit || 999};
+ SQL
+ end
+
+ def iterate_and_collect(order:, page_size:, where_conditions: nil)
+ all_items = []
+
+ loop do
+ paginated_items = run_query(build_query(order: order, where_conditions: where_conditions, limit: page_size))
+ break if paginated_items.empty?
+
+ all_items.concat(paginated_items)
+ last_item = paginated_items.last
+ cursor_attributes = order.cursor_attributes_for_node(last_item)
+ where_conditions = order.build_where_values(cursor_attributes).to_sql
+ end
+
+ all_items
+ end
+
+ subject do
+ run_query(build_query(order: order))
+ end
+
+ shared_examples 'order examples' do
+ it { expect(subject).to eq(expected) }
+
+ context 'when paginating forwards' do
+ subject { iterate_and_collect(order: order, page_size: 2) }
+
+ it { expect(subject).to eq(expected) }
+
+ context 'with different page size' do
+ subject { iterate_and_collect(order: order, page_size: 5) }
+
+ it { expect(subject).to eq(expected) }
+ end
+ end
+
+ context 'when paginating backwards' do
+ subject do
+ last_item = expected.last
+ cursor_attributes = order.cursor_attributes_for_node(last_item)
+ where_conditions = order.reversed_order.build_where_values(cursor_attributes)
+
+ iterate_and_collect(order: order.reversed_order, page_size: 2, where_conditions: where_conditions.to_sql)
+ end
+
+ it do
+ expect(subject).to eq(expected.reverse[1..-1]) # removing one item because we used it to calculate cursor data for the "last" page in subject
+ end
+ end
+ end
+
+ context 'when ordering by a distinct column' do
+ let(:table_data) do
+ <<-SQL
+ VALUES (1, 0, 0),
+ (2, 0, 0),
+ (3, 0, 0),
+ (4, 0, 0),
+ (5, 0, 0),
+ (6, 0, 0),
+ (7, 0, 0),
+ (8, 0, 0),
+ (9, 0, 0)
+ SQL
+ end
+
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ let(:expected) do
+ [
+ { "id" => 9, "year" => 0, "month" => 0 },
+ { "id" => 8, "year" => 0, "month" => 0 },
+ { "id" => 7, "year" => 0, "month" => 0 },
+ { "id" => 6, "year" => 0, "month" => 0 },
+ { "id" => 5, "year" => 0, "month" => 0 },
+ { "id" => 4, "year" => 0, "month" => 0 },
+ { "id" => 3, "year" => 0, "month" => 0 },
+ { "id" => 2, "year" => 0, "month" => 0 },
+ { "id" => 1, "year" => 0, "month" => 0 }
+ ]
+ end
+
+ it_behaves_like 'order examples'
+ end
+
+ context 'when ordering by two non-nullable columns and a distinct column' do
+ let(:table_data) do
+ <<-SQL
+ VALUES (1, 2010, 2),
+ (2, 2011, 1),
+ (3, 2009, 2),
+ (4, 2011, 1),
+ (5, 2011, 1),
+ (6, 2009, 2),
+ (7, 2010, 3),
+ (8, 2012, 4),
+ (9, 2013, 5)
+ SQL
+ end
+
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: table['month'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ let(:expected) do
+ [
+ { 'year' => 2009, 'month' => 2, 'id' => 3 },
+ { 'year' => 2009, 'month' => 2, 'id' => 6 },
+ { 'year' => 2010, 'month' => 2, 'id' => 1 },
+ { 'year' => 2010, 'month' => 3, 'id' => 7 },
+ { 'year' => 2011, 'month' => 1, 'id' => 2 },
+ { 'year' => 2011, 'month' => 1, 'id' => 4 },
+ { 'year' => 2011, 'month' => 1, 'id' => 5 },
+ { 'year' => 2012, 'month' => 4, 'id' => 8 },
+ { 'year' => 2013, 'month' => 5, 'id' => 9 }
+ ]
+ end
+
+ it_behaves_like 'order examples'
+ end
+
+ context 'when ordering by nullable columns and a distinct column' do
+ let(:table_data) do
+ <<-SQL
+ VALUES (1, 2010, null),
+ (2, 2011, 2),
+ (3, null, null),
+ (4, null, 5),
+ (5, 2010, null),
+ (6, 2011, 2),
+ (7, 2010, 2),
+ (8, 2012, 2),
+ (9, null, 2),
+ (10, null, null),
+ (11, 2010, 2)
+ SQL
+ end
+
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: Gitlab::Database.nulls_last_order('year', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('year', :desc),
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: Gitlab::Database.nulls_last_order('month', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('month', :desc),
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ let(:expected) do
+ [
+ { "id" => 7, "year" => 2010, "month" => 2 },
+ { "id" => 11, "year" => 2010, "month" => 2 },
+ { "id" => 1, "year" => 2010, "month" => nil },
+ { "id" => 5, "year" => 2010, "month" => nil },
+ { "id" => 2, "year" => 2011, "month" => 2 },
+ { "id" => 6, "year" => 2011, "month" => 2 },
+ { "id" => 8, "year" => 2012, "month" => 2 },
+ { "id" => 9, "year" => nil, "month" => 2 },
+ { "id" => 4, "year" => nil, "month" => 5 },
+ { "id" => 3, "year" => nil, "month" => nil },
+ { "id" => 10, "year" => nil, "month" => nil }
+ ]
+ end
+
+ it_behaves_like 'order examples'
+ end
+
+ context 'when ordering by nullable columns with nulls first ordering and a distinct column' do
+ let(:table_data) do
+ <<-SQL
+ VALUES (1, 2010, null),
+ (2, 2011, 2),
+ (3, null, null),
+ (4, null, 5),
+ (5, 2010, null),
+ (6, 2011, 2),
+ (7, 2010, 2),
+ (8, 2012, 2),
+ (9, null, 2),
+ (10, null, null),
+ (11, 2010, 2)
+ SQL
+ end
+
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: Gitlab::Database.nulls_first_order('year', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_last_order('year', :desc),
+ order_direction: :asc,
+ nullable: :nulls_first,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: Gitlab::Database.nulls_first_order('month', :asc),
+ order_direction: :asc,
+ reversed_order_expression: Gitlab::Database.nulls_last_order('month', :desc),
+ nullable: :nulls_first,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ let(:expected) do
+ [
+ { "id" => 3, "year" => nil, "month" => nil },
+ { "id" => 10, "year" => nil, "month" => nil },
+ { "id" => 9, "year" => nil, "month" => 2 },
+ { "id" => 4, "year" => nil, "month" => 5 },
+ { "id" => 1, "year" => 2010, "month" => nil },
+ { "id" => 5, "year" => 2010, "month" => nil },
+ { "id" => 7, "year" => 2010, "month" => 2 },
+ { "id" => 11, "year" => 2010, "month" => 2 },
+ { "id" => 2, "year" => 2011, "month" => 2 },
+ { "id" => 6, "year" => 2011, "month" => 2 },
+ { "id" => 8, "year" => 2012, "month" => 2 }
+ ]
+ end
+
+ it_behaves_like 'order examples'
+ end
+
+ context 'when ordering by non-nullable columns with mixed directions and a distinct column' do
+ let(:table_data) do
+ <<-SQL
+ VALUES (1, 2010, 0),
+ (2, 2011, 0),
+ (3, 2010, 0),
+ (4, 2010, 0),
+ (5, 2012, 0),
+ (6, 2012, 0),
+ (7, 2010, 0),
+ (8, 2011, 0),
+ (9, 2013, 0),
+ (10, 2014, 0),
+ (11, 2013, 0)
+ SQL
+ end
+
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ let(:expected) do
+ [
+ { "id" => 7, "year" => 2010, "month" => 0 },
+ { "id" => 4, "year" => 2010, "month" => 0 },
+ { "id" => 3, "year" => 2010, "month" => 0 },
+ { "id" => 1, "year" => 2010, "month" => 0 },
+ { "id" => 8, "year" => 2011, "month" => 0 },
+ { "id" => 2, "year" => 2011, "month" => 0 },
+ { "id" => 6, "year" => 2012, "month" => 0 },
+ { "id" => 5, "year" => 2012, "month" => 0 },
+ { "id" => 11, "year" => 2013, "month" => 0 },
+ { "id" => 9, "year" => 2013, "month" => 0 },
+ { "id" => 10, "year" => 2014, "month" => 0 }
+ ]
+ end
+
+ it 'takes out a slice between two cursors' do
+ after_cursor = { "id" => 8, "year" => 2011 }
+ before_cursor = { "id" => 5, "year" => 2012 }
+
+ after_conditions = order.build_where_values(after_cursor)
+ reversed = order.reversed_order
+ before_conditions = reversed.build_where_values(before_cursor)
+
+ query = build_query(order: order, where_conditions: "(#{after_conditions.to_sql}) AND (#{before_conditions.to_sql})", limit: 100)
+
+ expect(run_query(query)).to eq([
+ { "id" => 2, "year" => 2011, "month" => 0 },
+ { "id" => 6, "year" => 2012, "month" => 0 }
+ ])
+ end
+ end
+
+ context 'when the passed cursor values do not match with the order definition' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ context 'when values are missing' do
+ it 'raises error' do
+ expect { order.build_where_values(id: 1) }.to raise_error(/Missing items: year/)
+ end
+ end
+
+ context 'when extra values are present' do
+ it 'raises error' do
+ expect { order.build_where_values(id: 1, year: 2, foo: 3) }.to raise_error(/Extra items: foo/)
+ end
+ end
+
+ context 'when values are missing and extra values are present' do
+ it 'raises error' do
+ expect { order.build_where_values(year: 2, foo: 3) }.to raise_error(/Extra items: foo\. Missing items: id/)
+ end
+ end
+
+ context 'when no values are passed' do
+ it 'returns nil' do
+ expect(order.build_where_values({})).to eq(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index de37b997d13..71f4f2a3b64 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -113,6 +113,14 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
expect { |b| subject.call(worker, job, :test, &b) }.to yield_control.once
end
+ it 'calls BackgroundTransaction' do
+ expect_next_instance_of(Gitlab::Metrics::BackgroundTransaction) do |instance|
+ expect(instance).to receive(:run)
+ end
+
+ subject.call(worker, job, :test) {}
+ end
+
it 'sets queue specific metrics' do
expect(running_jobs_metric).to receive(:increment).with(labels, -1)
expect(running_jobs_metric).to receive(:increment).with(labels, 1)
diff --git a/spec/models/project_services/discord_service_spec.rb b/spec/models/project_services/discord_service_spec.rb
index 67e5ba79f01..ffe0a36dcdc 100644
--- a/spec/models/project_services/discord_service_spec.rb
+++ b/spec/models/project_services/discord_service_spec.rb
@@ -67,5 +67,16 @@ RSpec.describe DiscordService do
expect { subject.execute(sample_data) }.to raise_error(ArgumentError, /is blocked/)
end
end
+
+ context 'when the Discord request fails' do
+ before do
+ WebMock.stub_request(:post, webhook_url).to_return(status: 400)
+ end
+
+ it 'logs an error and returns false' do
+ expect(subject).to receive(:log_error).with('400 Bad Request')
+ expect(subject.execute(sample_data)).to be(false)
+ end
+ end
end
end
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index ba40eec9b69..d97a0ed9399 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -381,29 +381,41 @@ RSpec.describe 'getting merge request listings nested in a project' do
end
context 'when sorting by merged_at DESC' do
- it_behaves_like 'sorted paginated query' do
- let(:sort_param) { :MERGED_AT_DESC }
- let(:first_param) { 2 }
+ let(:sort_param) { :MERGED_AT_DESC }
+ let(:expected_results) do
+ [
+ merge_request_b,
+ merge_request_d,
+ merge_request_c,
+ merge_request_e,
+ merge_request_a
+ ].map { |mr| global_id_of(mr) }
+ end
- let(:expected_results) do
- [
- merge_request_b,
- merge_request_d,
- merge_request_c,
- merge_request_e,
- merge_request_a
- ].map { |mr| global_id_of(mr) }
- end
+ before do
+ five_days_ago = 5.days.ago
+
+ merge_request_d.metrics.update!(merged_at: five_days_ago)
- before do
- five_days_ago = 5.days.ago
+ # same merged_at, the second order column will decide (merge_request.id)
+ merge_request_c.metrics.update!(merged_at: five_days_ago)
+
+ merge_request_b.metrics.update!(merged_at: 1.day.ago)
+ end
+
+ it_behaves_like 'sorted paginated query' do
+ let(:first_param) { 2 }
+ end
- merge_request_d.metrics.update!(merged_at: five_days_ago)
+ context 'when last parameter is given' do
+ let(:params) { graphql_args(sort: sort_param, last: 2) }
+ let(:page_info) { nil }
- # same merged_at, the second order column will decide (merge_request.id)
- merge_request_c.metrics.update!(merged_at: five_days_ago)
+ it 'takes the last 2 records' do
+ query = pagination_query(params)
+ post_graphql(query, current_user: current_user)
- merge_request_b.metrics.update!(merged_at: 1.day.ago)
+ expect(results.map { |item| item["id"] }).to eq(expected_results.last(2))
end
end
end
diff --git a/spec/support/gitlab_experiment.rb b/spec/support/gitlab_experiment.rb
index 4a5b1fe73c9..4015db329fc 100644
--- a/spec/support/gitlab_experiment.rb
+++ b/spec/support/gitlab_experiment.rb
@@ -2,6 +2,7 @@
# Require the provided spec helper and matchers.
require 'gitlab/experiment/rspec'
+require_relative 'stub_snowplow'
# This is a temporary fix until we have a larger discussion around the
# challenges raised in https://gitlab.com/gitlab-org/gitlab/-/issues/300104
@@ -10,11 +11,21 @@ class ApplicationExperiment # rubocop:disable Gitlab/NamespacedClass
super(...)
Feature.persist_used!(feature_flag_name)
end
+
+ def should_track?
+ true
+ end
end
RSpec.configure do |config|
+ config.include StubSnowplow, :experiment
+
# Disable all caching for experiments in tests.
config.before do
allow(Gitlab::Experiment::Configuration).to receive(:cache).and_return(nil)
end
+
+ config.before(:each, :experiment) do
+ stub_snowplow
+ end
end
diff --git a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
index 17f7b765bc5..7bf2456c548 100644
--- a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
+++ b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
@@ -42,7 +42,7 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
end
end
-RSpec.shared_examples 'record ActiveRecord metrics' do |db_role|
+RSpec.shared_examples 'record ActiveRecord metrics in a metrics transaction' do |db_role|
it 'increments only db counters' do
if record_query
expect(transaction).to receive(:increment).with(:gitlab_transaction_db_count_total, 1)
@@ -80,3 +80,58 @@ RSpec.shared_examples 'record ActiveRecord metrics' do |db_role|
subscriber.sql(event)
end
end
+
+RSpec.shared_examples 'record ActiveRecord metrics' do |db_role|
+ context 'when both web and background transaction are available' do
+ let(:transaction) { double('Gitlab::Metrics::WebTransaction') }
+ let(:background_transaction) { double('Gitlab::Metrics::WebTransaction') }
+
+ before do
+ allow(::Gitlab::Metrics::WebTransaction).to receive(:current)
+ .and_return(transaction)
+ allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current)
+ .and_return(background_transaction)
+ allow(transaction).to receive(:increment)
+ allow(transaction).to receive(:observe)
+ end
+
+ it_behaves_like 'record ActiveRecord metrics in a metrics transaction', db_role
+
+ it 'captures the metrics for web only' do
+ expect(background_transaction).not_to receive(:observe)
+ expect(background_transaction).not_to receive(:increment)
+
+ subscriber.sql(event)
+ end
+ end
+
+ context 'when web transaction is available' do
+ let(:transaction) { double('Gitlab::Metrics::WebTransaction') }
+
+ before do
+ allow(::Gitlab::Metrics::WebTransaction).to receive(:current)
+ .and_return(transaction)
+ allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current)
+ .and_return(nil)
+ allow(transaction).to receive(:increment)
+ allow(transaction).to receive(:observe)
+ end
+
+ it_behaves_like 'record ActiveRecord metrics in a metrics transaction', db_role
+ end
+
+ context 'when background transaction is available' do
+ let(:transaction) { double('Gitlab::Metrics::BackgroundTransaction') }
+
+ before do
+ allow(::Gitlab::Metrics::WebTransaction).to receive(:current)
+ .and_return(nil)
+ allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current)
+ .and_return(transaction)
+ allow(transaction).to receive(:increment)
+ allow(transaction).to receive(:observe)
+ end
+
+ it_behaves_like 'record ActiveRecord metrics in a metrics transaction', db_role
+ end
+end
diff --git a/spec/support/snowplow.rb b/spec/support/snowplow.rb
index 0d6102f1705..e58be667b37 100644
--- a/spec/support/snowplow.rb
+++ b/spec/support/snowplow.rb
@@ -1,24 +1,13 @@
# frozen_string_literal: true
+require_relative 'stub_snowplow'
+
RSpec.configure do |config|
config.include SnowplowHelpers, :snowplow
+ config.include StubSnowplow, :snowplow
config.before(:each, :snowplow) do
- # Using a high buffer size to not cause early flushes
- buffer_size = 100
- # WebMock is set up to allow requests to `localhost`
- host = 'localhost'
-
- allow_any_instance_of(Gitlab::Tracking::Destinations::ProductAnalytics).to receive(:event)
-
- allow_any_instance_of(Gitlab::Tracking::Destinations::Snowplow)
- .to receive(:emitter)
- .and_return(SnowplowTracker::Emitter.new(host, buffer_size: buffer_size))
-
- stub_application_setting(snowplow_enabled: true)
-
- allow(SnowplowTracker::SelfDescribingJson).to receive(:new).and_call_original
- allow(Gitlab::Tracking).to receive(:event).and_call_original # rubocop:disable RSpec/ExpectGitlabTracking
+ stub_snowplow
end
config.after(:each, :snowplow) do
diff --git a/spec/support/stub_snowplow.rb b/spec/support/stub_snowplow.rb
new file mode 100644
index 00000000000..a21ce2399d7
--- /dev/null
+++ b/spec/support/stub_snowplow.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module StubSnowplow
+ def stub_snowplow
+ # Using a high buffer size to not cause early flushes
+ buffer_size = 100
+ # WebMock is set up to allow requests to `localhost`
+ host = 'localhost'
+
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(Gitlab::Tracking::Destinations::ProductAnalytics).to receive(:event)
+
+ allow_any_instance_of(Gitlab::Tracking::Destinations::Snowplow)
+ .to receive(:emitter)
+ .and_return(SnowplowTracker::Emitter.new(host, buffer_size: buffer_size))
+ # rubocop:enable RSpec/AnyInstanceOf
+
+ stub_application_setting(snowplow_enabled: true)
+
+ allow(SnowplowTracker::SelfDescribingJson).to receive(:new).and_call_original
+ allow(Gitlab::Tracking).to receive(:event).and_call_original # rubocop:disable RSpec/ExpectGitlabTracking
+ end
+end