Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-08-10 21:09:46 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-08-10 21:09:46 +0300
commit50ecbfaf1bfe883056ab366c8e4a28fd4574c6fa (patch)
treedc1034549eff45c7f057971da443aa1570bcebd3
parentaaabf6eb167d1a0bb2357f331bd411923ede37a6 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.gitlab/ci/package-and-test/main.gitlab-ci.yml2
-rw-r--r--.gitlab/ci/rails.gitlab-ci.yml17
-rw-r--r--app/assets/javascripts/super_sidebar/components/frequent_items_list.vue44
-rw-r--r--app/assets/javascripts/super_sidebar/utils.js28
-rw-r--r--app/finders/autocomplete/users_finder.rb2
-rw-r--r--app/graphql/types/ci/runner_manager_type.rb15
-rw-r--r--app/models/ci/runner_manager.rb11
-rw-r--r--app/models/concerns/from_union.rb3
-rw-r--r--app/models/issue.rb2
-rw-r--r--app/models/loose_foreign_keys/deleted_record.rb37
-rw-r--r--app/services/prometheus/proxy_service.rb145
-rw-r--r--app/services/prometheus/proxy_variable_substitution_service.rb155
-rw-r--r--config/feature_flags/development/loose_foreign_keys_batch_load_using_union.yml8
-rw-r--r--config/webpack.config.js6
-rw-r--r--doc/api/graphql/reference/index.md1
-rw-r--r--doc/development/testing_guide/end_to_end/index.md24
-rw-r--r--doc/development/testing_guide/end_to_end/package_and_test_pipeline.md137
-rw-r--r--doc/development/testing_guide/end_to_end/test_pipelines.md190
-rw-r--r--doc/user/okrs.md24
-rw-r--r--doc/user/project/issues/confidential_issues.md27
-rw-r--r--doc/user/project/issues/img/confidential_issues_index_page.pngbin30634 -> 0 bytes
-rw-r--r--doc/user/project/issues/img/confidential_issues_issue_page.pngbin24484 -> 0 bytes
-rw-r--r--doc/user/project/issues/img/confidential_issues_system_notes_v15_4.pngbin4289 -> 0 bytes
-rw-r--r--doc/user/project/issues/img/sidebar_confidential_issue.pngbin5404 -> 0 bytes
-rw-r--r--doc/user/project/issues/img/sidebar_not_confidential_issue.pngbin4081 -> 0 bytes
-rw-r--r--doc/user/tasks.md69
-rw-r--r--lib/gitlab/usage_data_counters/hll_redis_counter.rb49
-rw-r--r--locale/gitlab.pot9
-rw-r--r--qa/qa/support/api.rb17
-rw-r--r--qa/spec/support/api_spec.rb103
-rw-r--r--spec/finders/autocomplete/users_finder_spec.rb13
-rw-r--r--spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js110
-rw-r--r--spec/frontend/super_sidebar/utils_spec.js130
-rw-r--r--spec/graphql/types/ci/runner_manager_type_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb86
-rw-r--r--spec/models/ci/runner_manager_spec.rb17
-rw-r--r--spec/models/issue_spec.rb8
-rw-r--r--spec/models/loose_foreign_keys/deleted_record_spec.rb30
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb39
-rw-r--r--spec/services/prometheus/proxy_service_spec.rb240
-rw-r--r--spec/services/prometheus/proxy_variable_substitution_service_spec.rb204
41 files changed, 824 insertions, 1180 deletions
diff --git a/.gitlab/ci/package-and-test/main.gitlab-ci.yml b/.gitlab/ci/package-and-test/main.gitlab-ci.yml
index 9d0958458cc..9e11a6606f7 100644
--- a/.gitlab/ci/package-and-test/main.gitlab-ci.yml
+++ b/.gitlab/ci/package-and-test/main.gitlab-ci.yml
@@ -1,5 +1,5 @@
# E2E tests pipeline loaded dynamically by script: scripts/generate-e2e-pipeline
-# For adding new tests, refer to: doc/development/testing_guide/end_to_end/package_and_test_pipeline.md
+# For adding new tests, refer to: doc/development/testing_guide/end_to_end/test_pipelines.md
include:
- local: .gitlab/ci/qa-common/main.gitlab-ci.yml
- local: .gitlab/ci/qa-common/rules.gitlab-ci.yml
diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml
index ca5c1319735..326d23be5a4 100644
--- a/.gitlab/ci/rails.gitlab-ci.yml
+++ b/.gitlab/ci/rails.gitlab-ci.yml
@@ -465,11 +465,11 @@ rspec:artifact-collector ee:
rspec:coverage:
extends:
- .ruby-gems-coverage-cache
- - .fast-no-clone-job
- .rails:rules:rspec-coverage
stage: post-test
needs:
- - setup-test-env
+ - job: setup-test-env
+ artifacts: false
# FOSS/EE jobs
- job: rspec:artifact-collector unit
optional: true
@@ -496,23 +496,14 @@ rspec:coverage:
# Memory jobs
- job: memory-on-boot
optional: true
+ artifacts: false
variables:
- FILES_TO_DOWNLOAD: >
- config/bundler_setup.rb
- Gemfile
- Gemfile.checksum
- Gemfile.lock
- scripts/merge-simplecov
- spec/simplecov_env_core.rb
- spec/simplecov_env.rb
BUNDLE_WITHOUT: "" # This is to override the variable defined in .gitlab-ci.yml
BUNDLE_ONLY: "coverage"
before_script:
- - !reference [".fast-no-clone-job", before_script]
- - run_timed_command "download_local_gems"
+ - source scripts/utils.sh
- bundle_install_script
script:
- - chmod u+x scripts/merge-simplecov # Not the right permissions when downloading the script via the API.
- run_timed_command "bundle exec scripts/merge-simplecov"
coverage: '/LOC \((\d+\.\d+%)\) covered.$/'
artifacts:
diff --git a/app/assets/javascripts/super_sidebar/components/frequent_items_list.vue b/app/assets/javascripts/super_sidebar/components/frequent_items_list.vue
index 342e1284e86..eb3402f0666 100644
--- a/app/assets/javascripts/super_sidebar/components/frequent_items_list.vue
+++ b/app/assets/javascripts/super_sidebar/components/frequent_items_list.vue
@@ -1,9 +1,11 @@
<script>
import { GlButton, GlTooltipDirective } from '@gitlab/ui';
-import * as Sentry from '@sentry/browser';
-import AccessorUtilities from '~/lib/utils/accessor';
import { __ } from '~/locale';
-import { getTopFrequentItems, formatContextSwitcherItems } from '../utils';
+import {
+ getItemsFromLocalStorage,
+ removeItemFromLocalStorage,
+ formatContextSwitcherItems,
+} from '../utils';
import ItemsList from './items_list.vue';
export default {
@@ -43,35 +45,21 @@ export default {
},
},
created() {
- this.getItemsFromLocalStorage();
+ this.cachedFrequentItems = formatContextSwitcherItems(
+ getItemsFromLocalStorage({
+ storageKey: this.storageKey,
+ maxItems: this.maxItems,
+ }),
+ );
},
methods: {
- getItemsFromLocalStorage() {
- if (!AccessorUtilities.canUseLocalStorage()) {
- return;
- }
- try {
- const parsedCachedFrequentItems = JSON.parse(localStorage.getItem(this.storageKey));
- const topFrequentItems = getTopFrequentItems(parsedCachedFrequentItems, this.maxItems);
- this.cachedFrequentItems = formatContextSwitcherItems(topFrequentItems);
- } catch (e) {
- Sentry.captureException(e);
- }
- },
handleItemRemove(item) {
- try {
- // Remove item from local storage
- const parsedCachedFrequentItems = JSON.parse(localStorage.getItem(this.storageKey));
- localStorage.setItem(
- this.storageKey,
- JSON.stringify(parsedCachedFrequentItems.filter((i) => i.id !== item.id)),
- );
+ removeItemFromLocalStorage({
+ storageKey: this.storageKey,
+ item,
+ });
- // Update the list
- this.cachedFrequentItems = this.cachedFrequentItems.filter((i) => i.id !== item.id);
- } catch (e) {
- Sentry.captureException(e);
- }
+ this.cachedFrequentItems = this.cachedFrequentItems.filter((i) => i.id !== item.id);
},
},
i18n: {
diff --git a/app/assets/javascripts/super_sidebar/utils.js b/app/assets/javascripts/super_sidebar/utils.js
index 3b17a35c5bc..5b46425d223 100644
--- a/app/assets/javascripts/super_sidebar/utils.js
+++ b/app/assets/javascripts/super_sidebar/utils.js
@@ -1,3 +1,4 @@
+import * as Sentry from '@sentry/browser';
import AccessorUtilities from '~/lib/utils/accessor';
import { FREQUENT_ITEMS, FIFTEEN_MINUTES_IN_MS } from '~/frequent_items/constants';
import { truncateNamespace } from '~/lib/utils/text_utility';
@@ -84,4 +85,31 @@ export const formatContextSwitcherItems = (items) =>
link,
}));
+export const getItemsFromLocalStorage = ({ storageKey, maxItems }) => {
+ if (!AccessorUtilities.canUseLocalStorage()) {
+ return [];
+ }
+
+ try {
+ const parsedCachedFrequentItems = JSON.parse(localStorage.getItem(storageKey));
+ return getTopFrequentItems(parsedCachedFrequentItems, maxItems);
+ } catch (e) {
+ Sentry.captureException(e);
+ return [];
+ }
+};
+
+export const removeItemFromLocalStorage = ({ storageKey, item }) => {
+ try {
+ const parsedCachedFrequentItems = JSON.parse(localStorage.getItem(storageKey));
+ const filteredItems = parsedCachedFrequentItems.filter((i) => i.id !== item.id);
+ localStorage.setItem(storageKey, JSON.stringify(filteredItems));
+
+ return filteredItems;
+ } catch (e) {
+ Sentry.captureException(e);
+ return [];
+ }
+};
+
export const ariaCurrent = (isActive) => (isActive ? 'page' : null);
diff --git a/app/finders/autocomplete/users_finder.rb b/app/finders/autocomplete/users_finder.rb
index 8cb8a856be3..e7a24cde2bd 100644
--- a/app/finders/autocomplete/users_finder.rb
+++ b/app/finders/autocomplete/users_finder.rb
@@ -89,7 +89,7 @@ module Autocomplete
if project
project.authorized_users.union_with_user(author_id)
elsif group
- group.users_with_parents
+ ::Autocomplete::GroupUsersFinder.new(group: group).execute # rubocop: disable CodeReuse/Finder
elsif current_user
User.all
else
diff --git a/app/graphql/types/ci/runner_manager_type.rb b/app/graphql/types/ci/runner_manager_type.rb
index b36c8f42862..9311836cf27 100644
--- a/app/graphql/types/ci/runner_manager_type.rb
+++ b/app/graphql/types/ci/runner_manager_type.rb
@@ -26,6 +26,11 @@ module Types
description: 'ID of the runner manager.'
field :ip_address, GraphQL::Types::String, null: true,
description: 'IP address of the runner manager.'
+ field :job_execution_status,
+ Types::Ci::RunnerJobExecutionStatusEnum,
+ null: true,
+ description: 'Job execution status of the runner manager.',
+ alpha: { milestone: '16.3' }
field :platform_name, GraphQL::Types::String, null: true,
description: 'Platform provided by the runner manager.',
method: :platform
@@ -44,6 +49,16 @@ module Types
def executor_name
::Ci::Runner::EXECUTOR_TYPE_TO_NAMES[runner_manager.executor_type&.to_sym]
end
+
+ def job_execution_status
+ BatchLoader::GraphQL.for(runner_manager.id).batch(key: :running_builds_exist) do |runner_manager_ids, loader|
+ statuses = ::Ci::RunnerManager.id_in(runner_manager_ids).with_running_builds.index_by(&:id)
+
+ runner_manager_ids.each do |runner_manager_id|
+ loader.call(runner_manager_id, statuses[runner_manager_id] ? :running : :idle)
+ end
+ end
+ end
end
end
end
diff --git a/app/models/ci/runner_manager.rb b/app/models/ci/runner_manager.rb
index eebba968f54..4470c14612c 100644
--- a/app/models/ci/runner_manager.rb
+++ b/app/models/ci/runner_manager.rb
@@ -49,6 +49,17 @@ module Ci
where(runner_id: runner_id)
end
+ scope :with_running_builds, -> do
+ where('EXISTS(?)',
+ Ci::Build.select(1)
+ .joins(:runner_manager_build)
+ .running
+ .where("#{::Ci::Build.quoted_table_name}.runner_id = #{quoted_table_name}.runner_id")
+ .where("#{::Ci::RunnerManagerBuild.quoted_table_name}.runner_machine_id = #{quoted_table_name}.id")
+ .limit(1)
+ )
+ end
+
scope :order_id_desc, -> { order(id: :desc) }
def self.online_contact_time_deadline
diff --git a/app/models/concerns/from_union.rb b/app/models/concerns/from_union.rb
index be6744f1b2a..e816608265b 100644
--- a/app/models/concerns/from_union.rb
+++ b/app/models/concerns/from_union.rb
@@ -32,6 +32,9 @@ module FromUnion
# remove_duplicates - A boolean indicating if duplicate entries should be
# removed. Defaults to true.
#
+ # remove_order - A boolean indicating if the order from the relations should be
+ # removed. Defaults to true.
+ #
# alias_as - The alias to use for the sub query. Defaults to the name of the
# table of the current model.
# rubocop: disable Gitlab/Union
diff --git a/app/models/issue.rb b/app/models/issue.rb
index 5df1ce89350..d4ae77f4e86 100644
--- a/app/models/issue.rb
+++ b/app/models/issue.rb
@@ -784,7 +784,7 @@ class Issue < ApplicationRecord
# TODO: https://gitlab.com/gitlab-org/gitlab/-/work_items/393126
return unless project
- Issues::SearchData.upsert({ project_id: project_id, issue_id: id, search_vector: search_vector }, unique_by: %i(project_id issue_id))
+ Issues::SearchData.upsert({ namespace_id: namespace_id, project_id: project_id, issue_id: id, search_vector: search_vector }, unique_by: %i(project_id issue_id))
end
def ensure_metrics!
diff --git a/app/models/loose_foreign_keys/deleted_record.rb b/app/models/loose_foreign_keys/deleted_record.rb
index 7f64606e97b..1d26c3c11e4 100644
--- a/app/models/loose_foreign_keys/deleted_record.rb
+++ b/app/models/loose_foreign_keys/deleted_record.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class LooseForeignKeys::DeletedRecord < Gitlab::Database::SharedModel
+ include FromUnion
+
PARTITION_DURATION = 1.day
include PartitionedTable
@@ -34,13 +36,34 @@ class LooseForeignKeys::DeletedRecord < Gitlab::Database::SharedModel
enum status: { pending: 1, processed: 2 }, _prefix: :status
def self.load_batch_for_table(table, batch_size)
- # selecting partition as partition_number to workaround the sliding partitioning column ignore
- select(arel_table[Arel.star], arel_table[:partition].as('partition_number'))
- .for_table(table)
- .status_pending
- .consume_order
- .limit(batch_size)
- .to_a
+ if Feature.enabled?("loose_foreign_keys_batch_load_using_union")
+ partition_names = Gitlab::Database::PostgresPartitionedTable.each_partition(table_name).map(&:name)
+
+ unions = partition_names.map do |partition_name|
+ partition_number = partition_name[/\d+/].to_i
+
+ select(arel_table[Arel.star], arel_table[:partition].as('partition_number'))
+ .from("#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{partition_name} AS #{table_name}")
+ .for_table(table)
+ .where(partition: partition_number)
+ .status_pending
+ .consume_order
+ .limit(batch_size)
+ end
+
+ select(arel_table[Arel.star])
+ .from_union(unions, remove_duplicates: false, remove_order: false)
+ .limit(batch_size)
+ .to_a
+ else
+ # selecting partition as partition_number to workaround the sliding partitioning column ignore
+ select(arel_table[Arel.star], arel_table[:partition].as('partition_number'))
+ .for_table(table)
+ .status_pending
+ .consume_order
+ .limit(batch_size)
+ .to_a
+ end
end
def self.mark_records_processed(records)
diff --git a/app/services/prometheus/proxy_service.rb b/app/services/prometheus/proxy_service.rb
deleted file mode 100644
index 33635796771..00000000000
--- a/app/services/prometheus/proxy_service.rb
+++ /dev/null
@@ -1,145 +0,0 @@
-# frozen_string_literal: true
-
-module Prometheus
- class ProxyService < BaseService
- include ReactiveCaching
- include Gitlab::Utils::StrongMemoize
-
- self.reactive_cache_key = ->(service) { [] }
- self.reactive_cache_lease_timeout = 30.seconds
-
- # reactive_cache_refresh_interval should be set to a value higher than
- # reactive_cache_lifetime. If the refresh_interval is less than lifetime
- # then the ReactiveCachingWorker will re-query prometheus for this
- # PromQL query even though it's (probably) already been picked up by
- # the frontend
- # refresh_interval should be set less than lifetime only if this data
- # is expected to change *and* be fetched again by the frontend
- self.reactive_cache_refresh_interval = 90.seconds
- self.reactive_cache_lifetime = 1.minute
- self.reactive_cache_work_type = :external_dependency
- self.reactive_cache_worker_finder = ->(_id, *args) { from_cache(*args) }
-
- attr_accessor :proxyable, :method, :path, :params
-
- PROMETHEUS_QUERY_API = 'query'
- PROMETHEUS_QUERY_RANGE_API = 'query_range'
- PROMETHEUS_SERIES_API = 'series'
-
- PROXY_SUPPORT = {
- PROMETHEUS_QUERY_API => {
- method: ['GET'],
- params: %w(query time timeout)
- },
- PROMETHEUS_QUERY_RANGE_API => {
- method: ['GET'],
- params: %w(query start end step timeout)
- },
- PROMETHEUS_SERIES_API => {
- method: %w(GET),
- params: %w(match start end)
- }
- }.freeze
-
- def self.from_cache(proxyable_class_name, proxyable_id, method, path, params)
- proxyable_class = begin
- proxyable_class_name.constantize
- rescue NameError
- nil
- end
- return unless proxyable_class
-
- proxyable = proxyable_class.find(proxyable_id)
-
- new(proxyable, method, path, params)
- end
-
- # proxyable can be any model which responds to .prometheus_adapter
- # like Environment.
- def initialize(proxyable, method, path, params)
- @proxyable = proxyable
- @path = path
-
- # Convert ActionController::Parameters to hash because reactive_cache_worker
- # does not play nice with ActionController::Parameters.
- @params = filter_params(params, path).to_hash
-
- @method = method
- end
-
- def id
- nil
- end
-
- def execute
- return cannot_proxy_response unless can_proxy?
- return no_prometheus_response unless can_query?
-
- with_reactive_cache(*cache_key) do |result|
- result
- end
- end
-
- def calculate_reactive_cache(proxyable_class_name, proxyable_id, method, path, params)
- return no_prometheus_response unless can_query?
-
- response = prometheus_client_wrapper.proxy(path, params)
-
- success(http_status: response.code, body: response.body)
- rescue Gitlab::PrometheusClient::Error => err
- service_unavailable_response(err)
- end
-
- def cache_key
- [@proxyable.class.name, @proxyable.id, @method, @path, @params]
- end
-
- private
-
- def service_unavailable_response(exception)
- error(exception.message, :service_unavailable)
- end
-
- def no_prometheus_response
- error('No prometheus server found', :service_unavailable)
- end
-
- def cannot_proxy_response
- error('Proxy support for this API is not available currently')
- end
-
- def prometheus_adapter
- strong_memoize(:prometheus_adapter) do
- @proxyable.prometheus_adapter
- end
- end
-
- def prometheus_client_wrapper
- prometheus_adapter&.prometheus_client
- end
-
- def can_query?
- prometheus_adapter&.can_query?
- end
-
- def filter_params(params, path)
- params = substitute_params(params)
-
- params.slice(*PROXY_SUPPORT.dig(path, :params))
- end
-
- def can_proxy?
- PROXY_SUPPORT.dig(@path, :method)&.include?(@method)
- end
-
- def substitute_params(params)
- start_time = params[:start_time]
- end_time = params[:end_time]
-
- params['start'] = start_time if start_time
- params['end'] = end_time if end_time
-
- params
- end
- end
-end
diff --git a/app/services/prometheus/proxy_variable_substitution_service.rb b/app/services/prometheus/proxy_variable_substitution_service.rb
deleted file mode 100644
index 846dfeb33ce..00000000000
--- a/app/services/prometheus/proxy_variable_substitution_service.rb
+++ /dev/null
@@ -1,155 +0,0 @@
-# frozen_string_literal: true
-
-module Prometheus
- class ProxyVariableSubstitutionService < BaseService
- include Stepable
-
- VARIABLE_INTERPOLATION_REGEX = /
- {{ # Variable needs to be wrapped in these chars.
- \s* # Allow whitespace before and after the variable name.
- (?<variable> # Named capture.
- \w+ # Match one or more word characters.
- )
- \s*
- }}
- /x.freeze
-
- steps :validate_variables,
- :add_params_to_result,
- :substitute_params,
- :substitute_variables
-
- # @param environment [Environment]
- # @param params [Hash<Symbol,Any>]
- # @param params - query [String] The Prometheus query string.
- # @param params - start [String] (optional) A time string in the rfc3339 format.
- # @param params - start_time [String] (optional) A time string in the rfc3339 format.
- # @param params - end [String] (optional) A time string in the rfc3339 format.
- # @param params - end_time [String] (optional) A time string in the rfc3339 format.
- # @param params - variables [ActionController::Parameters] (optional) Variables with their values.
- # The keys in the Hash should be the name of the variable. The value should be the value of the
- # variable. Ex: `ActionController::Parameters.new(variable1: 'value 1', variable2: 'value 2').permit!`
- # @return [Prometheus::ProxyVariableSubstitutionService]
- #
- # Example:
- # Prometheus::ProxyVariableSubstitutionService.new(environment, {
- # params: {
- # start_time: '2020-07-03T06:08:36Z',
- # end_time: '2020-07-03T14:08:52Z',
- # query: 'up{instance="{{instance}}"}',
- # variables: { instance: 'srv1' }
- # }
- # })
- def initialize(environment, params = {})
- @environment = environment
- @params = params.deep_dup
- end
-
- # @return - params [Hash<Symbol,Any>] Returns a Hash containing a params key which is
- # similar to the `params` that is passed to the initialize method with 2 differences:
- # 1. Variables in the query string are substituted with their values.
- # If a variable present in the query string has no known value (values
- # are obtained from the `variables` Hash in `params` or from
- # `Gitlab::Prometheus::QueryVariables.call`), it will not be substituted.
- # 2. `start` and `end` keys are added, with their values copied from `start_time`
- # and `end_time`.
- #
- # Example output:
- #
- # {
- # params: {
- # start_time: '2020-07-03T06:08:36Z',
- # start: '2020-07-03T06:08:36Z',
- # end_time: '2020-07-03T14:08:52Z',
- # end: '2020-07-03T14:08:52Z',
- # query: 'up{instance="srv1"}',
- # variables: { instance: 'srv1' }
- # }
- # }
- def execute
- execute_steps
- end
-
- private
-
- def validate_variables(_result)
- return success unless variables
-
- unless variables.is_a?(ActionController::Parameters)
- return error(_('Optional parameter "variables" must be a Hash. Ex: variables[key1]=value1'))
- end
-
- success
- end
-
- def add_params_to_result(result)
- result[:params] = params
-
- success(result)
- end
-
- def substitute_params(result)
- start_time = result[:params][:start_time]
- end_time = result[:params][:end_time]
-
- result[:params][:start] = start_time if start_time
- result[:params][:end] = end_time if end_time
-
- success(result)
- end
-
- def substitute_variables(result)
- return success(result) unless query(result)
-
- result[:params][:query] = gsub(query(result), full_context(result))
-
- success(result)
- end
-
- def gsub(string, context)
- # Search for variables of the form `{{variable}}` in the string and replace
- # them with their value.
- string.gsub(VARIABLE_INTERPOLATION_REGEX) do |match|
- # Replace with the value of the variable, or if there is no such variable,
- # replace the invalid variable with itself. So,
- # `up{instance="{{invalid_variable}}"}` will remain
- # `up{instance="{{invalid_variable}}"}` after substitution.
- context.fetch($~[:variable], match)
- end
- end
-
- def predefined_context(result)
- Gitlab::Prometheus::QueryVariables.call(
- @environment,
- start_time: start_timestamp(result),
- end_time: end_timestamp(result)
- ).stringify_keys
- end
-
- def full_context(result)
- @full_context ||= predefined_context(result).reverse_merge(variables_hash)
- end
-
- def variables
- params[:variables]
- end
-
- def variables_hash
- variables.to_h
- end
-
- def start_timestamp(result)
- Time.rfc3339(result[:params][:start])
- rescue ArgumentError
- end
-
- def end_timestamp(result)
- Time.rfc3339(result[:params][:end])
- rescue ArgumentError
- end
-
- def query(result)
- result[:params][:query]
- end
- end
-end
diff --git a/config/feature_flags/development/loose_foreign_keys_batch_load_using_union.yml b/config/feature_flags/development/loose_foreign_keys_batch_load_using_union.yml
new file mode 100644
index 00000000000..0b0ed16c1cd
--- /dev/null
+++ b/config/feature_flags/development/loose_foreign_keys_batch_load_using_union.yml
@@ -0,0 +1,8 @@
+---
+name: loose_foreign_keys_batch_load_using_union
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128759
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/421422
+milestone: '16.3'
+type: development
+group: group::tenant scale
+default_enabled: false
diff --git a/config/webpack.config.js b/config/webpack.config.js
index 3bcdc8245f5..a4d2d188227 100644
--- a/config/webpack.config.js
+++ b/config/webpack.config.js
@@ -193,8 +193,6 @@ const alias = {
),
'~': path.join(ROOT_PATH, 'app/assets/javascripts'),
emojis: path.join(ROOT_PATH, 'fixtures/emojis'),
- empty_states: path.join(ROOT_PATH, 'app/views/shared/empty_states'),
- icons: path.join(ROOT_PATH, 'app/views/shared/icons'),
images: path.join(ROOT_PATH, 'app/assets/images'),
vendor: path.join(ROOT_PATH, 'vendor/assets/javascripts'),
jquery$: 'jquery/dist/jquery.slim.js',
@@ -231,8 +229,6 @@ if (IS_EE) {
Object.assign(alias, {
ee: path.join(ROOT_PATH, 'ee/app/assets/javascripts'),
ee_component: path.join(ROOT_PATH, 'ee/app/assets/javascripts'),
- ee_empty_states: path.join(ROOT_PATH, 'ee/app/views/shared/empty_states'),
- ee_icons: path.join(ROOT_PATH, 'ee/app/views/shared/icons'),
ee_images: path.join(ROOT_PATH, 'ee/app/assets/images'),
ee_else_ce: path.join(ROOT_PATH, 'ee/app/assets/javascripts'),
jh_else_ee: path.join(ROOT_PATH, 'ee/app/assets/javascripts'),
@@ -249,8 +245,6 @@ if (IS_JH) {
Object.assign(alias, {
jh: path.join(ROOT_PATH, 'jh/app/assets/javascripts'),
jh_component: path.join(ROOT_PATH, 'jh/app/assets/javascripts'),
- jh_empty_states: path.join(ROOT_PATH, 'jh/app/views/shared/empty_states'),
- jh_icons: path.join(ROOT_PATH, 'jh/app/views/shared/icons'),
jh_images: path.join(ROOT_PATH, 'jh/app/assets/images'),
// jh path alias https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74305#note_732793956
jh_else_ce: path.join(ROOT_PATH, 'jh/app/assets/javascripts'),
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index ae0dd7ae2c1..b191c9d43cf 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -13887,6 +13887,7 @@ Returns [`CiRunnerStatus!`](#cirunnerstatus).
| <a id="cirunnermanagerexecutorname"></a>`executorName` | [`String`](#string) | Executor last advertised by the runner. |
| <a id="cirunnermanagerid"></a>`id` | [`CiRunnerManagerID!`](#cirunnermanagerid) | ID of the runner manager. |
| <a id="cirunnermanageripaddress"></a>`ipAddress` | [`String`](#string) | IP address of the runner manager. |
+| <a id="cirunnermanagerjobexecutionstatus"></a>`jobExecutionStatus` **{warning-solid}** | [`CiRunnerJobExecutionStatus`](#cirunnerjobexecutionstatus) | **Introduced** in 16.3. This feature is an Experiment. It can be changed or removed at any time. Job execution status of the runner manager. |
| <a id="cirunnermanagerplatformname"></a>`platformName` | [`String`](#string) | Platform provided by the runner manager. |
| <a id="cirunnermanagerrevision"></a>`revision` | [`String`](#string) | Revision of the runner. |
| <a id="cirunnermanagerrunner"></a>`runner` | [`CiRunner`](#cirunner) | Runner configuration for the runner manager. |
diff --git a/doc/development/testing_guide/end_to_end/index.md b/doc/development/testing_guide/end_to_end/index.md
index adf679c44a2..4e7ef6f29a2 100644
--- a/doc/development/testing_guide/end_to_end/index.md
+++ b/doc/development/testing_guide/end_to_end/index.md
@@ -18,6 +18,8 @@ together.
We use [Omnibus GitLab](https://gitlab.com/gitlab-org/omnibus-gitlab) to build GitLab packages and then we test these packages
using the [GitLab QA orchestrator](https://gitlab.com/gitlab-org/gitlab-qa) tool to run the end-to-end tests located in the `qa` directory.
+Additionally, we use the [GitLab Development Kit](https://gitlab.com/gitlab-org/gitlab-development-kit) (GDK) as a test environment that can be deployed quickly for faster test feedback.
+
### Testing nightly builds
We run scheduled pipelines each night to test nightly builds created by Omnibus.
@@ -43,10 +45,9 @@ Docker image built from your merge request's changes.**
Manual action that starts end-to-end tests is also available
in [`gitlab-org/omnibus-gitlab` merge requests](https://docs.gitlab.com/omnibus/build/team_member_docs.html#i-have-an-mr-in-the-omnibus-gitlab-project-and-want-a-package-or-docker-image-to-test-it).
-#### How does it work?
+##### How does it work?
-Currently, we are using _multi-project pipeline_-like approach to run end-to-end
-pipelines.
+Currently, we are using _multi-project pipeline_-like approach to run end-to-end pipelines against Omnibus GitLab.
```mermaid
graph TB
@@ -98,7 +99,22 @@ work-around was suggested in <https://gitlab.com/gitlab-org/omnibus-gitlab/-/iss
A feature proposal to segregate access control regarding running pipelines from ability to push/merge was also created at <https://gitlab.com/gitlab-org/gitlab/-/issues/24585>.
For more technical details on CI/CD setup and documentation on adding new test jobs to `e2e:package-and-test` pipeline, see
-[`e2e:package_and_test` setup documentation](package_and_test_pipeline.md).
+[`e2e:package_and_test` setup documentation](test_pipelines.md).
+
+#### Using the `test-on-gdk` job
+
+The `e2e:test-on-gdk` job is run automatically in most merge requests, which triggers a [child-pipeline](../../../ci/pipelines/downstream_pipelines.md#parent-child-pipelines)
+that builds and installs a GDK instance from your merge request's changes, and then executes end-to-end tests against that GDK instance.
+
+##### How does it work?
+
+In the [`gitlab-org/gitlab` pipeline](https://gitlab.com/gitlab-org/gitlab):
+
+1. The [`build-gdk-image` job](https://gitlab.com/gitlab-org/gitlab/-/blob/07504c34b28ac656537cd60810992aa15e9e91b8/.gitlab/ci/build-images.gitlab-ci.yml#L32)
+ uses the code from the merge request to build a Docker image for a GDK instance.
+1. The `e2e:test-on-gdk` trigger job creates a child pipeline that executes the end-to-end tests against GDK instances launched from the image built in the previous job.
+
+For more details, see the [documentation for the `e2e:test-on-gdk` pipeline](test_pipelines.md#e2etest-on-gdk).
#### With merged results pipelines
diff --git a/doc/development/testing_guide/end_to_end/package_and_test_pipeline.md b/doc/development/testing_guide/end_to_end/package_and_test_pipeline.md
index b0257e7b02c..240db2cbfe5 100644
--- a/doc/development/testing_guide/end_to_end/package_and_test_pipeline.md
+++ b/doc/development/testing_guide/end_to_end/package_and_test_pipeline.md
@@ -1,134 +1,11 @@
---
-stage: none
-group: unassigned
-info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
+redirect_to: 'test_pipelines.md'
+remove_date: '2023-11-08'
---
-# e2e:package-and-test
+This document was moved to [another location](test_pipelines.md).
-The `e2e:package-and-test` child pipeline is the main executor of E2E testing for the GitLab platform. The pipeline definition has several dynamic
-components to reduce the number of tests being executed in merge request pipelines.
-
-## Setup
-
-Pipeline setup consists of:
-
-- The `e2e-test-pipeline-generate` job in the `prepare` stage of the main GitLab pipeline.
-- The `e2e:package-and-test` job in the `qa` stage, which triggers the child pipeline that is responsible for building the `omnibus` package and
- running E2E tests.
-
-### e2e-test-pipeline-generate
-
-This job consists of two components that implement selective test execution:
-
-- The [`detect_changes`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/qa/tasks/ci.rake) Rake task determines which e2e specs should be executed
- in a particular merge request pipeline. This task analyzes changes in a particular merge request and determines which specs must be executed.
- Based on that, a `dry-run` of every [scenario](https://gitlab.com/gitlab-org/gitlab/-/tree/master/qa/qa/scenario/test) executes and determines if a
- scenario contains any executable tests. Selective test execution uses [these criteria](index.md#selective-test-execution) to determine which specific
- tests to execute.
-- [`generate-e2e-pipeline`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/generate-e2e-pipeline) is executed, which generates a child
- pipeline YAML definition file with appropriate environment variables.
-
-### e2e:package-and-test
-
-E2E test execution pipeline consists of several stages which all support execution of E2E tests.
-
-#### .pre
-
-This stage is responsible for the following tasks:
-
-- Fetching `knapsack` reports that support [parallel test execution](index.md#run-tests-in-parallel).
-- Triggering downstream pipeline which builds the [`omnibus-gitlab`](https://gitlab.com/gitlab-org/omnibus-gitlab) Docker image.
-
-#### test
-
-This stage runs e2e tests against different types of GitLab configurations. The number of jobs executed is determined dynamically by
-[`e2e-test-pipeline-generate`](package_and_test_pipeline.md#e2e-test-pipeline-generate) job.
-
-#### report
-
-This stage is responsible for [allure test report](index.md#allure-report) generation.
-
-## Adding new jobs
-
-Selective test execution depends on a set of rules present in every job definition. A typical job contains the following attributes:
-
-```yaml
-variables:
- QA_SCENARIO: Test::Integration::MyNewJob
-rules:
- - !reference [.rules:test:qa, rules]
- - if: $QA_SUITES =~ /Test::Integration::MyNewJob/
- - !reference [.rules:test:manual, rules]
-```
-
-In this example:
-
-- `QA_SCENARIO: Test::Integration::MyNewJob`: name of the scenario class that is passed to the
- [`gitlab-qa`](https://gitlab.com/gitlab-org/gitlab-qa/-/blob/master/docs/what_tests_can_be_run.md) executor.
-- `!reference [.rules:test:qa, rules]`: main rule definition that is matched for pipelines that should execute all tests. For example, when changes to
- `qa` framework are present.
-- `if: $QA_SUITES =~ /Test::Integration::MyNewJob/`: main rule responsible for selective test execution. `QA_SUITE` is the name of the scenario
- abstraction located in [`qa framework`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/qa/qa/scenario/test).
-
- `QA_SUITE` is not the same as `QA_SCENARIO`, which is passed to the `gitlab-qa` executor. For consistency, it usually has the same name. `QA_SUITE`
- abstraction class usually contains information on what tags to run and optionally some additional setup steps.
-- `!reference [.rules:test:manual, rules]`: final rule that is always matched and sets the job to `manual` so it can still be executed on demand,
- even if not set to execute by selective test execution.
-
-Considering example above, perform the following steps to create a new job:
-
-1. Create new scenario type `my_new_job.rb` in the [`integration`](https://gitlab.com/gitlab-org/gitlab-qa/-/tree/master/lib/gitlab/qa/scenario/test/integration) directory
- of the [`gitlab-qa`](https://gitlab.com/gitlab-org/gitlab-qa) project and release new version so it's generally available.
-1. Create new scenario `my_new_job.rb` in [`integration`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/qa/qa/scenario/test/integration) directory of the
- [`qa`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/qa) framework. In the most simple case, this scenario would define RSpec tags that should be executed:
-
- ```ruby
- module QA
- module Scenario
- module Test
- module Integration
- class MyNewJob < Test::Instance::All
- tags :some_special_tag
- end
- end
- end
- end
- end
- ```
-
-1. Add new job definition in the [`main.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/package-and-test/main.gitlab-ci.yml) pipeline definition:
-
- ```yaml
- ee:my-new-job:
- extends: .qa
- variables:
- QA_SCENARIO: Test::Integration::MyNewJob
- rules:
- - !reference [.rules:test:qa, rules]
- - if: $QA_SUITES =~ /Test::Integration::MyNewJob/
- - !reference [.rules:test:manual, rules]
- ```
-
-### Parallel jobs
-
-For selective execution to work correctly with job types that require running multiple parallel jobs,
-a job definition typically must be split into parallel and selective variants. Splitting is necessary so that when selective execution
-executes only a single spec, multiple unnecessary jobs are not spawned. For example:
-
-```yaml
-ee:my-new-job-selective:
- extends: .qa
- variables:
- QA_SCENARIO: Test::Integration::MyNewJob
- rules:
- - !reference [.rules:test:qa-selective, rules]
- - if: $QA_SUITES =~ /Test::Integration::MyNewJob/
-ee:my-new-job:
- extends:
- - .parallel
- - ee:my-new-job-selective
- rules:
- - !reference [.rules:test:qa-parallel, rules]
- - if: $QA_SUITES =~ /Test::Integration::MyNewJob/
-```
+<!-- This redirect file can be deleted after <2023-11-08>. -->
+<!-- Redirects that point to other docs in the same project expire in three months. -->
+<!-- Redirects that point to docs in a different project or site (link is not relative and starts with `https:`) expire in one year. -->
+<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->
diff --git a/doc/development/testing_guide/end_to_end/test_pipelines.md b/doc/development/testing_guide/end_to_end/test_pipelines.md
new file mode 100644
index 00000000000..b47b75e398a
--- /dev/null
+++ b/doc/development/testing_guide/end_to_end/test_pipelines.md
@@ -0,0 +1,190 @@
+---
+stage: none
+group: unassigned
+info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
+---
+
+# End-to-end test pipelines
+
+## e2e:package-and-test
+
+The `e2e:package-and-test` child pipeline is the main executor of E2E testing for the GitLab platform. The pipeline definition has several dynamic
+components to reduce the number of tests being executed in merge request pipelines.
+
+### Setup
+
+Pipeline setup consists of:
+
+- The `e2e-test-pipeline-generate` job in the `prepare` stage of the main GitLab pipeline.
+- The `e2e:package-and-test` job in the `qa` stage, which triggers the child pipeline that is responsible for building the `omnibus` package and
+ running E2E tests.
+
+#### e2e-test-pipeline-generate
+
+This job consists of two components that implement selective test execution:
+
+- The [`detect_changes`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/qa/tasks/ci.rake) Rake task determines which e2e specs should be executed
+ in a particular merge request pipeline. This task analyzes changes in a particular merge request and determines which specs must be executed.
+ Based on that, a `dry-run` of every [scenario](https://gitlab.com/gitlab-org/gitlab/-/tree/master/qa/qa/scenario/test) executes and determines if a
+ scenario contains any executable tests. Selective test execution uses [these criteria](index.md#selective-test-execution) to determine which specific
+ tests to execute.
+- [`generate-e2e-pipeline`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/generate-e2e-pipeline) is executed, which generates a child
+ pipeline YAML definition file with appropriate environment variables.
+
+#### e2e:package-and-test
+
+E2E test execution pipeline consists of several stages which all support execution of E2E tests.
+
+##### .pre
+
+This stage is responsible for the following tasks:
+
+- Fetching `knapsack` reports that support [parallel test execution](index.md#run-tests-in-parallel).
+- Triggering downstream pipeline which builds the [`omnibus-gitlab`](https://gitlab.com/gitlab-org/omnibus-gitlab) Docker image.
+
+##### test
+
+This stage runs e2e tests against different types of GitLab configurations. The number of jobs executed is determined dynamically by
+[`e2e-test-pipeline-generate`](test_pipelines.md#e2e-test-pipeline-generate) job.
+
+##### report
+
+This stage is responsible for [allure test report](index.md#allure-report) generation.
+
+### Adding new jobs
+
+Selective test execution depends on a set of rules present in every job definition. A typical job contains the following attributes:
+
+```yaml
+variables:
+ QA_SCENARIO: Test::Integration::MyNewJob
+rules:
+ - !reference [.rules:test:qa, rules]
+ - if: $QA_SUITES =~ /Test::Integration::MyNewJob/
+ - !reference [.rules:test:manual, rules]
+```
+
+In this example:
+
+- `QA_SCENARIO: Test::Integration::MyNewJob`: name of the scenario class that is passed to the
+ [`gitlab-qa`](https://gitlab.com/gitlab-org/gitlab-qa/-/blob/master/docs/what_tests_can_be_run.md) executor.
+- `!reference [.rules:test:qa, rules]`: main rule definition that is matched for pipelines that should execute all tests. For example, when changes to
+ `qa` framework are present.
+- `if: $QA_SUITES =~ /Test::Integration::MyNewJob/`: main rule responsible for selective test execution. `QA_SUITE` is the name of the scenario
+ abstraction located in [`qa framework`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/qa/qa/scenario/test).
+
+ `QA_SUITE` is not the same as `QA_SCENARIO`, which is passed to the `gitlab-qa` executor. For consistency, it usually has the same name. `QA_SUITE`
+ abstraction class usually contains information on what tags to run and optionally some additional setup steps.
+- `!reference [.rules:test:manual, rules]`: final rule that is always matched and sets the job to `manual` so it can still be executed on demand,
+ even if not set to execute by selective test execution.
+
+Considering example above, perform the following steps to create a new job:
+
+1. Create new scenario type `my_new_job.rb` in the [`integration`](https://gitlab.com/gitlab-org/gitlab-qa/-/tree/master/lib/gitlab/qa/scenario/test/integration) directory
+ of the [`gitlab-qa`](https://gitlab.com/gitlab-org/gitlab-qa) project and release new version so it's generally available.
+1. Create new scenario `my_new_job.rb` in [`integration`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/qa/qa/scenario/test/integration) directory of the
+ [`qa`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/qa) framework. In the most simple case, this scenario would define RSpec tags that should be executed:
+
+ ```ruby
+ module QA
+ module Scenario
+ module Test
+ module Integration
+ class MyNewJob < Test::Instance::All
+ tags :some_special_tag
+ end
+ end
+ end
+ end
+ end
+ ```
+
+1. Add new job definition in the [`main.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/package-and-test/main.gitlab-ci.yml) pipeline definition:
+
+ ```yaml
+ ee:my-new-job:
+ extends: .qa
+ variables:
+ QA_SCENARIO: Test::Integration::MyNewJob
+ rules:
+ - !reference [.rules:test:qa, rules]
+ - if: $QA_SUITES =~ /Test::Integration::MyNewJob/
+ - !reference [.rules:test:manual, rules]
+ ```
+
+#### Parallel jobs
+
+For selective execution to work correctly with job types that require running multiple parallel jobs,
+a job definition typically must be split into parallel and selective variants. Splitting is necessary so that when selective execution
+executes only a single spec, multiple unnecessary jobs are not spawned. For example:
+
+```yaml
+ee:my-new-job-selective:
+ extends: .qa
+ variables:
+ QA_SCENARIO: Test::Integration::MyNewJob
+ rules:
+ - !reference [.rules:test:qa-selective, rules]
+ - if: $QA_SUITES =~ /Test::Integration::MyNewJob/
+ee:my-new-job:
+ extends:
+ - .parallel
+ - ee:my-new-job-selective
+ rules:
+ - !reference [.rules:test:qa-parallel, rules]
+ - if: $QA_SUITES =~ /Test::Integration::MyNewJob/
+```
+
+## `e2e:test-on-gdk`
+
+The `e2e:test-on-gdk` child pipeline supports development of the GitLab platform by providing feedback to engineers on
+end-to-end test execution faster than via `e2e:package-and-test` or [Review Apps](../review_apps.md).
+
+This is achieved by running tests against the [GitLab Development Kit](https://gitlab.com/gitlab-org/gitlab-development-kit) (GDK),
+which can be built and installed in less time than when testing against [Omnibus GitLab](https://gitlab.com/gitlab-org/omnibus-gitlab).
+The trade-off is that Omnibus GitLab can be used to deploy a production installation, whereas the GDK is a development
+environment. Tests that run against the GDK might not catch bugs that depend on part of the process of preparing GitLab
+to run in a production environment, including pre-compiling assets, assigning configuration defaults as part of an official
+installation package, deploying GitLab services to multiple servers, and more. On the other hand, engineers who use the
+GDK day-to-day can benefit from automated tests catching bugs that only appear on the GDK.
+
+### Setup
+
+The pipeline setup consists of several jobs in the main GitLab pipeline:
+
+- The [`e2e-test-pipeline-generate` job](https://gitlab.com/gitlab-org/gitlab/-/blob/9456299b995084bfceb8bc6d082229c0198a0f72/.gitlab/ci/setup.gitlab-ci.yml#L158)
+ in the `prepare` stage. This is the same job as in the [`e2e:package-and-test`](#e2epackage-and-test) pipeline.
+- The [`build-gdk-image` job](https://gitlab.com/gitlab-org/gitlab/-/blob/07504c34b28ac656537cd60810992aa15e9e91b8/.gitlab/ci/build-images.gitlab-ci.yml#L32)
+ in the `build-images` stage.
+- The `e2e:test-on-gdk` trigger job in the `qa` stage, which triggers the child pipeline that runs E2E tests.
+
+#### `build-gdk-image`
+
+[This job](https://gitlab.com/gitlab-org/gitlab/-/blob/07504c34b28ac656537cd60810992aa15e9e91b8/.gitlab/ci/build-images.gitlab-ci.yml#L32)
+uses the code from the merge request to build a Docker image that can be used in test jobs to launch a GDK instance in a container. The image is pushed to the Container Registry.
+
+The job also runs in pipelines on the default branch to build a base image that includes the GDK and GitLab components.
+This avoids building the entire image from scratch in merge requests. However, if the merge request includes changes to
+[certain GitLab components or code](https://gitlab.com/gitlab-org/gitlab/-/blob/24109c1a7ae1f29d4f6f1aeba3a13cbd8ea0e8e6/.gitlab/ci/rules.gitlab-ci.yml#L911)
+the job will rebuild the base image before building the image that will be used in the test jobs.
+
+#### `e2e:test-on-gdk` child pipeline
+
+Like the [`e2e:package-and-test`](#e2epackage-and-test) pipeline, the `e2e:test-on-gdk` pipeline consists of several stages
+that support execution of E2E tests.
+
+##### .pre
+
+This stage is responsible for fetching `knapsack` reports that support [parallel test execution](index.md#run-tests-in-parallel).
+
+##### test
+
+This stage runs e2e tests against different types of GitLab configurations. The number of jobs executed is determined dynamically by the
+[`e2e-test-pipeline-generate`](test_pipelines.md#e2e-test-pipeline-generate) job.
+
+Each job starts a container from the GDK Docker image created in the `build-gdk-image` job, and then executes the end-to-end
+tests against the GDK instance running in the container.
+
+##### report
+
+This stage is responsible for [allure test report](index.md#allure-report) generation.
diff --git a/doc/user/okrs.md b/doc/user/okrs.md
index 9d99d34c359..0cb27864377 100644
--- a/doc/user/okrs.md
+++ b/doc/user/okrs.md
@@ -358,7 +358,7 @@ To reorder them, drag them around.
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/8410) in GitLab 15.3.
-Confidential OKRs are [OKRs](index.md) visible only to members of a project with
+Confidential OKRs are OKRs visible only to members of a project with
[sufficient permissions](#who-can-see-confidential-okrs).
You can use confidential OKRs to keep security vulnerabilities private or prevent surprises from
leaking out.
@@ -405,11 +405,29 @@ the OKR even if they were actively participating before the change.
However, a user with the **Guest role** can create confidential OKRs, but can only view the ones
that they created themselves.
-Users with the Guest role or non-members can read the confidential issue if they are assigned to the issue.
-When a Guest user or non-member is unassigned from a confidential issue, they can no longer view it.
+Users with the Guest role or non-members can read the confidential OKR if they are assigned to the OKR.
+When a Guest user or non-member is unassigned from a confidential OKR, they can no longer view it.
Confidential OKRs are hidden in search results for users without the necessary permissions.
+### Confidential OKR indicators
+
+Confidential OKRs are visually different from regular OKRs in a few ways.
+Wherever OKRs are listed, you can see the confidential (**{eye-slash}**) icon
+next to the OKRs that are marked as confidential.
+
+If you don't have [enough permissions](#who-can-see-confidential-okrs),
+you cannot see confidential OKRs at all.
+
+Likewise, while inside the OKR, you can see the confidential (**{eye-slash}**) icon right next to
+the breadcrumbs.
+
+Every change from regular to confidential and vice versa, is indicated by a
+system note in the OKR's comments, for example:
+
+> - **{eye-slash}** Jo Garcia made the issue confidential 5 minutes ago
+> - **{eye}** Jo Garcia made the issue visible to everyone just now
+
## Two-column layout
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/415077) in GitLab 16.2 [with a flag](../administration/feature_flags.md) named `work_items_mvc_2`. Disabled by default.
diff --git a/doc/user/project/issues/confidential_issues.md b/doc/user/project/issues/confidential_issues.md
index f05b8412deb..823b41fde9e 100644
--- a/doc/user/project/issues/confidential_issues.md
+++ b/doc/user/project/issues/confidential_issues.md
@@ -15,6 +15,13 @@ keep security vulnerabilities private or prevent surprises from leaking out.
You can make an issue confidential when you create or edit an issue.
+Prerequisites:
+
+- You must have at least the Reporter role for the project.
+- If the issue you want to make confidential has any child [tasks](../../tasks.md),
+ you must first make all the child tasks confidential.
+ A confidential issue can have only confidential children.
+
### In a new issue
When you create a new issue, a checkbox right below the text area is available
@@ -62,10 +69,8 @@ Confidential issues are hidden in search results for users without the necessary
## Confidential issue indicators
Confidential issues are visually different from regular issues in a few ways.
-In the issues index page view, you can see the confidential (**{eye-slash}**) icon
-next to the issues that are marked as confidential:
-
-![Confidential issues index page](img/confidential_issues_index_page.png)
+In the issues list and boards, you can see the confidential (**{eye-slash}**) icon
+next to issues marked as confidential.
If you don't have [enough permissions](#who-can-see-confidential-issues),
you cannot see confidential issues at all.
@@ -74,21 +79,13 @@ Likewise, while inside the issue, you can see the confidential (**{eye-slash}**)
the issue number. There is also an indicator in the comment area that the
issue you are commenting on is confidential.
-![Confidential issue page](img/confidential_issues_issue_page.png)
-
There is also an indicator on the sidebar denoting confidentiality.
-| Confidential issue | Not confidential issue |
-| :-----------: | :----------: |
-| ![Sidebar confidential issue](img/sidebar_confidential_issue.png) | ![Sidebar not confidential issue](img/sidebar_not_confidential_issue.png) |
-
Every change from regular to confidential and vice versa, is indicated by a
-system note in the issue's comments:
-
-- **{eye-slash}** The issue is made confidential.
-- **{eye}** The issue is made public.
+system note in the issue's comments, for example:
-![Confidential issues system notes](img/confidential_issues_system_notes_v15_4.png)
+> - **{eye-slash}** Jo Garcia made the issue confidential 5 minutes ago
+> - **{eye}** Jo Garcia made the issue visible to everyone just now
## Merge requests for confidential issues
diff --git a/doc/user/project/issues/img/confidential_issues_index_page.png b/doc/user/project/issues/img/confidential_issues_index_page.png
deleted file mode 100644
index 16979bf9ac2..00000000000
--- a/doc/user/project/issues/img/confidential_issues_index_page.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/issues/img/confidential_issues_issue_page.png b/doc/user/project/issues/img/confidential_issues_issue_page.png
deleted file mode 100644
index b349149aa98..00000000000
--- a/doc/user/project/issues/img/confidential_issues_issue_page.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/issues/img/confidential_issues_system_notes_v15_4.png b/doc/user/project/issues/img/confidential_issues_system_notes_v15_4.png
deleted file mode 100644
index e448f609112..00000000000
--- a/doc/user/project/issues/img/confidential_issues_system_notes_v15_4.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/issues/img/sidebar_confidential_issue.png b/doc/user/project/issues/img/sidebar_confidential_issue.png
deleted file mode 100644
index 0ef61c7f1b0..00000000000
--- a/doc/user/project/issues/img/sidebar_confidential_issue.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/issues/img/sidebar_not_confidential_issue.png b/doc/user/project/issues/img/sidebar_not_confidential_issue.png
deleted file mode 100644
index c09f8204b37..00000000000
--- a/doc/user/project/issues/img/sidebar_not_confidential_issue.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/tasks.md b/doc/user/tasks.md
index ecd5ef0c42f..86d0a0bc905 100644
--- a/doc/user/tasks.md
+++ b/doc/user/tasks.md
@@ -360,6 +360,75 @@ To copy the task's email address:
1. Select **Plan > Issues**, then select your issue to view it.
1. In the top right corner, select the vertical ellipsis (**{ellipsis_v}**), then select **Copy task email address**.
+## Confidential tasks
+
+> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/8410) in GitLab 15.3.
+
+Confidential tasks are tasks visible only to members of a project with
+[sufficient permissions](#who-can-see-confidential-tasks).
+You can use confidential tasks to keep security vulnerabilities private or prevent surprises from
+leaking out.
+
+### Make a task confidential
+
+By default, tasks are public.
+You can make a task confidential when you create or edit it.
+
+Prerequisites:
+
+- You must have at least the Reporter role for the project.
+- If the task has a parent issue which is non-confidential, and you want to make the issue confidential,
+ you must first make all the child tasks confidential.
+ A [confidential issue](project/issues/confidential_issues.md) can have only confidential children.
+
+#### In a new task
+
+When you create a new task, a checkbox right below the text area is available to mark the
+task as confidential.
+
+Check that box and select **Create task**.
+
+#### In an existing task
+
+To change the confidentiality of an existing task:
+
+1. [Open the task](#view-tasks).
+1. In the top right corner, select the vertical ellipsis (**{ellipsis_v}**).
+1. Select **Turn on confidentiality**.
+
+### Who can see confidential tasks
+
+When a task is made confidential, only users with at least the Reporter role for the project have
+access to the task.
+Users with Guest or [Minimal](permissions.md#users-with-minimal-access) roles can't access
+the task even if they were actively participating before the change.
+
+However, a user with the **Guest role** can create confidential tasks, but can only view the ones
+that they created themselves.
+
+Users with the Guest role or non-members can read the confidential task if they are assigned to the task.
+When a Guest user or non-member is unassigned from a confidential task, they can no longer view it.
+
+Confidential tasks are hidden in search results for users without the necessary permissions.
+
+### Confidential task indicators
+
+Confidential tasks are visually different from regular tasks in a few ways.
+Wherever tasks are listed, you can see the confidential (**{eye-slash}**) icon
+next to the tasks that are marked as confidential.
+
+If you don't have [enough permissions](#who-can-see-confidential-tasks),
+you cannot see confidential tasks at all.
+
+Likewise, while inside the task, you can see the confidential (**{eye-slash}**) icon right next to
+the breadcrumbs.
+
+Every change from regular to confidential and vice versa, is indicated by a
+system note in the task's comments, for example:
+
+> - **{eye-slash}** Jo Garcia made the issue confidential 5 minutes ago
+> - **{eye}** Jo Garcia made the issue visible to everyone just now
+
## Two-column layout
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/415077) in GitLab 16.2 [with a flag](../administration/feature_flags.md) named `work_items_mvc_2`. Disabled by default.
diff --git a/lib/gitlab/usage_data_counters/hll_redis_counter.rb b/lib/gitlab/usage_data_counters/hll_redis_counter.rb
index 787e66d95ae..53594a27867 100644
--- a/lib/gitlab/usage_data_counters/hll_redis_counter.rb
+++ b/lib/gitlab/usage_data_counters/hll_redis_counter.rb
@@ -8,7 +8,6 @@ module Gitlab
EventError = Class.new(StandardError)
UnknownEvent = Class.new(EventError)
- InvalidContext = Class.new(EventError)
# Track event on entity_id
# Increment a Redis HLL counter for unique event_name and entity_id
@@ -29,29 +28,13 @@ module Gitlab
track(values, event_name, time: time)
end
- # Track unique events
- #
- # event_name - The event name.
- # values - One or multiple values counted.
- # context - Event context, plan level tracking.
- # time - Time of the action, set to Time.current.
- def track_event_in_context(event_name, values:, context:, time: Time.zone.now)
- return if context.blank?
- return unless context.in?(valid_context_list)
-
- track(values, event_name, context: context, time: time)
- end
-
# Count unique events for a given time range.
#
# event_names - The list of the events to count.
# start_date - The start date of the time range.
# end_date - The end date of the time range.
- # context - Event context, plan level tracking. Available if set when tracking.
- def unique_events(event_names:, start_date:, end_date:, context: '')
- count_unique_events(event_names: event_names, start_date: start_date, end_date: end_date, context: context) do
- raise InvalidContext if context.present? && !context.in?(valid_context_list)
- end
+ def unique_events(event_names:, start_date:, end_date:)
+ count_unique_events(event_names: event_names, start_date: start_date, end_date: end_date)
end
def known_event?(event_name)
@@ -68,7 +51,7 @@ module Gitlab
private
- def track(values, event_name, context: '', time: Time.zone.now)
+ def track(values, event_name, time: Time.zone.now)
return unless ::ServicePing::ServicePingSettings.enabled?
event = event_for(event_name)
@@ -77,7 +60,7 @@ module Gitlab
return if event.blank?
return unless Feature.enabled?(:redis_hll_tracking, type: :ops)
- Gitlab::Redis::HLL.add(key: redis_key(event, time, context), value: values, expiry: KEY_EXPIRY_LENGTH)
+ Gitlab::Redis::HLL.add(key: redis_key(event, time), value: values, expiry: KEY_EXPIRY_LENGTH)
rescue StandardError => e
# Ignore any exceptions unless is dev or test env
@@ -85,27 +68,20 @@ module Gitlab
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
end
- # The array of valid context on which we allow tracking
- def valid_context_list
- Plan.all_plans
- end
-
- def count_unique_events(event_names:, start_date:, end_date:, context: '')
+ def count_unique_events(event_names:, start_date:, end_date:)
events = events_for(Array(event_names).map(&:to_s))
- yield events if block_given?
-
- keys = keys_for_aggregation(events: events, start_date: start_date, end_date: end_date, context: context)
+ keys = keys_for_aggregation(events: events, start_date: start_date, end_date: end_date)
return FALLBACK unless keys.any?
redis_usage_data { Gitlab::Redis::HLL.count(keys: keys) }
end
- def keys_for_aggregation(events:, start_date:, end_date:, context: '')
+ def keys_for_aggregation(events:, start_date:, end_date:)
end_date = end_date.end_of_week - 1.week
(start_date.to_date..end_date.to_date).map do |date|
- events.map { |event| redis_key(event, date, context) }
+ events.map { |event| redis_key(event, date) }
end.flatten.uniq
end
@@ -134,20 +110,15 @@ module Gitlab
end
# Compose the key in order to store events daily or weekly
- def redis_key(event, time, context = '')
+ def redis_key(event, time)
raise UnknownEvent, "Unknown event #{event[:name]}" unless known_events_names.include?(event[:name].to_s)
key = "{#{REDIS_SLOT}}_#{event[:name]}"
year_week = time.strftime('%G-%V')
- key = "#{key}-#{year_week}"
-
- key = "#{context}_#{key}" if context.present?
- key
+ "#{key}-#{year_week}"
end
end
end
end
end
-
-Gitlab::UsageDataCounters::HLLRedisCounter.prepend_mod_with('Gitlab::UsageDataCounters::HLLRedisCounter')
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index f6606bcd427..cd125826060 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -19412,6 +19412,9 @@ msgstr ""
msgid "Failed to create resources"
msgstr ""
+msgid "Failed to create target branch rule"
+msgstr ""
+
msgid "Failed to create wiki"
msgstr ""
@@ -32785,9 +32788,6 @@ msgstr ""
msgid "Optional"
msgstr ""
-msgid "Optional parameter \"variables\" must be a Hash. Ex: variables[key1]=value1"
-msgstr ""
-
msgid "Optionally, you can %{link_to_customize} how FogBugz email addresses and usernames are imported into GitLab."
msgstr ""
@@ -54038,6 +54038,9 @@ msgstr ""
msgid "You have insufficient permissions to create a Todo for this alert"
msgstr ""
+msgid "You have insufficient permissions to create a target branch rule"
+msgstr ""
+
msgid "You have insufficient permissions to create an HTTP integration for this project"
msgstr ""
diff --git a/qa/qa/support/api.rb b/qa/qa/support/api.rb
index 0081b1c1d46..0b122271747 100644
--- a/qa/qa/support/api.rb
+++ b/qa/qa/support/api.rb
@@ -99,6 +99,23 @@ module QA
url.sub(/private_token=[^&]*/, "private_token=[****]")
end
+ # Returns the response body as a hash with secrets masked.
+ #
+ # @param [Hash] response the response body as a JSON string
+ # @param [Array<Symbol>] mask_secrets the keys (as symbols) of the response body whose values will be masked
+ # @return [Hash] the response body as a hash, with the specified secrets replaced with `****`
+ def masked_parsed_response(response, mask_secrets:)
+ unless mask_secrets.is_a?(Array)
+ raise(ArgumentError, "Expected `mask_secrets` to be an array, got #{mask_secrets.class}")
+ end
+
+ mask_secrets.all?(Symbol) || raise(ArgumentError, "Expected `mask_secrets` to be an array of symbols")
+
+ body = parse_body(response)
+ body.is_a?(Hash) || raise(ArgumentError, "Expected response body to be a hash, got #{body.class}")
+ body.each { |k, v| body[k] = mask_secrets.include?(k) ? '****' : v }
+ end
+
# Merges the gitlab_canary cookie into existing cookies for mixed environment testing.
#
# @param [Hash] args the existing args passed to method
diff --git a/qa/spec/support/api_spec.rb b/qa/spec/support/api_spec.rb
new file mode 100644
index 00000000000..aeb15dcde24
--- /dev/null
+++ b/qa/spec/support/api_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+module QA
+ RSpec.describe QA::Support::API do
+ describe ".masked_parsed_response" do
+ let(:secrets) { [:secret] }
+ let(:response_to_test) do
+ Struct.new(:body).new('{ "id": 1, "token": "foo", "secret": "bar", "name": "gitlab" }')
+ end
+
+ subject { described_class.masked_parsed_response(response_to_test, mask_secrets: secrets) }
+
+ shared_examples 'masks secrets' do
+ subject { described_class.masked_parsed_response(response_to_test, mask_secrets: secrets) }
+
+ it 'masks secrets' do
+ expect(subject).to match(a_hash_including(expected))
+ end
+ end
+
+ shared_examples 'raises an error' do
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, /Expected response body to be a hash/)
+ end
+ end
+
+ context 'when the response body is a hash' do
+ context 'with secret strings' do
+ let(:secrets) { [:token, :secret] }
+ let(:expected) do
+ {
+ id: 1,
+ token: '****',
+ secret: '****',
+ name: 'gitlab'
+ }
+ end
+
+ include_examples 'masks secrets'
+ end
+
+ context 'with secrets that are not strings' do
+ let(:secrets) { [:id] }
+ let(:expected) { { id: '****' } }
+
+ include_examples 'masks secrets'
+ end
+ end
+
+ context 'when the response body is a String' do
+ let(:response_to_test) { Struct.new(:body).new('"secret"') }
+
+ include_examples 'raises an error'
+ end
+
+ context 'when the response body is an Array' do
+ let(:response_to_test) { Struct.new(:body).new('["secret", "not-secret"]') }
+
+ include_examples 'raises an error'
+ end
+
+ context 'when the response body is an Integer' do
+ let(:response_to_test) { Struct.new(:body).new('1') }
+
+ include_examples 'raises an error'
+ end
+
+ context 'when the response body is a Float' do
+ let(:response_to_test) { Struct.new(:body).new('1.0') }
+
+ include_examples 'raises an error'
+ end
+
+ context 'when the response body is a Boolean' do
+ let(:response_to_test) { Struct.new(:body).new('true') }
+
+ include_examples 'raises an error'
+ end
+
+ context 'when the response body is null' do
+ let(:response_to_test) { Struct.new(:body).new('null') }
+
+ include_examples 'raises an error'
+ end
+
+ context 'when mask_secrets is not an array' do
+ let(:secrets) { 'secret' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, /Expected `mask_secrets` to be an array, got/)
+ end
+ end
+
+ context 'when mask_secrets contents are not all symbols' do
+ let(:secrets) { ['secret', :secret] }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, /Expected `mask_secrets` to be an array of symbols/)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/autocomplete/users_finder_spec.rb b/spec/finders/autocomplete/users_finder_spec.rb
index 319a1c20524..e4337e52306 100644
--- a/spec/finders/autocomplete/users_finder_spec.rb
+++ b/spec/finders/autocomplete/users_finder_spec.rb
@@ -88,13 +88,20 @@ RSpec.describe Autocomplete::UsersFinder do
let(:parent) { create(:group, :public, parent: grandparent) }
let(:child) { create(:group, :public, parent: parent) }
let(:group) { parent }
+ let(:child_project) { create(:project, group: group) }
let!(:grandparent_user) { create(:group_member, :developer, group: grandparent).user }
let!(:parent_user) { create(:group_member, :developer, group: parent).user }
let!(:child_user) { create(:group_member, :developer, group: child).user }
-
- it 'includes users from parent groups as well' do
- expect(subject).to match_array([grandparent_user, parent_user])
+ let!(:child_project_user) { create(:project_member, :developer, project: child_project).user }
+
+ it 'includes users from parent groups, descendant groups, and descendant projects' do
+ expect(subject).to contain_exactly(
+ grandparent_user,
+ parent_user,
+ child_user,
+ child_project_user
+ )
end
end
diff --git a/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js b/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js
index f0ecee5fed9..4057759b9b9 100644
--- a/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js
@@ -1,3 +1,5 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
import {
GlDropdown,
GlDropdownItem,
@@ -5,8 +7,7 @@ import {
GlLoadingIcon,
GlSearchBoxByType,
} from '@gitlab/ui';
-import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
-import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import BranchSwitcher from '~/ci/pipeline_editor/components/file_nav/branch_switcher.vue';
@@ -28,55 +29,14 @@ import {
mockTotalSearchResults,
} from '../../mock_data';
-const localVue = createLocalVue();
-localVue.use(VueApollo);
-
describe('Pipeline editor branch switcher', () => {
let wrapper;
let mockApollo;
let mockAvailableBranchQuery;
- const createComponent = ({
- currentBranch = mockDefaultBranch,
- availableBranches = ['main'],
- isQueryLoading = false,
- mountFn = shallowMount,
- options = {},
- props = {},
- } = {}) => {
- wrapper = mountFn(BranchSwitcher, {
- propsData: {
- ...props,
- paginationLimit: mockBranchPaginationLimit,
- },
- provide: {
- projectFullPath: mockProjectFullPath,
- totalBranches: mockTotalBranches,
- },
- mocks: {
- $apollo: {
- queries: {
- availableBranches: {
- loading: isQueryLoading,
- },
- },
- },
- },
- data() {
- return {
- availableBranches,
- currentBranch,
- };
- },
- ...options,
- });
- };
+ Vue.use(VueApollo);
- const createComponentWithApollo = ({
- mountFn = shallowMount,
- props = {},
- availableBranches = ['main'],
- } = {}) => {
+ const createComponent = ({ props = {} } = {}) => {
const handlers = [[getAvailableBranchesQuery, mockAvailableBranchQuery]];
mockApollo = createMockApollo(handlers, resolvers);
@@ -106,16 +66,19 @@ describe('Pipeline editor branch switcher', () => {
},
});
- createComponent({
- mountFn,
- props,
- availableBranches,
- options: {
- localVue,
- apolloProvider: mockApollo,
- mocks: {},
+ wrapper = shallowMount(BranchSwitcher, {
+ propsData: {
+ ...props,
+ paginationLimit: mockBranchPaginationLimit,
+ },
+ provide: {
+ projectFullPath: mockProjectFullPath,
+ totalBranches: mockTotalBranches,
},
+ apolloProvider: mockApollo,
});
+
+ return waitForPromises();
};
const findDropdown = () => wrapper.findComponent(GlDropdown);
@@ -137,7 +100,7 @@ describe('Pipeline editor branch switcher', () => {
expect(wrapper.emitted('showError')).toBeDefined();
expect(wrapper.emitted('showError')[0]).toEqual([
{
- reasons: [wrapper.vm.$options.i18n.fetchError],
+ reasons: ['Unable to fetch branch list for this project.'],
type: DEFAULT_FAILURE,
},
]);
@@ -145,19 +108,26 @@ describe('Pipeline editor branch switcher', () => {
describe('when querying for the first time', () => {
beforeEach(() => {
- createComponentWithApollo({ availableBranches: [] });
+ createComponent();
});
it('disables the dropdown', () => {
expect(findDropdown().props('disabled')).toBe(true);
});
+
+ it('shows loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
});
describe('after querying', () => {
beforeEach(async () => {
setAvailableBranchesMock(generateMockProjectBranches());
- createComponentWithApollo({ mountFn: mount });
- await waitForPromises();
+ await createComponent();
+ });
+
+ it('does not render the loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
});
it('renders search box', () => {
@@ -185,8 +155,7 @@ describe('Pipeline editor branch switcher', () => {
describe('on fetch error', () => {
beforeEach(async () => {
setAvailableBranchesMock(new Error());
- createComponentWithApollo({ availableBranches: [] });
- await waitForPromises();
+ await createComponent();
});
it('does not render dropdown', () => {
@@ -202,8 +171,7 @@ describe('Pipeline editor branch switcher', () => {
beforeEach(async () => {
jest.spyOn(window.history, 'pushState').mockImplementation(() => {});
setAvailableBranchesMock(generateMockProjectBranches());
- createComponentWithApollo({ mountFn: mount });
- await waitForPromises();
+ await createComponent();
});
it('updates session history when selecting a different branch', async () => {
@@ -251,7 +219,7 @@ describe('Pipeline editor branch switcher', () => {
describe('with unsaved changes', () => {
beforeEach(async () => {
- createComponentWithApollo({ mountFn: mount, props: { hasUnsavedChanges: true } });
+ createComponent({ props: { hasUnsavedChanges: true } });
await waitForPromises();
});
@@ -270,8 +238,7 @@ describe('Pipeline editor branch switcher', () => {
describe('when searching', () => {
beforeEach(async () => {
setAvailableBranchesMock(generateMockProjectBranches());
- createComponentWithApollo({ mountFn: mount });
- await waitForPromises();
+ await createComponent();
});
afterEach(() => {
@@ -355,23 +322,10 @@ describe('Pipeline editor branch switcher', () => {
});
});
- describe('loading icon', () => {
- it.each`
- isQueryLoading | isRendered
- ${true} | ${true}
- ${false} | ${false}
- `('checks if query is loading before rendering', ({ isQueryLoading, isRendered }) => {
- createComponent({ isQueryLoading, mountFn: mount });
-
- expect(findLoadingIcon().exists()).toBe(isRendered);
- });
- });
-
describe('when scrolling to the bottom of the list', () => {
beforeEach(async () => {
setAvailableBranchesMock(generateMockProjectBranches());
- createComponentWithApollo();
- await waitForPromises();
+ await createComponent();
});
afterEach(() => {
diff --git a/spec/frontend/super_sidebar/utils_spec.js b/spec/frontend/super_sidebar/utils_spec.js
index 8c8673ddbc4..f7a7e8db24a 100644
--- a/spec/frontend/super_sidebar/utils_spec.js
+++ b/spec/frontend/super_sidebar/utils_spec.js
@@ -1,14 +1,21 @@
+import * as Sentry from '@sentry/browser';
import {
getTopFrequentItems,
trackContextAccess,
formatContextSwitcherItems,
+ getItemsFromLocalStorage,
+ removeItemFromLocalStorage,
ariaCurrent,
} from '~/super_sidebar/utils';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import AccessorUtilities from '~/lib/utils/accessor';
import { FREQUENT_ITEMS, FIFTEEN_MINUTES_IN_MS } from '~/frequent_items/constants';
import { unsortedFrequentItems, sortedFrequentItems } from '../frequent_items/mock_data';
-import { searchUserProjectsAndGroupsResponseMock } from './mock_data';
+import { cachedFrequentProjects, searchUserProjectsAndGroupsResponseMock } from './mock_data';
+
+jest.mock('@sentry/browser');
+
+useLocalStorageSpy();
describe('Super sidebar utils spec', () => {
describe('getTopFrequentItems', () => {
@@ -35,8 +42,6 @@ describe('Super sidebar utils spec', () => {
});
describe('trackContextAccess', () => {
- useLocalStorageSpy();
-
const username = 'root';
const context = {
namespace: 'groups',
@@ -159,6 +164,125 @@ describe('Super sidebar utils spec', () => {
});
});
+ describe('getItemsFromLocalStorage', () => {
+ const storageKey = 'mockStorageKey';
+ const maxItems = 5;
+ const storedItems = JSON.parse(cachedFrequentProjects);
+
+ beforeEach(() => {
+ window.localStorage.setItem(storageKey, cachedFrequentProjects);
+ });
+
+ describe('when localStorage cannot be accessed', () => {
+ beforeEach(() => {
+ jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(false);
+ });
+
+ it('returns an empty array', () => {
+ const items = getItemsFromLocalStorage({ storageKey, maxItems });
+ expect(items).toEqual([]);
+ });
+ });
+
+ describe('when localStorage contains parseable data', () => {
+ it('returns an array of items limited by max items', () => {
+ const items = getItemsFromLocalStorage({ storageKey, maxItems });
+ expect(items.length).toEqual(maxItems);
+
+ items.forEach((item) => {
+ expect(storedItems).toContainEqual(item);
+ });
+ });
+
+ it('returns all items if max items is large', () => {
+ const items = getItemsFromLocalStorage({ storageKey, maxItems: 1 });
+ expect(items.length).toEqual(1);
+
+ expect(storedItems).toContainEqual(items[0]);
+ });
+ });
+
+ describe('when localStorage contains unparseable data', () => {
+ let items;
+
+ beforeEach(() => {
+ window.localStorage.setItem(storageKey, 'unparseable');
+ items = getItemsFromLocalStorage({ storageKey, maxItems });
+ });
+
+ it('logs an error to Sentry', () => {
+ expect(Sentry.captureException).toHaveBeenCalled();
+ });
+
+ it('returns an empty array', () => {
+ expect(items).toEqual([]);
+ });
+ });
+ });
+
+ describe('removeItemFromLocalStorage', () => {
+ const storageKey = 'mockStorageKey';
+ const originalStoredItems = JSON.parse(cachedFrequentProjects);
+
+ beforeEach(() => {
+ window.localStorage.setItem(storageKey, cachedFrequentProjects);
+ });
+
+ describe('when given an item to delete', () => {
+ let items;
+ let modifiedStoredItems;
+
+ beforeEach(() => {
+ items = removeItemFromLocalStorage({ storageKey, item: { id: 3 } });
+ modifiedStoredItems = JSON.parse(window.localStorage.getItem(storageKey));
+ });
+
+ it('removes the item from localStorage', () => {
+ expect(modifiedStoredItems.length).toBe(originalStoredItems.length - 1);
+ expect(modifiedStoredItems).not.toContainEqual(originalStoredItems[2]);
+ });
+
+ it('returns the resulting stored structure', () => {
+ expect(items).toEqual(modifiedStoredItems);
+ });
+ });
+
+ describe('when given an unknown item to delete', () => {
+ let items;
+ let modifiedStoredItems;
+
+ beforeEach(() => {
+ items = removeItemFromLocalStorage({ storageKey, item: { id: 'does-not-exist' } });
+ modifiedStoredItems = JSON.parse(window.localStorage.getItem(storageKey));
+ });
+
+ it('does not change the stored value', () => {
+ expect(modifiedStoredItems).toEqual(originalStoredItems);
+ });
+
+ it('returns the stored structure', () => {
+ expect(items).toEqual(originalStoredItems);
+ });
+ });
+
+ describe('when localStorage has unparseable data', () => {
+ let items;
+
+ beforeEach(() => {
+ window.localStorage.setItem(storageKey, 'unparseable');
+ items = removeItemFromLocalStorage({ storageKey, item: { id: 3 } });
+ });
+
+ it('logs an error to Sentry', () => {
+ expect(Sentry.captureException).toHaveBeenCalled();
+ });
+
+ it('returns an empty array', () => {
+ expect(items).toEqual([]);
+ });
+ });
+ });
+
describe('ariaCurrent', () => {
it.each`
isActive | expected
diff --git a/spec/graphql/types/ci/runner_manager_type_spec.rb b/spec/graphql/types/ci/runner_manager_type_spec.rb
index 6f73171cd8f..ff7297b0a0e 100644
--- a/spec/graphql/types/ci/runner_manager_type_spec.rb
+++ b/spec/graphql/types/ci/runner_manager_type_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabSchema.types['CiRunnerManager'], feature_category: :runner_
it 'contains attributes related to a runner manager' do
expected_fields = %w[
- architecture_name contacted_at created_at executor_name id ip_address platform_name revision
+ architecture_name contacted_at created_at executor_name id ip_address job_execution_status platform_name revision
runner status system_id version
]
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 4dd3948ad5c..7bef14d5f7a 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -8,9 +8,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
- let(:default_context) { 'default' }
- let(:invalid_context) { 'invalid' }
-
around do |example|
# We need to freeze to a reference time
# because visits are grouped by the week number in the year
@@ -73,7 +70,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:no_slot) { 'no_slot' }
let(:different_aggregation) { 'different_aggregation' }
let(:custom_daily_event) { 'g_analytics_custom' }
- let(:context_event) { 'context_event' }
let(:global_category) { 'global' }
let(:compliance_category) { 'compliance' }
@@ -88,8 +84,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
{ name: category_productivity_event },
{ name: compliance_slot_event },
{ name: no_slot },
- { name: different_aggregation },
- { name: context_event }
+ { name: different_aggregation }
].map(&:with_indifferent_access)
end
@@ -191,43 +186,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
- describe '.track_event_in_context' do
- context 'with valid contex' do
- it 'increments context event counter' do
- expect(Gitlab::Redis::HLL).to receive(:add) do |kwargs|
- expect(kwargs[:key]).to match(/^#{default_context}_.*/)
- end
-
- described_class.track_event_in_context(context_event, values: entity1, context: default_context)
- end
-
- it 'tracks events with multiple values' do
- values = [entity1, entity2]
- expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_analytics_contribution/,
- value: values,
- expiry: described_class::KEY_EXPIRY_LENGTH)
-
- described_class.track_event_in_context(:g_analytics_contribution, values: values, context: default_context)
- end
- end
-
- context 'with empty context' do
- it 'does not increment a counter' do
- expect(Gitlab::Redis::HLL).not_to receive(:add)
-
- described_class.track_event_in_context(context_event, values: entity1, context: '')
- end
- end
-
- context 'when sending invalid context' do
- it 'does not increment a counter' do
- expect(Gitlab::Redis::HLL).not_to receive(:add)
-
- described_class.track_event_in_context(context_event, values: entity1, context: invalid_context)
- end
- end
- end
-
describe '.unique_events' do
before do
# events in current week, should not be counted as week is not complete
@@ -337,48 +295,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
- describe 'context level tracking' do
- using RSpec::Parameterized::TableSyntax
-
- let(:known_events) do
- [
- { name: 'event_name_1' },
- { name: 'event_name_2' },
- { name: 'event_name_3' }
- ].map(&:with_indifferent_access)
- end
-
- before do
- allow(described_class).to receive(:known_events).and_return(known_events)
- allow(described_class).to receive(:categories).and_return(%w(category1 category2))
-
- described_class.track_event_in_context('event_name_1', values: [entity1, entity3], context: default_context, time: 2.days.ago)
- described_class.track_event_in_context('event_name_1', values: entity3, context: default_context, time: 2.days.ago)
- described_class.track_event_in_context('event_name_1', values: entity3, context: invalid_context, time: 2.days.ago)
- described_class.track_event_in_context('event_name_2', values: [entity1, entity2], context: '', time: 2.weeks.ago)
- end
-
- subject(:unique_events) { described_class.unique_events(event_names: event_names, start_date: 4.weeks.ago, end_date: Date.current, context: context) }
-
- context 'with correct arguments' do
- where(:event_names, :context, :value) do
- ['event_name_1'] | 'default' | 2
- ['event_name_1'] | '' | 0
- ['event_name_2'] | '' | 0
- end
-
- with_them do
- it { is_expected.to eq value }
- end
- end
-
- context 'with invalid context' do
- it 'raise error' do
- expect { described_class.unique_events(event_names: 'event_name_1', start_date: 4.weeks.ago, end_date: Date.current, context: invalid_context) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::InvalidContext)
- end
- end
- end
-
describe '.calculate_events_union' do
let(:time_range) { { start_date: 7.days.ago, end_date: DateTime.current } }
let(:known_events) do
diff --git a/spec/models/ci/runner_manager_spec.rb b/spec/models/ci/runner_manager_spec.rb
index d69bf1a0da0..a4759f65b41 100644
--- a/spec/models/ci/runner_manager_spec.rb
+++ b/spec/models/ci/runner_manager_spec.rb
@@ -112,6 +112,23 @@ RSpec.describe Ci::RunnerManager, feature_category: :runner_fleet, type: :model
end
end
+ describe '.with_running_builds' do
+ subject(:scope) { described_class.with_running_builds }
+
+ let_it_be(:runner) { create(:ci_runner) }
+ let_it_be(:runner_manager1) { create(:ci_runner_machine, runner: runner) }
+ let_it_be(:runner_manager2) { create(:ci_runner_machine, runner: runner) }
+
+ before_all do
+ create(:ci_runner_machine_build, runner_manager: runner_manager1,
+ build: create(:ci_build, :success, runner: runner))
+ create(:ci_runner_machine_build, runner_manager: runner_manager2,
+ build: create(:ci_build, :running, runner: runner))
+ end
+
+ it { is_expected.to contain_exactly runner_manager2 }
+ end
+
describe '.order_id_desc' do
subject(:scope) { described_class.order_id_desc }
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index a26ab2501ec..9db710cb3cc 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -2031,4 +2031,12 @@ RSpec.describe Issue, feature_category: :team_planning do
expect { issue1.unsubscribe_email_participant(email) }.not_to change { issue2.issue_email_participants.count }
end
end
+
+ describe '#update_search_data!' do
+ it 'copies namespace_id to search data' do
+ issue = create(:issue)
+
+ expect(issue.search_data.namespace_id).to eq(issue.namespace_id)
+ end
+ end
end
diff --git a/spec/models/loose_foreign_keys/deleted_record_spec.rb b/spec/models/loose_foreign_keys/deleted_record_spec.rb
index 0c16a725663..ed80f5c1516 100644
--- a/spec/models/loose_foreign_keys/deleted_record_spec.rb
+++ b/spec/models/loose_foreign_keys/deleted_record_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe LooseForeignKeys::DeletedRecord, type: :model do
+RSpec.describe LooseForeignKeys::DeletedRecord, type: :model, feature_category: :database do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:table) { 'public.projects' }
describe 'class methods' do
@@ -14,14 +16,30 @@ RSpec.describe LooseForeignKeys::DeletedRecord, type: :model do
let(:records) { described_class.load_batch_for_table(table, 10) }
describe '.load_batch_for_table' do
- it 'loads records and orders them by creation date' do
- expect(records).to eq([deleted_record_1, deleted_record_2, deleted_record_4])
+ where(:union_feature_flag_value) do
+ [true, false]
end
- it 'supports configurable batch size' do
- records = described_class.load_batch_for_table(table, 2)
+ with_them do
+ before do
+ stub_feature_flags('loose_foreign_keys_batch_load_using_union' => union_feature_flag_value)
+ end
+
+ it 'loads records and orders them by creation date' do
+ expect(records).to eq([deleted_record_1, deleted_record_2, deleted_record_4])
+ end
+
+ it 'supports configurable batch size' do
+ records = described_class.load_batch_for_table(table, 2)
+
+ expect(records).to eq([deleted_record_1, deleted_record_2])
+ end
- expect(records).to eq([deleted_record_1, deleted_record_2])
+ it 'returns the partition number in each returned record' do
+ records = described_class.load_batch_for_table(table, 4)
+
+ expect(records).to all(have_attributes(partition: (a_value > 0)))
+ end
end
end
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 6acd705c982..3cfb98c57fd 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -109,9 +109,9 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
runner.maintainer_note.present? ? a_string_including('<strong>Test maintenance note</strong>') : '',
job_count: runner.builds.count,
jobs: a_hash_including(
- "count" => runner.builds.count,
- "nodes" => an_instance_of(Array),
- "pageInfo" => anything
+ 'count' => runner.builds.count,
+ 'nodes' => an_instance_of(Array),
+ 'pageInfo' => anything
),
project_count: nil,
admin_url: "http://localhost/admin/runners/#{runner.id}",
@@ -124,8 +124,21 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
'assignRunner' => true
},
managers: a_hash_including(
- "count" => runner.runner_managers.count,
- "nodes" => an_instance_of(Array),
+ 'count' => runner.runner_managers.count,
+ 'nodes' => runner.runner_managers.map do |runner_manager|
+ a_graphql_entity_for(
+ runner_manager,
+ system_id: runner_manager.system_xid,
+ version: runner_manager.version,
+ revision: runner_manager.revision,
+ ip_address: runner_manager.ip_address,
+ executor_name: runner_manager.executor_type&.dasherize,
+ architecture_name: runner_manager.architecture,
+ platform_name: runner_manager.platform,
+ status: runner_manager.status.to_s.upcase,
+ job_execution_status: runner_manager.builds.running.any? ? 'RUNNING' : 'IDLE'
+ )
+ end,
"pageInfo" => anything
)
)
@@ -215,11 +228,19 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
end
end
- context 'with build running' do
+ context 'with build running', :freeze_time do
+ let!(:pipeline) { create(:ci_pipeline, project: project1) }
+ let!(:runner_manager) do
+ create(:ci_runner_machine,
+ runner: runner, ip_address: '127.0.0.1', version: '16.3', revision: 'a', architecture: 'arm', platform: 'osx',
+ contacted_at: 1.second.ago, executor_type: 'docker')
+ end
+
+ let!(:runner) { create(:ci_runner) }
+ let!(:build) { create(:ci_build, :running, runner: runner, pipeline: pipeline) }
+
before do
- project = create(:project, :repository)
- pipeline = create(:ci_pipeline, project: project)
- create(:ci_build, :running, runner: runner, pipeline: pipeline)
+ create(:ci_runner_machine_build, runner_manager: runner_manager, build: build)
end
it_behaves_like 'runner details fetch'
diff --git a/spec/services/prometheus/proxy_service_spec.rb b/spec/services/prometheus/proxy_service_spec.rb
deleted file mode 100644
index f71662f62ad..00000000000
--- a/spec/services/prometheus/proxy_service_spec.rb
+++ /dev/null
@@ -1,240 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Prometheus::ProxyService, feature_category: :metrics do
- include ReactiveCachingHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- describe 'configuration' do
- it 'ReactiveCaching refresh is not needed' do
- expect(described_class.reactive_cache_refresh_interval).to be > described_class.reactive_cache_lifetime
- end
- end
-
- describe '#initialize' do
- let(:params) { ActionController::Parameters.new(query: '1').permit! }
-
- it 'initializes attributes' do
- result = described_class.new(environment, 'GET', 'query', params)
-
- expect(result.proxyable).to eq(environment)
- expect(result.method).to eq('GET')
- expect(result.path).to eq('query')
- expect(result.params).to eq('query' => '1')
- end
-
- it 'converts ActionController::Parameters into hash' do
- result = described_class.new(environment, 'GET', 'query', params)
-
- expect(result.params).to be_an_instance_of(Hash)
- end
-
- context 'with unknown params' do
- let(:params) { ActionController::Parameters.new(query: '1', other_param: 'val').permit! }
-
- it 'filters unknown params' do
- result = described_class.new(environment, 'GET', 'query', params)
-
- expect(result.params).to eq('query' => '1')
- end
- end
-
- context 'with series method' do
- let(:params) do
- ActionController::Parameters.new(
- match: ['1'],
- start: "2020-06-11T10:15:51Z",
- end: "2020-06-11T11:16:06Z",
- unknown_param: 'val'
- ).permit!
- end
-
- it 'allows match, start and end parameters' do
- result = described_class.new(environment, 'GET', 'series', params)
-
- expect(result.params).to eq(
- 'match' => ['1'],
- 'start' => "2020-06-11T10:15:51Z",
- 'end' => "2020-06-11T11:16:06Z"
- )
- end
- end
- end
-
- describe '#execute' do
- let(:prometheus_adapter) { instance_double(::Integrations::Prometheus) }
- let(:params) { ActionController::Parameters.new(query: '1').permit! }
-
- subject { described_class.new(environment, 'GET', 'query', params) }
-
- context 'when prometheus_adapter is nil' do
- before do
- allow(environment).to receive(:prometheus_adapter).and_return(nil)
- end
-
- it 'returns error' do
- expect(subject.execute).to eq(
- status: :error,
- message: 'No prometheus server found',
- http_status: :service_unavailable
- )
- end
- end
-
- context 'when prometheus_adapter cannot query' do
- before do
- allow(environment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
- allow(prometheus_adapter).to receive(:can_query?).and_return(false)
- end
-
- it 'returns error' do
- expect(subject.execute).to eq(
- status: :error,
- message: 'No prometheus server found',
- http_status: :service_unavailable
- )
- end
- end
-
- context 'cannot proxy' do
- subject { described_class.new(environment, 'POST', 'garbage', params) }
-
- it 'returns error' do
- expect(subject.execute).to eq(
- message: 'Proxy support for this API is not available currently',
- status: :error
- )
- end
- end
-
- context 'with caching', :use_clean_rails_memory_store_caching do
- let(:return_value) { { 'http_status' => 200, 'body' => 'body' } }
-
- let(:opts) do
- [environment.class.name, environment.id, 'GET', 'query', { 'query' => '1' }]
- end
-
- before do
- allow(environment).to receive(:prometheus_adapter)
- .and_return(prometheus_adapter)
- allow(prometheus_adapter).to receive(:can_query?).and_return(true)
- end
-
- context 'when value present in cache' do
- before do
- stub_reactive_cache(subject, return_value, opts)
- end
-
- it 'returns cached value' do
- result = subject.execute
-
- expect(result[:http_status]).to eq(return_value[:http_status])
- expect(result[:body]).to eq(return_value[:body])
- end
- end
-
- context 'when value not present in cache' do
- it 'returns nil' do
- expect(ExternalServiceReactiveCachingWorker)
- .to receive(:perform_async)
- .with(subject.class, subject.id, *opts)
-
- result = subject.execute
-
- expect(result).to eq(nil)
- end
- end
- end
-
- context 'call prometheus api' do
- let(:prometheus_client) { instance_double(Gitlab::PrometheusClient) }
-
- before do
- synchronous_reactive_cache(subject)
-
- allow(environment).to receive(:prometheus_adapter)
- .and_return(prometheus_adapter)
- allow(prometheus_adapter).to receive(:can_query?).and_return(true)
- allow(prometheus_adapter).to receive(:prometheus_client)
- .and_return(prometheus_client)
- end
-
- context 'connection to prometheus server succeeds' do
- let(:rest_client_response) { instance_double(RestClient::Response) }
- let(:prometheus_http_status_code) { 400 }
-
- let(:response_body) do
- '{"status":"error","errorType":"bad_data","error":"parse error at char 1: no expression found in input"}'
- end
-
- before do
- allow(prometheus_client).to receive(:proxy).and_return(rest_client_response)
-
- allow(rest_client_response).to receive(:code)
- .and_return(prometheus_http_status_code)
- allow(rest_client_response).to receive(:body).and_return(response_body)
- end
-
- it 'returns the http status code and body from prometheus' do
- expect(subject.execute).to eq(
- http_status: prometheus_http_status_code,
- body: response_body,
- status: :success
- )
- end
- end
-
- context 'connection to prometheus server fails' do
- context 'prometheus client raises Gitlab::PrometheusClient::Error' do
- before do
- allow(prometheus_client).to receive(:proxy)
- .and_raise(Gitlab::PrometheusClient::Error, 'Network connection error')
- end
-
- it 'returns error' do
- expect(subject.execute).to eq(
- status: :error,
- message: 'Network connection error',
- http_status: :service_unavailable
- )
- end
- end
- end
-
- context 'with series API' do
- let(:rest_client_response) { instance_double(RestClient::Response, code: 200, body: '') }
-
- let(:params) do
- ActionController::Parameters.new(match: ['1'], start: 1.hour.ago.rfc3339, end: Time.current.rfc3339).permit!
- end
-
- subject { described_class.new(environment, 'GET', 'series', params) }
-
- it 'calls PrometheusClient with given parameters' do
- expect(prometheus_client).to receive(:proxy)
- .with('series', params.to_h)
- .and_return(rest_client_response)
-
- subject.execute
- end
- end
- end
- end
-
- describe '.from_cache' do
- it 'initializes an instance of ProxyService class' do
- result = described_class.from_cache(
- environment.class.name, environment.id, 'GET', 'query', { 'query' => '1' }
- )
-
- expect(result).to be_an_instance_of(described_class)
- expect(result.proxyable).to eq(environment)
- expect(result.method).to eq('GET')
- expect(result.path).to eq('query')
- expect(result.params).to eq('query' => '1')
- end
- end
-end
diff --git a/spec/services/prometheus/proxy_variable_substitution_service_spec.rb b/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
deleted file mode 100644
index a5395eed1b4..00000000000
--- a/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
+++ /dev/null
@@ -1,204 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Prometheus::ProxyVariableSubstitutionService, feature_category: :metrics do
- describe '#execute' do
- let_it_be(:environment) { create(:environment) }
-
- let(:params_keys) { { query: 'up{environment="{{ci_environment_slug}}"}' } }
- let(:params) { ActionController::Parameters.new(params_keys).permit! }
- let(:result) { subject.execute }
-
- subject { described_class.new(environment, params) }
-
- shared_examples 'success' do
- it 'replaces variables with values' do
- expect(result[:status]).to eq(:success)
- expect(result[:params][:query]).to eq(expected_query)
- end
- end
-
- shared_examples 'error' do |message|
- it 'returns error' do
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq(message)
- end
- end
-
- context 'does not alter params passed to the service' do
- it do
- subject.execute
-
- expect(params).to eq(
- ActionController::Parameters.new(
- query: 'up{environment="{{ci_environment_slug}}"}'
- ).permit!
- )
- end
- end
-
- context 'with predefined variables' do
- context 'with nil query' do
- let(:params_keys) { {} }
-
- it_behaves_like 'success' do
- let(:expected_query) { nil }
- end
- end
-
- context 'with liquid format' do
- let(:params_keys) do
- { query: 'up{environment="{{ci_environment_slug}}"}' }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { %[up{environment="#{environment.slug}"}] }
- end
- end
- end
-
- context 'with custom variables' do
- let(:pod_name) { "pod1" }
-
- let(:params_keys) do
- {
- query: 'up{pod_name="{{pod_name}}"}',
- variables: { 'pod_name' => pod_name }
- }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { %q[up{pod_name="pod1"}] }
- end
-
- context 'with predefined variables in variables parameter' do
- let(:params_keys) do
- {
- query: 'up{pod_name="{{pod_name}}",env="{{ci_environment_slug}}"}',
- variables: { 'pod_name' => pod_name, 'ci_environment_slug' => 'custom_value' }
- }
- end
-
- it_behaves_like 'success' do
- # Predefined variable values should not be overwritten by custom variable
- # values.
- let(:expected_query) { "up{pod_name=\"#{pod_name}\",env=\"#{environment.slug}\"}" }
- end
- end
-
- context 'with invalid variables parameter' do
- let(:params_keys) do
- {
- query: 'up{pod_name="{{pod_name}}"}',
- variables: ['a']
- }
- end
-
- it_behaves_like 'error', 'Optional parameter "variables" must be a Hash. Ex: variables[key1]=value1'
- end
-
- context 'with nil variables' do
- let(:params_keys) do
- {
- query: 'up{pod_name="{{pod_name}}"}',
- variables: nil
- }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { 'up{pod_name="{{pod_name}}"}' }
- end
- end
- end
-
- context 'gsub variable substitution tolerance for weirdness' do
- context 'with whitespace around variable' do
- let(:params_keys) do
- {
- query: 'up{' \
- "env1={{ ci_environment_slug}}," \
- "env2={{ci_environment_slug }}," \
- "{{ environment_filter }}" \
- '}'
- }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) do
- 'up{' \
- "env1=#{environment.slug}," \
- "env2=#{environment.slug}," \
- "container_name!=\"POD\",environment=\"#{environment.slug}\"" \
- '}'
- end
- end
- end
-
- context 'with empty variables' do
- let(:params_keys) do
- { query: "up{env1={{}},env2={{ }}}" }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { "up{env1={{}},env2={{ }}}" }
- end
- end
-
- context 'with multiple occurrences of variable in string' do
- let(:params_keys) do
- { query: "up{env1={{ci_environment_slug}},env2={{ci_environment_slug}}}" }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { "up{env1=#{environment.slug},env2=#{environment.slug}}" }
- end
- end
-
- context 'with multiple variables in string' do
- let(:params_keys) do
- { query: "up{env={{ci_environment_slug}},{{environment_filter}}}" }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) do
- "up{env=#{environment.slug}," \
- "container_name!=\"POD\",environment=\"#{environment.slug}\"}"
- end
- end
- end
-
- context 'with unknown variables in string' do
- let(:params_keys) { { query: "up{env={{env_slug}}}" } }
-
- it_behaves_like 'success' do
- let(:expected_query) { "up{env={{env_slug}}}" }
- end
- end
-
- context 'with unknown and known variables in string' do
- let(:params_keys) do
- { query: "up{env={{ci_environment_slug}},other_env={{env_slug}}}" }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { "up{env=#{environment.slug},other_env={{env_slug}}}" }
- end
- end
- end
-
- context '__range' do
- let(:params_keys) do
- {
- query: 'topk(5, sum by (method) (rate(rest_client_requests_total[{{__range}}])))',
- start_time: '2020-05-29T08:19:07.142Z',
- end_time: '2020-05-29T16:19:07.142Z'
- }
- end
-
- it_behaves_like 'success' do
- let(:expected_query) { "topk(5, sum by (method) (rate(rest_client_requests_total[#{8.hours.to_i}s])))" }
- end
- end
- end
-end