Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-11-08 12:12:25 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-11-08 12:12:25 +0300
commita130bdc4dd6f78e1605b79b77929c9315b322be3 (patch)
tree8ea70694cdfb038e8d19ea7d23bedfd2d08f212e
parente808a772e795130cb3c7c1cc9673a1205f4ccade (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.rubocop_todo/layout/argument_alignment.yml11
-rw-r--r--app/assets/javascripts/work_items/components/shared/work_item_token_input.vue120
-rw-r--r--app/assets/javascripts/work_items/components/work_item_parent.vue2
-rw-r--r--app/assets/javascripts/work_items/constants.js5
-rw-r--r--app/assets/javascripts/work_items/graphql/group_work_items.query.graphql2
-rw-r--r--app/assets/javascripts/work_items/graphql/project_work_items.query.graphql11
-rw-r--r--app/models/concerns/use_sql_function_for_primary_key_lookups.rb39
-rw-r--r--app/models/environment.rb7
-rw-r--r--app/models/namespace.rb1
-rw-r--r--app/models/project.rb1
-rw-r--r--app/models/user.rb1
-rw-r--r--app/services/environments/auto_recover_service.rb44
-rw-r--r--app/workers/all_queues.yml9
-rw-r--r--app/workers/environments/auto_recover_worker.rb21
-rw-r--r--app/workers/environments/auto_stop_cron_worker.rb1
-rw-r--r--config/feature_flags/development/use_sql_functions_for_primary_key_lookups.yml8
-rw-r--r--config/sidekiq_queues.yml2
-rw-r--r--db/docs/compliance_framework_security_policies.yml10
-rw-r--r--db/migrate/20231025123238_create_compliance_framework_security_policies.rb21
-rw-r--r--db/migrate/20231026050554_add_functions_for_primary_key_lookup.rb26
-rw-r--r--db/migrate/20231031200433_add_framework_fk_to_compliance_framework_security_policies.rb19
-rw-r--r--db/migrate/20231031200645_add_policy_configuration_fk_to_compliance_framework_security_policies.rb19
-rw-r--r--db/schema_migrations/202310251232381
-rw-r--r--db/schema_migrations/202310260505541
-rw-r--r--db/schema_migrations/202310312004331
-rw-r--r--db/schema_migrations/202310312006451
-rw-r--r--db/structure.sql493
-rw-r--r--lib/gitlab/database/dictionary.rb60
-rw-r--r--lib/gitlab/database/gitlab_schema.rb26
-rw-r--r--locale/gitlab.pot5
-rw-r--r--spec/frontend/work_items/components/shared/work_item_token_input_spec.js86
-rw-r--r--spec/frontend/work_items/components/work_item_parent_spec.js14
-rw-r--r--spec/frontend/work_items/mock_data.js73
-rw-r--r--spec/lib/gitlab/database/dictionary_spec.rb84
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb4
-rw-r--r--spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb63
-rw-r--r--spec/models/concerns/use_sql_function_for_primary_key_lookups_spec.rb181
-rw-r--r--spec/models/environment_spec.rb72
-rw-r--r--spec/requests/api/ci/job_artifacts_spec.rb70
-rw-r--r--spec/requests/api/ci/jobs_spec.rb16
-rw-r--r--spec/requests/api/ci/pipelines_spec.rb72
-rw-r--r--spec/requests/api/ci/resource_groups_spec.rb4
-rw-r--r--spec/requests/api/ci/runner/jobs_artifacts_spec.rb11
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb56
-rw-r--r--spec/requests/api/ci/runner/jobs_trace_spec.rb22
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb4
-rw-r--r--spec/requests/api/ci/runners_spec.rb18
-rw-r--r--spec/services/environments/auto_recover_service_spec.rb99
-rw-r--r--spec/workers/environments/auto_recover_worker_spec.rb64
-rw-r--r--spec/workers/environments/auto_stop_cron_worker_spec.rb8
50 files changed, 1558 insertions, 431 deletions
diff --git a/.rubocop_todo/layout/argument_alignment.yml b/.rubocop_todo/layout/argument_alignment.yml
index 91c7f265f99..eff0b672fea 100644
--- a/.rubocop_todo/layout/argument_alignment.yml
+++ b/.rubocop_todo/layout/argument_alignment.yml
@@ -1044,8 +1044,6 @@ Layout/ArgumentAlignment:
- 'ee/spec/requests/api/analytics/project_deployment_frequency_spec.rb'
- 'ee/spec/requests/api/api_spec.rb'
- 'ee/spec/requests/api/branches_spec.rb'
- - 'ee/spec/requests/api/ci/jobs_spec.rb'
- - 'ee/spec/requests/api/ci/pipelines_spec.rb'
- 'ee/spec/requests/api/composer_packages_spec.rb'
- 'ee/spec/requests/api/deployments_spec.rb'
- 'ee/spec/requests/api/dora/metrics_spec.rb'
@@ -1503,15 +1501,6 @@ Layout/ArgumentAlignment:
- 'spec/requests/api/api_spec.rb'
- 'spec/requests/api/badges_spec.rb'
- 'spec/requests/api/branches_spec.rb'
- - 'spec/requests/api/ci/job_artifacts_spec.rb'
- - 'spec/requests/api/ci/jobs_spec.rb'
- - 'spec/requests/api/ci/pipelines_spec.rb'
- - 'spec/requests/api/ci/resource_groups_spec.rb'
- - 'spec/requests/api/ci/runner/jobs_artifacts_spec.rb'
- - 'spec/requests/api/ci/runner/jobs_request_post_spec.rb'
- - 'spec/requests/api/ci/runner/jobs_trace_spec.rb'
- - 'spec/requests/api/ci/runner/runners_post_spec.rb'
- - 'spec/requests/api/ci/runners_spec.rb'
- 'spec/requests/api/clusters/agent_tokens_spec.rb'
- 'spec/requests/api/clusters/agents_spec.rb'
- 'spec/requests/api/commit_statuses_spec.rb'
diff --git a/app/assets/javascripts/work_items/components/shared/work_item_token_input.vue b/app/assets/javascripts/work_items/components/shared/work_item_token_input.vue
index ce3d4749f17..c122db6c902 100644
--- a/app/assets/javascripts/work_items/components/shared/work_item_token_input.vue
+++ b/app/assets/javascripts/work_items/components/shared/work_item_token_input.vue
@@ -1,21 +1,28 @@
<script>
-import { GlTokenSelector } from '@gitlab/ui';
+import { GlTokenSelector, GlAlert } from '@gitlab/ui';
import { debounce } from 'lodash';
+
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import { isNumeric } from '~/lib/utils/number_utils';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
+import SafeHtml from '~/vue_shared/directives/safe_html';
+import { highlighter } from 'ee_else_ce/gfm_auto_complete';
import groupWorkItemsQuery from '../../graphql/group_work_items.query.graphql';
import projectWorkItemsQuery from '../../graphql/project_work_items.query.graphql';
import {
WORK_ITEMS_TYPE_MAP,
I18N_WORK_ITEM_SEARCH_INPUT_PLACEHOLDER,
+ I18N_WORK_ITEM_SEARCH_ERROR,
sprintfWorkItem,
} from '../../constants';
export default {
components: {
GlTokenSelector,
+ GlAlert,
},
+ directives: { SafeHtml },
inject: ['isGroup'],
props: {
value: {
@@ -55,26 +62,31 @@ export default {
variables() {
return {
fullPath: this.fullPath,
- searchTerm: this.search?.title || this.search,
+ searchTerm: '',
types: this.childrenType ? [this.childrenType] : [],
- in: this.search ? 'TITLE' : undefined,
+ isNumber: false,
};
},
skip() {
return !this.searchStarted;
},
update(data) {
- return data.workspace.workItems.nodes.filter(
- (wi) => !this.childrenIds.includes(wi.id) && this.parentWorkItemId !== wi.id,
- );
+ return [
+ ...this.filterItems(data.workspace.workItemsByIid?.nodes),
+ ...this.filterItems(data.workspace.workItems.nodes),
+ ];
+ },
+ error() {
+ this.error = sprintfWorkItem(I18N_WORK_ITEM_SEARCH_ERROR, this.childrenTypeName);
},
},
},
data() {
return {
availableWorkItems: [],
- search: '',
+ query: '',
searchStarted: false,
+ error: '',
};
},
computed: {
@@ -105,7 +117,24 @@ export default {
methods: {
getIdFromGraphQLId,
setSearchKey(value) {
- this.search = value;
+ this.query = value;
+
+ // Query parameters for searching by text
+ const variables = {
+ searchTerm: value,
+ in: value ? 'TITLE' : undefined,
+ iid: null,
+ isNumber: false,
+ };
+
+ // Check if it is a number, add iid as query parameter
+ if (isNumeric(value) && value) {
+ variables.iid = value;
+ variables.isNumber = true;
+ }
+
+ // Fetch combined results of search by iid and search by title.
+ this.$apollo.queries.availableWorkItems.refetch(variables);
},
handleFocus() {
this.searchStarted = true;
@@ -129,33 +158,58 @@ export default {
}
});
},
+ formatResults(input) {
+ if (!this.query) {
+ return input;
+ }
+
+ return highlighter(`<span class="gl-text-black-normal">${input}</span>`, this.query);
+ },
+ unsetError() {
+ this.error = '';
+ },
+ filterItems(items) {
+ return (
+ items?.filter(
+ (wi) => !this.childrenIds.includes(wi.id) && this.parentWorkItemId !== wi.id,
+ ) || []
+ );
+ },
},
};
</script>
<template>
- <gl-token-selector
- ref="tokenSelector"
- v-model="workItemsToAdd"
- :dropdown-items="availableWorkItems"
- :loading="isLoading"
- :placeholder="addInputPlaceholder"
- menu-class="gl-dropdown-menu-wide dropdown-reduced-height gl-min-h-7!"
- :container-class="tokenSelectorContainerClass"
- data-testid="work-item-token-select-input"
- @text-input="debouncedSearchKeyUpdate"
- @focus="handleFocus"
- @mouseover.native="handleMouseOver"
- @mouseout.native="handleMouseOut"
- @token-add="focusInputText"
- @token-remove="focusInputText"
- @blur="handleBlur"
- >
- <template #token-content="{ token }"> {{ token.iid }} {{ token.title }} </template>
- <template #dropdown-item-content="{ dropdownItem }">
- <div class="gl-display-flex">
- <div class="gl-text-secondary gl-font-sm gl-mr-4">{{ dropdownItem.iid }}</div>
- <div class="gl-text-truncate">{{ dropdownItem.title }}</div>
- </div>
- </template>
- </gl-token-selector>
+ <div>
+ <gl-alert v-if="error" variant="danger" class="gl-mb-3" @dismiss="unsetError">
+ {{ error }}
+ </gl-alert>
+ <gl-token-selector
+ ref="tokenSelector"
+ v-model="workItemsToAdd"
+ :dropdown-items="availableWorkItems"
+ :loading="isLoading"
+ :placeholder="addInputPlaceholder"
+ menu-class="gl-dropdown-menu-wide dropdown-reduced-height gl-min-h-7!"
+ :container-class="tokenSelectorContainerClass"
+ data-testid="work-item-token-select-input"
+ @text-input="debouncedSearchKeyUpdate"
+ @focus="handleFocus"
+ @mouseover.native="handleMouseOver"
+ @mouseout.native="handleMouseOut"
+ @token-add="focusInputText"
+ @token-remove="focusInputText"
+ @blur="handleBlur"
+ >
+ <template #token-content="{ token }"> {{ token.iid }} {{ token.title }} </template>
+ <template #dropdown-item-content="{ dropdownItem }">
+ <div class="gl-display-flex">
+ <div
+ v-safe-html="formatResults(dropdownItem.iid)"
+ class="gl-text-secondary gl-font-sm gl-mr-4"
+ ></div>
+ <div v-safe-html="formatResults(dropdownItem.title)" class="gl-text-truncate"></div>
+ </div>
+ </template>
+ </gl-token-selector>
+ </div>
</template>
diff --git a/app/assets/javascripts/work_items/components/work_item_parent.vue b/app/assets/javascripts/work_items/components/work_item_parent.vue
index d0e83ba8c21..ce30f7985cf 100644
--- a/app/assets/javascripts/work_items/components/work_item_parent.vue
+++ b/app/assets/javascripts/work_items/components/work_item_parent.vue
@@ -107,6 +107,8 @@ export default {
searchTerm: this.search,
types: this.parentType,
in: this.search ? 'TITLE' : undefined,
+ iid: null,
+ isNumber: false,
};
},
skip() {
diff --git a/app/assets/javascripts/work_items/constants.js b/app/assets/javascripts/work_items/constants.js
index e2dbfeb55a5..c3d3d623515 100644
--- a/app/assets/javascripts/work_items/constants.js
+++ b/app/assets/javascripts/work_items/constants.js
@@ -94,8 +94,9 @@ export const I18N_WORK_ITEM_FETCH_AWARD_EMOJI_ERROR = s__(
export const I18N_WORK_ITEM_CREATE_BUTTON_LABEL = s__('WorkItem|Create %{workItemType}');
export const I18N_WORK_ITEM_ADD_BUTTON_LABEL = s__('WorkItem|Add %{workItemType}');
export const I18N_WORK_ITEM_ADD_MULTIPLE_BUTTON_LABEL = s__('WorkItem|Add %{workItemType}s');
-export const I18N_WORK_ITEM_SEARCH_INPUT_PLACEHOLDER = s__(
- 'WorkItem|Search existing %{workItemType}s',
+export const I18N_WORK_ITEM_SEARCH_INPUT_PLACEHOLDER = s__('WorkItem|Search existing items');
+export const I18N_WORK_ITEM_SEARCH_ERROR = s__(
+ 'WorkItem|Something went wrong while fetching the %{workItemType}. Please try again.',
);
export const I18N_WORK_ITEM_CONFIDENTIALITY_CHECKBOX_LABEL = s__(
'WorkItem|This %{workItemType} is confidential and should only be visible to team members with at least Reporter access',
diff --git a/app/assets/javascripts/work_items/graphql/group_work_items.query.graphql b/app/assets/javascripts/work_items/graphql/group_work_items.query.graphql
index 320bb4a2494..5332e21a0cb 100644
--- a/app/assets/javascripts/work_items/graphql/group_work_items.query.graphql
+++ b/app/assets/javascripts/work_items/graphql/group_work_items.query.graphql
@@ -11,8 +11,6 @@ query groupWorkItems(
id
iid
title
- state
- confidential
}
}
}
diff --git a/app/assets/javascripts/work_items/graphql/project_work_items.query.graphql b/app/assets/javascripts/work_items/graphql/project_work_items.query.graphql
index 2be436aa8c2..3aeaaa1116a 100644
--- a/app/assets/javascripts/work_items/graphql/project_work_items.query.graphql
+++ b/app/assets/javascripts/work_items/graphql/project_work_items.query.graphql
@@ -3,6 +3,8 @@ query projectWorkItems(
$fullPath: ID!
$types: [IssueType!]
$in: [IssuableSearchableField!]
+ $iid: String = null
+ $isNumber: Boolean!
) {
workspace: project(fullPath: $fullPath) {
id
@@ -11,8 +13,13 @@ query projectWorkItems(
id
iid
title
- state
- confidential
+ }
+ }
+ workItemsByIid: workItems(iid: $iid, types: $types) @include(if: $isNumber) {
+ nodes {
+ id
+ iid
+ title
}
}
}
diff --git a/app/models/concerns/use_sql_function_for_primary_key_lookups.rb b/app/models/concerns/use_sql_function_for_primary_key_lookups.rb
new file mode 100644
index 00000000000..c3ca3cfc038
--- /dev/null
+++ b/app/models/concerns/use_sql_function_for_primary_key_lookups.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+module UseSqlFunctionForPrimaryKeyLookups
+ extend ActiveSupport::Concern
+
+ class_methods do
+ def find(*args)
+ return super unless Feature.enabled?(:use_sql_functions_for_primary_key_lookups, Feature.current_request)
+ return super unless args.one?
+ return super if block_given? || primary_key.nil? || scope_attributes?
+
+ return_array = false
+ id = args.first
+
+ if id.is_a?(Array)
+ return super if id.many?
+
+ return_array = true
+
+ id = id.first
+ end
+
+ return super if id.nil? || (id.is_a?(String) && !id.number?)
+
+ from_clause = "find_#{table_name}_by_id(?) #{quoted_table_name}"
+ filter_empty_row = "#{quoted_table_name}.#{connection.quote_column_name(primary_key)} IS NOT NULL"
+ query = from(from_clause).where(filter_empty_row).limit(1).to_sql
+ # Using find_by_sql so we get query cache working
+ record = find_by_sql([query, id]).first
+
+ unless record
+ message = "Couldn't find #{name} with '#{primary_key}'=#{id}"
+ raise(ActiveRecord::RecordNotFound.new(message, name, primary_key, id))
+ end
+
+ return_array ? [record] : record
+ end
+ end
+end
diff --git a/app/models/environment.rb b/app/models/environment.rb
index efdcf7174aa..c3066bbca0a 100644
--- a/app/models/environment.rb
+++ b/app/models/environment.rb
@@ -8,6 +8,8 @@ class Environment < ApplicationRecord
include NullifyIfBlank
include FromUnion
+ LONG_STOP = 1.week
+
self.reactive_cache_refresh_interval = 1.minute
self.reactive_cache_lifetime = 55.seconds
self.reactive_cache_hard_limit = 10.megabytes
@@ -104,6 +106,7 @@ class Environment < ApplicationRecord
scope :preload_project, -> { preload(:project) }
scope :auto_stoppable, -> (limit) { available.where('auto_stop_at < ?', Time.zone.now).limit(limit) }
scope :auto_deletable, -> (limit) { stopped.where('auto_delete_at < ?', Time.zone.now).limit(limit) }
+ scope :long_stopping, -> { with_state(:stopping).where('updated_at < ?', LONG_STOP.ago) }
scope :deployed_and_updated_before, -> (project_id, before) do
# this query joins deployments and filters out any environment that has recent deployments
@@ -322,6 +325,10 @@ class Environment < ApplicationRecord
last_deployment.try(:created_at)
end
+ def long_stopping?
+ stopping? && self.updated_at < LONG_STOP.ago
+ end
+
def ref_path
"refs/#{Repository::REF_ENVIRONMENTS}/#{slug}"
end
diff --git a/app/models/namespace.rb b/app/models/namespace.rb
index 1f2224bba09..db40a908bd7 100644
--- a/app/models/namespace.rb
+++ b/app/models/namespace.rb
@@ -18,6 +18,7 @@ class Namespace < ApplicationRecord
include Referable
include CrossDatabaseIgnoredTables
include IgnorableColumns
+ include UseSqlFunctionForPrimaryKeyLookups
ignore_column :unlock_membership_to_ldap, remove_with: '16.7', remove_after: '2023-11-16'
diff --git a/app/models/project.rb b/app/models/project.rb
index 9fb7745d432..06c8ce57eb3 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -45,6 +45,7 @@ class Project < ApplicationRecord
include UpdatedAtFilterable
include IgnorableColumns
include CrossDatabaseIgnoredTables
+ include UseSqlFunctionForPrimaryKeyLookups
ignore_column :emails_disabled, remove_with: '16.3', remove_after: '2023-08-22'
diff --git a/app/models/user.rb b/app/models/user.rb
index d0aca79a57c..2fefa92f0a3 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -32,6 +32,7 @@ class User < MainClusterwide::ApplicationRecord
include EachBatch
include CrossDatabaseIgnoredTables
include IgnorableColumns
+ include UseSqlFunctionForPrimaryKeyLookups
ignore_column %i[
email_opted_in
diff --git a/app/services/environments/auto_recover_service.rb b/app/services/environments/auto_recover_service.rb
new file mode 100644
index 00000000000..d52f90bbe50
--- /dev/null
+++ b/app/services/environments/auto_recover_service.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module Environments
+ class AutoRecoverService
+ include ::Gitlab::ExclusiveLeaseHelpers
+ include ::Gitlab::LoopHelpers
+
+ BATCH_SIZE = 100
+ LOOP_TIMEOUT = 45.minutes
+ LOOP_LIMIT = 1000
+ EXCLUSIVE_LOCK_KEY = 'environments:auto_recover:lock'
+ LOCK_TIMEOUT = 50.minutes
+
+ ##
+ # Recover environments that are stuck stopping on a GitLab instance
+ #
+ # This auto stop process cannot run for more than 45 minutes. This is for
+ # preventing multiple `AutoStopCronWorker` CRON jobs run concurrently,
+ # which is scheduled at every hour.
+ def execute
+ in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
+ loop_until(timeout: LOOP_TIMEOUT, limit: LOOP_LIMIT) do
+ recover_in_batch
+ end
+ end
+ end
+
+ private
+
+ def recover_in_batch
+ environments = Environment.preload_project.select(:id, :project_id).long_stopping.limit(BATCH_SIZE)
+
+ return false if environments.empty?
+
+ Environments::AutoRecoverWorker.bulk_perform_async_with_contexts(
+ environments,
+ arguments_proc: ->(environment) { environment.id },
+ context_proc: ->(environment) { { project: environment.project } }
+ )
+
+ true
+ end
+ end
+end
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index 1ca099733e9..cd166750fdd 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -2883,6 +2883,15 @@
:weight: 2
:idempotent: false
:tags: []
+- :name: environments_auto_recover
+ :worker_name: Environments::AutoRecoverWorker
+ :feature_category: :continuous_delivery
+ :has_external_dependencies: false
+ :urgency: :low
+ :resource_boundary: :unknown
+ :weight: 1
+ :idempotent: true
+ :tags: []
- :name: environments_auto_stop
:worker_name: Environments::AutoStopWorker
:feature_category: :continuous_delivery
diff --git a/app/workers/environments/auto_recover_worker.rb b/app/workers/environments/auto_recover_worker.rb
new file mode 100644
index 00000000000..76a807571b2
--- /dev/null
+++ b/app/workers/environments/auto_recover_worker.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Environments
+ class AutoRecoverWorker
+ include ApplicationWorker
+
+ data_consistency :delayed
+ idempotent!
+ feature_category :continuous_delivery
+
+ def perform(environment_id, _params = {})
+ Environment.find_by_id(environment_id).try do |environment|
+ next unless environment.long_stopping?
+
+ next unless environment.stop_actions.all?(&:complete?)
+
+ environment.recover_stuck_stopping
+ end
+ end
+ end
+end
diff --git a/app/workers/environments/auto_stop_cron_worker.rb b/app/workers/environments/auto_stop_cron_worker.rb
index 4d6453a85e7..26b18c406e5 100644
--- a/app/workers/environments/auto_stop_cron_worker.rb
+++ b/app/workers/environments/auto_stop_cron_worker.rb
@@ -13,6 +13,7 @@ module Environments
def perform
AutoStopService.new.execute
+ AutoRecoverService.new.execute
end
end
end
diff --git a/config/feature_flags/development/use_sql_functions_for_primary_key_lookups.yml b/config/feature_flags/development/use_sql_functions_for_primary_key_lookups.yml
new file mode 100644
index 00000000000..c8ee2894aef
--- /dev/null
+++ b/config/feature_flags/development/use_sql_functions_for_primary_key_lookups.yml
@@ -0,0 +1,8 @@
+---
+name: use_sql_functions_for_primary_key_lookups
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135196
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/429479
+milestone: '16.6'
+type: development
+group: group::optimize
+default_enabled: false
diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml
index b9bd6e6c740..a35e5492914 100644
--- a/config/sidekiq_queues.yml
+++ b/config/sidekiq_queues.yml
@@ -277,6 +277,8 @@
- 2
- - emails_on_push
- 2
+- - environments_auto_recover
+ - 1
- - environments_auto_stop
- 1
- - environments_canary_ingress_update
diff --git a/db/docs/compliance_framework_security_policies.yml b/db/docs/compliance_framework_security_policies.yml
new file mode 100644
index 00000000000..9f16b703a9d
--- /dev/null
+++ b/db/docs/compliance_framework_security_policies.yml
@@ -0,0 +1,10 @@
+---
+table_name: compliance_framework_security_policies
+classes:
+- ComplianceManagement::ComplianceFramework::SecurityPolicy
+feature_categories:
+- security_policy_management
+description: Persists the relation between compliance_frameworks and security_orchestration_policy_configurations
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135291
+milestone: '16.6'
+gitlab_schema: gitlab_main
diff --git a/db/migrate/20231025123238_create_compliance_framework_security_policies.rb b/db/migrate/20231025123238_create_compliance_framework_security_policies.rb
new file mode 100644
index 00000000000..1cf970e0d6c
--- /dev/null
+++ b/db/migrate/20231025123238_create_compliance_framework_security_policies.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+class CreateComplianceFrameworkSecurityPolicies < Gitlab::Database::Migration[2.2]
+ UNIQUE_INDEX_NAME = 'unique_compliance_framework_security_policies_framework_id'
+ POLICY_CONFIGURATION_INDEX_NAME = 'idx_compliance_security_policies_on_policy_configuration_id'
+
+ milestone '16.6'
+ enable_lock_retries!
+
+ def change
+ create_table :compliance_framework_security_policies do |t|
+ t.bigint :framework_id, null: false
+ t.bigint :policy_configuration_id, null: false
+ t.timestamps_with_timezone null: false
+ t.integer :policy_index, limit: 2, null: false
+
+ t.index :policy_configuration_id, name: POLICY_CONFIGURATION_INDEX_NAME
+ t.index [:framework_id, :policy_configuration_id, :policy_index], unique: true, name: UNIQUE_INDEX_NAME
+ end
+ end
+end
diff --git a/db/migrate/20231026050554_add_functions_for_primary_key_lookup.rb b/db/migrate/20231026050554_add_functions_for_primary_key_lookup.rb
new file mode 100644
index 00000000000..ecf32f74e4b
--- /dev/null
+++ b/db/migrate/20231026050554_add_functions_for_primary_key_lookup.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+class AddFunctionsForPrimaryKeyLookup < Gitlab::Database::Migration[2.2]
+ milestone '16.6'
+
+ TABLES = %i[users namespaces projects].freeze
+
+ def up
+ TABLES.each do |table|
+ execute <<~SQL
+ CREATE OR REPLACE FUNCTION find_#{table}_by_id(#{table}_id bigint)
+ RETURNS #{table} AS $$
+ BEGIN
+ return (SELECT #{table} FROM #{table} WHERE id = #{table}_id LIMIT 1);
+ END;
+ $$ LANGUAGE plpgsql STABLE PARALLEL SAFE COST 1;
+ SQL
+ end
+ end
+
+ def down
+ TABLES.each do |table|
+ execute "DROP FUNCTION IF EXISTS find_#{table}_by_id"
+ end
+ end
+end
diff --git a/db/migrate/20231031200433_add_framework_fk_to_compliance_framework_security_policies.rb b/db/migrate/20231031200433_add_framework_fk_to_compliance_framework_security_policies.rb
new file mode 100644
index 00000000000..bb7fa924d15
--- /dev/null
+++ b/db/migrate/20231031200433_add_framework_fk_to_compliance_framework_security_policies.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddFrameworkFkToComplianceFrameworkSecurityPolicies < Gitlab::Database::Migration[2.2]
+ milestone '16.6'
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_foreign_key :compliance_framework_security_policies,
+ :compliance_management_frameworks,
+ column: :framework_id,
+ on_delete: :cascade
+ end
+
+ def down
+ with_lock_retries do
+ remove_foreign_key :compliance_framework_security_policies, column: :framework_id
+ end
+ end
+end
diff --git a/db/migrate/20231031200645_add_policy_configuration_fk_to_compliance_framework_security_policies.rb b/db/migrate/20231031200645_add_policy_configuration_fk_to_compliance_framework_security_policies.rb
new file mode 100644
index 00000000000..cf6419c5128
--- /dev/null
+++ b/db/migrate/20231031200645_add_policy_configuration_fk_to_compliance_framework_security_policies.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddPolicyConfigurationFkToComplianceFrameworkSecurityPolicies < Gitlab::Database::Migration[2.2]
+ milestone '16.6'
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_foreign_key :compliance_framework_security_policies,
+ :security_orchestration_policy_configurations,
+ column: :policy_configuration_id,
+ on_delete: :cascade
+ end
+
+ def down
+ with_lock_retries do
+ remove_foreign_key :compliance_framework_security_policies, column: :policy_configuration_id
+ end
+ end
+end
diff --git a/db/schema_migrations/20231025123238 b/db/schema_migrations/20231025123238
new file mode 100644
index 00000000000..e93a7a4d3fb
--- /dev/null
+++ b/db/schema_migrations/20231025123238
@@ -0,0 +1 @@
+8a34911b504b3752071aa2f6f1eb8dbc6b91540cceb69881c12c89adb48dcc78 \ No newline at end of file
diff --git a/db/schema_migrations/20231026050554 b/db/schema_migrations/20231026050554
new file mode 100644
index 00000000000..d99dc675ab6
--- /dev/null
+++ b/db/schema_migrations/20231026050554
@@ -0,0 +1 @@
+e71f80b77121722c75125e59ec2e9c3df323b34a107304447948bed05804224c \ No newline at end of file
diff --git a/db/schema_migrations/20231031200433 b/db/schema_migrations/20231031200433
new file mode 100644
index 00000000000..1093e9edabb
--- /dev/null
+++ b/db/schema_migrations/20231031200433
@@ -0,0 +1 @@
+409134f3d8980c647bd9ecd73f6f56729c7cf6f83059b3fd32d5665c36ab1a92 \ No newline at end of file
diff --git a/db/schema_migrations/20231031200645 b/db/schema_migrations/20231031200645
new file mode 100644
index 00000000000..4d29fbfd996
--- /dev/null
+++ b/db/schema_migrations/20231031200645
@@ -0,0 +1 @@
+09f38031c5ae4a88eae80d24285163b45ee6cbc96903a4f54dc0552cb11d12a4 \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 5a618d6d636..8582b819feb 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -35,6 +35,248 @@ RETURN NULL;
END
$$;
+CREATE TABLE namespaces (
+ id integer NOT NULL,
+ name character varying NOT NULL,
+ path character varying NOT NULL,
+ owner_id integer,
+ created_at timestamp without time zone,
+ updated_at timestamp without time zone,
+ type character varying DEFAULT 'User'::character varying NOT NULL,
+ description character varying DEFAULT ''::character varying NOT NULL,
+ avatar character varying,
+ membership_lock boolean DEFAULT false,
+ share_with_group_lock boolean DEFAULT false,
+ visibility_level integer DEFAULT 20 NOT NULL,
+ request_access_enabled boolean DEFAULT true NOT NULL,
+ ldap_sync_status character varying DEFAULT 'ready'::character varying NOT NULL,
+ ldap_sync_error character varying,
+ ldap_sync_last_update_at timestamp without time zone,
+ ldap_sync_last_successful_update_at timestamp without time zone,
+ ldap_sync_last_sync_at timestamp without time zone,
+ description_html text,
+ lfs_enabled boolean,
+ parent_id integer,
+ shared_runners_minutes_limit integer,
+ repository_size_limit bigint,
+ require_two_factor_authentication boolean DEFAULT false NOT NULL,
+ two_factor_grace_period integer DEFAULT 48 NOT NULL,
+ cached_markdown_version integer,
+ project_creation_level integer,
+ runners_token character varying,
+ file_template_project_id integer,
+ saml_discovery_token character varying,
+ runners_token_encrypted character varying,
+ custom_project_templates_group_id integer,
+ auto_devops_enabled boolean,
+ extra_shared_runners_minutes_limit integer,
+ last_ci_minutes_notification_at timestamp with time zone,
+ last_ci_minutes_usage_notification_level integer,
+ subgroup_creation_level integer DEFAULT 1,
+ emails_disabled boolean,
+ max_pages_size integer,
+ max_artifacts_size integer,
+ mentions_disabled boolean,
+ default_branch_protection smallint,
+ unlock_membership_to_ldap boolean,
+ max_personal_access_token_lifetime integer,
+ push_rule_id bigint,
+ shared_runners_enabled boolean DEFAULT true NOT NULL,
+ allow_descendants_override_disabled_shared_runners boolean DEFAULT false NOT NULL,
+ traversal_ids integer[] DEFAULT '{}'::integer[] NOT NULL,
+ organization_id bigint DEFAULT 1
+);
+
+CREATE FUNCTION find_namespaces_by_id(namespaces_id bigint) RETURNS namespaces
+ LANGUAGE plpgsql STABLE COST 1 PARALLEL SAFE
+ AS $$
+BEGIN
+ return (SELECT namespaces FROM namespaces WHERE id = namespaces_id LIMIT 1);
+END;
+$$;
+
+CREATE TABLE projects (
+ id integer NOT NULL,
+ name character varying,
+ path character varying,
+ description text,
+ created_at timestamp without time zone,
+ updated_at timestamp without time zone,
+ creator_id integer,
+ namespace_id integer NOT NULL,
+ last_activity_at timestamp without time zone,
+ import_url character varying,
+ visibility_level integer DEFAULT 0 NOT NULL,
+ archived boolean DEFAULT false NOT NULL,
+ avatar character varying,
+ merge_requests_template text,
+ star_count integer DEFAULT 0 NOT NULL,
+ merge_requests_rebase_enabled boolean DEFAULT false,
+ import_type character varying,
+ import_source character varying,
+ approvals_before_merge integer DEFAULT 0 NOT NULL,
+ reset_approvals_on_push boolean DEFAULT true,
+ merge_requests_ff_only_enabled boolean DEFAULT false,
+ issues_template text,
+ mirror boolean DEFAULT false NOT NULL,
+ mirror_last_update_at timestamp without time zone,
+ mirror_last_successful_update_at timestamp without time zone,
+ mirror_user_id integer,
+ shared_runners_enabled boolean DEFAULT true NOT NULL,
+ runners_token character varying,
+ build_allow_git_fetch boolean DEFAULT true NOT NULL,
+ build_timeout integer DEFAULT 3600 NOT NULL,
+ mirror_trigger_builds boolean DEFAULT false NOT NULL,
+ pending_delete boolean DEFAULT false,
+ public_builds boolean DEFAULT true NOT NULL,
+ last_repository_check_failed boolean,
+ last_repository_check_at timestamp without time zone,
+ only_allow_merge_if_pipeline_succeeds boolean DEFAULT false NOT NULL,
+ has_external_issue_tracker boolean,
+ repository_storage character varying DEFAULT 'default'::character varying NOT NULL,
+ repository_read_only boolean,
+ request_access_enabled boolean DEFAULT true NOT NULL,
+ has_external_wiki boolean,
+ ci_config_path character varying,
+ lfs_enabled boolean,
+ description_html text,
+ only_allow_merge_if_all_discussions_are_resolved boolean,
+ repository_size_limit bigint,
+ printing_merge_request_link_enabled boolean DEFAULT true NOT NULL,
+ auto_cancel_pending_pipelines integer DEFAULT 1 NOT NULL,
+ service_desk_enabled boolean DEFAULT true,
+ cached_markdown_version integer,
+ delete_error text,
+ last_repository_updated_at timestamp without time zone,
+ disable_overriding_approvers_per_merge_request boolean,
+ storage_version smallint,
+ resolve_outdated_diff_discussions boolean,
+ remote_mirror_available_overridden boolean,
+ only_mirror_protected_branches boolean,
+ pull_mirror_available_overridden boolean,
+ jobs_cache_index integer,
+ external_authorization_classification_label character varying,
+ mirror_overwrites_diverged_branches boolean,
+ pages_https_only boolean DEFAULT true,
+ external_webhook_token character varying,
+ packages_enabled boolean,
+ merge_requests_author_approval boolean DEFAULT false,
+ pool_repository_id bigint,
+ runners_token_encrypted character varying,
+ bfg_object_map character varying,
+ detected_repository_languages boolean,
+ merge_requests_disable_committers_approval boolean,
+ require_password_to_approve boolean,
+ emails_disabled boolean,
+ max_pages_size integer,
+ max_artifacts_size integer,
+ pull_mirror_branch_prefix character varying(50),
+ remove_source_branch_after_merge boolean,
+ marked_for_deletion_at date,
+ marked_for_deletion_by_user_id integer,
+ autoclose_referenced_issues boolean,
+ suggestion_commit_message character varying(255),
+ project_namespace_id bigint,
+ hidden boolean DEFAULT false NOT NULL,
+ organization_id bigint DEFAULT 1
+);
+
+CREATE FUNCTION find_projects_by_id(projects_id bigint) RETURNS projects
+ LANGUAGE plpgsql STABLE COST 1 PARALLEL SAFE
+ AS $$
+BEGIN
+ return (SELECT projects FROM projects WHERE id = projects_id LIMIT 1);
+END;
+$$;
+
+CREATE TABLE users (
+ id integer NOT NULL,
+ email character varying DEFAULT ''::character varying NOT NULL,
+ encrypted_password character varying DEFAULT ''::character varying NOT NULL,
+ reset_password_token character varying,
+ reset_password_sent_at timestamp without time zone,
+ remember_created_at timestamp without time zone,
+ sign_in_count integer DEFAULT 0,
+ current_sign_in_at timestamp without time zone,
+ last_sign_in_at timestamp without time zone,
+ current_sign_in_ip character varying,
+ last_sign_in_ip character varying,
+ created_at timestamp without time zone,
+ updated_at timestamp without time zone,
+ name character varying,
+ admin boolean DEFAULT false NOT NULL,
+ projects_limit integer NOT NULL,
+ failed_attempts integer DEFAULT 0,
+ locked_at timestamp without time zone,
+ username character varying,
+ can_create_group boolean DEFAULT true NOT NULL,
+ can_create_team boolean DEFAULT true NOT NULL,
+ state character varying,
+ color_scheme_id integer DEFAULT 1 NOT NULL,
+ password_expires_at timestamp without time zone,
+ created_by_id integer,
+ last_credential_check_at timestamp without time zone,
+ avatar character varying,
+ confirmation_token character varying,
+ confirmed_at timestamp without time zone,
+ confirmation_sent_at timestamp without time zone,
+ unconfirmed_email character varying,
+ hide_no_ssh_key boolean DEFAULT false,
+ admin_email_unsubscribed_at timestamp without time zone,
+ notification_email character varying,
+ hide_no_password boolean DEFAULT false,
+ password_automatically_set boolean DEFAULT false,
+ encrypted_otp_secret character varying,
+ encrypted_otp_secret_iv character varying,
+ encrypted_otp_secret_salt character varying,
+ otp_required_for_login boolean DEFAULT false NOT NULL,
+ otp_backup_codes text,
+ public_email character varying,
+ dashboard integer DEFAULT 0,
+ project_view integer DEFAULT 2,
+ consumed_timestep integer,
+ layout integer DEFAULT 0,
+ hide_project_limit boolean DEFAULT false,
+ note text,
+ unlock_token character varying,
+ otp_grace_period_started_at timestamp without time zone,
+ external boolean DEFAULT false,
+ incoming_email_token character varying,
+ auditor boolean DEFAULT false NOT NULL,
+ require_two_factor_authentication_from_group boolean DEFAULT false NOT NULL,
+ two_factor_grace_period integer DEFAULT 48 NOT NULL,
+ last_activity_on date,
+ notified_of_own_activity boolean DEFAULT false,
+ preferred_language character varying,
+ theme_id smallint,
+ accepted_term_id integer,
+ feed_token character varying,
+ private_profile boolean DEFAULT false NOT NULL,
+ roadmap_layout smallint,
+ include_private_contributions boolean,
+ commit_email character varying,
+ group_view integer,
+ managing_group_id integer,
+ first_name character varying(255),
+ last_name character varying(255),
+ static_object_token character varying(255),
+ role smallint,
+ user_type smallint DEFAULT 0,
+ static_object_token_encrypted text,
+ otp_secret_expires_at timestamp with time zone,
+ onboarding_in_progress boolean DEFAULT false NOT NULL,
+ CONSTRAINT check_0dd5948e38 CHECK ((user_type IS NOT NULL)),
+ CONSTRAINT check_7bde697e8e CHECK ((char_length(static_object_token_encrypted) <= 255))
+);
+
+CREATE FUNCTION find_users_by_id(users_id bigint) RETURNS users
+ LANGUAGE plpgsql STABLE COST 1 PARALLEL SAFE
+ AS $$
+BEGIN
+ return (SELECT users FROM users WHERE id = users_id LIMIT 1);
+END;
+$$;
+
CREATE FUNCTION gitlab_schema_prevent_write() RETURNS trigger
LANGUAGE plpgsql
AS $$
@@ -14781,6 +15023,24 @@ CREATE SEQUENCE commit_user_mentions_id_seq
ALTER SEQUENCE commit_user_mentions_id_seq OWNED BY commit_user_mentions.id;
+CREATE TABLE compliance_framework_security_policies (
+ id bigint NOT NULL,
+ framework_id bigint NOT NULL,
+ policy_configuration_id bigint NOT NULL,
+ created_at timestamp with time zone NOT NULL,
+ updated_at timestamp with time zone NOT NULL,
+ policy_index smallint NOT NULL
+);
+
+CREATE SEQUENCE compliance_framework_security_policies_id_seq
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+ALTER SEQUENCE compliance_framework_security_policies_id_seq OWNED BY compliance_framework_security_policies.id;
+
CREATE TABLE compliance_management_frameworks (
id bigint NOT NULL,
name text NOT NULL,
@@ -19233,58 +19493,6 @@ CREATE SEQUENCE namespace_statistics_id_seq
ALTER SEQUENCE namespace_statistics_id_seq OWNED BY namespace_statistics.id;
-CREATE TABLE namespaces (
- id integer NOT NULL,
- name character varying NOT NULL,
- path character varying NOT NULL,
- owner_id integer,
- created_at timestamp without time zone,
- updated_at timestamp without time zone,
- type character varying DEFAULT 'User'::character varying NOT NULL,
- description character varying DEFAULT ''::character varying NOT NULL,
- avatar character varying,
- membership_lock boolean DEFAULT false,
- share_with_group_lock boolean DEFAULT false,
- visibility_level integer DEFAULT 20 NOT NULL,
- request_access_enabled boolean DEFAULT true NOT NULL,
- ldap_sync_status character varying DEFAULT 'ready'::character varying NOT NULL,
- ldap_sync_error character varying,
- ldap_sync_last_update_at timestamp without time zone,
- ldap_sync_last_successful_update_at timestamp without time zone,
- ldap_sync_last_sync_at timestamp without time zone,
- description_html text,
- lfs_enabled boolean,
- parent_id integer,
- shared_runners_minutes_limit integer,
- repository_size_limit bigint,
- require_two_factor_authentication boolean DEFAULT false NOT NULL,
- two_factor_grace_period integer DEFAULT 48 NOT NULL,
- cached_markdown_version integer,
- project_creation_level integer,
- runners_token character varying,
- file_template_project_id integer,
- saml_discovery_token character varying,
- runners_token_encrypted character varying,
- custom_project_templates_group_id integer,
- auto_devops_enabled boolean,
- extra_shared_runners_minutes_limit integer,
- last_ci_minutes_notification_at timestamp with time zone,
- last_ci_minutes_usage_notification_level integer,
- subgroup_creation_level integer DEFAULT 1,
- emails_disabled boolean,
- max_pages_size integer,
- max_artifacts_size integer,
- mentions_disabled boolean,
- default_branch_protection smallint,
- unlock_membership_to_ldap boolean,
- max_personal_access_token_lifetime integer,
- push_rule_id bigint,
- shared_runners_enabled boolean DEFAULT true NOT NULL,
- allow_descendants_override_disabled_shared_runners boolean DEFAULT false NOT NULL,
- traversal_ids integer[] DEFAULT '{}'::integer[] NOT NULL,
- organization_id bigint DEFAULT 1
-);
-
CREATE SEQUENCE namespaces_id_seq
START WITH 1
INCREMENT BY 1
@@ -21942,92 +22150,6 @@ CREATE SEQUENCE project_wiki_repositories_id_seq
ALTER SEQUENCE project_wiki_repositories_id_seq OWNED BY project_wiki_repositories.id;
-CREATE TABLE projects (
- id integer NOT NULL,
- name character varying,
- path character varying,
- description text,
- created_at timestamp without time zone,
- updated_at timestamp without time zone,
- creator_id integer,
- namespace_id integer NOT NULL,
- last_activity_at timestamp without time zone,
- import_url character varying,
- visibility_level integer DEFAULT 0 NOT NULL,
- archived boolean DEFAULT false NOT NULL,
- avatar character varying,
- merge_requests_template text,
- star_count integer DEFAULT 0 NOT NULL,
- merge_requests_rebase_enabled boolean DEFAULT false,
- import_type character varying,
- import_source character varying,
- approvals_before_merge integer DEFAULT 0 NOT NULL,
- reset_approvals_on_push boolean DEFAULT true,
- merge_requests_ff_only_enabled boolean DEFAULT false,
- issues_template text,
- mirror boolean DEFAULT false NOT NULL,
- mirror_last_update_at timestamp without time zone,
- mirror_last_successful_update_at timestamp without time zone,
- mirror_user_id integer,
- shared_runners_enabled boolean DEFAULT true NOT NULL,
- runners_token character varying,
- build_allow_git_fetch boolean DEFAULT true NOT NULL,
- build_timeout integer DEFAULT 3600 NOT NULL,
- mirror_trigger_builds boolean DEFAULT false NOT NULL,
- pending_delete boolean DEFAULT false,
- public_builds boolean DEFAULT true NOT NULL,
- last_repository_check_failed boolean,
- last_repository_check_at timestamp without time zone,
- only_allow_merge_if_pipeline_succeeds boolean DEFAULT false NOT NULL,
- has_external_issue_tracker boolean,
- repository_storage character varying DEFAULT 'default'::character varying NOT NULL,
- repository_read_only boolean,
- request_access_enabled boolean DEFAULT true NOT NULL,
- has_external_wiki boolean,
- ci_config_path character varying,
- lfs_enabled boolean,
- description_html text,
- only_allow_merge_if_all_discussions_are_resolved boolean,
- repository_size_limit bigint,
- printing_merge_request_link_enabled boolean DEFAULT true NOT NULL,
- auto_cancel_pending_pipelines integer DEFAULT 1 NOT NULL,
- service_desk_enabled boolean DEFAULT true,
- cached_markdown_version integer,
- delete_error text,
- last_repository_updated_at timestamp without time zone,
- disable_overriding_approvers_per_merge_request boolean,
- storage_version smallint,
- resolve_outdated_diff_discussions boolean,
- remote_mirror_available_overridden boolean,
- only_mirror_protected_branches boolean,
- pull_mirror_available_overridden boolean,
- jobs_cache_index integer,
- external_authorization_classification_label character varying,
- mirror_overwrites_diverged_branches boolean,
- pages_https_only boolean DEFAULT true,
- external_webhook_token character varying,
- packages_enabled boolean,
- merge_requests_author_approval boolean DEFAULT false,
- pool_repository_id bigint,
- runners_token_encrypted character varying,
- bfg_object_map character varying,
- detected_repository_languages boolean,
- merge_requests_disable_committers_approval boolean,
- require_password_to_approve boolean,
- emails_disabled boolean,
- max_pages_size integer,
- max_artifacts_size integer,
- pull_mirror_branch_prefix character varying(50),
- remove_source_branch_after_merge boolean,
- marked_for_deletion_at date,
- marked_for_deletion_by_user_id integer,
- autoclose_referenced_issues boolean,
- suggestion_commit_message character varying(255),
- project_namespace_id bigint,
- hidden boolean DEFAULT false NOT NULL,
- organization_id bigint DEFAULT 1
-);
-
CREATE SEQUENCE projects_id_seq
START WITH 1
INCREMENT BY 1
@@ -24443,86 +24565,6 @@ CREATE SEQUENCE user_synced_attributes_metadata_id_seq
ALTER SEQUENCE user_synced_attributes_metadata_id_seq OWNED BY user_synced_attributes_metadata.id;
-CREATE TABLE users (
- id integer NOT NULL,
- email character varying DEFAULT ''::character varying NOT NULL,
- encrypted_password character varying DEFAULT ''::character varying NOT NULL,
- reset_password_token character varying,
- reset_password_sent_at timestamp without time zone,
- remember_created_at timestamp without time zone,
- sign_in_count integer DEFAULT 0,
- current_sign_in_at timestamp without time zone,
- last_sign_in_at timestamp without time zone,
- current_sign_in_ip character varying,
- last_sign_in_ip character varying,
- created_at timestamp without time zone,
- updated_at timestamp without time zone,
- name character varying,
- admin boolean DEFAULT false NOT NULL,
- projects_limit integer NOT NULL,
- failed_attempts integer DEFAULT 0,
- locked_at timestamp without time zone,
- username character varying,
- can_create_group boolean DEFAULT true NOT NULL,
- can_create_team boolean DEFAULT true NOT NULL,
- state character varying,
- color_scheme_id integer DEFAULT 1 NOT NULL,
- password_expires_at timestamp without time zone,
- created_by_id integer,
- last_credential_check_at timestamp without time zone,
- avatar character varying,
- confirmation_token character varying,
- confirmed_at timestamp without time zone,
- confirmation_sent_at timestamp without time zone,
- unconfirmed_email character varying,
- hide_no_ssh_key boolean DEFAULT false,
- admin_email_unsubscribed_at timestamp without time zone,
- notification_email character varying,
- hide_no_password boolean DEFAULT false,
- password_automatically_set boolean DEFAULT false,
- encrypted_otp_secret character varying,
- encrypted_otp_secret_iv character varying,
- encrypted_otp_secret_salt character varying,
- otp_required_for_login boolean DEFAULT false NOT NULL,
- otp_backup_codes text,
- public_email character varying,
- dashboard integer DEFAULT 0,
- project_view integer DEFAULT 2,
- consumed_timestep integer,
- layout integer DEFAULT 0,
- hide_project_limit boolean DEFAULT false,
- note text,
- unlock_token character varying,
- otp_grace_period_started_at timestamp without time zone,
- external boolean DEFAULT false,
- incoming_email_token character varying,
- auditor boolean DEFAULT false NOT NULL,
- require_two_factor_authentication_from_group boolean DEFAULT false NOT NULL,
- two_factor_grace_period integer DEFAULT 48 NOT NULL,
- last_activity_on date,
- notified_of_own_activity boolean DEFAULT false,
- preferred_language character varying,
- theme_id smallint,
- accepted_term_id integer,
- feed_token character varying,
- private_profile boolean DEFAULT false NOT NULL,
- roadmap_layout smallint,
- include_private_contributions boolean,
- commit_email character varying,
- group_view integer,
- managing_group_id integer,
- first_name character varying(255),
- last_name character varying(255),
- static_object_token character varying(255),
- role smallint,
- user_type smallint DEFAULT 0,
- static_object_token_encrypted text,
- otp_secret_expires_at timestamp with time zone,
- onboarding_in_progress boolean DEFAULT false NOT NULL,
- CONSTRAINT check_0dd5948e38 CHECK ((user_type IS NOT NULL)),
- CONSTRAINT check_7bde697e8e CHECK ((char_length(static_object_token_encrypted) <= 255))
-);
-
CREATE SEQUENCE users_id_seq
START WITH 1
INCREMENT BY 1
@@ -26280,6 +26322,8 @@ ALTER TABLE ONLY clusters_kubernetes_namespaces ALTER COLUMN id SET DEFAULT next
ALTER TABLE ONLY commit_user_mentions ALTER COLUMN id SET DEFAULT nextval('commit_user_mentions_id_seq'::regclass);
+ALTER TABLE ONLY compliance_framework_security_policies ALTER COLUMN id SET DEFAULT nextval('compliance_framework_security_policies_id_seq'::regclass);
+
ALTER TABLE ONLY compliance_management_frameworks ALTER COLUMN id SET DEFAULT nextval('compliance_management_frameworks_id_seq'::regclass);
ALTER TABLE ONLY container_registry_protection_rules ALTER COLUMN id SET DEFAULT nextval('container_registry_protection_rules_id_seq'::regclass);
@@ -28291,6 +28335,9 @@ ALTER TABLE ONLY clusters
ALTER TABLE ONLY commit_user_mentions
ADD CONSTRAINT commit_user_mentions_pkey PRIMARY KEY (id);
+ALTER TABLE ONLY compliance_framework_security_policies
+ ADD CONSTRAINT compliance_framework_security_policies_pkey PRIMARY KEY (id);
+
ALTER TABLE ONLY compliance_management_frameworks
ADD CONSTRAINT compliance_management_frameworks_pkey PRIMARY KEY (id);
@@ -31169,6 +31216,8 @@ CREATE INDEX idx_build_artifacts_size_refreshes_state_updated_at ON project_buil
CREATE INDEX idx_ci_pipelines_artifacts_locked ON ci_pipelines USING btree (ci_ref_id, id) WHERE (locked = 1);
+CREATE INDEX idx_compliance_security_policies_on_policy_configuration_id ON compliance_framework_security_policies USING btree (policy_configuration_id);
+
CREATE INDEX idx_container_exp_policies_on_project_id_next_run_at ON container_expiration_policies USING btree (project_id, next_run_at) WHERE (enabled = true);
CREATE INDEX idx_container_exp_policies_on_project_id_next_run_at_enabled ON container_expiration_policies USING btree (project_id, next_run_at, enabled);
@@ -35155,6 +35204,8 @@ CREATE UNIQUE INDEX unique_batched_background_migrations_queued_migration_versio
CREATE UNIQUE INDEX unique_ci_builds_token_encrypted_and_partition_id ON ci_builds USING btree (token_encrypted, partition_id) WHERE (token_encrypted IS NOT NULL);
+CREATE UNIQUE INDEX unique_compliance_framework_security_policies_framework_id ON compliance_framework_security_policies USING btree (framework_id, policy_configuration_id, policy_index);
+
CREATE UNIQUE INDEX unique_external_audit_event_destination_namespace_id_and_name ON audit_events_external_audit_event_destinations USING btree (namespace_id, name);
CREATE UNIQUE INDEX unique_google_cloud_logging_configurations_on_namespace_id ON audit_events_google_cloud_logging_configurations USING btree (namespace_id, google_project_id_name, log_id_name);
@@ -37667,6 +37718,9 @@ ALTER TABLE ONLY issues
ALTER TABLE ONLY protected_tag_create_access_levels
ADD CONSTRAINT fk_b4eb82fe3c FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
+ALTER TABLE ONLY compliance_framework_security_policies
+ ADD CONSTRAINT fk_b5df066d8f FOREIGN KEY (framework_id) REFERENCES compliance_management_frameworks(id) ON DELETE CASCADE;
+
ALTER TABLE ONLY catalog_resource_versions
ADD CONSTRAINT fk_b670eae96b FOREIGN KEY (catalog_resource_id) REFERENCES catalog_resources(id) ON DELETE CASCADE;
@@ -37796,6 +37850,9 @@ ALTER TABLE ONLY todos
ALTER TABLE ONLY dast_site_profiles_pipelines
ADD CONSTRAINT fk_cf05cf8fe1 FOREIGN KEY (dast_site_profile_id) REFERENCES dast_site_profiles(id) ON DELETE CASCADE;
+ALTER TABLE ONLY compliance_framework_security_policies
+ ADD CONSTRAINT fk_cf3c0ac207 FOREIGN KEY (policy_configuration_id) REFERENCES security_orchestration_policy_configurations(id) ON DELETE CASCADE;
+
ALTER TABLE ONLY issue_assignment_events
ADD CONSTRAINT fk_cfd2073177 FOREIGN KEY (issue_id) REFERENCES issues(id) ON DELETE CASCADE;
diff --git a/lib/gitlab/database/dictionary.rb b/lib/gitlab/database/dictionary.rb
new file mode 100644
index 00000000000..7b0c8560a26
--- /dev/null
+++ b/lib/gitlab/database/dictionary.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ class Dictionary
+ def initialize(file_path)
+ @file_path = file_path
+ @data = YAML.load_file(file_path)
+ end
+
+ def name_and_schema
+ [key_name, gitlab_schema.to_sym]
+ end
+
+ def table_name
+ data['table_name']
+ end
+
+ def view_name
+ data['view_name']
+ end
+
+ def milestone
+ data['milestone']
+ end
+
+ def gitlab_schema
+ data['gitlab_schema']
+ end
+
+ def schema?(schema_name)
+ gitlab_schema == schema_name.to_s
+ end
+
+ def key_name
+ table_name || view_name
+ end
+
+ def validate!
+ return true unless gitlab_schema.nil?
+
+ raise(
+ GitlabSchema::UnknownSchemaError,
+ "#{file_path} must specify a valid gitlab_schema for #{key_name}. " \
+ "See #{help_page_url}"
+ )
+ end
+
+ private
+
+ attr_reader :file_path, :data
+
+ def help_page_url
+ # rubocop:disable Gitlab/DocUrl -- link directly to docs.gitlab.com, always
+ 'https://docs.gitlab.com/ee/development/database/database_dictionary.html'
+ # rubocop:enable Gitlab/DocUrl
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/gitlab_schema.rb b/lib/gitlab/database/gitlab_schema.rb
index e885df52dfd..ecb45622061 100644
--- a/lib/gitlab/database/gitlab_schema.rb
+++ b/lib/gitlab/database/gitlab_schema.rb
@@ -139,19 +139,19 @@ module Gitlab
end
def self.deleted_tables_to_schema
- @deleted_tables_to_schema ||= self.build_dictionary('deleted_tables').to_h
+ @deleted_tables_to_schema ||= self.build_dictionary('deleted_tables').map(&:name_and_schema).to_h
end
def self.deleted_views_to_schema
- @deleted_views_to_schema ||= self.build_dictionary('deleted_views').to_h
+ @deleted_views_to_schema ||= self.build_dictionary('deleted_views').map(&:name_and_schema).to_h
end
def self.tables_to_schema
- @tables_to_schema ||= self.build_dictionary('').to_h
+ @tables_to_schema ||= self.build_dictionary('').map(&:name_and_schema).to_h
end
def self.views_to_schema
- @views_to_schema ||= self.build_dictionary('views').to_h
+ @views_to_schema ||= self.build_dictionary('views').map(&:name_and_schema).to_h
end
def self.schema_names
@@ -160,21 +160,9 @@ module Gitlab
def self.build_dictionary(scope)
Dir.glob(dictionary_path_globs(scope)).map do |file_path|
- data = YAML.load_file(file_path)
-
- key_name = data['table_name'] || data['view_name']
-
- # rubocop:disable Gitlab/DocUrl
- if data['gitlab_schema'].nil?
- raise(
- UnknownSchemaError,
- "#{file_path} must specify a valid gitlab_schema for #{key_name}. " \
- "See https://docs.gitlab.com/ee/development/database/database_dictionary.html"
- )
- end
- # rubocop:enable Gitlab/DocUrl
-
- [key_name, data['gitlab_schema'].to_sym]
+ dictionary = Dictionary.new(file_path)
+ dictionary.validate!
+ dictionary
end
end
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 1a0a5c3df73..67835205733 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -54522,7 +54522,7 @@ msgstr ""
msgid "WorkItem|Save and overwrite"
msgstr ""
-msgid "WorkItem|Search existing %{workItemType}s"
+msgid "WorkItem|Search existing items"
msgstr ""
msgid "WorkItem|Select type"
@@ -54582,6 +54582,9 @@ msgstr ""
msgid "WorkItem|Something went wrong while fetching milestones. Please try again."
msgstr ""
+msgid "WorkItem|Something went wrong while fetching the %{workItemType}. Please try again."
+msgstr ""
+
msgid "WorkItem|Something went wrong while fetching work item award emojis. Please try again."
msgstr ""
diff --git a/spec/frontend/work_items/components/shared/work_item_token_input_spec.js b/spec/frontend/work_items/components/shared/work_item_token_input_spec.js
index c70dbbd909d..5726aaaa2d0 100644
--- a/spec/frontend/work_items/components/shared/work_item_token_input_spec.js
+++ b/spec/frontend/work_items/components/shared/work_item_token_input_spec.js
@@ -1,5 +1,5 @@
-import Vue from 'vue';
-import { GlTokenSelector } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import { GlTokenSelector, GlAlert } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -8,7 +8,12 @@ import WorkItemTokenInput from '~/work_items/components/shared/work_item_token_i
import { WORK_ITEM_TYPE_ENUM_TASK } from '~/work_items/constants';
import groupWorkItemsQuery from '~/work_items/graphql/group_work_items.query.graphql';
import projectWorkItemsQuery from '~/work_items/graphql/project_work_items.query.graphql';
-import { availableWorkItemsResponse, searchedWorkItemsResponse } from '../../mock_data';
+import {
+ availableWorkItemsResponse,
+ searchWorkItemsTextResponse,
+ searchWorkItemsIidResponse,
+ searchWorkItemsTextIidResponse,
+} from '../../mock_data';
Vue.use(VueApollo);
@@ -16,15 +21,17 @@ describe('WorkItemTokenInput', () => {
let wrapper;
const availableWorkItemsResolver = jest.fn().mockResolvedValue(availableWorkItemsResponse);
- const groupSearchedWorkItemResolver = jest.fn().mockResolvedValue(searchedWorkItemsResponse);
- const searchedWorkItemResolver = jest.fn().mockResolvedValue(searchedWorkItemsResponse);
+ const groupSearchedWorkItemResolver = jest.fn().mockResolvedValue(searchWorkItemsTextResponse);
+ const searchWorkItemTextResolver = jest.fn().mockResolvedValue(searchWorkItemsTextResponse);
+ const searchWorkItemIidResolver = jest.fn().mockResolvedValue(searchWorkItemsIidResponse);
+ const searchWorkItemTextIidResolver = jest.fn().mockResolvedValue(searchWorkItemsTextIidResponse);
const createComponent = async ({
workItemsToAdd = [],
parentConfidential = false,
childrenType = WORK_ITEM_TYPE_ENUM_TASK,
areWorkItemsToAddValid = true,
- workItemsResolver = searchedWorkItemResolver,
+ workItemsResolver = searchWorkItemTextResolver,
isGroup = false,
} = {}) => {
wrapper = shallowMountExtended(WorkItemTokenInput, {
@@ -50,6 +57,7 @@ describe('WorkItemTokenInput', () => {
};
const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
+ const findGlAlert = () => wrapper.findComponent(GlAlert);
it('searches for available work items on focus', async () => {
createComponent({ workItemsResolver: availableWorkItemsResolver });
@@ -61,24 +69,34 @@ describe('WorkItemTokenInput', () => {
searchTerm: '',
types: [WORK_ITEM_TYPE_ENUM_TASK],
in: undefined,
+ iid: null,
+ isNumber: false,
});
expect(findTokenSelector().props('dropdownItems')).toHaveLength(3);
});
- it('searches for available work items when typing in input', async () => {
- createComponent({ workItemsResolver: searchedWorkItemResolver });
- findTokenSelector().vm.$emit('focus');
- findTokenSelector().vm.$emit('text-input', 'Task 2');
- await waitForPromises();
-
- expect(searchedWorkItemResolver).toHaveBeenCalledWith({
- fullPath: 'test-project-path',
- searchTerm: 'Task 2',
- types: [WORK_ITEM_TYPE_ENUM_TASK],
- in: 'TITLE',
- });
- expect(findTokenSelector().props('dropdownItems')).toHaveLength(1);
- });
+ it.each`
+ inputType | input | resolver | searchTerm | iid | isNumber | length
+ ${'iid'} | ${'101'} | ${searchWorkItemIidResolver} | ${'101'} | ${'101'} | ${true} | ${1}
+ ${'text'} | ${'Task 2'} | ${searchWorkItemTextResolver} | ${'Task 2'} | ${null} | ${false} | ${1}
+ ${'iid and text'} | ${'123'} | ${searchWorkItemTextIidResolver} | ${'123'} | ${'123'} | ${true} | ${2}
+ `(
+ 'searches by $inputType for available work items when typing in input',
+ async ({ input, resolver, searchTerm, iid, isNumber, length }) => {
+ createComponent({ workItemsResolver: resolver });
+ findTokenSelector().vm.$emit('focus');
+ findTokenSelector().vm.$emit('text-input', input);
+ await waitForPromises();
+
+ expect(resolver).toHaveBeenCalledWith({
+ searchTerm,
+ in: 'TITLE',
+ iid,
+ isNumber,
+ });
+ expect(findTokenSelector().props('dropdownItems')).toHaveLength(length);
+ },
+ );
it('renders red border around token selector input when work item is not valid', () => {
createComponent({
@@ -95,7 +113,7 @@ describe('WorkItemTokenInput', () => {
});
it('calls the project work items query', () => {
- expect(searchedWorkItemResolver).toHaveBeenCalled();
+ expect(searchWorkItemTextResolver).toHaveBeenCalled();
});
it('skips calling the group work items query', () => {
@@ -110,11 +128,35 @@ describe('WorkItemTokenInput', () => {
});
it('skips calling the project work items query', () => {
- expect(searchedWorkItemResolver).not.toHaveBeenCalled();
+ expect(searchWorkItemTextResolver).not.toHaveBeenCalled();
});
it('calls the group work items query', () => {
expect(groupSearchedWorkItemResolver).toHaveBeenCalled();
});
});
+
+ describe('when project work items query fails', () => {
+ beforeEach(() => {
+ createComponent({
+ workItemsResolver: jest
+ .fn()
+ .mockRejectedValue('Something went wrong while fetching the results'),
+ });
+ findTokenSelector().vm.$emit('focus');
+ });
+
+ it('shows error and allows error alert to be closed', async () => {
+ await waitForPromises();
+ expect(findGlAlert().exists()).toBe(true);
+ expect(findGlAlert().text()).toBe(
+ 'Something went wrong while fetching the task. Please try again.',
+ );
+
+ findGlAlert().vm.$emit('dismiss');
+ await nextTick();
+
+ expect(findGlAlert().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_parent_spec.js b/spec/frontend/work_items/components/work_item_parent_spec.js
index 0c02f0c63ec..11fe6dffbfa 100644
--- a/spec/frontend/work_items/components/work_item_parent_spec.js
+++ b/spec/frontend/work_items/components/work_item_parent_spec.js
@@ -148,15 +148,27 @@ describe('WorkItemParent component', () => {
});
await findCollapsibleListbox().vm.$emit('shown');
- await findCollapsibleListbox().vm.$emit('search', 'Objective 101');
await waitForPromises();
expect(searchedItemQueryHandler).toHaveBeenCalledWith({
fullPath: 'full-path',
+ searchTerm: '',
+ types: [WORK_ITEM_TYPE_ENUM_OBJECTIVE],
+ in: undefined,
+ iid: null,
+ isNumber: false,
+ });
+
+ await findCollapsibleListbox().vm.$emit('search', 'Objective 101');
+
+ expect(searchedItemQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'full-path',
searchTerm: 'Objective 101',
types: [WORK_ITEM_TYPE_ENUM_OBJECTIVE],
in: 'TITLE',
+ iid: null,
+ isNumber: false,
});
await nextTick();
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index 06d59a34367..41e8a01de36 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -1600,27 +1600,18 @@ export const availableWorkItemsResponse = {
id: 'gid://gitlab/WorkItem/458',
iid: '2',
title: 'Task 1',
- state: 'OPEN',
- createdAt: '2022-08-03T12:41:54Z',
- confidential: false,
__typename: 'WorkItem',
},
{
id: 'gid://gitlab/WorkItem/459',
iid: '3',
title: 'Task 2',
- state: 'OPEN',
- createdAt: '2022-08-03T12:41:54Z',
- confidential: false,
__typename: 'WorkItem',
},
{
id: 'gid://gitlab/WorkItem/460',
iid: '4',
title: 'Task 3',
- state: 'OPEN',
- createdAt: '2022-08-03T12:41:54Z',
- confidential: true,
__typename: 'WorkItem',
},
],
@@ -1640,24 +1631,18 @@ export const availableObjectivesResponse = {
id: 'gid://gitlab/WorkItem/716',
iid: '122',
title: 'Objective 101',
- state: 'OPEN',
- confidential: false,
__typename: 'WorkItem',
},
{
id: 'gid://gitlab/WorkItem/712',
iid: '118',
title: 'Objective 103',
- state: 'OPEN',
- confidential: false,
__typename: 'WorkItem',
},
{
id: 'gid://gitlab/WorkItem/711',
iid: '117',
title: 'Objective 102',
- state: 'OPEN',
- confidential: false,
__typename: 'WorkItem',
},
],
@@ -1677,8 +1662,6 @@ export const searchedObjectiveResponse = {
id: 'gid://gitlab/WorkItem/716',
iid: '122',
title: 'Objective 101',
- state: 'OPEN',
- confidential: false,
__typename: 'WorkItem',
},
],
@@ -1687,7 +1670,7 @@ export const searchedObjectiveResponse = {
},
};
-export const searchedWorkItemsResponse = {
+export const searchWorkItemsTextResponse = {
data: {
workspace: {
__typename: 'Project',
@@ -1698,9 +1681,57 @@ export const searchedWorkItemsResponse = {
id: 'gid://gitlab/WorkItem/459',
iid: '3',
title: 'Task 2',
- state: 'OPEN',
- createdAt: '2022-08-03T12:41:54Z',
- confidential: false,
+ __typename: 'WorkItem',
+ },
+ ],
+ },
+ },
+ },
+};
+
+export const searchWorkItemsIidResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ workItems: {
+ nodes: [],
+ },
+ workItemsByIid: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/460',
+ iid: '101',
+ title: 'Task 3',
+ __typename: 'WorkItem',
+ },
+ ],
+ },
+ },
+ },
+};
+
+export const searchWorkItemsTextIidResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ workItems: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/459',
+ iid: '3',
+ title: 'Task 123',
+ __typename: 'WorkItem',
+ },
+ ],
+ },
+ workItemsByIid: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/460',
+ iid: '123',
+ title: 'Task 2',
__typename: 'WorkItem',
},
],
diff --git a/spec/lib/gitlab/database/dictionary_spec.rb b/spec/lib/gitlab/database/dictionary_spec.rb
new file mode 100644
index 00000000000..6d2de41468b
--- /dev/null
+++ b/spec/lib/gitlab/database/dictionary_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Dictionary, feature_category: :database do
+ subject(:database_dictionary) { described_class.new(file_path) }
+
+ context 'for a table' do
+ let(:file_path) { 'db/docs/application_settings.yml' }
+
+ describe '#name_and_schema' do
+ it 'returns the name of the table and its gitlab schema' do
+ expect(database_dictionary.name_and_schema).to match_array(['application_settings', :gitlab_main_clusterwide])
+ end
+ end
+
+ describe '#table_name' do
+ it 'returns the name of the table' do
+ expect(database_dictionary.table_name).to eq('application_settings')
+ end
+ end
+
+ describe '#view_name' do
+ it 'returns nil' do
+ expect(database_dictionary.view_name).to be_nil
+ end
+ end
+
+ describe '#milestone' do
+ it 'returns the milestone in which the table was introduced' do
+ expect(database_dictionary.milestone).to eq('7.7')
+ end
+ end
+
+ describe '#gitlab_schema' do
+ it 'returns the gitlab_schema of the table' do
+ expect(database_dictionary.table_name).to eq('application_settings')
+ end
+ end
+
+ describe '#schema?' do
+ it 'checks if the given schema matches the schema of the table' do
+ expect(database_dictionary.schema?('gitlab_main')).to eq(false)
+ expect(database_dictionary.schema?('gitlab_main_clusterwide')).to eq(true)
+ end
+ end
+
+ describe '#key_name' do
+ it 'returns the value of the name of the table' do
+ expect(database_dictionary.key_name).to eq('application_settings')
+ end
+ end
+
+ describe '#validate!' do
+ it 'raises an error if the gitlab_schema is empty' do
+ allow(database_dictionary).to receive(:gitlab_schema).and_return(nil)
+
+ expect { database_dictionary.validate! }.to raise_error(Gitlab::Database::GitlabSchema::UnknownSchemaError)
+ end
+ end
+ end
+
+ context 'for a view' do
+ let(:file_path) { 'db/docs/views/postgres_constraints.yml' }
+
+ describe '#table_name' do
+ it 'returns nil' do
+ expect(database_dictionary.table_name).to be_nil
+ end
+ end
+
+ describe '#view_name' do
+ it 'returns the name of the view' do
+ expect(database_dictionary.view_name).to eq('postgres_constraints')
+ end
+ end
+
+ describe '#key_name' do
+ it 'returns the value of the name of the view' do
+ expect(database_dictionary.key_name).to eq('postgres_constraints')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index a6de695c345..a47e53c18a5 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -95,10 +95,10 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
# ignore gitlab_internal due to `ar_internal_metadata`, `schema_migrations`
table_and_view_names = table_and_view_names
- .reject { |_, gitlab_schema| gitlab_schema == :gitlab_internal }
+ .reject { |database_dictionary| database_dictionary.schema?('gitlab_internal') }
duplicated_tables = table_and_view_names
- .group_by(&:first)
+ .group_by(&:key_name)
.select { |_, schemas| schemas.count > 1 }
.keys
diff --git a/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb b/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
new file mode 100644
index 00000000000..338475fa9c4
--- /dev/null
+++ b/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'new tables with gitlab_main schema', feature_category: :cell do
+ # During the development of Cells, we will be moving tables from the `gitlab_main` schema
+ # to either the `gitlab_main_clusterwide` or `gitlab_main_cell` schema.
+ # As part of this process, starting from milestone 16.7, it will be a mandatory requirement that
+ # all newly created tables are associated with one of these two schemas.
+ # Any attempt to set the `gitlab_main` schema for a new table will result in a failure of this spec.
+
+ # Specific tables can be exempted from this requirement, and such tables must be added to the `exempted_tables` list.
+ let!(:exempted_tables) do
+ []
+ end
+
+ let!(:starting_from_milestone) { 16.7 }
+
+ it 'only allows exempted tables to have `gitlab_main` as its schema, after milestone 16.7', :aggregate_failures do
+ tables_having_gitlab_main_schema(starting_from_milestone: starting_from_milestone).each do |table_name|
+ expect(exempted_tables).to include(table_name), error_message(table_name)
+ end
+ end
+
+ it 'only allows tables having `gitlab_main` as its schema in `exempted_tables`', :aggregate_failures do
+ tables_having_gitlab_main_schema = gitlab_main_schema_tables.map(&:table_name)
+
+ exempted_tables.each do |exempted_table|
+ expect(tables_having_gitlab_main_schema).to include(exempted_table),
+ "`#{exempted_table}` does not have `gitlab_main` as its schema.
+ Please remove this table from the `exempted_tables` list."
+ end
+ end
+
+ private
+
+ def error_message(table_name)
+ <<~HEREDOC
+ The table `#{table_name}` has been added with `gitlab_main` schema.
+ Starting from GitLab #{starting_from_milestone}, we expect new tables to use either the `gitlab_main_cell` or the
+ `gitlab_main_clusterwide` schema.
+
+ To choose an appropriate schema for this table from among `gitlab_main_cell` and `gitlab_main_clusterwide`, please refer
+ to our guidelines at https://docs.gitlab.com/ee/development/database/multiple_databases.html#guidelines-on-choosing-between-gitlab_main_cell-and-gitlab_main_clusterwide-schema, or consult with the Tenant Scale group.
+
+ Please see issue https://gitlab.com/gitlab-org/gitlab/-/issues/424990 to understand why this change is being enforced.
+ HEREDOC
+ end
+
+ def tables_having_gitlab_main_schema(starting_from_milestone:)
+ selected_data = gitlab_main_schema_tables.select do |database_dictionary|
+ database_dictionary.milestone.to_f >= starting_from_milestone
+ end
+
+ selected_data.map(&:table_name)
+ end
+
+ def gitlab_main_schema_tables
+ ::Gitlab::Database::GitlabSchema.build_dictionary('').select do |database_dictionary|
+ database_dictionary.schema?('gitlab_main')
+ end
+ end
+end
diff --git a/spec/models/concerns/use_sql_function_for_primary_key_lookups_spec.rb b/spec/models/concerns/use_sql_function_for_primary_key_lookups_spec.rb
new file mode 100644
index 00000000000..f6f53c9aad5
--- /dev/null
+++ b/spec/models/concerns/use_sql_function_for_primary_key_lookups_spec.rb
@@ -0,0 +1,181 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe UseSqlFunctionForPrimaryKeyLookups, feature_category: :groups_and_projects do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:another_project) { create(:project) }
+
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = :projects
+
+ include UseSqlFunctionForPrimaryKeyLookups
+ end
+ end
+
+ context 'when the use_sql_functions_for_primary_key_lookups FF is on' do
+ before do
+ stub_feature_flags(use_sql_functions_for_primary_key_lookups: true)
+ end
+
+ it 'loads the correct record' do
+ expect(model.find(project.id).id).to eq(project.id)
+ end
+
+ it 'uses the fuction-based finder query' do
+ query = <<~SQL
+ SELECT "projects".* FROM find_projects_by_id(#{project.id})#{' '}
+ "projects" WHERE ("projects"."id" IS NOT NULL) LIMIT 1
+ SQL
+ query_log = ActiveRecord::QueryRecorder.new { model.find(project.id) }.log
+
+ expect(query_log).to match_array(include(query.tr("\n", '')))
+ end
+
+ it 'uses query cache', :use_sql_query_cache do
+ query = <<~SQL
+ SELECT "projects".* FROM find_projects_by_id(#{project.id})#{' '}
+ "projects" WHERE ("projects"."id" IS NOT NULL) LIMIT 1
+ SQL
+
+ recorder = ActiveRecord::QueryRecorder.new do
+ model.find(project.id)
+ model.find(project.id)
+ model.find(project.id)
+ end
+
+ expect(recorder.data.each_value.first[:count]).to eq(1)
+ expect(recorder.cached).to include(query.tr("\n", ''))
+ end
+
+ context 'when the model has ignored columns' do
+ around do |example|
+ model.ignored_columns = %i[path]
+ example.run
+ model.ignored_columns = []
+ end
+
+ it 'enumerates the column names' do
+ column_list = model.columns.map do |column|
+ %("projects"."#{column.name}")
+ end.join(', ')
+
+ expect(column_list).not_to include(%("projects"."path"))
+
+ query = <<~SQL
+ SELECT #{column_list} FROM find_projects_by_id(#{project.id})#{' '}
+ "projects" WHERE ("projects"."id" IS NOT NULL) LIMIT 1
+ SQL
+ query_log = ActiveRecord::QueryRecorder.new { model.find(project.id) }.log
+
+ expect(query_log).to match_array(include(query.tr("\n", '')))
+ end
+ end
+
+ context 'when there are scope attributes' do
+ let(:scoped_model) do
+ Class.new(model) do
+ default_scope { where.not(path: nil) } # rubocop: disable Cop/DefaultScope -- Needed for testing a specific case
+ end
+ end
+
+ it 'loads the correct record' do
+ expect(scoped_model.find(project.id).id).to eq(project.id)
+ end
+
+ it 'does not use the function-based finder query' do
+ query_log = ActiveRecord::QueryRecorder.new { scoped_model.find(project.id) }.log
+
+ expect(query_log).not_to include(match(/find_projects_by_id/))
+ end
+ end
+
+ context 'when there are multiple arguments' do
+ it 'loads the correct records' do
+ expect(model.find(project.id, another_project.id).map(&:id)).to match_array([project.id, another_project.id])
+ end
+
+ it 'does not use the function-based finder query' do
+ query_log = ActiveRecord::QueryRecorder.new { model.find(project.id, another_project.id) }.log
+
+ expect(query_log).not_to include(match(/find_projects_by_id/))
+ end
+ end
+
+ context 'when there is block given' do
+ it 'loads the correct records' do
+ expect(model.find(0) { |p| p.path == project.path }.id).to eq(project.id)
+ end
+
+ it 'does not use the function-based finder query' do
+ query_log = ActiveRecord::QueryRecorder.new { model.find(0) { |p| p.path == project.path } }.log
+
+ expect(query_log).not_to include(match(/find_projects_by_id/))
+ end
+ end
+
+ context 'when there is no primary key defined' do
+ let(:model_without_pk) do
+ Class.new(model) do
+ def self.primary_key
+ nil
+ end
+ end
+ end
+
+ it 'raises ActiveRecord::UnknownPrimaryKey' do
+ expect { model_without_pk.find(0) }.to raise_error ActiveRecord::UnknownPrimaryKey
+ end
+ end
+
+ context 'when id is provided as an array' do
+ it 'returns the correct record as an array' do
+ expect(model.find([project.id]).map(&:id)).to eq([project.id])
+ end
+
+ it 'does use the function-based finder query' do
+ query_log = ActiveRecord::QueryRecorder.new { model.find([project.id]) }.log
+
+ expect(query_log).to include(match(/find_projects_by_id/))
+ end
+
+ context 'when array has multiple elements' do
+ it 'does not use the function-based finder query' do
+ query_log = ActiveRecord::QueryRecorder.new { model.find([project.id, another_project.id]) }.log
+
+ expect(query_log).not_to include(match(/find_projects_by_id/))
+ end
+ end
+ end
+
+ context 'when the provided id is null' do
+ it 'raises ActiveRecord::RecordNotFound' do
+ expect { model.find(nil) }.to raise_error ActiveRecord::RecordNotFound, "Couldn't find without an ID"
+ end
+ end
+
+ context 'when the provided id is not a string that can cast to numeric' do
+ it 'raises ActiveRecord::RecordNotFound' do
+ expect { model.find('foo') }.to raise_error ActiveRecord::RecordNotFound, "Couldn't find with 'id'=foo"
+ end
+ end
+ end
+
+ context 'when the use_sql_functions_for_primary_key_lookups FF is off' do
+ before do
+ stub_feature_flags(use_sql_functions_for_primary_key_lookups: false)
+ end
+
+ it 'loads the correct record' do
+ expect(model.find(project.id).id).to eq(project.id)
+ end
+
+ it 'uses the SQL-based finder query' do
+ expected_query = %(SELECT "projects".* FROM \"projects\" WHERE "projects"."id" = #{project.id} LIMIT 1)
+ query_log = ActiveRecord::QueryRecorder.new { model.find(project.id) }.log
+
+ expect(query_log).to match_array(include(expected_query))
+ end
+ end
+end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 9cf82c707f1..33142922670 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -166,6 +166,37 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
end
end
+ describe '#long_stopping?' do
+ subject { environment1.long_stopping? }
+
+ let(:long_ago) { (described_class::LONG_STOP + 1.day).ago }
+ let(:not_long_ago) { (described_class::LONG_STOP - 1.day).ago }
+
+ context 'when a stopping environment has not been updated recently' do
+ let!(:environment1) { create(:environment, state: 'stopping', project: project, updated_at: long_ago) }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when a stopping environment has been updated recently' do
+ let!(:environment1) { create(:environment, state: 'stopping', project: project, updated_at: not_long_ago) }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when a non stopping environment has not been updated recently' do
+ let!(:environment1) { create(:environment, project: project, updated_at: long_ago) }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when a non stopping environment has been updated recently' do
+ let!(:environment1) { create(:environment, project: project, updated_at: not_long_ago) }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
describe ".stopped_review_apps" do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:old_stopped_review_env) { create(:environment, :with_review_app, :stopped, created_at: 31.days.ago, project: project) }
@@ -406,6 +437,47 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
end
end
+ describe '.long_stopping' do
+ subject { described_class.long_stopping }
+
+ let_it_be(:project) { create(:project) }
+ let(:environment) { create(:environment, project: project) }
+ let(:long) { (described_class::LONG_STOP + 1.day).ago }
+ let(:short) { (described_class::LONG_STOP - 1.day).ago }
+
+ context 'when a stopping environment has not been updated recently' do
+ before do
+ environment.update!(state: :stopping, updated_at: long)
+ end
+
+ it { is_expected.to eq([environment]) }
+ end
+
+ context 'when a stopping environment has been updated recently' do
+ before do
+ environment.update!(state: :stopping, updated_at: short)
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when a non stopping environment has not been updated recently' do
+ before do
+ environment.update!(state: :available, updated_at: long)
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when a non stopping environment has been updated recently' do
+ before do
+ environment.update!(state: :available, updated_at: short)
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '.pluck_names' do
subject { described_class.pluck_names }
diff --git a/spec/requests/api/ci/job_artifacts_spec.rb b/spec/requests/api/ci/job_artifacts_spec.rb
index 6f4e7fd66ed..b96ba356855 100644
--- a/spec/requests/api/ci/job_artifacts_spec.rb
+++ b/spec/requests/api/ci/job_artifacts_spec.rb
@@ -14,9 +14,7 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
end
let_it_be(:pipeline, reload: true) do
- create(:ci_pipeline, project: project,
- sha: project.commit.id,
- ref: project.default_branch)
+ create(:ci_pipeline, project: project, sha: project.commit.id, ref: project.default_branch)
end
let(:user) { create(:user) }
@@ -179,8 +177,7 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
context 'when project is public' do
it 'allows to access artifacts' do
- project.update_column(:visibility_level,
- Gitlab::VisibilityLevel::PUBLIC)
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
project.update_column(:public_builds, true)
get_artifact_file(artifact)
@@ -193,8 +190,7 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
let(:job) { create(:ci_build, :artifacts, :with_private_artifacts_config, pipeline: pipeline) }
it 'rejects access to artifacts' do
- project.update_column(:visibility_level,
- Gitlab::VisibilityLevel::PUBLIC)
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
project.update_column(:public_builds, true)
get_artifact_file(artifact)
@@ -208,8 +204,7 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
end
it 'allows access to artifacts' do
- project.update_column(:visibility_level,
- Gitlab::VisibilityLevel::PUBLIC)
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
project.update_column(:public_builds, true)
get_artifact_file(artifact)
@@ -221,8 +216,7 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
context 'when project is public with builds access disabled' do
it 'rejects access to artifacts' do
- project.update_column(:visibility_level,
- Gitlab::VisibilityLevel::PUBLIC)
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
project.update_column(:public_builds, false)
get_artifact_file(artifact)
@@ -233,8 +227,7 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
context 'when project is private' do
it 'rejects access and hides existence of artifacts' do
- project.update_column(:visibility_level,
- Gitlab::VisibilityLevel::PRIVATE)
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel::PRIVATE)
project.update_column(:public_builds, true)
get_artifact_file(artifact)
@@ -254,8 +247,7 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h)
- .to include('Content-Type' => 'application/json',
- 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
+ .to include('Content-Type' => 'application/json', 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
expect(response.headers.to_h)
.not_to include('Gitlab-Workhorse-Detect-Content-Type' => 'true')
expect(response.parsed_body).to be_empty
@@ -404,10 +396,12 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
end
before do
- stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
- uploader: JobArtifactUploader,
- proxy_download: proxy_download,
- cdn: cdn_config)
+ stub_object_storage_uploader(
+ config: Gitlab.config.artifacts.object_store,
+ uploader: JobArtifactUploader,
+ proxy_download: proxy_download,
+ cdn: cdn_config
+ )
allow(Gitlab::ApplicationContext).to receive(:push).and_call_original
end
@@ -624,10 +618,11 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
it 'allows to access artifacts', :sidekiq_might_not_need_inline do
expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers.to_h)
- .to include('Content-Type' => 'application/json',
- 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/,
- 'Gitlab-Workhorse-Detect-Content-Type' => 'true')
+ expect(response.headers.to_h).to include(
+ 'Content-Type' => 'application/json',
+ 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/,
+ 'Gitlab-Workhorse-Detect-Content-Type' => 'true'
+ )
end
end
@@ -695,10 +690,11 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers.to_h)
- .to include('Content-Type' => 'application/json',
- 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/,
- 'Gitlab-Workhorse-Detect-Content-Type' => 'true')
+ expect(response.headers.to_h).to include(
+ 'Content-Type' => 'application/json',
+ 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/,
+ 'Gitlab-Workhorse-Detect-Content-Type' => 'true'
+ )
expect(response.parsed_body).to be_empty
end
end
@@ -713,10 +709,11 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
get_artifact_file(artifact, 'improve/awesome')
expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers.to_h)
- .to include('Content-Type' => 'application/json',
- 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/,
- 'Gitlab-Workhorse-Detect-Content-Type' => 'true')
+ expect(response.headers.to_h).to include(
+ 'Content-Type' => 'application/json',
+ 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/,
+ 'Gitlab-Workhorse-Detect-Content-Type' => 'true'
+ )
end
end
@@ -765,8 +762,15 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
context 'artifacts did not expire' do
let(:job) do
- create(:ci_build, :trace_artifact, :artifacts, :success,
- project: project, pipeline: pipeline, artifacts_expire_at: Time.now + 7.days)
+ create(
+ :ci_build,
+ :trace_artifact,
+ :artifacts,
+ :success,
+ project: project,
+ pipeline: pipeline,
+ artifacts_expire_at: Time.now + 7.days
+ )
end
it 'keeps artifacts' do
diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb
index 386718cba49..2ab112a8527 100644
--- a/spec/requests/api/ci/jobs_spec.rb
+++ b/spec/requests/api/ci/jobs_spec.rb
@@ -14,9 +14,7 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
end
let_it_be(:pipeline, reload: true) do
- create(:ci_pipeline, project: project,
- sha: project.commit.id,
- ref: project.default_branch)
+ create(:ci_pipeline, project: project, sha: project.commit.id, ref: project.default_branch)
end
let(:user) { create(:user) }
@@ -25,10 +23,14 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
let(:guest) { create(:project_member, :guest, project: project).user }
let(:running_job) do
- create(:ci_build, :running, project: project,
- user: user,
- pipeline: pipeline,
- artifacts_expire_at: 1.day.since)
+ create(
+ :ci_build,
+ :running,
+ project: project,
+ user: user,
+ pipeline: pipeline,
+ artifacts_expire_at: 1.day.since
+ )
end
let!(:job) do
diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb
index 34503b3fd16..eef125e1bc3 100644
--- a/spec/requests/api/ci/pipelines_spec.rb
+++ b/spec/requests/api/ci/pipelines_spec.rb
@@ -13,8 +13,14 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
end
let_it_be(:pipeline) do
- create(:ci_empty_pipeline, project: project, sha: project.commit.id,
- ref: project.default_branch, user: user, name: 'Build pipeline')
+ create(
+ :ci_empty_pipeline,
+ project: project,
+ sha: project.commit.id,
+ ref: project.default_branch,
+ user: user,
+ name: 'Build pipeline'
+ )
end
before do
@@ -357,8 +363,13 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
let(:query) { {} }
let(:api_user) { user }
let_it_be(:job) do
- create(:ci_build, :success, name: 'build', pipeline: pipeline,
- artifacts_expire_at: 1.day.since)
+ create(
+ :ci_build,
+ :success,
+ name: 'build',
+ pipeline: pipeline,
+ artifacts_expire_at: 1.day.since
+ )
end
let(:guest) { create(:project_member, :guest, project: project).user }
@@ -540,12 +551,14 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
let(:downstream_pipeline) { create(:ci_pipeline) }
let!(:pipeline_source) do
- create(:ci_sources_pipeline,
- source_pipeline: pipeline,
- source_project: project,
- source_job: bridge,
- pipeline: downstream_pipeline,
- project: downstream_pipeline.project)
+ create(
+ :ci_sources_pipeline,
+ source_pipeline: pipeline,
+ source_project: project,
+ source_job: bridge,
+ pipeline: downstream_pipeline,
+ project: downstream_pipeline.project
+ )
end
let(:query) { {} }
@@ -713,12 +726,14 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
def create_bridge(pipeline, status = :created)
create(:ci_bridge, status: status, pipeline: pipeline).tap do |bridge|
downstream_pipeline = create(:ci_pipeline)
- create(:ci_sources_pipeline,
- source_pipeline: pipeline,
- source_project: pipeline.project,
- source_job: bridge,
- pipeline: downstream_pipeline,
- project: downstream_pipeline.project)
+ create(
+ :ci_sources_pipeline,
+ source_pipeline: pipeline,
+ source_project: pipeline.project,
+ source_job: bridge,
+ pipeline: downstream_pipeline,
+ project: downstream_pipeline.project
+ )
end
end
end
@@ -914,13 +929,24 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
let(:second_branch) { project.repository.branches[2] }
let!(:second_pipeline) do
- create(:ci_empty_pipeline, project: project, sha: second_branch.target,
- ref: second_branch.name, user: user, name: 'Build pipeline')
+ create(
+ :ci_empty_pipeline,
+ project: project,
+ sha: second_branch.target,
+ ref: second_branch.name,
+ user: user,
+ name: 'Build pipeline'
+ )
end
before do
- create(:ci_empty_pipeline, project: project, sha: project.commit.parent.id,
- ref: project.default_branch, user: user)
+ create(
+ :ci_empty_pipeline,
+ project: project,
+ sha: project.commit.parent.id,
+ ref: project.default_branch,
+ user: user
+ )
end
context 'default repository branch' do
@@ -1182,8 +1208,7 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
describe 'POST /projects/:id/pipelines/:pipeline_id/retry' do
context 'authorized user' do
let_it_be(:pipeline) do
- create(:ci_pipeline, project: project, sha: project.commit.id,
- ref: project.default_branch)
+ create(:ci_pipeline, project: project, sha: project.commit.id, ref: project.default_branch)
end
let_it_be(:build) { create(:ci_build, :failed, pipeline: pipeline) }
@@ -1228,8 +1253,7 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
describe 'POST /projects/:id/pipelines/:pipeline_id/cancel' do
let_it_be(:pipeline) do
- create(:ci_empty_pipeline, project: project, sha: project.commit.id,
- ref: project.default_branch)
+ create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch)
end
let_it_be(:build) { create(:ci_build, :running, pipeline: pipeline) }
diff --git a/spec/requests/api/ci/resource_groups_spec.rb b/spec/requests/api/ci/resource_groups_spec.rb
index 26265aec1dc..809b1c7c3d3 100644
--- a/spec/requests/api/ci/resource_groups_spec.rb
+++ b/spec/requests/api/ci/resource_groups_spec.rb
@@ -126,9 +126,7 @@ RSpec.describe API::Ci::ResourceGroups, feature_category: :continuous_delivery d
context 'when resource group key contains a slash' do
let_it_be(:resource_group) { create(:ci_resource_group, project: project, key: 'test/test') }
let_it_be(:upcoming_processable) do
- create(:ci_processable,
- :waiting_for_resource,
- resource_group: resource_group)
+ create(:ci_processable, :waiting_for_resource, resource_group: resource_group)
end
let(:key) { 'test%2Ftest' }
diff --git a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
index 2e0be23ba90..637469411d5 100644
--- a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
@@ -30,8 +30,15 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
describe '/api/v4/jobs' do
let(:job) do
- create(:ci_build, :artifacts, :extended_options,
- pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ create(
+ :ci_build,
+ :artifacts,
+ :extended_options,
+ pipeline: pipeline,
+ name: 'spinach',
+ stage: 'test',
+ stage_idx: 0
+ )
end
describe 'artifacts' do
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index 23d6a1eeccc..2a870a25ea6 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -24,8 +24,17 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
let(:runner) { create(:ci_runner, :project, projects: [project]) }
let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
let(:job) do
- create(:ci_build, :pending, :queued, :artifacts, :extended_options,
- pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ create(
+ :ci_build,
+ :pending,
+ :queued,
+ :artifacts,
+ :extended_options,
+ pipeline: pipeline,
+ name: 'spinach',
+ stage: 'test',
+ stage_idx: 0
+ )
end
describe 'POST /api/v4/jobs/request' do
@@ -342,10 +351,11 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
request_job
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['refspecs'])
- .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
- '+refs/tags/*:refs/tags/*',
- '+refs/heads/*:refs/remotes/origin/*')
+ expect(json_response['git_info']['refspecs']).to contain_exactly(
+ "+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ '+refs/tags/*:refs/tags/*',
+ '+refs/heads/*:refs/remotes/origin/*'
+ )
end
end
end
@@ -383,10 +393,11 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
request_job
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['refspecs'])
- .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
- '+refs/tags/*:refs/tags/*',
- '+refs/heads/*:refs/remotes/origin/*')
+ expect(json_response['git_info']['refspecs']).to contain_exactly(
+ "+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ '+refs/tags/*:refs/tags/*',
+ '+refs/heads/*:refs/remotes/origin/*'
+ )
end
end
end
@@ -646,8 +657,16 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
context 'when job has code coverage report' do
let(:job) do
- create(:ci_build, :pending, :queued, :coverage_report_cobertura,
- pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ create(
+ :ci_build,
+ :pending,
+ :queued,
+ :coverage_report_cobertura,
+ pipeline: pipeline,
+ name: 'spinach',
+ stage: 'test',
+ stage_idx: 0
+ )
end
let(:expected_artifacts) do
@@ -788,9 +807,16 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
describe 'time_in_queue_seconds support' do
let(:job) do
- create(:ci_build, :pending, :queued, pipeline: pipeline,
- name: 'spinach', stage: 'test', stage_idx: 0,
- queued_at: 60.seconds.ago)
+ create(
+ :ci_build,
+ :pending,
+ :queued,
+ pipeline: pipeline,
+ name: 'spinach',
+ stage: 'test',
+ stage_idx: 0,
+ queued_at: 60.seconds.ago
+ )
end
it 'presents the time_in_queue_seconds info in the payload' do
diff --git a/spec/requests/api/ci/runner/jobs_trace_spec.rb b/spec/requests/api/ci/runner/jobs_trace_spec.rb
index ee00fc5a793..8c596d2338f 100644
--- a/spec/requests/api/ci/runner/jobs_trace_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_trace_spec.rb
@@ -23,14 +23,28 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_trace_chunks, feature_catego
let(:runner) { create(:ci_runner, :project, projects: [project]) }
let(:user) { create(:user) }
let(:job) do
- create(:ci_build, :artifacts, :extended_options,
- pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ create(
+ :ci_build,
+ :artifacts,
+ :extended_options,
+ pipeline: pipeline,
+ name: 'spinach',
+ stage: 'test',
+ stage_idx: 0
+ )
end
describe 'PATCH /api/v4/jobs/:id/trace' do
let(:job) do
- create(:ci_build, :running, :trace_live,
- project: project, user: user, runner_id: runner.id, pipeline: pipeline)
+ create(
+ :ci_build,
+ :running,
+ :trace_live,
+ project: project,
+ user: user,
+ runner_id: runner.id,
+ pipeline: pipeline
+ )
end
let(:headers) { { API::Ci::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
index c82bdf1af57..1490172d1c3 100644
--- a/spec/requests/api/ci/runner/runners_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -185,8 +185,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
it "sets the runner's ip_address" do
post api('/runners'),
- params: { token: registration_token },
- headers: { 'X-Forwarded-For' => '123.111.123.111' }
+ params: { token: registration_token },
+ headers: { 'X-Forwarded-For' => '123.111.123.111' }
expect(response).to have_gitlab_http_status(:created)
expect(::Ci::Runner.last.ip_address).to eq('123.111.123.111')
diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb
index 7b69e9d1cb0..ba80684e89e 100644
--- a/spec/requests/api/ci/runners_spec.rb
+++ b/spec/requests/api/ci/runners_spec.rb
@@ -500,13 +500,17 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :runner_
active = shared_runner.active
runner_queue_value = shared_runner.ensure_runner_queue_value
- update_runner(shared_runner.id, admin, description: "#{description}_updated",
- active: !active,
- tag_list: ['ruby2.1', 'pgsql', 'mysql'],
- run_untagged: 'false',
- locked: 'true',
- access_level: 'ref_protected',
- maximum_timeout: 1234)
+ update_runner(
+ shared_runner.id,
+ admin,
+ description: "#{description}_updated",
+ active: !active,
+ tag_list: ['ruby2.1', 'pgsql', 'mysql'],
+ run_untagged: 'false',
+ locked: 'true',
+ access_level: 'ref_protected',
+ maximum_timeout: 1234
+ )
shared_runner.reload
expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/services/environments/auto_recover_service_spec.rb b/spec/services/environments/auto_recover_service_spec.rb
new file mode 100644
index 00000000000..9807e8f9314
--- /dev/null
+++ b/spec/services/environments/auto_recover_service_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Environments::AutoRecoverService, :clean_gitlab_redis_shared_state, :sidekiq_inline,
+ feature_category: :continuous_delivery do
+ include CreateEnvironmentsHelpers
+ include ExclusiveLeaseHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:service) { described_class.new }
+
+ before_all do
+ project.add_developer(user)
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:environments) { Environment.all }
+
+ before_all do
+ project.add_developer(user)
+ project.repository.add_branch(user, 'review/feature-1', 'master')
+ project.repository.add_branch(user, 'review/feature-2', 'master')
+ end
+
+ before do
+ create_review_app(user, project, 'review/feature-1')
+ create_review_app(user, project, 'review/feature-2')
+
+ Environment.all.map do |e|
+ e.stop_actions.map(&:drop)
+ e.stop!
+ e.update!(updated_at: (Environment::LONG_STOP + 1.day).ago)
+ e.reload
+ end
+ end
+
+ it 'stops environments that have been stuck stopping too long' do
+ expect { subject }
+ .to change { Environment.all.map(&:state).uniq }
+ .from(['stopping']).to(['available'])
+ end
+
+ it 'schedules stop processes in bulk' do
+ args = [[Environment.find_by_name('review/feature-1').id], [Environment.find_by_name('review/feature-2').id]]
+
+ expect(Environments::AutoRecoverWorker)
+ .to receive(:bulk_perform_async).with(args).once.and_call_original
+
+ subject
+ end
+
+ context 'when the other sidekiq worker has already been running' do
+ before do
+ stub_exclusive_lease_taken(described_class::EXCLUSIVE_LOCK_KEY)
+ end
+
+ it 'does not execute recover_in_batch' do
+ expect_next_instance_of(described_class) do |service|
+ expect(service).not_to receive(:recover_in_batch)
+ end
+
+ expect { subject }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+ end
+
+ context 'when loop reached timeout' do
+ before do
+ stub_const("#{described_class}::LOOP_TIMEOUT", 0.seconds)
+ stub_const("#{described_class}::LOOP_LIMIT", 100_000)
+ allow_next_instance_of(described_class) do |service|
+ allow(service).to receive(:recover_in_batch).and_return(true)
+ end
+ end
+
+ it 'returns false and does not continue the process' do
+ is_expected.to eq(false)
+ end
+ end
+
+ context 'when loop reached loop limit' do
+ before do
+ stub_const("#{described_class}::LOOP_LIMIT", 1)
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ it 'stops only one available environment' do
+ expect { subject }.to change { Environment.long_stopping.count }.by(-1)
+ end
+ end
+ end
+end
diff --git a/spec/workers/environments/auto_recover_worker_spec.rb b/spec/workers/environments/auto_recover_worker_spec.rb
new file mode 100644
index 00000000000..7b25eb5d616
--- /dev/null
+++ b/spec/workers/environments/auto_recover_worker_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Environments::AutoRecoverWorker, feature_category: :continuous_delivery do
+ include CreateEnvironmentsHelpers
+
+ subject { worker.perform(environment_id) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+
+ let!(:environment) { create_review_app(user, project, 'review/feature').environment }
+ let(:environment_id) { environment.id }
+ let(:worker) { described_class.new }
+ let(:user) { developer }
+
+ before_all do
+ project.repository.add_branch(developer, 'review/feature', 'master')
+ end
+
+ context 'when environment has been updated recently' do
+ it 'recovers the environment' do
+ environment.stop!
+ environment.update!(updated_at: (Environment::LONG_STOP - 1.day).ago)
+
+ expect { subject }
+ .not_to change { environment.reload.state }
+ .from('stopping')
+ end
+ end
+
+ context 'when all stop actions are not complete' do
+ it 'does not recover the environment' do
+ environment.stop!
+ environment.stop_actions.map(&:drop)
+ environment.update!(updated_at: (Environment::LONG_STOP + 1.day).ago)
+
+ expect { subject }
+ .to change { environment.reload.state }
+ .from('stopping').to('available')
+ end
+ end
+
+ context 'when all stop actions are complete' do
+ it 'recovers the environment' do
+ environment.stop!
+ environment.update!(updated_at: (Environment::LONG_STOP + 1.day).ago)
+
+ expect { subject }
+ .not_to change { environment.reload.state }
+ .from('stopping')
+ end
+ end
+
+ context 'when there are no corresponding environment record' do
+ let!(:environment) { instance_double('Environment', id: non_existing_record_id) }
+
+ it 'ignores the invalid record' do
+ expect { subject }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/workers/environments/auto_stop_cron_worker_spec.rb b/spec/workers/environments/auto_stop_cron_worker_spec.rb
index ad44cf97e07..14a74022a1f 100644
--- a/spec/workers/environments/auto_stop_cron_worker_spec.rb
+++ b/spec/workers/environments/auto_stop_cron_worker_spec.rb
@@ -14,4 +14,12 @@ RSpec.describe Environments::AutoStopCronWorker, feature_category: :continuous_d
subject
end
+
+ it 'executes Environments::AutoRecoverService' do
+ expect_next_instance_of(Environments::AutoRecoverService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ subject
+ end
end