Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2024-01-11 18:09:58 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2024-01-11 18:09:58 +0300
commitd8877c12347443fa02e0ba53ad8d5cd318f6fa28 (patch)
treebe066b259837e82a5ca51b3f8feb3285594d8f66
parent97feef1f73fd0a3237d1a2aef4e977e7e27ecace (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.rubocop_todo/layout/line_length.yml10
-rw-r--r--.rubocop_todo/lint/symbol_conversion.yml2
-rw-r--r--.rubocop_todo/naming/heredoc_delimiter_naming.yml2
-rw-r--r--.rubocop_todo/performance/map_compact.yml2
-rw-r--r--.rubocop_todo/rspec/before_all_role_assignment.yml2
-rw-r--r--.rubocop_todo/rspec/context_wording.yml2
-rw-r--r--.rubocop_todo/rspec/named_subject.yml4
-rw-r--r--.rubocop_todo/style/if_unless_modifier.yml2
-rw-r--r--.rubocop_todo/style/inline_disable_annotation.yml2
-rw-r--r--app/assets/javascripts/kubernetes_dashboard/components/workload_details.vue2
-rw-r--r--app/assets/javascripts/kubernetes_dashboard/components/workload_layout.vue14
-rw-r--r--app/assets/javascripts/kubernetes_dashboard/components/workload_table.vue9
-rw-r--r--app/assets/javascripts/kubernetes_dashboard/constants.js5
-rw-r--r--app/assets/javascripts/releases/components/app_edit_new.vue3
-rw-r--r--app/assets/javascripts/releases/components/tag_field_new.vue3
-rw-r--r--app/assets/javascripts/releases/stores/modules/edit_new/actions.js15
-rw-r--r--app/assets/javascripts/releases/stores/modules/edit_new/getters.js12
-rw-r--r--app/assets/javascripts/releases/stores/modules/edit_new/state.js1
-rw-r--r--app/controllers/projects/google_cloud/configuration_controller.rb2
-rw-r--r--app/controllers/projects/google_cloud/databases_controller.rb6
-rw-r--r--app/controllers/projects/google_cloud/deployments_controller.rb6
-rw-r--r--app/controllers/projects/google_cloud/gcp_regions_controller.rb2
-rw-r--r--app/controllers/projects/google_cloud/service_accounts_controller.rb2
-rw-r--r--app/models/application_setting.rb240
-rw-r--r--app/models/ci/pipeline_config.rb4
-rw-r--r--app/models/concerns/ci/partitionable/testing.rb1
-rw-r--r--app/models/group.rb6
-rw-r--r--app/models/member.rb5
-rw-r--r--app/services/cloud_seed/google_cloud/base_service.rb67
-rw-r--r--app/services/cloud_seed/google_cloud/create_cloudsql_instance_service.rb80
-rw-r--r--app/services/cloud_seed/google_cloud/create_service_accounts_service.rb42
-rw-r--r--app/services/cloud_seed/google_cloud/enable_cloud_run_service.rb23
-rw-r--r--app/services/cloud_seed/google_cloud/enable_cloudsql_service.rb27
-rw-r--r--app/services/cloud_seed/google_cloud/enable_vision_ai_service.rb21
-rw-r--r--app/services/cloud_seed/google_cloud/fetch_google_ip_list_service.rb93
-rw-r--r--app/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service.rb25
-rw-r--r--app/services/cloud_seed/google_cloud/generate_pipeline_service.rb100
-rw-r--r--app/services/cloud_seed/google_cloud/get_cloudsql_instances_service.rb20
-rw-r--r--app/services/cloud_seed/google_cloud/service_accounts_service.rb53
-rw-r--r--app/services/cloud_seed/google_cloud/setup_cloudsql_instance_service.rb120
-rw-r--r--app/services/google_cloud/base_service.rb65
-rw-r--r--app/services/google_cloud/create_cloudsql_instance_service.rb78
-rw-r--r--app/services/google_cloud/create_service_accounts_service.rb40
-rw-r--r--app/services/google_cloud/enable_cloud_run_service.rb21
-rw-r--r--app/services/google_cloud/enable_cloudsql_service.rb25
-rw-r--r--app/services/google_cloud/enable_vision_ai_service.rb19
-rw-r--r--app/services/google_cloud/fetch_google_ip_list_service.rb91
-rw-r--r--app/services/google_cloud/gcp_region_add_or_replace_service.rb23
-rw-r--r--app/services/google_cloud/generate_pipeline_service.rb98
-rw-r--r--app/services/google_cloud/get_cloudsql_instances_service.rb18
-rw-r--r--app/services/google_cloud/service_accounts_service.rb51
-rw-r--r--app/services/google_cloud/setup_cloudsql_instance_service.rb118
-rw-r--r--app/workers/google_cloud/create_cloudsql_instance_worker.rb2
-rw-r--r--app/workers/google_cloud/fetch_google_ip_list_worker.rb2
-rw-r--r--data/deprecations/16-8-deprecate-pg_schema-backup-setting.yml14
-rw-r--r--db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_config.yml9
-rw-r--r--db/migrate/20240109145839_add_partition_id_to_pipeline_config.rb10
-rw-r--r--db/post_migrate/20240110090352_queue_backfill_partition_id_ci_pipeline_config.rb26
-rw-r--r--db/post_migrate/20240110093654_schedule_index_removal_ci_build_trace_metadata.rb18
-rw-r--r--db/post_migrate/20240110094002_drop_index_from_ci_job_artifact_state.rb17
-rw-r--r--db/post_migrate/20240110094510_remove_fk_from_ci_job_artifact_state.rb36
-rw-r--r--db/schema_migrations/202401091458391
-rw-r--r--db/schema_migrations/202401100903521
-rw-r--r--db/schema_migrations/202401100936541
-rw-r--r--db/schema_migrations/202401100940021
-rw-r--r--db/schema_migrations/202401100945101
-rw-r--r--db/structure.sql8
-rw-r--r--doc/architecture/blueprints/cells/impacted_features/git-access.md37
-rw-r--r--doc/architecture/blueprints/ci_builds_runner_fleet_metrics/ci_insights.md154
-rw-r--r--doc/architecture/blueprints/ci_builds_runner_fleet_metrics/img/current_page.pngbin0 -> 132200 bytes
-rw-r--r--doc/architecture/blueprints/ci_builds_runner_fleet_metrics/index.md4
-rw-r--r--doc/ci/yaml/index.md6
-rw-r--r--doc/update/background_migrations.md207
-rw-r--r--doc/update/background_migrations_troubleshooting.md210
-rw-r--r--doc/update/deprecations.md19
-rw-r--r--doc/update/versions/gitlab_14_changes.md2
-rw-r--r--doc/user/application_security/dependency_scanning/index.md35
-rw-r--r--doc/user/project/merge_requests/approvals/settings.md5
-rw-r--r--lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config.rb37
-rw-r--r--lib/gitlab/github_import/importer/replay_events_importer.rb4
-rw-r--r--locale/gitlab.pot23
-rw-r--r--spec/db/schema_spec.rb1
-rw-r--r--spec/factories/ci/pipeline_config.rb8
-rw-r--r--spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap2
-rw-r--r--spec/frontend/custom_emoji/components/list_spec.js3
-rw-r--r--spec/frontend/emoji/components/emoji_group_spec.js1
-rw-r--r--spec/frontend/kubernetes_dashboard/components/workload_table_spec.js11
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap8
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js2
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js19
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js52
-rw-r--r--spec/frontend/releases/stores/modules/detail/getters_spec.js15
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb73
-rw-r--r--spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb87
-rw-r--r--spec/migrations/20240110090352_queue_backfill_partition_id_ci_pipeline_config_spec.rb56
-rw-r--r--spec/models/ci/pipeline_config_spec.rb17
-rw-r--r--spec/models/group_spec.rb10
-rw-r--r--spec/requests/projects/google_cloud/databases_controller_spec.rb8
-rw-r--r--spec/requests/projects/google_cloud/deployments_controller_spec.rb10
-rw-r--r--spec/services/cloud_seed/google_cloud/create_cloudsql_instance_service_spec.rb (renamed from spec/services/google_cloud/create_cloudsql_instance_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/create_service_accounts_service_spec.rb (renamed from spec/services/google_cloud/create_service_accounts_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/enable_cloud_run_service_spec.rb (renamed from spec/services/google_cloud/enable_cloud_run_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/enable_cloudsql_service_spec.rb (renamed from spec/services/google_cloud/enable_cloudsql_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/enable_vision_ai_service_spec.rb (renamed from spec/services/google_cloud/enable_vision_ai_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/fetch_google_ip_list_service_spec.rb (renamed from spec/services/google_cloud/fetch_google_ip_list_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service_spec.rb (renamed from spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb (renamed from spec/services/google_cloud/generate_pipeline_service_spec.rb)22
-rw-r--r--spec/services/cloud_seed/google_cloud/get_cloudsql_instances_service_spec.rb (renamed from spec/services/google_cloud/get_cloudsql_instances_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/service_accounts_service_spec.rb (renamed from spec/services/google_cloud/service_accounts_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/setup_cloudsql_instance_service_spec.rb (renamed from spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb)2
-rw-r--r--spec/support/helpers/database/duplicate_indexes.yml3
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb32
-rw-r--r--spec/uploaders/object_storage/cdn/google_cdn_spec.rb2
-rw-r--r--spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb8
-rw-r--r--spec/workers/google_cloud/fetch_google_ip_list_worker_spec.rb2
115 files changed, 1858 insertions, 1188 deletions
diff --git a/.rubocop_todo/layout/line_length.yml b/.rubocop_todo/layout/line_length.yml
index 1b241836da3..8ca02a6a1d0 100644
--- a/.rubocop_todo/layout/line_length.yml
+++ b/.rubocop_todo/layout/line_length.yml
@@ -496,8 +496,8 @@ Layout/LineLength:
- 'app/services/environments/schedule_to_delete_review_apps_service.rb'
- 'app/services/feature_flags/update_service.rb'
- 'app/services/git/process_ref_changes_service.rb'
- - 'app/services/google_cloud/create_service_accounts_service.rb'
- - 'app/services/google_cloud/generate_pipeline_service.rb'
+ - 'app/services/cloud_seed/google_cloud/create_service_accounts_service.rb'
+ - 'app/services/cloud_seed/google_cloud/generate_pipeline_service.rb'
- 'app/services/groups/create_service.rb'
- 'app/services/groups/destroy_service.rb'
- 'app/services/groups/transfer_service.rb'
@@ -4457,9 +4457,9 @@ Layout/LineLength:
- 'spec/services/files/update_service_spec.rb'
- 'spec/services/git/branch_hooks_service_spec.rb'
- 'spec/services/git/branch_push_service_spec.rb'
- - 'spec/services/google_cloud/enable_cloud_run_service_spec.rb'
- - 'spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb'
- - 'spec/services/google_cloud/service_accounts_service_spec.rb'
+ - 'spec/services/cloud_seed/google_cloud/enable_cloud_run_service_spec.rb'
+ - 'spec/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service_spec.rb'
+ - 'spec/services/cloud_seed/google_cloud/service_accounts_service_spec.rb'
- 'spec/services/groups/autocomplete_service_spec.rb'
- 'spec/services/groups/transfer_service_spec.rb'
- 'spec/services/groups/update_service_spec.rb'
diff --git a/.rubocop_todo/lint/symbol_conversion.yml b/.rubocop_todo/lint/symbol_conversion.yml
index eae5c89eef7..893e457ddff 100644
--- a/.rubocop_todo/lint/symbol_conversion.yml
+++ b/.rubocop_todo/lint/symbol_conversion.yml
@@ -5,7 +5,7 @@ Lint/SymbolConversion:
- 'app/helpers/breadcrumbs_helper.rb'
- 'app/helpers/environments_helper.rb'
- 'app/helpers/tooling/visual_review_helper.rb'
- - 'app/services/google_cloud/create_cloudsql_instance_service.rb'
+ - 'app/services/cloud_seed/google_cloud/create_cloudsql_instance_service.rb'
- 'config/puma.rb'
- 'ee/app/components/billing/plan_component.rb'
- 'ee/app/controllers/projects/security/scanned_resources_controller.rb'
diff --git a/.rubocop_todo/naming/heredoc_delimiter_naming.yml b/.rubocop_todo/naming/heredoc_delimiter_naming.yml
index 92f5994092b..50f359fa448 100644
--- a/.rubocop_todo/naming/heredoc_delimiter_naming.yml
+++ b/.rubocop_todo/naming/heredoc_delimiter_naming.yml
@@ -105,7 +105,7 @@ Naming/HeredocDelimiterNaming:
- 'spec/services/ci/create_web_ide_terminal_service_spec.rb'
- 'spec/services/ci/parse_dotenv_artifact_service_spec.rb'
- 'spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb'
- - 'spec/services/google_cloud/generate_pipeline_service_spec.rb'
+ - 'spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb'
- 'spec/services/task_list_toggle_service_spec.rb'
- 'spec/support/helpers/repo_helpers.rb'
- 'spec/support/helpers/seed_repo.rb'
diff --git a/.rubocop_todo/performance/map_compact.yml b/.rubocop_todo/performance/map_compact.yml
index 22cefbab067..576a610006e 100644
--- a/.rubocop_todo/performance/map_compact.yml
+++ b/.rubocop_todo/performance/map_compact.yml
@@ -31,7 +31,7 @@ Performance/MapCompact:
- 'app/services/events/render_service.rb'
- 'app/services/feature_flags/update_service.rb'
- 'app/services/git/base_hooks_service.rb'
- - 'app/services/google_cloud/fetch_google_ip_list_service.rb'
+ - 'app/services/cloud_seed/google_cloud/fetch_google_ip_list_service.rb'
- 'app/services/jira_import/start_import_service.rb'
- 'app/services/jira_import/users_mapper_service.rb'
- 'app/services/labels/available_labels_service.rb'
diff --git a/.rubocop_todo/rspec/before_all_role_assignment.yml b/.rubocop_todo/rspec/before_all_role_assignment.yml
index 8e1a2a28f1f..d1078cf4735 100644
--- a/.rubocop_todo/rspec/before_all_role_assignment.yml
+++ b/.rubocop_todo/rspec/before_all_role_assignment.yml
@@ -1402,7 +1402,7 @@ RSpec/BeforeAllRoleAssignment:
- 'spec/services/environments/schedule_to_delete_review_apps_service_spec.rb'
- 'spec/services/files/base_service_spec.rb'
- 'spec/services/git/branch_push_service_spec.rb'
- - 'spec/services/google_cloud/generate_pipeline_service_spec.rb'
+ - 'spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb'
- 'spec/services/groups/auto_devops_service_spec.rb'
- 'spec/services/groups/autocomplete_service_spec.rb'
- 'spec/services/groups/group_links/create_service_spec.rb'
diff --git a/.rubocop_todo/rspec/context_wording.yml b/.rubocop_todo/rspec/context_wording.yml
index de893e11a73..fa8f44aeb9f 100644
--- a/.rubocop_todo/rspec/context_wording.yml
+++ b/.rubocop_todo/rspec/context_wording.yml
@@ -2558,7 +2558,7 @@ RSpec/ContextWording:
- 'spec/services/git/tag_push_service_spec.rb'
- 'spec/services/git/wiki_push_service/change_spec.rb'
- 'spec/services/git/wiki_push_service_spec.rb'
- - 'spec/services/google_cloud/generate_pipeline_service_spec.rb'
+ - 'spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb'
- 'spec/services/gpg_keys/create_service_spec.rb'
- 'spec/services/groups/deploy_tokens/revoke_service_spec.rb'
- 'spec/services/groups/destroy_service_spec.rb'
diff --git a/.rubocop_todo/rspec/named_subject.yml b/.rubocop_todo/rspec/named_subject.yml
index 58076addbcf..ab6ddc4c89a 100644
--- a/.rubocop_todo/rspec/named_subject.yml
+++ b/.rubocop_todo/rspec/named_subject.yml
@@ -3269,8 +3269,8 @@ RSpec/NamedSubject:
- 'spec/services/git/process_ref_changes_service_spec.rb'
- 'spec/services/git/tag_push_service_spec.rb'
- 'spec/services/git/wiki_push_service_spec.rb'
- - 'spec/services/google_cloud/fetch_google_ip_list_service_spec.rb'
- - 'spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb'
+ - 'spec/services/cloud_seed/google_cloud/fetch_google_ip_list_service_spec.rb'
+ - 'spec/services/cloud_seed/google_cloud/setup_cloudsql_instance_service_spec.rb'
- 'spec/services/gpg_keys/create_service_spec.rb'
- 'spec/services/gpg_keys/destroy_service_spec.rb'
- 'spec/services/groups/autocomplete_service_spec.rb'
diff --git a/.rubocop_todo/style/if_unless_modifier.yml b/.rubocop_todo/style/if_unless_modifier.yml
index db8f6d8d851..c99e96f3e55 100644
--- a/.rubocop_todo/style/if_unless_modifier.yml
+++ b/.rubocop_todo/style/if_unless_modifier.yml
@@ -245,7 +245,7 @@ Style/IfUnlessModifier:
- 'app/services/environments/canary_ingress/update_service.rb'
- 'app/services/environments/reset_auto_stop_service.rb'
- 'app/services/git/process_ref_changes_service.rb'
- - 'app/services/google_cloud/generate_pipeline_service.rb'
+ - 'app/services/cloud_seed/google_cloud/generate_pipeline_service.rb'
- 'app/services/groups/create_service.rb'
- 'app/services/groups/transfer_service.rb'
- 'app/services/groups/update_statistics_service.rb'
diff --git a/.rubocop_todo/style/inline_disable_annotation.yml b/.rubocop_todo/style/inline_disable_annotation.yml
index 473378c24fb..6c2c802d516 100644
--- a/.rubocop_todo/style/inline_disable_annotation.yml
+++ b/.rubocop_todo/style/inline_disable_annotation.yml
@@ -3126,7 +3126,7 @@ Style/InlineDisableAnnotation:
- 'spec/services/ci/pipeline_schedules/calculate_next_run_service_spec.rb'
- 'spec/services/draft_notes/destroy_service_spec.rb'
- 'spec/services/event_create_service_spec.rb'
- - 'spec/services/google_cloud/enable_cloudsql_service_spec.rb'
+ - 'spec/services/cloud_seed/google_cloud/enable_cloudsql_service_spec.rb'
- 'spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb'
- 'spec/services/merge_requests/create_service_spec.rb'
- 'spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb'
diff --git a/app/assets/javascripts/kubernetes_dashboard/components/workload_details.vue b/app/assets/javascripts/kubernetes_dashboard/components/workload_details.vue
index 41fb2527036..bcc0ddf824a 100644
--- a/app/assets/javascripts/kubernetes_dashboard/components/workload_details.vue
+++ b/app/assets/javascripts/kubernetes_dashboard/components/workload_details.vue
@@ -50,7 +50,7 @@ export default {
<template>
<ul class="gl-list-style-none">
<workload-details-item :label="$options.i18n.name">
- {{ item.name }}
+ <span class="gl-word-break-word"> {{ item.name }}</span>
</workload-details-item>
<workload-details-item :label="$options.i18n.kind">
{{ item.kind }}
diff --git a/app/assets/javascripts/kubernetes_dashboard/components/workload_layout.vue b/app/assets/javascripts/kubernetes_dashboard/components/workload_layout.vue
index 8b1436b5486..6579e0229e6 100644
--- a/app/assets/javascripts/kubernetes_dashboard/components/workload_layout.vue
+++ b/app/assets/javascripts/kubernetes_dashboard/components/workload_layout.vue
@@ -1,6 +1,7 @@
<script>
import { GlLoadingIcon, GlAlert, GlDrawer } from '@gitlab/ui';
import { DRAWER_Z_INDEX } from '~/lib/utils/constants';
+import { getContentWrapperHeight } from '~/lib/utils/dom_utils';
import WorkloadStats from './workload_stats.vue';
import WorkloadTable from './workload_table.vue';
import WorkloadDetails from './workload_details.vue';
@@ -36,7 +37,7 @@ export default {
fields: {
type: Array,
required: false,
- default: null,
+ default: undefined,
},
},
data() {
@@ -45,6 +46,11 @@ export default {
selectedItem: {},
};
},
+ computed: {
+ getDrawerHeaderHeight() {
+ return getContentWrapperHeight();
+ },
+ },
methods: {
closeDetailsDrawer() {
this.showDetailsDrawer = false;
@@ -68,12 +74,14 @@ export default {
<gl-drawer
:open="showDetailsDrawer"
- header-height="calc(var(--top-bar-height) + var(--performance-bar-height))"
+ :header-height="getDrawerHeaderHeight"
:z-index="$options.DRAWER_Z_INDEX"
@close="closeDetailsDrawer"
>
<template #title>
- <h4 class="gl-font-weight-bold gl-font-size-h2 gl-m-0">{{ selectedItem.name }}</h4>
+ <h4 class="gl-font-weight-bold gl-font-size-h2 gl-m-0 gl-word-break-word">
+ {{ selectedItem.name }}
+ </h4>
</template>
<template #default>
<workload-details :item="selectedItem" />
diff --git a/app/assets/javascripts/kubernetes_dashboard/components/workload_table.vue b/app/assets/javascripts/kubernetes_dashboard/components/workload_table.vue
index d3704863538..83940fb91c8 100644
--- a/app/assets/javascripts/kubernetes_dashboard/components/workload_table.vue
+++ b/app/assets/javascripts/kubernetes_dashboard/components/workload_table.vue
@@ -1,9 +1,9 @@
<script>
import { GlTable, GlBadge, GlPagination } from '@gitlab/ui';
+import { __ } from '~/locale';
import {
WORKLOAD_STATUS_BADGE_VARIANTS,
PAGE_SIZE,
- TABLE_HEADING_CLASSES,
DEFAULT_WORKLOAD_TABLE_FIELDS,
} from '../constants';
@@ -34,7 +34,6 @@ export default {
return this.fields.map((field) => {
return {
...field,
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
};
});
@@ -45,6 +44,9 @@ export default {
this.$emit('select-item', item);
},
},
+ i18n: {
+ emptyText: __('No results found'),
+ },
PAGE_SIZE,
WORKLOAD_STATUS_BADGE_VARIANTS,
TABLE_CELL_CLASSES: 'gl-p-2',
@@ -58,9 +60,10 @@ export default {
:fields="tableFields"
:per-page="$options.PAGE_SIZE"
:current-page="currentPage"
+ :empty-text="$options.i18n.emptyText"
tbody-tr-class="gl-hover-cursor-pointer"
+ show-empty
stacked="md"
- bordered
hover
@row-clicked="selectItem"
>
diff --git a/app/assets/javascripts/kubernetes_dashboard/constants.js b/app/assets/javascripts/kubernetes_dashboard/constants.js
index 0696fcab875..458a79cbcb6 100644
--- a/app/assets/javascripts/kubernetes_dashboard/constants.js
+++ b/app/assets/javascripts/kubernetes_dashboard/constants.js
@@ -30,20 +30,21 @@ export const WORKLOAD_STATUS_BADGE_VARIANTS = {
export const PAGE_SIZE = 20;
-export const TABLE_HEADING_CLASSES = 'gl-bg-gray-50! gl-font-weight-bold gl-white-space-nowrap';
-
export const DEFAULT_WORKLOAD_TABLE_FIELDS = [
{
key: 'name',
label: s__('KubernetesDashboard|Name'),
+ tdClass: 'gl-md-w-half gl-lg-w-40p gl-word-break-word',
},
{
key: 'status',
label: s__('KubernetesDashboard|Status'),
+ tdClass: 'gl-md-w-15',
},
{
key: 'namespace',
label: s__('KubernetesDashboard|Namespace'),
+ tdClass: 'gl-md-w-30p gl-lg-w-40p gl-word-break-word',
},
{
key: 'age',
diff --git a/app/assets/javascripts/releases/components/app_edit_new.vue b/app/assets/javascripts/releases/components/app_edit_new.vue
index 228007dd7d6..6fce9b4a129 100644
--- a/app/assets/javascripts/releases/components/app_edit_new.vue
+++ b/app/assets/javascripts/releases/components/app_edit_new.vue
@@ -55,6 +55,7 @@ export default {
'groupId',
'groupMilestonesAvailable',
'tagNotes',
+ 'isFetchingTagNotes',
]),
...mapGetters('editNew', ['isValid', 'formattedReleaseNotes']),
showForm() {
@@ -113,7 +114,7 @@ export default {
return this.isExistingRelease ? __('Save changes') : __('Create release');
},
isFormSubmissionDisabled() {
- return this.isUpdatingRelease || !this.isValid;
+ return this.isUpdatingRelease || !this.isValid || this.isFetchingTagNotes;
},
milestoneComboboxExtraLinks() {
return [
diff --git a/app/assets/javascripts/releases/components/tag_field_new.vue b/app/assets/javascripts/releases/components/tag_field_new.vue
index 04f3d73235b..370e920be02 100644
--- a/app/assets/javascripts/releases/components/tag_field_new.vue
+++ b/app/assets/javascripts/releases/components/tag_field_new.vue
@@ -43,6 +43,9 @@ export default {
return this.newTagName ? this.$options.i18n.createTag : this.$options.i18n.typeNew;
},
},
+ mounted() {
+ this.newTagName = this.release?.tagName || '';
+ },
methods: {
...mapActions('editNew', [
'setSearching',
diff --git a/app/assets/javascripts/releases/stores/modules/edit_new/actions.js b/app/assets/javascripts/releases/stores/modules/edit_new/actions.js
index 8bdfb057adc..a0d782a02a1 100644
--- a/app/assets/javascripts/releases/stores/modules/edit_new/actions.js
+++ b/app/assets/javascripts/releases/stores/modules/edit_new/actions.js
@@ -3,6 +3,7 @@ import { getTag } from '~/rest_api';
import { createAlert } from '~/alert';
import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
import AccessorUtilities from '~/lib/utils/accessor';
+import { HTTP_STATUS_NOT_FOUND } from '~/lib/utils/http_status';
import { s__ } from '~/locale';
import createReleaseMutation from '~/releases/graphql/mutations/create_release.mutation.graphql';
import deleteReleaseMutation from '~/releases/graphql/mutations/delete_release.mutation.graphql';
@@ -245,7 +246,7 @@ export const updateRelease = async ({ commit, dispatch, state, getters }) => {
}
};
-export const fetchTagNotes = ({ commit, state }, tagName) => {
+export const fetchTagNotes = ({ commit, state, dispatch }, tagName) => {
commit(types.REQUEST_TAG_NOTES);
return getTag(state.projectId, tagName)
@@ -253,11 +254,15 @@ export const fetchTagNotes = ({ commit, state }, tagName) => {
commit(types.RECEIVE_TAG_NOTES_SUCCESS, data);
})
.catch((error) => {
+ if (error?.response?.status === HTTP_STATUS_NOT_FOUND) {
+ commit(types.RECEIVE_TAG_NOTES_SUCCESS, {});
+ return Promise.all([dispatch('setNewTag'), dispatch('setCreating')]);
+ }
createAlert({
message: s__('Release|Unable to fetch the tag notes.'),
});
- commit(types.RECEIVE_TAG_NOTES_ERROR, error);
+ return commit(types.RECEIVE_TAG_NOTES_ERROR, error);
});
};
@@ -326,7 +331,7 @@ export const clearDraftRelease = ({ getters }) => {
}
};
-export const loadDraftRelease = ({ commit, getters, state }) => {
+export const loadDraftRelease = ({ commit, getters, state, dispatch }) => {
try {
const release = window.localStorage.getItem(getters.localStorageKey);
const createFrom = window.localStorage.getItem(getters.localStorageCreateFromKey);
@@ -340,6 +345,10 @@ export const loadDraftRelease = ({ commit, getters, state }) => {
: state.originalReleasedAt,
});
commit(types.UPDATE_CREATE_FROM, JSON.parse(createFrom));
+
+ if (parsedRelease.tagName) {
+ dispatch('fetchTagNotes', parsedRelease.tagName);
+ }
} else {
commit(types.INITIALIZE_EMPTY_RELEASE);
}
diff --git a/app/assets/javascripts/releases/stores/modules/edit_new/getters.js b/app/assets/javascripts/releases/stores/modules/edit_new/getters.js
index 0b37c2b81d1..d1cde8b9029 100644
--- a/app/assets/javascripts/releases/stores/modules/edit_new/getters.js
+++ b/app/assets/javascripts/releases/stores/modules/edit_new/getters.js
@@ -170,13 +170,11 @@ export const releaseDeleteMutationVariables = (state) => ({
},
});
-export const formattedReleaseNotes = ({
- includeTagNotes,
- release: { description, tagMessage },
- tagNotes,
- showCreateFrom,
-}) => {
- const notes = showCreateFrom ? tagMessage : tagNotes;
+export const formattedReleaseNotes = (
+ { includeTagNotes, release: { description, tagMessage }, tagNotes },
+ { isNewTag },
+) => {
+ const notes = isNewTag ? tagMessage : tagNotes;
return includeTagNotes && notes
? `${description}\n\n### ${s__('Releases|Tag message')}\n\n${notes}\n`
: description;
diff --git a/app/assets/javascripts/releases/stores/modules/edit_new/state.js b/app/assets/javascripts/releases/stores/modules/edit_new/state.js
index 7bd3968dd93..a02949568b2 100644
--- a/app/assets/javascripts/releases/stores/modules/edit_new/state.js
+++ b/app/assets/javascripts/releases/stores/modules/edit_new/state.js
@@ -61,6 +61,7 @@ export default ({
updateError: null,
tagNotes: '',
+ isFetchingTagNotes: false,
includeTagNotes: false,
existingRelease: null,
originalReleasedAt: new Date(),
diff --git a/app/controllers/projects/google_cloud/configuration_controller.rb b/app/controllers/projects/google_cloud/configuration_controller.rb
index d35b2d54c53..3baa1210ec2 100644
--- a/app/controllers/projects/google_cloud/configuration_controller.rb
+++ b/app/controllers/projects/google_cloud/configuration_controller.rb
@@ -8,7 +8,7 @@ module Projects
configurationUrl: project_google_cloud_configuration_path(project),
deploymentsUrl: project_google_cloud_deployments_path(project),
databasesUrl: project_google_cloud_databases_path(project),
- serviceAccounts: ::GoogleCloud::ServiceAccountsService.new(project).find_for_project,
+ serviceAccounts: ::CloudSeed::GoogleCloud::ServiceAccountsService.new(project).find_for_project,
createServiceAccountUrl: project_google_cloud_service_accounts_path(project),
emptyIllustrationUrl:
ActionController::Base.helpers.image_path('illustrations/empty-state/empty-pipeline-md.svg'),
diff --git a/app/controllers/projects/google_cloud/databases_controller.rb b/app/controllers/projects/google_cloud/databases_controller.rb
index ea79efd9f4f..9023b8a5fa6 100644
--- a/app/controllers/projects/google_cloud/databases_controller.rb
+++ b/app/controllers/projects/google_cloud/databases_controller.rb
@@ -14,7 +14,7 @@ module Projects
cloudsqlPostgresUrl: new_project_google_cloud_database_path(project, :postgres),
cloudsqlMysqlUrl: new_project_google_cloud_database_path(project, :mysql),
cloudsqlSqlserverUrl: new_project_google_cloud_database_path(project, :sqlserver),
- cloudsqlInstances: ::GoogleCloud::GetCloudsqlInstancesService.new(project).execute,
+ cloudsqlInstances: ::CloudSeed::GoogleCloud::GetCloudsqlInstancesService.new(project).execute,
emptyIllustrationUrl:
ActionController::Base.helpers.image_path('illustrations/empty-state/empty-pipeline-md.svg')
}
@@ -46,7 +46,7 @@ module Projects
end
def create
- enable_response = ::GoogleCloud::EnableCloudsqlService
+ enable_response = ::CloudSeed::GoogleCloud::EnableCloudsqlService
.new(project, current_user, enable_service_params)
.execute
@@ -54,7 +54,7 @@ module Projects
track_event(:error_enable_cloudsql_services)
flash[:alert] = error_message(enable_response[:message])
else
- create_response = ::GoogleCloud::CreateCloudsqlInstanceService
+ create_response = ::CloudSeed::GoogleCloud::CreateCloudsqlInstanceService
.new(project, current_user, create_service_params)
.execute
diff --git a/app/controllers/projects/google_cloud/deployments_controller.rb b/app/controllers/projects/google_cloud/deployments_controller.rb
index 92c99ad4271..e4666f9335c 100644
--- a/app/controllers/projects/google_cloud/deployments_controller.rb
+++ b/app/controllers/projects/google_cloud/deployments_controller.rb
@@ -17,7 +17,7 @@ class Projects::GoogleCloud::DeploymentsController < Projects::GoogleCloud::Base
def cloud_run
params = { google_oauth2_token: token_in_session }
- enable_cloud_run_response = GoogleCloud::EnableCloudRunService
+ enable_cloud_run_response = CloudSeed::GoogleCloud::EnableCloudRunService
.new(project, current_user, params).execute
if enable_cloud_run_response[:status] == :error
@@ -25,8 +25,8 @@ class Projects::GoogleCloud::DeploymentsController < Projects::GoogleCloud::Base
flash[:alert] = enable_cloud_run_response[:message]
redirect_to project_google_cloud_deployments_path(project)
else
- params = { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN }
- generate_pipeline_response = GoogleCloud::GeneratePipelineService
+ params = { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN }
+ generate_pipeline_response = CloudSeed::GoogleCloud::GeneratePipelineService
.new(project, current_user, params).execute
if generate_pipeline_response[:status] == :error
diff --git a/app/controllers/projects/google_cloud/gcp_regions_controller.rb b/app/controllers/projects/google_cloud/gcp_regions_controller.rb
index c51261721b2..593e27eeebf 100644
--- a/app/controllers/projects/google_cloud/gcp_regions_controller.rb
+++ b/app/controllers/projects/google_cloud/gcp_regions_controller.rb
@@ -20,7 +20,7 @@ class Projects::GoogleCloud::GcpRegionsController < Projects::GoogleCloud::BaseC
def create
permitted_params = params.permit(:ref, :gcp_region)
- GoogleCloud::GcpRegionAddOrReplaceService.new(project).execute(permitted_params[:ref], permitted_params[:gcp_region])
+ CloudSeed::GoogleCloud::GcpRegionAddOrReplaceService.new(project).execute(permitted_params[:ref], permitted_params[:gcp_region])
track_event(:configure_region)
redirect_to project_google_cloud_configuration_path(project), notice: _('GCP region configured')
end
diff --git a/app/controllers/projects/google_cloud/service_accounts_controller.rb b/app/controllers/projects/google_cloud/service_accounts_controller.rb
index 7b029e25ea2..5a5f53943c0 100644
--- a/app/controllers/projects/google_cloud/service_accounts_controller.rb
+++ b/app/controllers/projects/google_cloud/service_accounts_controller.rb
@@ -27,7 +27,7 @@ class Projects::GoogleCloud::ServiceAccountsController < Projects::GoogleCloud::
def create
permitted_params = params.permit(:gcp_project, :ref)
- response = GoogleCloud::CreateServiceAccountsService.new(
+ response = CloudSeed::GoogleCloud::CreateServiceAccountsService.new(
project,
current_user,
google_oauth2_token: token_in_session,
diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb
index bb980b9327a..71dc5521a4d 100644
--- a/app/models/application_setting.rb
+++ b/app/models/application_setting.rb
@@ -99,7 +99,9 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
validates :default_branch_protection_defaults, json_schema: { filename: 'default_branch_protection_defaults' }
validates :default_branch_protection_defaults, bytesize: { maximum: -> { DEFAULT_BRANCH_PROTECTIONS_DEFAULT_MAX_SIZE } }
- validates :failed_login_attempts_unlock_period_in_minutes,
+ validates :external_pipeline_validation_service_timeout,
+ :failed_login_attempts_unlock_period_in_minutes,
+ :max_login_attempts,
allow_nil: true,
numericality: { only_integer: true, greater_than: 0 }
@@ -118,10 +120,6 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
allow_nil: false,
qualified_domain_array: true
- validates :session_expire_delay,
- presence: true,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
validates :minimum_password_length,
presence: true,
numericality: {
@@ -222,38 +220,6 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
hostname: true,
length: { maximum: 255 }
- validates :max_attachment_size,
- presence: true,
- numericality: { only_integer: true, greater_than: 0 }
-
- validates :max_artifacts_size,
- presence: true,
- numericality: { only_integer: true, greater_than: 0 }
-
- validates :max_export_size,
- presence: true,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
- validates :max_import_size,
- presence: true,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
- validates :max_import_remote_file_size,
- presence: true,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
- validates :bulk_import_max_download_file_size,
- presence: true,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
- validates :max_decompressed_archive_size,
- presence: true,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
- validates :max_login_attempts,
- allow_nil: true,
- numericality: { only_integer: true, greater_than: 0 }
-
validates :max_pages_size,
presence: true,
numericality: {
@@ -261,31 +227,11 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
less_than: ::Gitlab::Pages::MAX_SIZE / 1.megabyte
}
- validates :max_pages_custom_domains_per_project,
- presence: true,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
- validates :jobs_per_stage_page_size,
- presence: true,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
- validates :max_terraform_state_size_bytes,
- presence: true,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
validates :default_artifacts_expire_in, presence: true, duration: true
validates :container_expiration_policies_enable_historic_entries,
inclusion: { in: [true, false], message: N_('must be a boolean value') }
- validates :container_registry_token_expire_delay,
- presence: true,
- numericality: { only_integer: true, greater_than: 0 }
-
- validates :decompress_archive_file_timeout,
- presence: true,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
validate :check_repository_storages_weighted
validates :auto_devops_domain,
@@ -300,14 +246,6 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
presence: { message: 'Domain denylist cannot be empty if denylist is enabled.' },
if: :domain_denylist_enabled?
- validates :housekeeping_optimize_repository_period,
- presence: true,
- numericality: { only_integer: true, greater_than: 0 }
-
- validates :terminal_max_session_time,
- presence: true,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
validates :polling_interval_multiplier,
presence: true,
numericality: { greater_than_or_equal_to: 0 }
@@ -413,59 +351,26 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
length: { maximum: 100, message: N_('is too long (maximum is 100 entries)') },
allow_nil: false
- validates :push_event_hooks_limit,
- numericality: { greater_than_or_equal_to: 0 }
-
validates :push_event_activities_limit,
+ :push_event_hooks_limit,
numericality: { greater_than_or_equal_to: 0 }
- validates :snippet_size_limit, numericality: { only_integer: true, greater_than: 0 }
validates :wiki_page_max_content_bytes, numericality: { only_integer: true, greater_than_or_equal_to: 1.kilobytes }
validates :wiki_asciidoc_allow_uri_includes, inclusion: { in: [true, false], message: N_('must be a boolean value') }
- validates :max_yaml_size_bytes, numericality: { only_integer: true, greater_than: 0 }, presence: true
- validates :max_yaml_depth, numericality: { only_integer: true, greater_than: 0 }, presence: true
-
- validates :ci_max_total_yaml_size_bytes, numericality: { only_integer: true, greater_than_or_equal_to: 0 }, presence: true
-
- validates :ci_max_includes, numericality: { only_integer: true, greater_than_or_equal_to: 0 }, presence: true
validates :email_restrictions, untrusted_regexp: true
validates :hashed_storage_enabled, inclusion: { in: [true], message: N_("Hashed storage can't be disabled anymore for new projects") }
- validates :container_registry_delete_tags_service_timeout,
- :container_registry_cleanup_tags_service_max_list_size,
- :container_registry_data_repair_detail_worker_max_concurrency,
- :container_registry_expiration_policies_worker_capacity,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
validates :container_registry_expiration_policies_caching,
inclusion: { in: [true, false], message: N_('must be a boolean value') }
- validates :container_registry_import_max_tags_count,
- :container_registry_import_max_retries,
- :container_registry_import_start_max_retries,
- :container_registry_import_max_step_duration,
- :container_registry_pre_import_timeout,
- :container_registry_import_timeout,
- allow_nil: false,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
validates :container_registry_pre_import_tags_rate,
allow_nil: false,
numericality: { greater_than_or_equal_to: 0 }
validates :container_registry_import_target_plan, presence: true
validates :container_registry_import_created_before, presence: true
- validates :dependency_proxy_ttl_group_policy_worker_capacity,
- allow_nil: false,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
- validates :packages_cleanup_package_file_worker_capacity,
- :package_registry_cleanup_policies_worker_capacity,
- allow_nil: false,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
validates :invisible_captcha_enabled,
inclusion: { in: [true, false], message: N_('must be a boolean value') }
@@ -584,15 +489,6 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
length: { maximum: 255 },
allow_blank: true
- validates :issues_create_limit,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
- validates :raw_blob_request_limit,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
- validates :pipeline_limit_per_project_user_sha,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
validates :ci_jwt_signing_key,
rsa_key: true, allow_nil: true
@@ -619,40 +515,83 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
validates :slack_app_verification_token
end
- with_options(presence: true, numericality: { only_integer: true, greater_than: 0 }) do
- validates :throttle_unauthenticated_api_requests_per_period
- validates :throttle_unauthenticated_api_period_in_seconds
- validates :throttle_unauthenticated_requests_per_period
- validates :throttle_unauthenticated_period_in_seconds
- validates :throttle_unauthenticated_packages_api_requests_per_period
- validates :throttle_unauthenticated_packages_api_period_in_seconds
- validates :throttle_unauthenticated_files_api_requests_per_period
- validates :throttle_unauthenticated_files_api_period_in_seconds
- validates :throttle_unauthenticated_deprecated_api_requests_per_period
- validates :throttle_unauthenticated_deprecated_api_period_in_seconds
- validates :throttle_authenticated_api_requests_per_period
- validates :throttle_authenticated_api_period_in_seconds
- validates :throttle_authenticated_git_lfs_requests_per_period
- validates :throttle_authenticated_git_lfs_period_in_seconds
- validates :throttle_authenticated_web_requests_per_period
- validates :throttle_authenticated_web_period_in_seconds
- validates :throttle_authenticated_packages_api_requests_per_period
- validates :throttle_authenticated_packages_api_period_in_seconds
- validates :throttle_authenticated_files_api_requests_per_period
- validates :throttle_authenticated_files_api_period_in_seconds
- validates :throttle_authenticated_deprecated_api_requests_per_period
- validates :throttle_authenticated_deprecated_api_period_in_seconds
- validates :throttle_protected_paths_requests_per_period
- validates :throttle_protected_paths_period_in_seconds
- validates :project_jobs_api_rate_limit
+ with_options(numericality: { only_integer: true, greater_than: 0 }) do
+ validates :bulk_import_concurrent_pipeline_batch_limit,
+ :container_registry_token_expire_delay,
+ :housekeeping_optimize_repository_period,
+ :inactive_projects_delete_after_months,
+ :max_artifacts_size,
+ :max_attachment_size,
+ :max_yaml_depth,
+ :max_yaml_size_bytes,
+ :namespace_aggregation_schedule_lease_duration_in_seconds,
+ :project_jobs_api_rate_limit,
+ :snippet_size_limit,
+ :throttle_authenticated_api_period_in_seconds,
+ :throttle_authenticated_api_requests_per_period,
+ :throttle_authenticated_deprecated_api_period_in_seconds,
+ :throttle_authenticated_deprecated_api_requests_per_period,
+ :throttle_authenticated_files_api_period_in_seconds,
+ :throttle_authenticated_files_api_requests_per_period,
+ :throttle_authenticated_git_lfs_period_in_seconds,
+ :throttle_authenticated_git_lfs_requests_per_period,
+ :throttle_authenticated_packages_api_period_in_seconds,
+ :throttle_authenticated_packages_api_requests_per_period,
+ :throttle_authenticated_web_period_in_seconds,
+ :throttle_authenticated_web_requests_per_period,
+ :throttle_protected_paths_period_in_seconds,
+ :throttle_protected_paths_requests_per_period,
+ :throttle_unauthenticated_api_period_in_seconds,
+ :throttle_unauthenticated_api_requests_per_period,
+ :throttle_unauthenticated_deprecated_api_period_in_seconds,
+ :throttle_unauthenticated_deprecated_api_requests_per_period,
+ :throttle_unauthenticated_files_api_period_in_seconds,
+ :throttle_unauthenticated_files_api_requests_per_period,
+ :throttle_unauthenticated_packages_api_period_in_seconds,
+ :throttle_unauthenticated_packages_api_requests_per_period,
+ :throttle_unauthenticated_period_in_seconds,
+ :throttle_unauthenticated_requests_per_period
end
with_options(numericality: { only_integer: true, greater_than_or_equal_to: 0 }) do
- validates :notes_create_limit
- validates :search_rate_limit
- validates :search_rate_limit_unauthenticated
- validates :projects_api_rate_limit_unauthenticated
- validates :gitlab_shell_operation_limit
+ validates :bulk_import_max_download_file_size,
+ :ci_max_includes,
+ :ci_max_total_yaml_size_bytes,
+ :container_registry_cleanup_tags_service_max_list_size,
+ :container_registry_data_repair_detail_worker_max_concurrency,
+ :container_registry_delete_tags_service_timeout,
+ :container_registry_expiration_policies_worker_capacity,
+ :container_registry_import_max_retries,
+ :container_registry_import_max_step_duration,
+ :container_registry_import_max_tags_count,
+ :container_registry_import_start_max_retries,
+ :container_registry_import_timeout,
+ :container_registry_pre_import_timeout,
+ :decompress_archive_file_timeout,
+ :dependency_proxy_ttl_group_policy_worker_capacity,
+ :gitlab_shell_operation_limit,
+ :inactive_projects_min_size_mb,
+ :issues_create_limit,
+ :jobs_per_stage_page_size,
+ :max_decompressed_archive_size,
+ :max_export_size,
+ :max_import_remote_file_size,
+ :max_import_size,
+ :max_pages_custom_domains_per_project,
+ :max_terraform_state_size_bytes,
+ :notes_create_limit,
+ :package_registry_cleanup_policies_worker_capacity,
+ :packages_cleanup_package_file_worker_capacity,
+ :pipeline_limit_per_project_user_sha,
+ :projects_api_rate_limit_unauthenticated,
+ :raw_blob_request_limit,
+ :search_rate_limit,
+ :search_rate_limit_unauthenticated,
+ :session_expire_delay,
+ :sidekiq_job_limiter_compression_threshold_bytes,
+ :sidekiq_job_limiter_limit_bytes,
+ :terminal_max_session_time,
+ :users_get_by_id_limit
end
validates :search_rate_limit_allowlist,
@@ -669,10 +608,6 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
validates :external_pipeline_validation_service_url,
addressable_url: ADDRESSABLE_URL_VALIDATION_OPTIONS, allow_blank: true
- validates :external_pipeline_validation_service_timeout,
- allow_nil: true,
- numericality: { only_integer: true, greater_than: 0 }
-
validates :whats_new_variant,
inclusion: { in: ApplicationSetting.whats_new_variants.keys }
@@ -686,10 +621,6 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
validates :sidekiq_job_limiter_mode,
inclusion: { in: self.sidekiq_job_limiter_modes }
- validates :sidekiq_job_limiter_compression_threshold_bytes,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
- validates :sidekiq_job_limiter_limit_bytes,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :sentry_enabled,
inclusion: { in: [true, false], message: N_('must be a boolean value') }
@@ -711,8 +642,6 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
length: { maximum: 255 },
if: :error_tracking_enabled?
- validates :users_get_by_id_limit,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :users_get_by_id_limit_allowlist,
length: { maximum: 100, message: N_('is too long (maximum is 100 entries)') },
allow_nil: false
@@ -724,20 +653,11 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
presence: true,
if: :update_runner_versions_enabled?
- validates :inactive_projects_min_size_mb,
- numericality: { only_integer: true, greater_than_or_equal_to: 0 }
-
- validates :inactive_projects_delete_after_months,
- numericality: { only_integer: true, greater_than: 0 }
-
validates :inactive_projects_send_warning_email_after_months,
numericality: { only_integer: true, greater_than: 0, less_than: :inactive_projects_delete_after_months }
validates :prometheus_alert_db_indicators_settings, json_schema: { filename: 'application_setting_prometheus_alert_db_indicators_settings' }, allow_nil: true
- validates :namespace_aggregation_schedule_lease_duration_in_seconds,
- numericality: { only_integer: true, greater_than: 0 }
-
validates :sentry_clientside_traces_sample_rate,
presence: true,
numericality: { greater_than_or_equal_to: 0, less_than_or_equal_to: 1, message: N_('must be a value between 0 and 1') }
@@ -815,10 +735,6 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
allow_nil: false,
inclusion: { in: [true, false], message: N_('must be a boolean value') }
- validates :bulk_import_concurrent_pipeline_batch_limit,
- presence: true,
- numericality: { only_integer: true, greater_than: 0 }
-
validates :allow_runner_registration_token,
allow_nil: false,
inclusion: { in: [true, false], message: N_('must be a boolean value') }
diff --git a/app/models/ci/pipeline_config.rb b/app/models/ci/pipeline_config.rb
index e2dcad653d7..11decd3fc66 100644
--- a/app/models/ci/pipeline_config.rb
+++ b/app/models/ci/pipeline_config.rb
@@ -2,11 +2,15 @@
module Ci
class PipelineConfig < Ci::ApplicationRecord
+ include Ci::Partitionable
+
self.table_name = 'ci_pipelines_config'
self.primary_key = :pipeline_id
belongs_to :pipeline, class_name: "Ci::Pipeline", inverse_of: :pipeline_config
validates :pipeline, presence: true
validates :content, presence: true
+
+ partitionable scope: :pipeline
end
end
diff --git a/app/models/concerns/ci/partitionable/testing.rb b/app/models/concerns/ci/partitionable/testing.rb
index 86a7d5fcf38..9f0d55329ad 100644
--- a/app/models/concerns/ci/partitionable/testing.rb
+++ b/app/models/concerns/ci/partitionable/testing.rb
@@ -23,6 +23,7 @@ module Ci
Ci::RunnerManagerBuild
Ci::PipelineArtifact
Ci::PipelineChatData
+ Ci::PipelineConfig
Ci::PipelineMetadata
Ci::PipelineVariable
Ci::Sources::Pipeline
diff --git a/app/models/group.rb b/app/models/group.rb
index bbd1ec7cca0..bbf34ce21c0 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -707,7 +707,11 @@ class Group < Namespace
end
def highest_group_member(user)
- GroupMember.where(source_id: self_and_ancestors_ids, user_id: user.id).order(:access_level).last
+ GroupMember
+ .where(source_id: self_and_ancestors_ids, user_id: user.id)
+ .non_request
+ .order(:access_level)
+ .last
end
def bots
diff --git a/app/models/member.rb b/app/models/member.rb
index d3101656739..8bec64932b3 100644
--- a/app/models/member.rb
+++ b/app/models/member.rb
@@ -496,7 +496,10 @@ class Member < ApplicationRecord
strong_memoize(:highest_group_member) do
next unless user_id && source&.ancestors&.any?
- GroupMember.where(source: source.ancestors, user_id: user_id).order(:access_level).last
+ GroupMember
+ .where(source: source.ancestors, user_id: user_id)
+ .non_request
+ .order(:access_level).last
end
end
diff --git a/app/services/cloud_seed/google_cloud/base_service.rb b/app/services/cloud_seed/google_cloud/base_service.rb
new file mode 100644
index 00000000000..e59031c5371
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/base_service.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ class BaseService < ::BaseService
+ protected
+
+ def google_oauth2_token
+ @params[:google_oauth2_token]
+ end
+
+ def gcp_project_id
+ @params[:gcp_project_id]
+ end
+
+ def environment_name
+ @params[:environment_name]
+ end
+
+ def google_api_client
+ @google_api_client_instance ||= GoogleApi::CloudPlatform::Client.new(google_oauth2_token, nil)
+ end
+
+ def unique_gcp_project_ids
+ filter_params = { key: 'GCP_PROJECT_ID' }
+ @unique_gcp_project_ids ||= ::Ci::VariablesFinder.new(project, filter_params).execute.map(&:value).uniq
+ end
+
+ def group_vars_by_environment(keys)
+ filtered_vars = project.variables.filter { |variable| keys.include? variable.key }
+ filtered_vars.each_with_object({}) do |variable, grouped|
+ grouped[variable.environment_scope] ||= {}
+ grouped[variable.environment_scope][variable.key] = variable.value
+ end
+ end
+
+ def create_or_replace_project_vars(environment_scope, key, value, is_protected, is_masked = false)
+ change_params = {
+ variable_params: {
+ key: key,
+ value: value,
+ environment_scope: environment_scope,
+ protected: is_protected,
+ masked: is_masked
+ }
+ }
+ existing_variable = find_existing_variable(environment_scope, key)
+
+ if existing_variable
+ change_params[:action] = :update
+ change_params[:variable] = existing_variable
+ else
+ change_params[:action] = :create
+ end
+
+ ::Ci::ChangeVariableService.new(container: project, current_user: current_user, params: change_params).execute
+ end
+
+ private
+
+ def find_existing_variable(environment_scope, key)
+ filter_params = { key: key, filter: { environment_scope: environment_scope } }
+ ::Ci::VariablesFinder.new(project, filter_params).execute.first
+ end
+ end
+ end
+end
diff --git a/app/services/cloud_seed/google_cloud/create_cloudsql_instance_service.rb b/app/services/cloud_seed/google_cloud/create_cloudsql_instance_service.rb
new file mode 100644
index 00000000000..8b967a2d551
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/create_cloudsql_instance_service.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ DEFAULT_REGION = 'us-east1'
+
+ class CreateCloudsqlInstanceService < ::CloudSeed::GoogleCloud::BaseService
+ WORKER_INTERVAL = 30.seconds
+
+ def execute
+ create_cloud_instance
+ trigger_instance_setup_worker
+ success
+ rescue Google::Apis::Error => err
+ error(err.message)
+ end
+
+ private
+
+ def create_cloud_instance
+ google_api_client.create_cloudsql_instance(
+ gcp_project_id,
+ instance_name,
+ root_password,
+ database_version,
+ region,
+ tier
+ )
+ end
+
+ def trigger_instance_setup_worker
+ ::GoogleCloud::CreateCloudsqlInstanceWorker.perform_in(
+ WORKER_INTERVAL,
+ current_user.id,
+ project.id,
+ {
+ 'google_oauth2_token': google_oauth2_token,
+ 'gcp_project_id': gcp_project_id,
+ 'instance_name': instance_name,
+ 'database_version': database_version,
+ 'environment_name': environment_name,
+ 'is_protected': protected?
+ }
+ )
+ end
+
+ def protected?
+ project.protected_for?(environment_name)
+ end
+
+ def instance_name
+ # Generates an `instance_name` for the to-be-created Cloud SQL instance
+ # Example: `gitlab-34647-postgres-14-staging`
+ environment_alias = environment_name == '*' ? 'ALL' : environment_name
+ name = "gitlab-#{project.id}-#{database_version}-#{environment_alias}"
+ name.tr("_", "-").downcase
+ end
+
+ def root_password
+ SecureRandom.hex(16)
+ end
+
+ def database_version
+ params[:database_version]
+ end
+
+ def region
+ region = ::Ci::VariablesFinder
+ .new(project, { key: Projects::GoogleCloud::GcpRegionsController::GCP_REGION_CI_VAR_KEY,
+ environment_scope: environment_name })
+ .execute.first
+ region&.value || DEFAULT_REGION
+ end
+
+ def tier
+ params[:tier]
+ end
+ end
+ end
+end
diff --git a/app/services/cloud_seed/google_cloud/create_service_accounts_service.rb b/app/services/cloud_seed/google_cloud/create_service_accounts_service.rb
new file mode 100644
index 00000000000..f15779cc14b
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/create_service_accounts_service.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ class CreateServiceAccountsService < ::CloudSeed::GoogleCloud::BaseService
+ def execute
+ service_account = google_api_client.create_service_account(gcp_project_id, service_account_name, service_account_desc)
+ service_account_key = google_api_client.create_service_account_key(gcp_project_id, service_account.unique_id)
+ google_api_client.grant_service_account_roles(gcp_project_id, service_account.email)
+
+ service_accounts_service.add_for_project(
+ environment_name,
+ service_account.project_id,
+ Gitlab::Json.dump(service_account),
+ Gitlab::Json.dump(service_account_key),
+ ProtectedBranch.protected?(project, environment_name) || ProtectedTag.protected?(project, environment_name)
+ )
+
+ ServiceResponse.success(message: _('Service account generated successfully'), payload: {
+ service_account: service_account,
+ service_account_key: service_account_key
+ })
+ end
+
+ private
+
+ def service_accounts_service
+ GoogleCloud::ServiceAccountsService.new(project)
+ end
+
+ def service_account_name
+ "GitLab :: #{project.name} :: #{environment_name}"
+ end
+
+ def service_account_desc
+ "GitLab generated service account for project '#{project.name}' and environment '#{environment_name}'"
+ end
+ end
+ end
+end
+
+CloudSeed::GoogleCloud::CreateServiceAccountsService.prepend_mod
diff --git a/app/services/cloud_seed/google_cloud/enable_cloud_run_service.rb b/app/services/cloud_seed/google_cloud/enable_cloud_run_service.rb
new file mode 100644
index 00000000000..3ab5608c937
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/enable_cloud_run_service.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ class EnableCloudRunService < ::CloudSeed::GoogleCloud::BaseService
+ def execute
+ gcp_project_ids = unique_gcp_project_ids
+
+ if gcp_project_ids.empty?
+ error("No GCP projects found. Configure a service account or GCP_PROJECT_ID ci variable.")
+ else
+ gcp_project_ids.each do |gcp_project_id|
+ google_api_client.enable_cloud_run(gcp_project_id)
+ google_api_client.enable_artifacts_registry(gcp_project_id)
+ google_api_client.enable_cloud_build(gcp_project_id)
+ end
+
+ success({ gcp_project_ids: gcp_project_ids })
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/cloud_seed/google_cloud/enable_cloudsql_service.rb b/app/services/cloud_seed/google_cloud/enable_cloudsql_service.rb
new file mode 100644
index 00000000000..d36f3ffd7c2
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/enable_cloudsql_service.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ class EnableCloudsqlService < ::CloudSeed::GoogleCloud::BaseService
+ def execute
+ create_or_replace_project_vars(environment_name, 'GCP_PROJECT_ID', gcp_project_id, ci_var_protected?)
+
+ unique_gcp_project_ids.each do |gcp_project_id|
+ google_api_client.enable_cloud_sql_admin(gcp_project_id)
+ google_api_client.enable_compute(gcp_project_id)
+ google_api_client.enable_service_networking(gcp_project_id)
+ end
+
+ success({ gcp_project_ids: unique_gcp_project_ids })
+ rescue Google::Apis::Error => err
+ error(err.message)
+ end
+
+ private
+
+ def ci_var_protected?
+ ProtectedBranch.protected?(project, environment_name) || ProtectedTag.protected?(project, environment_name)
+ end
+ end
+ end
+end
diff --git a/app/services/cloud_seed/google_cloud/enable_vision_ai_service.rb b/app/services/cloud_seed/google_cloud/enable_vision_ai_service.rb
new file mode 100644
index 00000000000..865c11cba6a
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/enable_vision_ai_service.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ class EnableVisionAiService < ::CloudSeed::GoogleCloud::BaseService
+ def execute
+ gcp_project_ids = unique_gcp_project_ids
+
+ if gcp_project_ids.empty?
+ error("No GCP projects found. Configure a service account or GCP_PROJECT_ID ci variable.")
+ else
+ gcp_project_ids.each do |gcp_project_id|
+ google_api_client.enable_vision_api(gcp_project_id)
+ end
+
+ success({ gcp_project_ids: gcp_project_ids })
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/cloud_seed/google_cloud/fetch_google_ip_list_service.rb b/app/services/cloud_seed/google_cloud/fetch_google_ip_list_service.rb
new file mode 100644
index 00000000000..c02b3a87352
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/fetch_google_ip_list_service.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ class FetchGoogleIpListService
+ include BaseServiceUtility
+
+ GOOGLE_IP_RANGES_URL = 'https://www.gstatic.com/ipranges/cloud.json'
+ RESPONSE_BODY_LIMIT = 1.megabyte
+ EXPECTED_CONTENT_TYPE = 'application/json'
+
+ IpListNotRetrievedError = Class.new(StandardError)
+
+ def execute
+ # Prevent too many workers from hitting the same HTTP endpoint
+ if ::Gitlab::ApplicationRateLimiter.throttled?(:fetch_google_ip_list, scope: nil)
+ return error("#{self.class} was rate limited")
+ end
+
+ subnets = fetch_and_update_cache!
+
+ Gitlab::AppJsonLogger.info(
+ class: self.class.name,
+ message: 'Successfully retrieved Google IP list',
+ subnet_count: subnets.count
+ )
+
+ success({ subnets: subnets })
+ rescue IpListNotRetrievedError => err
+ Gitlab::ErrorTracking.log_exception(err)
+ error('Google IP list not retrieved')
+ end
+
+ private
+
+ # Attempts to retrieve and parse the list of IPs from Google. Updates
+ # the internal cache so that the data is accessible.
+ #
+ # Returns an array of IPAddr objects consisting of subnets.
+ def fetch_and_update_cache!
+ parsed_response = fetch_google_ip_list
+
+ parse_google_prefixes(parsed_response).tap do |subnets|
+ ::ObjectStorage::CDN::GoogleIpCache.update!(subnets)
+ end
+ end
+
+ def fetch_google_ip_list
+ response = Gitlab::HTTP.get(GOOGLE_IP_RANGES_URL, follow_redirects: false, allow_local_requests: false)
+
+ validate_response!(response)
+
+ response.parsed_response
+ end
+
+ def validate_response!(response)
+ raise IpListNotRetrievedError, "response was #{response.code}" unless response.code == 200
+ raise IpListNotRetrievedError, "response was nil" unless response.body
+
+ parsed_response = response.parsed_response
+
+ unless response.content_type == EXPECTED_CONTENT_TYPE && parsed_response.is_a?(Hash)
+ raise IpListNotRetrievedError, "response was not JSON"
+ end
+
+ if response.body&.bytesize.to_i > RESPONSE_BODY_LIMIT
+ raise IpListNotRetrievedError, "response was too large: #{response.body.bytesize}"
+ end
+
+ prefixes = parsed_response['prefixes']
+
+ raise IpListNotRetrievedError, "JSON was type #{prefixes.class}, expected Array" unless prefixes.is_a?(Array)
+ raise IpListNotRetrievedError, "#{GOOGLE_IP_RANGES_URL} did not return any IP ranges" if prefixes.empty?
+
+ response.parsed_response
+ end
+
+ def parse_google_prefixes(parsed_response)
+ ranges = parsed_response['prefixes'].map do |prefix|
+ ip_range = prefix['ipv4Prefix'] || prefix['ipv6Prefix']
+
+ next unless ip_range
+
+ IPAddr.new(ip_range)
+ end.compact
+
+ raise IpListNotRetrievedError, "#{GOOGLE_IP_RANGES_URL} did not return any IP ranges" if ranges.empty?
+
+ ranges
+ end
+ end
+ end
+end
diff --git a/app/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service.rb b/app/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service.rb
new file mode 100644
index 00000000000..11a644b3e9d
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ class GcpRegionAddOrReplaceService < ::CloudSeed::GoogleCloud::BaseService
+ def execute(environment, region)
+ gcp_region_key = Projects::GoogleCloud::GcpRegionsController::GCP_REGION_CI_VAR_KEY
+
+ change_params = { variable_params: { key: gcp_region_key, value: region, environment_scope: environment } }
+ filter_params = { key: gcp_region_key, filter: { environment_scope: environment } }
+
+ existing_variable = ::Ci::VariablesFinder.new(project, filter_params).execute.first
+
+ if existing_variable
+ change_params[:action] = :update
+ change_params[:variable] = existing_variable
+ else
+ change_params[:action] = :create
+ end
+
+ ::Ci::ChangeVariableService.new(container: project, current_user: current_user, params: change_params).execute
+ end
+ end
+ end
+end
diff --git a/app/services/cloud_seed/google_cloud/generate_pipeline_service.rb b/app/services/cloud_seed/google_cloud/generate_pipeline_service.rb
new file mode 100644
index 00000000000..d8b45f301ec
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/generate_pipeline_service.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ class GeneratePipelineService < ::CloudSeed::GoogleCloud::BaseService
+ ACTION_DEPLOY_TO_CLOUD_RUN = 'DEPLOY_TO_CLOUD_RUN'
+ ACTION_DEPLOY_TO_CLOUD_STORAGE = 'DEPLOY_TO_CLOUD_STORAGE'
+ ACTION_VISION_AI_PIPELINE = 'VISION_AI_PIPELINE'
+
+ def execute
+ commit_attributes = generate_commit_attributes
+ create_branch_response = ::Branches::CreateService.new(project, current_user)
+ .execute(commit_attributes[:branch_name], project.default_branch)
+
+ if create_branch_response[:status] == :error
+ return create_branch_response
+ end
+
+ branch = create_branch_response[:branch]
+
+ service = default_branch_gitlab_ci_yml.present? ? ::Files::UpdateService : ::Files::CreateService
+
+ commit_response = service.new(project, current_user, commit_attributes).execute
+
+ if commit_response[:status] == :error
+ return commit_response
+ end
+
+ success({ branch_name: branch.name, commit: commit_response })
+ end
+
+ private
+
+ def action
+ @params[:action]
+ end
+
+ def generate_commit_attributes
+ case action
+ when ACTION_DEPLOY_TO_CLOUD_RUN
+ branch_name = "deploy-to-cloud-run-#{SecureRandom.hex(8)}"
+ {
+ commit_message: 'Enable Cloud Run deployments',
+ file_path: '.gitlab-ci.yml',
+ file_content: pipeline_content('gcp/cloud-run.gitlab-ci.yml'),
+ branch_name: branch_name,
+ start_branch: branch_name
+ }
+ when ACTION_DEPLOY_TO_CLOUD_STORAGE
+ branch_name = "deploy-to-cloud-storage-#{SecureRandom.hex(8)}"
+ {
+ commit_message: 'Enable Cloud Storage deployments',
+ file_path: '.gitlab-ci.yml',
+ file_content: pipeline_content('gcp/cloud-storage.gitlab-ci.yml'),
+ branch_name: branch_name,
+ start_branch: branch_name
+ }
+ when ACTION_VISION_AI_PIPELINE
+ branch_name = "vision-ai-pipeline-#{SecureRandom.hex(8)}"
+ {
+ commit_message: 'Enable Vision AI Pipeline',
+ file_path: '.gitlab-ci.yml',
+ file_content: pipeline_content('gcp/vision-ai.gitlab-ci.yml'),
+ branch_name: branch_name,
+ start_branch: branch_name
+ }
+ end
+ end
+
+ def default_branch_gitlab_ci_yml
+ @default_branch_gitlab_ci_yml ||= project.ci_config_for(project.default_branch)
+ end
+
+ def pipeline_content(include_path)
+ gitlab_ci_yml = ::Gitlab::Ci::Config::Yaml::Loader.new(default_branch_gitlab_ci_yml || '{}').load
+
+ append_remote_include(
+ gitlab_ci_yml.content,
+ "https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/library/-/raw/main/#{include_path}"
+ )
+ end
+
+ def append_remote_include(gitlab_ci_yml, include_url)
+ stages = gitlab_ci_yml['stages'] || []
+ gitlab_ci_yml['stages'] = if action == ACTION_VISION_AI_PIPELINE
+ (stages + %w[validate detect render]).uniq
+ else
+ (stages + %w[build test deploy]).uniq
+ end
+
+ includes = gitlab_ci_yml['include'] || []
+ includes = Array.wrap(includes)
+ includes << { 'remote' => include_url }
+ gitlab_ci_yml['include'] = includes.uniq
+
+ gitlab_ci_yml.deep_stringify_keys.to_yaml
+ end
+ end
+ end
+end
diff --git a/app/services/cloud_seed/google_cloud/get_cloudsql_instances_service.rb b/app/services/cloud_seed/google_cloud/get_cloudsql_instances_service.rb
new file mode 100644
index 00000000000..b037298c8cb
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/get_cloudsql_instances_service.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ class GetCloudsqlInstancesService < ::CloudSeed::GoogleCloud::BaseService
+ CLOUDSQL_KEYS = %w[GCP_PROJECT_ID GCP_CLOUDSQL_INSTANCE_NAME GCP_CLOUDSQL_VERSION].freeze
+
+ def execute
+ group_vars_by_environment(CLOUDSQL_KEYS).map do |environment_scope, value|
+ {
+ ref: environment_scope,
+ gcp_project: value['GCP_PROJECT_ID'],
+ instance_name: value['GCP_CLOUDSQL_INSTANCE_NAME'],
+ version: value['GCP_CLOUDSQL_VERSION']
+ }
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/cloud_seed/google_cloud/service_accounts_service.rb b/app/services/cloud_seed/google_cloud/service_accounts_service.rb
new file mode 100644
index 00000000000..4881c440c9c
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/service_accounts_service.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ ##
+ # GCP keys used to store Google Cloud Service Accounts
+ GCP_KEYS = %w[GCP_PROJECT_ID GCP_SERVICE_ACCOUNT GCP_SERVICE_ACCOUNT_KEY].freeze
+
+ ##
+ # This service deals with GCP Service Accounts in GitLab
+
+ class ServiceAccountsService < ::CloudSeed::GoogleCloud::BaseService
+ ##
+ # Find GCP Service Accounts in a GitLab project
+ #
+ # This method looks up GitLab project's CI vars
+ # and returns Google Cloud Service Accounts combinations
+ # aligning GitLab project and ref to GCP projects
+
+ def find_for_project
+ group_vars_by_environment(GCP_KEYS).map do |environment_scope, value|
+ {
+ ref: environment_scope,
+ gcp_project: value['GCP_PROJECT_ID'],
+ service_account_exists: value['GCP_SERVICE_ACCOUNT'].present?,
+ service_account_key_exists: value['GCP_SERVICE_ACCOUNT_KEY'].present?
+ }
+ end
+ end
+
+ def add_for_project(ref, gcp_project_id, service_account, service_account_key, is_protected)
+ create_or_replace_project_vars(
+ ref,
+ 'GCP_PROJECT_ID',
+ gcp_project_id,
+ is_protected
+ )
+ create_or_replace_project_vars(
+ ref,
+ 'GCP_SERVICE_ACCOUNT',
+ service_account,
+ is_protected
+ )
+ create_or_replace_project_vars(
+ ref,
+ 'GCP_SERVICE_ACCOUNT_KEY',
+ service_account_key,
+ is_protected
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/cloud_seed/google_cloud/setup_cloudsql_instance_service.rb b/app/services/cloud_seed/google_cloud/setup_cloudsql_instance_service.rb
new file mode 100644
index 00000000000..b8c160f0683
--- /dev/null
+++ b/app/services/cloud_seed/google_cloud/setup_cloudsql_instance_service.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+module CloudSeed
+ module GoogleCloud
+ class SetupCloudsqlInstanceService < ::CloudSeed::GoogleCloud::BaseService
+ INSTANCE_STATE_RUNNABLE = 'RUNNABLE'
+ OPERATION_STATE_DONE = 'DONE'
+ DEFAULT_DATABASE_NAME = 'main_db'
+ DEFAULT_DATABASE_USER = 'main_user'
+
+ def execute
+ return error('Unauthorized user') unless Ability.allowed?(current_user, :admin_project_google_cloud, project)
+
+ get_instance_response = google_api_client.get_cloudsql_instance(gcp_project_id, instance_name)
+
+ if get_instance_response.state != INSTANCE_STATE_RUNNABLE
+ return error("CloudSQL instance not RUNNABLE: #{Gitlab::Json.dump(get_instance_response)}")
+ end
+
+ save_instance_ci_vars(get_instance_response)
+
+ list_database_response = google_api_client.list_cloudsql_databases(gcp_project_id, instance_name)
+ list_user_response = google_api_client.list_cloudsql_users(gcp_project_id, instance_name)
+
+ existing_database = list_database_response.items.find { |database| database.name == database_name }
+ existing_user = list_user_response.items.find { |user| user.name == username }
+
+ if existing_database && existing_user
+ save_database_ci_vars
+ save_user_ci_vars(existing_user)
+ return success
+ end
+
+ database_response = execute_database_setup(existing_database)
+ return database_response if database_response[:status] == :error
+
+ save_database_ci_vars
+
+ user_response = execute_user_setup(existing_user)
+ return user_response if user_response[:status] == :error
+
+ save_user_ci_vars(existing_user)
+
+ success
+ rescue Google::Apis::Error => err
+ error(message: Gitlab::Json.dump(err))
+ end
+
+ private
+
+ def instance_name
+ @params[:instance_name]
+ end
+
+ def database_version
+ @params[:database_version]
+ end
+
+ def database_name
+ @params.fetch(:database_name, DEFAULT_DATABASE_NAME)
+ end
+
+ def username
+ @params.fetch(:username, DEFAULT_DATABASE_USER)
+ end
+
+ def password
+ @password ||= SecureRandom.hex(16)
+ end
+
+ def save_ci_var(key, value, is_masked = false)
+ create_or_replace_project_vars(environment_name, key, value, @params[:is_protected], is_masked)
+ end
+
+ def save_instance_ci_vars(cloudsql_instance)
+ primary_ip_address = cloudsql_instance.ip_addresses.first.ip_address
+ connection_name = cloudsql_instance.connection_name
+
+ save_ci_var('GCP_PROJECT_ID', gcp_project_id)
+ save_ci_var('GCP_CLOUDSQL_INSTANCE_NAME', instance_name)
+ save_ci_var('GCP_CLOUDSQL_CONNECTION_NAME', connection_name)
+ save_ci_var('GCP_CLOUDSQL_PRIMARY_IP_ADDRESS', primary_ip_address)
+ save_ci_var('GCP_CLOUDSQL_VERSION', database_version)
+ end
+
+ def save_database_ci_vars
+ save_ci_var('GCP_CLOUDSQL_DATABASE_NAME', database_name)
+ end
+
+ def save_user_ci_vars(user_exists)
+ save_ci_var('GCP_CLOUDSQL_DATABASE_USER', username)
+ save_ci_var('GCP_CLOUDSQL_DATABASE_PASS', user_exists ? user_exists.password : password, true)
+ end
+
+ def execute_database_setup(database_exists)
+ return success if database_exists
+
+ database_response = google_api_client.create_cloudsql_database(gcp_project_id, instance_name, database_name)
+
+ if database_response.status != OPERATION_STATE_DONE
+ return error("Database creation failed: #{Gitlab::Json.dump(database_response)}")
+ end
+
+ success
+ end
+
+ def execute_user_setup(existing_user)
+ return success if existing_user
+
+ user_response = google_api_client.create_cloudsql_user(gcp_project_id, instance_name, username, password)
+
+ if user_response.status != OPERATION_STATE_DONE
+ return error("User creation failed: #{Gitlab::Json.dump(user_response)}")
+ end
+
+ success
+ end
+ end
+ end
+end
diff --git a/app/services/google_cloud/base_service.rb b/app/services/google_cloud/base_service.rb
deleted file mode 100644
index 01aee2231c9..00000000000
--- a/app/services/google_cloud/base_service.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- class BaseService < ::BaseService
- protected
-
- def google_oauth2_token
- @params[:google_oauth2_token]
- end
-
- def gcp_project_id
- @params[:gcp_project_id]
- end
-
- def environment_name
- @params[:environment_name]
- end
-
- def google_api_client
- @google_api_client_instance ||= GoogleApi::CloudPlatform::Client.new(google_oauth2_token, nil)
- end
-
- def unique_gcp_project_ids
- filter_params = { key: 'GCP_PROJECT_ID' }
- @unique_gcp_project_ids ||= ::Ci::VariablesFinder.new(project, filter_params).execute.map(&:value).uniq
- end
-
- def group_vars_by_environment(keys)
- filtered_vars = project.variables.filter { |variable| keys.include? variable.key }
- filtered_vars.each_with_object({}) do |variable, grouped|
- grouped[variable.environment_scope] ||= {}
- grouped[variable.environment_scope][variable.key] = variable.value
- end
- end
-
- def create_or_replace_project_vars(environment_scope, key, value, is_protected, is_masked = false)
- change_params = {
- variable_params: {
- key: key,
- value: value,
- environment_scope: environment_scope,
- protected: is_protected,
- masked: is_masked
- }
- }
- existing_variable = find_existing_variable(environment_scope, key)
-
- if existing_variable
- change_params[:action] = :update
- change_params[:variable] = existing_variable
- else
- change_params[:action] = :create
- end
-
- ::Ci::ChangeVariableService.new(container: project, current_user: current_user, params: change_params).execute
- end
-
- private
-
- def find_existing_variable(environment_scope, key)
- filter_params = { key: key, filter: { environment_scope: environment_scope } }
- ::Ci::VariablesFinder.new(project, filter_params).execute.first
- end
- end
-end
diff --git a/app/services/google_cloud/create_cloudsql_instance_service.rb b/app/services/google_cloud/create_cloudsql_instance_service.rb
deleted file mode 100644
index 9a1263f0796..00000000000
--- a/app/services/google_cloud/create_cloudsql_instance_service.rb
+++ /dev/null
@@ -1,78 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- DEFAULT_REGION = 'us-east1'
-
- class CreateCloudsqlInstanceService < ::GoogleCloud::BaseService
- WORKER_INTERVAL = 30.seconds
-
- def execute
- create_cloud_instance
- trigger_instance_setup_worker
- success
- rescue Google::Apis::Error => err
- error(err.message)
- end
-
- private
-
- def create_cloud_instance
- google_api_client.create_cloudsql_instance(
- gcp_project_id,
- instance_name,
- root_password,
- database_version,
- region,
- tier
- )
- end
-
- def trigger_instance_setup_worker
- GoogleCloud::CreateCloudsqlInstanceWorker.perform_in(
- WORKER_INTERVAL,
- current_user.id,
- project.id,
- {
- 'google_oauth2_token': google_oauth2_token,
- 'gcp_project_id': gcp_project_id,
- 'instance_name': instance_name,
- 'database_version': database_version,
- 'environment_name': environment_name,
- 'is_protected': protected?
- }
- )
- end
-
- def protected?
- project.protected_for?(environment_name)
- end
-
- def instance_name
- # Generates an `instance_name` for the to-be-created Cloud SQL instance
- # Example: `gitlab-34647-postgres-14-staging`
- environment_alias = environment_name == '*' ? 'ALL' : environment_name
- name = "gitlab-#{project.id}-#{database_version}-#{environment_alias}"
- name.tr("_", "-").downcase
- end
-
- def root_password
- SecureRandom.hex(16)
- end
-
- def database_version
- params[:database_version]
- end
-
- def region
- region = ::Ci::VariablesFinder
- .new(project, { key: Projects::GoogleCloud::GcpRegionsController::GCP_REGION_CI_VAR_KEY,
- environment_scope: environment_name })
- .execute.first
- region&.value || DEFAULT_REGION
- end
-
- def tier
- params[:tier]
- end
- end
-end
diff --git a/app/services/google_cloud/create_service_accounts_service.rb b/app/services/google_cloud/create_service_accounts_service.rb
deleted file mode 100644
index ca0aa7c91df..00000000000
--- a/app/services/google_cloud/create_service_accounts_service.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- class CreateServiceAccountsService < ::GoogleCloud::BaseService
- def execute
- service_account = google_api_client.create_service_account(gcp_project_id, service_account_name, service_account_desc)
- service_account_key = google_api_client.create_service_account_key(gcp_project_id, service_account.unique_id)
- google_api_client.grant_service_account_roles(gcp_project_id, service_account.email)
-
- service_accounts_service.add_for_project(
- environment_name,
- service_account.project_id,
- Gitlab::Json.dump(service_account),
- Gitlab::Json.dump(service_account_key),
- ProtectedBranch.protected?(project, environment_name) || ProtectedTag.protected?(project, environment_name)
- )
-
- ServiceResponse.success(message: _('Service account generated successfully'), payload: {
- service_account: service_account,
- service_account_key: service_account_key
- })
- end
-
- private
-
- def service_accounts_service
- GoogleCloud::ServiceAccountsService.new(project)
- end
-
- def service_account_name
- "GitLab :: #{project.name} :: #{environment_name}"
- end
-
- def service_account_desc
- "GitLab generated service account for project '#{project.name}' and environment '#{environment_name}'"
- end
- end
-end
-
-GoogleCloud::CreateServiceAccountsService.prepend_mod
diff --git a/app/services/google_cloud/enable_cloud_run_service.rb b/app/services/google_cloud/enable_cloud_run_service.rb
deleted file mode 100644
index 4fd92f423c5..00000000000
--- a/app/services/google_cloud/enable_cloud_run_service.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- class EnableCloudRunService < ::GoogleCloud::BaseService
- def execute
- gcp_project_ids = unique_gcp_project_ids
-
- if gcp_project_ids.empty?
- error("No GCP projects found. Configure a service account or GCP_PROJECT_ID ci variable.")
- else
- gcp_project_ids.each do |gcp_project_id|
- google_api_client.enable_cloud_run(gcp_project_id)
- google_api_client.enable_artifacts_registry(gcp_project_id)
- google_api_client.enable_cloud_build(gcp_project_id)
- end
-
- success({ gcp_project_ids: gcp_project_ids })
- end
- end
- end
-end
diff --git a/app/services/google_cloud/enable_cloudsql_service.rb b/app/services/google_cloud/enable_cloudsql_service.rb
deleted file mode 100644
index 911cccca5ca..00000000000
--- a/app/services/google_cloud/enable_cloudsql_service.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- class EnableCloudsqlService < ::GoogleCloud::BaseService
- def execute
- create_or_replace_project_vars(environment_name, 'GCP_PROJECT_ID', gcp_project_id, ci_var_protected?)
-
- unique_gcp_project_ids.each do |gcp_project_id|
- google_api_client.enable_cloud_sql_admin(gcp_project_id)
- google_api_client.enable_compute(gcp_project_id)
- google_api_client.enable_service_networking(gcp_project_id)
- end
-
- success({ gcp_project_ids: unique_gcp_project_ids })
- rescue Google::Apis::Error => err
- error(err.message)
- end
-
- private
-
- def ci_var_protected?
- ProtectedBranch.protected?(project, environment_name) || ProtectedTag.protected?(project, environment_name)
- end
- end
-end
diff --git a/app/services/google_cloud/enable_vision_ai_service.rb b/app/services/google_cloud/enable_vision_ai_service.rb
deleted file mode 100644
index f7adea706ed..00000000000
--- a/app/services/google_cloud/enable_vision_ai_service.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- class EnableVisionAiService < ::GoogleCloud::BaseService
- def execute
- gcp_project_ids = unique_gcp_project_ids
-
- if gcp_project_ids.empty?
- error("No GCP projects found. Configure a service account or GCP_PROJECT_ID ci variable.")
- else
- gcp_project_ids.each do |gcp_project_id|
- google_api_client.enable_vision_api(gcp_project_id)
- end
-
- success({ gcp_project_ids: gcp_project_ids })
- end
- end
- end
-end
diff --git a/app/services/google_cloud/fetch_google_ip_list_service.rb b/app/services/google_cloud/fetch_google_ip_list_service.rb
deleted file mode 100644
index 54af841d002..00000000000
--- a/app/services/google_cloud/fetch_google_ip_list_service.rb
+++ /dev/null
@@ -1,91 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- class FetchGoogleIpListService
- include BaseServiceUtility
-
- GOOGLE_IP_RANGES_URL = 'https://www.gstatic.com/ipranges/cloud.json'
- RESPONSE_BODY_LIMIT = 1.megabyte
- EXPECTED_CONTENT_TYPE = 'application/json'
-
- IpListNotRetrievedError = Class.new(StandardError)
-
- def execute
- # Prevent too many workers from hitting the same HTTP endpoint
- if ::Gitlab::ApplicationRateLimiter.throttled?(:fetch_google_ip_list, scope: nil)
- return error("#{self.class} was rate limited")
- end
-
- subnets = fetch_and_update_cache!
-
- Gitlab::AppJsonLogger.info(
- class: self.class.name,
- message: 'Successfully retrieved Google IP list',
- subnet_count: subnets.count
- )
-
- success({ subnets: subnets })
- rescue IpListNotRetrievedError => err
- Gitlab::ErrorTracking.log_exception(err)
- error('Google IP list not retrieved')
- end
-
- private
-
- # Attempts to retrieve and parse the list of IPs from Google. Updates
- # the internal cache so that the data is accessible.
- #
- # Returns an array of IPAddr objects consisting of subnets.
- def fetch_and_update_cache!
- parsed_response = fetch_google_ip_list
-
- parse_google_prefixes(parsed_response).tap do |subnets|
- ::ObjectStorage::CDN::GoogleIpCache.update!(subnets)
- end
- end
-
- def fetch_google_ip_list
- response = Gitlab::HTTP.get(GOOGLE_IP_RANGES_URL, follow_redirects: false, allow_local_requests: false)
-
- validate_response!(response)
-
- response.parsed_response
- end
-
- def validate_response!(response)
- raise IpListNotRetrievedError, "response was #{response.code}" unless response.code == 200
- raise IpListNotRetrievedError, "response was nil" unless response.body
-
- parsed_response = response.parsed_response
-
- unless response.content_type == EXPECTED_CONTENT_TYPE && parsed_response.is_a?(Hash)
- raise IpListNotRetrievedError, "response was not JSON"
- end
-
- if response.body&.bytesize.to_i > RESPONSE_BODY_LIMIT
- raise IpListNotRetrievedError, "response was too large: #{response.body.bytesize}"
- end
-
- prefixes = parsed_response['prefixes']
-
- raise IpListNotRetrievedError, "JSON was type #{prefixes.class}, expected Array" unless prefixes.is_a?(Array)
- raise IpListNotRetrievedError, "#{GOOGLE_IP_RANGES_URL} did not return any IP ranges" if prefixes.empty?
-
- response.parsed_response
- end
-
- def parse_google_prefixes(parsed_response)
- ranges = parsed_response['prefixes'].map do |prefix|
- ip_range = prefix['ipv4Prefix'] || prefix['ipv6Prefix']
-
- next unless ip_range
-
- IPAddr.new(ip_range)
- end.compact
-
- raise IpListNotRetrievedError, "#{GOOGLE_IP_RANGES_URL} did not return any IP ranges" if ranges.empty?
-
- ranges
- end
- end
-end
diff --git a/app/services/google_cloud/gcp_region_add_or_replace_service.rb b/app/services/google_cloud/gcp_region_add_or_replace_service.rb
deleted file mode 100644
index f79df707a08..00000000000
--- a/app/services/google_cloud/gcp_region_add_or_replace_service.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- class GcpRegionAddOrReplaceService < ::GoogleCloud::BaseService
- def execute(environment, region)
- gcp_region_key = Projects::GoogleCloud::GcpRegionsController::GCP_REGION_CI_VAR_KEY
-
- change_params = { variable_params: { key: gcp_region_key, value: region, environment_scope: environment } }
- filter_params = { key: gcp_region_key, filter: { environment_scope: environment } }
-
- existing_variable = ::Ci::VariablesFinder.new(project, filter_params).execute.first
-
- if existing_variable
- change_params[:action] = :update
- change_params[:variable] = existing_variable
- else
- change_params[:action] = :create
- end
-
- ::Ci::ChangeVariableService.new(container: project, current_user: current_user, params: change_params).execute
- end
- end
-end
diff --git a/app/services/google_cloud/generate_pipeline_service.rb b/app/services/google_cloud/generate_pipeline_service.rb
deleted file mode 100644
index 97d008db76b..00000000000
--- a/app/services/google_cloud/generate_pipeline_service.rb
+++ /dev/null
@@ -1,98 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- class GeneratePipelineService < ::GoogleCloud::BaseService
- ACTION_DEPLOY_TO_CLOUD_RUN = 'DEPLOY_TO_CLOUD_RUN'
- ACTION_DEPLOY_TO_CLOUD_STORAGE = 'DEPLOY_TO_CLOUD_STORAGE'
- ACTION_VISION_AI_PIPELINE = 'VISION_AI_PIPELINE'
-
- def execute
- commit_attributes = generate_commit_attributes
- create_branch_response = ::Branches::CreateService.new(project, current_user)
- .execute(commit_attributes[:branch_name], project.default_branch)
-
- if create_branch_response[:status] == :error
- return create_branch_response
- end
-
- branch = create_branch_response[:branch]
-
- service = default_branch_gitlab_ci_yml.present? ? ::Files::UpdateService : ::Files::CreateService
-
- commit_response = service.new(project, current_user, commit_attributes).execute
-
- if commit_response[:status] == :error
- return commit_response
- end
-
- success({ branch_name: branch.name, commit: commit_response })
- end
-
- private
-
- def action
- @params[:action]
- end
-
- def generate_commit_attributes
- case action
- when ACTION_DEPLOY_TO_CLOUD_RUN
- branch_name = "deploy-to-cloud-run-#{SecureRandom.hex(8)}"
- {
- commit_message: 'Enable Cloud Run deployments',
- file_path: '.gitlab-ci.yml',
- file_content: pipeline_content('gcp/cloud-run.gitlab-ci.yml'),
- branch_name: branch_name,
- start_branch: branch_name
- }
- when ACTION_DEPLOY_TO_CLOUD_STORAGE
- branch_name = "deploy-to-cloud-storage-#{SecureRandom.hex(8)}"
- {
- commit_message: 'Enable Cloud Storage deployments',
- file_path: '.gitlab-ci.yml',
- file_content: pipeline_content('gcp/cloud-storage.gitlab-ci.yml'),
- branch_name: branch_name,
- start_branch: branch_name
- }
- when ACTION_VISION_AI_PIPELINE
- branch_name = "vision-ai-pipeline-#{SecureRandom.hex(8)}"
- {
- commit_message: 'Enable Vision AI Pipeline',
- file_path: '.gitlab-ci.yml',
- file_content: pipeline_content('gcp/vision-ai.gitlab-ci.yml'),
- branch_name: branch_name,
- start_branch: branch_name
- }
- end
- end
-
- def default_branch_gitlab_ci_yml
- @default_branch_gitlab_ci_yml ||= project.ci_config_for(project.default_branch)
- end
-
- def pipeline_content(include_path)
- gitlab_ci_yml = ::Gitlab::Ci::Config::Yaml::Loader.new(default_branch_gitlab_ci_yml || '{}').load
-
- append_remote_include(
- gitlab_ci_yml.content,
- "https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/library/-/raw/main/#{include_path}"
- )
- end
-
- def append_remote_include(gitlab_ci_yml, include_url)
- stages = gitlab_ci_yml['stages'] || []
- gitlab_ci_yml['stages'] = if action == ACTION_VISION_AI_PIPELINE
- (stages + %w[validate detect render]).uniq
- else
- (stages + %w[build test deploy]).uniq
- end
-
- includes = gitlab_ci_yml['include'] || []
- includes = Array.wrap(includes)
- includes << { 'remote' => include_url }
- gitlab_ci_yml['include'] = includes.uniq
-
- gitlab_ci_yml.deep_stringify_keys.to_yaml
- end
- end
-end
diff --git a/app/services/google_cloud/get_cloudsql_instances_service.rb b/app/services/google_cloud/get_cloudsql_instances_service.rb
deleted file mode 100644
index 701e83d556d..00000000000
--- a/app/services/google_cloud/get_cloudsql_instances_service.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- class GetCloudsqlInstancesService < ::GoogleCloud::BaseService
- CLOUDSQL_KEYS = %w[GCP_PROJECT_ID GCP_CLOUDSQL_INSTANCE_NAME GCP_CLOUDSQL_VERSION].freeze
-
- def execute
- group_vars_by_environment(CLOUDSQL_KEYS).map do |environment_scope, value|
- {
- ref: environment_scope,
- gcp_project: value['GCP_PROJECT_ID'],
- instance_name: value['GCP_CLOUDSQL_INSTANCE_NAME'],
- version: value['GCP_CLOUDSQL_VERSION']
- }
- end
- end
- end
-end
diff --git a/app/services/google_cloud/service_accounts_service.rb b/app/services/google_cloud/service_accounts_service.rb
deleted file mode 100644
index e90fd112e2e..00000000000
--- a/app/services/google_cloud/service_accounts_service.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- ##
- # GCP keys used to store Google Cloud Service Accounts
- GCP_KEYS = %w[GCP_PROJECT_ID GCP_SERVICE_ACCOUNT GCP_SERVICE_ACCOUNT_KEY].freeze
-
- ##
- # This service deals with GCP Service Accounts in GitLab
-
- class ServiceAccountsService < ::GoogleCloud::BaseService
- ##
- # Find GCP Service Accounts in a GitLab project
- #
- # This method looks up GitLab project's CI vars
- # and returns Google Cloud Service Accounts combinations
- # aligning GitLab project and ref to GCP projects
-
- def find_for_project
- group_vars_by_environment(GCP_KEYS).map do |environment_scope, value|
- {
- ref: environment_scope,
- gcp_project: value['GCP_PROJECT_ID'],
- service_account_exists: value['GCP_SERVICE_ACCOUNT'].present?,
- service_account_key_exists: value['GCP_SERVICE_ACCOUNT_KEY'].present?
- }
- end
- end
-
- def add_for_project(ref, gcp_project_id, service_account, service_account_key, is_protected)
- create_or_replace_project_vars(
- ref,
- 'GCP_PROJECT_ID',
- gcp_project_id,
- is_protected
- )
- create_or_replace_project_vars(
- ref,
- 'GCP_SERVICE_ACCOUNT',
- service_account,
- is_protected
- )
- create_or_replace_project_vars(
- ref,
- 'GCP_SERVICE_ACCOUNT_KEY',
- service_account_key,
- is_protected
- )
- end
- end
-end
diff --git a/app/services/google_cloud/setup_cloudsql_instance_service.rb b/app/services/google_cloud/setup_cloudsql_instance_service.rb
deleted file mode 100644
index 40184b927ad..00000000000
--- a/app/services/google_cloud/setup_cloudsql_instance_service.rb
+++ /dev/null
@@ -1,118 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloud
- class SetupCloudsqlInstanceService < ::GoogleCloud::BaseService
- INSTANCE_STATE_RUNNABLE = 'RUNNABLE'
- OPERATION_STATE_DONE = 'DONE'
- DEFAULT_DATABASE_NAME = 'main_db'
- DEFAULT_DATABASE_USER = 'main_user'
-
- def execute
- return error('Unauthorized user') unless Ability.allowed?(current_user, :admin_project_google_cloud, project)
-
- get_instance_response = google_api_client.get_cloudsql_instance(gcp_project_id, instance_name)
-
- if get_instance_response.state != INSTANCE_STATE_RUNNABLE
- return error("CloudSQL instance not RUNNABLE: #{Gitlab::Json.dump(get_instance_response)}")
- end
-
- save_instance_ci_vars(get_instance_response)
-
- list_database_response = google_api_client.list_cloudsql_databases(gcp_project_id, instance_name)
- list_user_response = google_api_client.list_cloudsql_users(gcp_project_id, instance_name)
-
- existing_database = list_database_response.items.find { |database| database.name == database_name }
- existing_user = list_user_response.items.find { |user| user.name == username }
-
- if existing_database && existing_user
- save_database_ci_vars
- save_user_ci_vars(existing_user)
- return success
- end
-
- database_response = execute_database_setup(existing_database)
- return database_response if database_response[:status] == :error
-
- save_database_ci_vars
-
- user_response = execute_user_setup(existing_user)
- return user_response if user_response[:status] == :error
-
- save_user_ci_vars(existing_user)
-
- success
- rescue Google::Apis::Error => err
- error(message: Gitlab::Json.dump(err))
- end
-
- private
-
- def instance_name
- @params[:instance_name]
- end
-
- def database_version
- @params[:database_version]
- end
-
- def database_name
- @params.fetch(:database_name, DEFAULT_DATABASE_NAME)
- end
-
- def username
- @params.fetch(:username, DEFAULT_DATABASE_USER)
- end
-
- def password
- @password ||= SecureRandom.hex(16)
- end
-
- def save_ci_var(key, value, is_masked = false)
- create_or_replace_project_vars(environment_name, key, value, @params[:is_protected], is_masked)
- end
-
- def save_instance_ci_vars(cloudsql_instance)
- primary_ip_address = cloudsql_instance.ip_addresses.first.ip_address
- connection_name = cloudsql_instance.connection_name
-
- save_ci_var('GCP_PROJECT_ID', gcp_project_id)
- save_ci_var('GCP_CLOUDSQL_INSTANCE_NAME', instance_name)
- save_ci_var('GCP_CLOUDSQL_CONNECTION_NAME', connection_name)
- save_ci_var('GCP_CLOUDSQL_PRIMARY_IP_ADDRESS', primary_ip_address)
- save_ci_var('GCP_CLOUDSQL_VERSION', database_version)
- end
-
- def save_database_ci_vars
- save_ci_var('GCP_CLOUDSQL_DATABASE_NAME', database_name)
- end
-
- def save_user_ci_vars(user_exists)
- save_ci_var('GCP_CLOUDSQL_DATABASE_USER', username)
- save_ci_var('GCP_CLOUDSQL_DATABASE_PASS', user_exists ? user_exists.password : password, true)
- end
-
- def execute_database_setup(database_exists)
- return success if database_exists
-
- database_response = google_api_client.create_cloudsql_database(gcp_project_id, instance_name, database_name)
-
- if database_response.status != OPERATION_STATE_DONE
- return error("Database creation failed: #{Gitlab::Json.dump(database_response)}")
- end
-
- success
- end
-
- def execute_user_setup(existing_user)
- return success if existing_user
-
- user_response = google_api_client.create_cloudsql_user(gcp_project_id, instance_name, username, password)
-
- if user_response.status != OPERATION_STATE_DONE
- return error("User creation failed: #{Gitlab::Json.dump(user_response)}")
- end
-
- success
- end
- end
-end
diff --git a/app/workers/google_cloud/create_cloudsql_instance_worker.rb b/app/workers/google_cloud/create_cloudsql_instance_worker.rb
index 8c4f4c83339..e0d0747e227 100644
--- a/app/workers/google_cloud/create_cloudsql_instance_worker.rb
+++ b/app/workers/google_cloud/create_cloudsql_instance_worker.rb
@@ -13,7 +13,7 @@ module GoogleCloud
project = Project.find(project_id)
params = params.with_indifferent_access
- response = ::GoogleCloud::SetupCloudsqlInstanceService.new(project, user, params).execute
+ response = ::CloudSeed::GoogleCloud::SetupCloudsqlInstanceService.new(project, user, params).execute
if response[:status] == :error
raise "Error SetupCloudsqlInstanceService: #{response.to_json}"
diff --git a/app/workers/google_cloud/fetch_google_ip_list_worker.rb b/app/workers/google_cloud/fetch_google_ip_list_worker.rb
index b14b4e735dc..de725709bea 100644
--- a/app/workers/google_cloud/fetch_google_ip_list_worker.rb
+++ b/app/workers/google_cloud/fetch_google_ip_list_worker.rb
@@ -11,7 +11,7 @@ module GoogleCloud
idempotent!
def perform
- GoogleCloud::FetchGoogleIpListService.new.execute
+ CloudSeed::GoogleCloud::FetchGoogleIpListService.new.execute
end
end
end
diff --git a/data/deprecations/16-8-deprecate-pg_schema-backup-setting.yml b/data/deprecations/16-8-deprecate-pg_schema-backup-setting.yml
new file mode 100644
index 00000000000..4edb2c8fde2
--- /dev/null
+++ b/data/deprecations/16-8-deprecate-pg_schema-backup-setting.yml
@@ -0,0 +1,14 @@
+- title: "Support for setting custom schema for backup is deprecated"
+ announcement_milestone: "16.8"
+ removal_milestone: "17.0"
+ breaking_change: true
+ reporter: sranasinghe
+ stage: core platform
+ issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/435210
+ body: |
+ You could configure GitLab to use a custom schema for backup, by setting
+ `gitlab_rails['backup_pg_schema'] = '<schema_name>'` in `/etc/gitlab/gitlab.rb` for Linux package installations,
+ or by editing `config/gitlab.yml` for self-compiled installations.
+
+ While the configuration setting was available, it had no effect and did not serve the purpose it was intended.
+ This configuration setting will be removed in GitLab 17.0.
diff --git a/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_config.yml b/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_config.yml
new file mode 100644
index 00000000000..e51bef28419
--- /dev/null
+++ b/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_config.yml
@@ -0,0 +1,9 @@
+---
+migration_job_name: BackfillPartitionIdCiPipelineConfig
+description: Fixes incorrect values for ci_pipelines_config being in the wrong partition
+feature_category: continuous_integration
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141461
+milestone: '16.8'
+queued_migration_version: 20240110090352
+finalize_after: '2024-01-22'
+finalized_by: # version of the migration that finalized this BBM
diff --git a/db/migrate/20240109145839_add_partition_id_to_pipeline_config.rb b/db/migrate/20240109145839_add_partition_id_to_pipeline_config.rb
new file mode 100644
index 00000000000..e4f661a5e31
--- /dev/null
+++ b/db/migrate/20240109145839_add_partition_id_to_pipeline_config.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+class AddPartitionIdToPipelineConfig < Gitlab::Database::Migration[2.2]
+ milestone '16.8'
+ enable_lock_retries!
+
+ def change
+ add_column(:ci_pipelines_config, :partition_id, :bigint, default: 100, null: false)
+ end
+end
diff --git a/db/post_migrate/20240110090352_queue_backfill_partition_id_ci_pipeline_config.rb b/db/post_migrate/20240110090352_queue_backfill_partition_id_ci_pipeline_config.rb
new file mode 100644
index 00000000000..d5ae025b385
--- /dev/null
+++ b/db/post_migrate/20240110090352_queue_backfill_partition_id_ci_pipeline_config.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+class QueueBackfillPartitionIdCiPipelineConfig < Gitlab::Database::Migration[2.2]
+ milestone '16.8'
+ restrict_gitlab_migration gitlab_schema: :gitlab_ci
+
+ MIGRATION = 'BackfillPartitionIdCiPipelineConfig'
+ DELAY_INTERVAL = 2.minutes
+ BATCH_SIZE = 1000
+ SUB_BATCH_SIZE = 100
+
+ def up
+ queue_batched_background_migration(
+ MIGRATION,
+ :ci_pipelines_config,
+ :pipeline_id,
+ job_interval: DELAY_INTERVAL,
+ batch_size: BATCH_SIZE,
+ sub_batch_size: SUB_BATCH_SIZE
+ )
+ end
+
+ def down
+ delete_batched_background_migration(MIGRATION, :ci_pipelines_config, :pipeline_id, [])
+ end
+end
diff --git a/db/post_migrate/20240110093654_schedule_index_removal_ci_build_trace_metadata.rb b/db/post_migrate/20240110093654_schedule_index_removal_ci_build_trace_metadata.rb
new file mode 100644
index 00000000000..8f2faa32c81
--- /dev/null
+++ b/db/post_migrate/20240110093654_schedule_index_removal_ci_build_trace_metadata.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class ScheduleIndexRemovalCiBuildTraceMetadata < Gitlab::Database::Migration[2.2]
+ milestone '16.8'
+ disable_ddl_transaction!
+
+ INDEX_NAME = :index_ci_build_trace_metadata_on_trace_artifact_id
+ TABLE_NAME = :ci_build_trace_metadata
+ COLUMN_NAME = :trace_artifact_id
+
+ def up
+ prepare_async_index_removal(TABLE_NAME, COLUMN_NAME, name: INDEX_NAME)
+ end
+
+ def down
+ unprepare_async_index(TABLE_NAME, COLUMN_NAME, name: INDEX_NAME)
+ end
+end
diff --git a/db/post_migrate/20240110094002_drop_index_from_ci_job_artifact_state.rb b/db/post_migrate/20240110094002_drop_index_from_ci_job_artifact_state.rb
new file mode 100644
index 00000000000..0b92c73ad55
--- /dev/null
+++ b/db/post_migrate/20240110094002_drop_index_from_ci_job_artifact_state.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class DropIndexFromCiJobArtifactState < Gitlab::Database::Migration[2.2]
+ milestone '16.8'
+ disable_ddl_transaction!
+
+ INDEX_NAME = :index_ci_job_artifact_states_on_job_artifact_id
+ TABLE_NAME = :ci_job_artifact_states
+
+ def up
+ remove_concurrent_index_by_name(TABLE_NAME, INDEX_NAME)
+ end
+
+ def down
+ add_concurrent_index(TABLE_NAME, :job_artifact_id, name: INDEX_NAME)
+ end
+end
diff --git a/db/post_migrate/20240110094510_remove_fk_from_ci_job_artifact_state.rb b/db/post_migrate/20240110094510_remove_fk_from_ci_job_artifact_state.rb
new file mode 100644
index 00000000000..bbe1f39e3ed
--- /dev/null
+++ b/db/post_migrate/20240110094510_remove_fk_from_ci_job_artifact_state.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+class RemoveFkFromCiJobArtifactState < Gitlab::Database::Migration[2.2]
+ milestone '16.8'
+ disable_ddl_transaction!
+
+ SOURCE_TABLE_NAME = :ci_job_artifact_states
+ TARGET_TABLE_NAME = :ci_job_artifacts
+ COLUMN = :job_artifact_id
+ TARGET_COLUMN = :id
+ FK_NAME = :fk_rails_80a9cba3b2
+
+ def up
+ with_lock_retries do
+ remove_foreign_key_if_exists(
+ SOURCE_TABLE_NAME,
+ TARGET_TABLE_NAME,
+ name: FK_NAME,
+ reverse_lock_order: true
+ )
+ end
+ end
+
+ def down
+ add_concurrent_foreign_key(
+ SOURCE_TABLE_NAME,
+ TARGET_TABLE_NAME,
+ column: COLUMN,
+ target_column: TARGET_COLUMN,
+ validate: true,
+ reverse_lock_order: true,
+ on_delete: :cascade,
+ name: FK_NAME
+ )
+ end
+end
diff --git a/db/schema_migrations/20240109145839 b/db/schema_migrations/20240109145839
new file mode 100644
index 00000000000..0657c41254a
--- /dev/null
+++ b/db/schema_migrations/20240109145839
@@ -0,0 +1 @@
+6b33ceb05ab45bd0a13cd6706a9cf006f756574a08152ab6de87ff840f56b95d \ No newline at end of file
diff --git a/db/schema_migrations/20240110090352 b/db/schema_migrations/20240110090352
new file mode 100644
index 00000000000..836d868f513
--- /dev/null
+++ b/db/schema_migrations/20240110090352
@@ -0,0 +1 @@
+cc4ea0747894b9e896c888c9c7ba8bbae2aea8aa55041ac4b8a0be18b5b38908 \ No newline at end of file
diff --git a/db/schema_migrations/20240110093654 b/db/schema_migrations/20240110093654
new file mode 100644
index 00000000000..1c5f4a1208d
--- /dev/null
+++ b/db/schema_migrations/20240110093654
@@ -0,0 +1 @@
+41964f40e257d2d7fb0427f7f2911ea0857d67f51e9a1f4ed26d36d42394dd19 \ No newline at end of file
diff --git a/db/schema_migrations/20240110094002 b/db/schema_migrations/20240110094002
new file mode 100644
index 00000000000..5b6fcb1080a
--- /dev/null
+++ b/db/schema_migrations/20240110094002
@@ -0,0 +1 @@
+ec56796b8c3514380e716259119e7def5a958e7bfade25c69e3c99ffd52a265f \ No newline at end of file
diff --git a/db/schema_migrations/20240110094510 b/db/schema_migrations/20240110094510
new file mode 100644
index 00000000000..e533dc38912
--- /dev/null
+++ b/db/schema_migrations/20240110094510
@@ -0,0 +1 @@
+6ed6cfa3e8b8b8fdbfacadcdff55dda07365f113d4f84e33c6bab878352cda11 \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index bd165b96799..deca0111644 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -14720,7 +14720,8 @@ CREATE TABLE ci_pipelines (
CREATE TABLE ci_pipelines_config (
pipeline_id bigint NOT NULL,
- content text NOT NULL
+ content text NOT NULL,
+ partition_id bigint DEFAULT 100 NOT NULL
);
CREATE SEQUENCE ci_pipelines_id_seq
@@ -32854,8 +32855,6 @@ CREATE UNIQUE INDEX index_ci_group_variables_on_group_id_and_key_and_environment
CREATE UNIQUE INDEX index_ci_instance_variables_on_key ON ci_instance_variables USING btree (key);
-CREATE INDEX index_ci_job_artifact_states_on_job_artifact_id ON ci_job_artifact_states USING btree (job_artifact_id);
-
CREATE INDEX index_ci_job_artifact_states_on_job_artifact_id_partition_id ON ci_job_artifact_states USING btree (job_artifact_id, partition_id);
CREATE INDEX index_ci_job_artifacts_expire_at_unlocked_non_trace ON ci_job_artifacts USING btree (expire_at) WHERE ((locked = 0) AND (file_type <> 3) AND (expire_at IS NOT NULL));
@@ -40088,9 +40087,6 @@ ALTER TABLE ONLY dependency_proxy_manifest_states
ADD CONSTRAINT fk_rails_806cf07a3c FOREIGN KEY (dependency_proxy_manifest_id) REFERENCES dependency_proxy_manifests(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_job_artifact_states
- ADD CONSTRAINT fk_rails_80a9cba3b2 FOREIGN KEY (job_artifact_id) REFERENCES ci_job_artifacts(id) ON DELETE CASCADE;
-
-ALTER TABLE ONLY ci_job_artifact_states
ADD CONSTRAINT fk_rails_80a9cba3b2_p FOREIGN KEY (partition_id, job_artifact_id) REFERENCES ci_job_artifacts(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY approval_merge_request_rules_users
diff --git a/doc/architecture/blueprints/cells/impacted_features/git-access.md b/doc/architecture/blueprints/cells/impacted_features/git-access.md
index 611b4db5f43..d2d357d4178 100644
--- a/doc/architecture/blueprints/cells/impacted_features/git-access.md
+++ b/doc/architecture/blueprints/cells/impacted_features/git-access.md
@@ -6,12 +6,10 @@ description: 'Cells: Git Access'
<!-- vale gitlab.FutureTense = NO -->
-This document is a work-in-progress and represents a very early state of the
-Cells design. Significant aspects are not documented, though we expect to add
-them in the future. This is one possible architecture for Cells, and we intend to
-contrast this with alternatives before deciding which approach to implement.
-This documentation will be kept even if we decide not to implement this so that
-we can document the reasons for not choosing this approach.
+This document is a work-in-progress and represents a very early state of the Cells design.
+Significant aspects are not documented, though we expect to add them in the future.
+This is one possible architecture for Cells, and we intend to contrast this with alternatives before deciding which approach to implement.
+This documentation will be kept even if we decide not to implement this so that we can document the reasons for not choosing this approach.
# Cells: Git Access
@@ -146,11 +144,34 @@ Where:
Supporting Git repositories if a Cell can access only its own repositories does not appear to be complex.
The one major complication is supporting snippets, but this likely falls in the same category as for the approach to support a user's Personal Namespace.
-## 4.1. Pros
+### 4.1. Pros
1. The API used for supporting HTTPS/SSH and Hooks are well defined and can easily be made routable.
-## 4.2. Cons
+### 4.2. Cons
1. The sharing of repositories objects is limited to the given Cell and Gitaly node.
1. Cross-Cells forks are likely impossible to be supported (discover: How this works today across different Gitaly node).
+
+## 5. Forking and object pools
+
+One of the biggest struggles that needs to be addressed with the Cells architecture is how to handle forking. At present, Gitaly utilizes object pools to provide deduplication of fork storage. If forks are not created on the same storage node as the upstream repository that is being forked, we end up with significant storage inefficiencies as we will effectively have two complete copies of the repository and we will not be able to utilize object pools to improve performance.
+
+The storage nodes from one Cell cannot talk to the storage nodes of another Cell, making forking across Cells impossible. Therefore, it will be necessary to ensure that forked repositories end up in the same Cell (and on the same Gitaly nodes) as their upstream parent repository. This will also enable Gitaly to continue to utilize object pools to provide storage and performance efficiency.
+
+### 5.1. How this works today
+
+**Single Gitaly storage node**
+
+Currently, for a GitLab instance backed with a single Gitaly storage node, forking works just fine.
+Any forks must reside on the same storage node as there is only one, and therefore object deduplication (and object pools) all function as expected.
+
+**Sharded Gitaly storage**
+
+A sharded Gitaly storage is when multiple Gitaly storage nodes are attached to a single instance, and repositories are assigned based on a priority weighting between the nodes.
+
+Since Gitaly knows how to do cross-storage fetches, forking across shards works without issue.
+
+**Gitaly Cluster**
+
+For Gitaly cluster, we recently resolved [the issue](https://gitlab.com/gitlab-org/gitaly/-/issues/5094) of object pools not being created on the same storage nodes as the parent repository. This enables forking to work correctly from an efficiency perspective (can share an object pool) and from an object deduplication perspective (Git can properly deduplicate storage).
diff --git a/doc/architecture/blueprints/ci_builds_runner_fleet_metrics/ci_insights.md b/doc/architecture/blueprints/ci_builds_runner_fleet_metrics/ci_insights.md
new file mode 100644
index 00000000000..72d82558eb7
--- /dev/null
+++ b/doc/architecture/blueprints/ci_builds_runner_fleet_metrics/ci_insights.md
@@ -0,0 +1,154 @@
+---
+status: proposed
+creation-date: "2023-01-25"
+authors: [ "@pedropombeiro", "@vshushlin"]
+coach: "@grzesiek"
+approvers: [ ]
+stage: Verify
+group: Runner
+participating-stages: []
+description: 'CI Insights design'
+---
+
+# CI Insights
+
+## Summary
+
+As part of the Fleet Metrics, we would like to have a section dedicated to CI insights to help users monitor pipelines and summarize findings about pipelines speed, common job failures and more. It would eventually offer actionables to help users optimize and fix issues with their CI/CD.
+
+## Motivation
+
+We have a [page for CI/CD Analytics](https://gitlab.com/gitlab-org/gitlab/-/pipelines/charts?chart=pipelines) that contain some very basic analytics on pipelines. Most of this information relates to the **total** number of pipelines over time, which does not give any real value to customers: projects will always see an increase of pipelines number over time, so the total number of pipelines is of little consequence.
+
+![Current page](img/current_page.png)
+
+Because this page lacks real insights, it makes understanding pipelines slowdowns or failures hard to track and becomes a very manual task. We want to empower users to optimize their workflow in a centralized place to avoid all of the manual labor associated with either querying the API for data and then manually parsing it or navigating the UI through dozens of pages utils the insights or action required can be found.
+
+As we are going to process large quantities of data relating to a proejct pipelines, there is potential to eventually summarize findings with an AI tool to give insights into job failures, pipeline slowdowns and flaky specs. As AI has become a crucial part of our product roadmap and Verify lacks any promising lead in that area, this page could be the center of this new addition.
+
+- Deliver a new Pipelines Analysis Dashbord page
+- Have excellent data visualization to help digest information quickly
+- Flexible querying to let users get the information they want
+
+- Clear actionables based on information presented in the page
+- Show some default information on landing like pipelines duration over time and slowest jobs
+- Make the CI/CD Analytics more accessible, liked and remembered (AKA, more page views)
+
+### Non-Goals
+
+We do not aim to improve the GitLab project's pipeline speed. This feature could help us achieve this, but it is not a direct objective of this blueprint.
+
+We also are not aiming to have AI in the first iteration and should instead focus on making as much information available and disgestible as possible.
+
+## Proposal
+
+Revamp the [page for CI/CD Analytics](https://gitlab.com/gitlab-org/gitlab/-/pipelines/charts?chart=pipelines) to include more meaningful data so that users can troubleshoot their pipelines with ease. Here is a list of the main improvements:
+
+### Overall statistics
+
+The current "overall statistics" will become a one line header in a smaller font to keep this information available, but without taking as much visual space. For the pipelines chart, we will replace it with a stacked bar plot where each stack of a bar represents a status and each bar is a unit (in days, a day, in month a month and in years, a year) so users can keep track of how many pipelines ran in that specific unit of time and what percent of these pipelines ended up in failling or succeeding.
+
+### Pipeline duration graph
+
+A new pipeline duration graph that can be customized by type (MR pipelines, pipeline on a specific branch, etc), number of runs and status (success, failed, etc) and will replace the current `Pipeline durations for the last 30 commits` chart. The existing chart checks the latest 30 commits made on the repository with no filtering so the results presented are not very valuable.
+
+We also add jobs that failed multiple times and jobs that are the slowest in the last x pipelines on master. All of this is to support the effort of allowing users to query their pipelines data to figure out what they need to improve on or what kind of problems they are facing with their CI/CD configuration.
+
+### Visibility
+
+Add a link in the `pipelines` page to increase the visibility of this feature. We can add a new option with the `Run pipeline` primary button.
+
+### Master Broken
+
+Add a "Is master broken?" quick option that scans the last x pipelines on the main branch and check for failed jobs. All jobs that failed multiple times will be listed in a table with the option to create an incident from that list.
+
+### Color scheme
+
+Rethink our current color schemes for data visuliaztion when it comes to pipelines statuses. We currently use the default visualization colors, but they don't actually match with that colors user have grown accustomed to for pipeline/jobs statuses. There is an opportunity here to help user better understand their data through more relevant color schemes and better visualization.
+
+### Routing
+
+Change the routing from `pipelines/charts` to `pipelines/analytics` since `charts` is a really restrictive terminology when talking about data visualization. It also doesn't really convey what this page is, which is a way to get information, not just nice charts. Then we can also get rid of the query parameter for the tabs and instead support first-class routing.
+
+## Design and implementation details
+
+### New API for aggregated data
+
+This feature depends on having a new set of data available to us that aggregates jobs and pipelines insights and make them available to the client.
+
+We'll start by aggregating data from ClickHouse, and probably only for `gitlab.com`, as the MVC. We will aggregate the data on the backend on the fly. So far ClickHouse has been very capable of such things.
+
+We won't store the aggregated data anywhere (we'll probably have the materialized views in ClickHouse, but nothing more complex). Then if the features get traction, we can explore ways to bring these features to environments without ClickHouse
+
+This way we can move fast, test our ideas with real users, and get feedback.
+
+### Feature flag
+
+To develop this new analytic page, we will gate the new page behind a feature flag `ci_insights`, and conditionally render the old or new analytics page. Potentially, we could even add the flag on the controller to decide which route to render: the new `/analytic` when the flag is one, and the old `/charts` when it isn't.
+
+### Add analytics on page view
+
+Make sure that we can get information on how often this page is viewed. If we do not have it, then let's implment some to know how visible this page is. The changes to this section should make the view count go up and we want to track this as a measure of success.
+
+### Routing
+
+We are planning to have new routes for the page and some redicts to setup. To read more about the routing proposal, see the [related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/437556)
+
+### Pipelines duration graph
+
+We want a way for user to query data about pipelines with a lot of different criterias. Most notably, query for only pipelines with the scope `finished` or by status `success` or `failed`. There is also the possibility to scope this to a ref, so users could either test for the main branch or maybe even a branch that has introduced a CI/CD change. We want branch comparaison for pipeline speed.
+
+To get more accurate data, we want to increase the count of pipelines requested. In graphQL, we have a limit of 100 items and we will probably get performance degradations quite quickly. We need to define how we could get more data set for more accurate data visualization.
+
+### Jobs insights
+
+Currently, there is no way to query a single job across multiple pipelines and it prevent us from doing a query that would look like this:
+
+```graphql
+query getJob($projectPath: ID!, $jobName: String!){
+ project(fullPath:$projectPath){
+ job(name: $jobName, last: 100){
+ nodes{
+ id
+ duration
+ }
+ }
+ }
+}
+```
+
+There are plans to create a new unified table to log job analytics and it is not yet defined what this API will look like. Without comitting yet to an API definiton, we want so unified way to query information for nalytics that may look rougly like so:
+
+```ruby
+get_jobs(project_id:, job_name: nil, stage: nil, stage_index: nil, *etc)
+# >
+[{id: 1, duration: 134, status: 'failed'}, *etc]
+
+get_jobs_statistics(project_id, job_name:, *etc)
+# >
+[{time_bucket: '2024-01-01:00:00:00', avg_duration: 234, count: 123, statuses_count: {success: 123, failed: 45, cancelled: 45}}]
+```
+
+### Revamping our charts
+
+Explore new color scheme and nicer look on our charts. Colaborate with UX to determine whether this is something we had on our mind or not and support any iniative to have nicer, more modern looking charts as our charts are quite forgettable.
+
+## Alternative Solutions
+
+### New page
+
+We could create a brand new page and leave this section as it is. The pro would be that we could perhaps have a more prominent placement in the Navigation under `Build`, while the cons are that we'd have clear overlap with the section.
+
+### Pipeline analysis per pipeline
+
+There was an [experiment](https://gitlab.com/gitlab-org/gitlab/-/issues/365902) in the past to add performance insights **per pipeline**. The experiment was removed and deemed not viable. Some of the findings were that:
+
+- Users did not interact with the page as much as thought and would not click on the button to view insights
+- Users who did click on the button did not try to get more insights into a job.
+- Users did not leave feedback in the issue.
+
+This experiment reveals to us mostly that users who go on the pipeline graph page `pipelines/:id` are **not** trying to imrpove the performance of pipelines. Instead, it is most likely that this page is used to debug pipeline failures, which means that they are from the IC/developer persona, not the DevOps engineer trying to improve the workflow. By having this section in a more "broad" area, we expect a much better adoption and more useful actionables.
+
+### Do nothing
+
+We could leave this section untouched and not add any new form of analytics. The pro here would be the saved resources and time. The cons are that we currently have no way to help customers improve their CI/CD configurations speed except reading our documentation. This revamped section would also be a great gateway for AI features and help user iteration on their setup.
diff --git a/doc/architecture/blueprints/ci_builds_runner_fleet_metrics/img/current_page.png b/doc/architecture/blueprints/ci_builds_runner_fleet_metrics/img/current_page.png
new file mode 100644
index 00000000000..42b09d37785
--- /dev/null
+++ b/doc/architecture/blueprints/ci_builds_runner_fleet_metrics/img/current_page.png
Binary files differ
diff --git a/doc/architecture/blueprints/ci_builds_runner_fleet_metrics/index.md b/doc/architecture/blueprints/ci_builds_runner_fleet_metrics/index.md
index 104a6ee2136..016db5f5766 100644
--- a/doc/architecture/blueprints/ci_builds_runner_fleet_metrics/index.md
+++ b/doc/architecture/blueprints/ci_builds_runner_fleet_metrics/index.md
@@ -61,6 +61,10 @@ The following customer problems should be solved when addressing this question.
#### Which runners have failures in the past hour?
+## CI Insights
+
+CI Insights is a page that would mostly expose data on pipelines and jobs duration, with a multitude of different filters, search and dynamic graphs. To read more on this, see [this related sub-section](ci_insights.md).
+
## Implementation
The current implementation plan is based on a
diff --git a/doc/ci/yaml/index.md b/doc/ci/yaml/index.md
index a2490767350..447e0bd93da 100644
--- a/doc/ci/yaml/index.md
+++ b/doc/ci/yaml/index.md
@@ -807,12 +807,12 @@ scan-website:
Inputs are mandatory when included, unless you set a default value with `spec:inputs:default`.
-Use `default: null` to have no default value.
+Use `default: ''` to have no default value.
**Keyword type**: Header keyword. `specs` must be declared at the top of the configuration file,
in a header section.
-**Possible inputs**: A string representing the default value, or `null`.
+**Possible inputs**: A string representing the default value, or `''`.
**Example of `spec:inputs:default`**:
@@ -823,7 +823,7 @@ spec:
user:
default: 'test-user'
flags:
- default: null
+ default: ''
---
# The pipeline configuration would follow...
diff --git a/doc/update/background_migrations.md b/doc/update/background_migrations.md
index 0cd622f68ee..f1a3f7e7839 100644
--- a/doc/update/background_migrations.md
+++ b/doc/update/background_migrations.md
@@ -56,7 +56,7 @@ as 'finished', but it is 'active':
```
If you get this error,
-[review the options](#database-migrations-failing-because-of-batched-background-migration-not-finished) for
+[review the options](background_migrations_troubleshooting.md#database-migrations-failing-because-of-batched-background-migration-not-finished) for
how to complete the batched background migrations needed for the GitLab upgrade.
#### From the GitLab UI
@@ -419,208 +419,3 @@ sudo -u git -H bundle exec rails runner -e production 'puts Gitlab::Database::Ba
```
::EndTabs
-
-## Troubleshooting
-
-<!-- Linked from lib/gitlab/database/migrations/batched_background_migration_helpers.rb -->
-
-### Database migrations failing because of batched background migration not finished
-
-When updating to GitLab version 14.2 or later, database migrations might fail with a message like:
-
-```plaintext
-StandardError: An error has occurred, all later migrations canceled:
-
-Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active':
- {:job_class_name=>"CopyColumnUsingBackgroundMigrationJob",
- :table_name=>"push_event_payloads",
- :column_name=>"event_id",
- :job_arguments=>[["event_id"],
- ["event_id_convert_to_bigint"]]
- }
-```
-
-First, check if you have followed the [version-specific upgrade instructions for 14.2](../update/versions/gitlab_14_changes.md#1420).
-If you have, you can [manually finish the batched background migration](#finish-a-failed-migration-manually)).
-If you haven't, choose one of the following methods:
-
-1. [Rollback and upgrade](#roll-back-and-follow-the-required-upgrade-path) through one of the required
- versions before updating to 14.2+.
-1. [Roll forward](#roll-forward-and-finish-the-migrations-on-the-upgraded-version), staying on the current
- version and manually ensuring that the batched migrations complete successfully.
-
-#### Roll back and follow the required upgrade path
-
-1. [Rollback and restore the previously installed version](../administration/backup_restore/index.md)
-1. Update to either 14.0.5 or 14.1 **before** updating to 14.2+
-1. [Check the status](#check-the-status-of-batched-background-migrations) of the batched background migrations and
- make sure they are all marked as finished before attempting to upgrade again. If any remain marked as active,
- you can [manually finish them](#finish-a-failed-migration-manually).
-
-#### Roll forward and finish the migrations on the upgraded version
-
-##### For a deployment with downtime
-
-To run all the batched background migrations, it can take a significant amount of time
-depending on the size of your GitLab installation.
-
-1. [Check the status](#check-the-status-of-batched-background-migrations) of the batched background migrations in the
- database, and [manually run them](#finish-a-failed-migration-manually) with the appropriate
- arguments until the status query returns no rows.
-1. When the status of all of all them is marked as complete, re-run migrations for your installation.
-1. [Complete the database migrations](../administration/raketasks/maintenance.md#run-incomplete-database-migrations) from your GitLab upgrade:
-
- ```plaintext
- sudo gitlab-rake db:migrate
- ```
-
-1. Run a reconfigure:
-
- ```plaintext
- sudo gitlab-ctl reconfigure
- ```
-
-1. Finish the upgrade for your installation.
-
-##### For a no-downtime deployment
-
-As the failing migrations are post-deployment migrations, you can remain on a running instance of the upgraded
-version and wait for the batched background migrations to finish.
-
-1. [Check the status](#check-the-status-of-batched-background-migrations) of the batched background migration from
- the error message, and make sure it is listed as finished. If it is still active, either wait until it is done,
- or [manually finish it](#finish-a-failed-migration-manually).
-1. Re-run migrations for your installation, so the remaining post-deployment migrations finish.
-
-### The `BackfillNamespaceIdForNamespaceRoute` batched migration job fails
-
-In GitLab 14.8, the `BackfillNamespaceIdForNamespaceRoute` batched background migration job
-may fail to complete. When retried, a `500 Server Error` is returned. This issue was
-[resolved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/82387) in GitLab 14.9.
-
-To resolve this issue, [upgrade GitLab](../update/index.md) from 14.8 to 14.9.
-You can ignore the failed batch migration until after you update to GitLab 14.9.
-
-### Background migrations remain in the Sidekiq queue
-
-WARNING:
-The following operations can disrupt your GitLab performance. They run a number of Sidekiq jobs that perform various database or file updates.
-
-Run the following check. If it returns non-zero and the count does not decrease over time, follow the rest of the steps in this section.
-
-```shell
-# For Linux package installations:
-sudo gitlab-rails runner -e production 'puts Gitlab::BackgroundMigration.remaining'
-
-# For self-compiled installations:
-cd /home/git/gitlab
-sudo -u git -H bundle exec rails runner -e production 'puts Gitlab::BackgroundMigration.remaining'
-```
-
-It is safe to re-execute the following commands, especially if you have 1000+ pending jobs which would likely overflow your runtime memory.
-
-::Tabs
-
-:::TabTitle Linux package (Omnibus)
-
-```shell
-# Start the rails console
-sudo gitlab-rails c
-
-# Execute the following in the rails console
-scheduled_queue = Sidekiq::ScheduledSet.new
-pending_job_classes = scheduled_queue.select { |job| job["class"] == "BackgroundMigrationWorker" }.map { |job| job["args"].first }.uniq
-pending_job_classes.each { |job_class| Gitlab::BackgroundMigration.steal(job_class) }
-```
-
-:::TabTitle Self-compiled (source)
-
-```shell
-# Start the rails console
-sudo -u git -H bundle exec rails RAILS_ENV=production
-
-# Execute the following in the rails console
-scheduled_queue = Sidekiq::ScheduledSet.new
-pending_job_classes = scheduled_queue.select { |job| job["class"] == "BackgroundMigrationWorker" }.map { |job| job["args"].first }.uniq
-pending_job_classes.each { |job_class| Gitlab::BackgroundMigration.steal(job_class) }
-```
-
-::EndTabs
-
-### Background migrations stuck in 'pending' state
-
-WARNING:
-The following operations can disrupt your GitLab performance. They run a number
-of Sidekiq jobs that perform various database or file updates.
-
-- GitLab 14.2 introduced an issue where a background migration named
- `BackfillDraftStatusOnMergeRequests` can be permanently stuck in a
- **pending** state across upgrades when the instance lacks records that match
- the migration's target. To clean up this stuck migration, see the
- [14.2.0 version-specific instructions](versions/gitlab_14_changes.md#1420).
-- GitLab 14.4 introduced an issue where a background migration named
- `PopulateTopicsTotalProjectsCountCache` can be permanently stuck in a
- **pending** state across upgrades when the instance lacks records that match
- the migration's target. To clean up this stuck migration, see the
- [14.4.0 version-specific instructions](versions/gitlab_14_changes.md#1440).
-- GitLab 14.5 introduced an issue where a background migration named
- `UpdateVulnerabilityOccurrencesLocation` can be permanently stuck in a
- **pending** state across upgrades when the instance lacks records that match
- the migration's target. To clean up this stuck migration, see the
- [14.5.0 version-specific instructions](versions/gitlab_14_changes.md#1450).
-- GitLab 14.8 introduced an issue where a background migration named
- `PopulateTopicsNonPrivateProjectsCount` can be permanently stuck in a
- **pending** state across upgrades. To clean up this stuck migration, see the
- [14.8.0 version-specific instructions](versions/gitlab_14_changes.md#1480).
-- GitLab 14.9 introduced an issue where a background migration named
- `ResetDuplicateCiRunnersTokenValuesOnProjects` can be permanently stuck in a
- **pending** state across upgrades when the instance lacks records that match
- the migration's target. To clean up this stuck migration, see the
- [14.9.0 version-specific instructions](versions/gitlab_14_changes.md#1490).
-
-For other background migrations stuck in pending, run the following check. If
-it returns non-zero and the count does not decrease over time, follow the rest
-of the steps in this section.
-
-```shell
-# For Linux package installations:
-sudo gitlab-rails runner -e production 'puts Gitlab::Database::BackgroundMigrationJob.pending.count'
-
-# For self-compiled installations:
-cd /home/git/gitlab
-sudo -u git -H bundle exec rails runner -e production 'puts Gitlab::Database::BackgroundMigrationJob.pending.count'
-```
-
-It is safe to re-attempt these migrations to clear them out from a pending status:
-
-::Tabs
-
-:::TabTitle Linux package (Omnibus)
-
-```shell
-# Start the rails console
-sudo gitlab-rails c
-
-# Execute the following in the rails console
-Gitlab::Database::BackgroundMigrationJob.pending.find_each do |job|
- puts "Running pending job '#{job.class_name}' with arguments #{job.arguments}"
- result = Gitlab::BackgroundMigration.perform(job.class_name, job.arguments)
- puts "Result: #{result}"
-end
-```
-
-:::TabTitle Self-compiled (source)
-
-```shell
-# Start the rails console
-sudo -u git -H bundle exec rails RAILS_ENV=production
-
-# Execute the following in the rails console
-Gitlab::Database::BackgroundMigrationJob.pending.find_each do |job|
- puts "Running pending job '#{job.class_name}' with arguments #{job.arguments}"
- result = Gitlab::BackgroundMigration.perform(job.class_name, job.arguments)
- puts "Result: #{result}"
-end
-```
-
-::EndTabs
diff --git a/doc/update/background_migrations_troubleshooting.md b/doc/update/background_migrations_troubleshooting.md
new file mode 100644
index 00000000000..f4ea9c2a556
--- /dev/null
+++ b/doc/update/background_migrations_troubleshooting.md
@@ -0,0 +1,210 @@
+---
+stage: Data Stores
+group: Database
+info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+---
+
+# Troubleshooting **(FREE SELF)**
+
+<!-- Linked from lib/gitlab/database/migrations/batched_background_migration_helpers.rb -->
+
+## Database migrations failing because of batched background migration not finished
+
+When updating to GitLab version 14.2 or later, database migrations might fail with a message like:
+
+```plaintext
+StandardError: An error has occurred, all later migrations canceled:
+
+Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active':
+ {:job_class_name=>"CopyColumnUsingBackgroundMigrationJob",
+ :table_name=>"push_event_payloads",
+ :column_name=>"event_id",
+ :job_arguments=>[["event_id"],
+ ["event_id_convert_to_bigint"]]
+ }
+```
+
+First, check if you have followed the [version-specific upgrade instructions for 14.2](../update/versions/gitlab_14_changes.md#1420).
+If you have, you can [manually finish the batched background migration](background_migrations.md#finish-a-failed-migration-manually)).
+If you haven't, choose one of the following methods:
+
+1. [Rollback and upgrade](#roll-back-and-follow-the-required-upgrade-path) through one of the required
+versions before updating to 14.2+.
+1. [Roll forward](#roll-forward-and-finish-the-migrations-on-the-upgraded-version), staying on the current
+version and manually ensuring that the batched migrations complete successfully.
+
+### Roll back and follow the required upgrade path
+
+1. [Rollback and restore the previously installed version](../administration/backup_restore/index.md)
+1. Update to either 14.0.5 or 14.1 **before** updating to 14.2+
+1. [Check the status](background_migrations.md#check-the-status-of-batched-background-migrations) of the batched background migrations and
+make sure they are all marked as finished before attempting to upgrade again. If any remain marked as active,
+you can [manually finish them](background_migrations.md#finish-a-failed-migration-manually).
+
+### Roll forward and finish the migrations on the upgraded version
+
+#### For a deployment with downtime
+
+To run all the batched background migrations, it can take a significant amount of time
+depending on the size of your GitLab installation.
+
+1. [Check the status](background_migrations.md#check-the-status-of-batched-background-migrations) of the batched background migrations in the
+database, and [manually run them](background_migrations.md#finish-a-failed-migration-manually) with the appropriate
+arguments until the status query returns no rows.
+1. When the status of all of all them is marked as complete, re-run migrations for your installation.
+1. [Complete the database migrations](../administration/raketasks/maintenance.md#run-incomplete-database-migrations) from your GitLab upgrade:
+
+ ```plaintext
+ sudo gitlab-rake db:migrate
+ ```
+
+1. Run a reconfigure:
+
+ ```plaintext
+ sudo gitlab-ctl reconfigure
+ ```
+
+1. Finish the upgrade for your installation.
+
+#### For a no-downtime deployment
+
+As the failing migrations are post-deployment migrations, you can remain on a running instance of the upgraded
+version and wait for the batched background migrations to finish.
+
+1. [Check the status](background_migrations.md#check-the-status-of-batched-background-migrations) of the batched background migration from
+the error message, and make sure it is listed as finished. If it is still active, either wait until it is done,
+or [manually finish it](background_migrations.md#finish-a-failed-migration-manually).
+1. Re-run migrations for your installation, so the remaining post-deployment migrations finish.
+
+## The `BackfillNamespaceIdForNamespaceRoute` batched migration job fails
+
+In GitLab 14.8, the `BackfillNamespaceIdForNamespaceRoute` batched background migration job
+may fail to complete. When retried, a `500 Server Error` is returned. This issue was
+[resolved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/82387) in GitLab 14.9.
+
+To resolve this issue, [upgrade GitLab](../update/index.md) from 14.8 to 14.9.
+You can ignore the failed batch migration until after you update to GitLab 14.9.
+
+## Background migrations remain in the Sidekiq queue
+
+WARNING:
+The following operations can disrupt your GitLab performance. They run a number of Sidekiq jobs that perform various database or file updates.
+
+Run the following check. If it returns non-zero and the count does not decrease over time, follow the rest of the steps in this section.
+
+```shell
+# For Linux package installations:
+sudo gitlab-rails runner -e production 'puts Gitlab::BackgroundMigration.remaining'
+
+# For self-compiled installations:
+cd /home/git/gitlab
+sudo -u git -H bundle exec rails runner -e production 'puts Gitlab::BackgroundMigration.remaining'
+```
+
+It is safe to re-execute the following commands, especially if you have 1000+ pending jobs which would likely overflow your runtime memory.
+
+::Tabs
+
+:::TabTitle Linux package (Omnibus)
+
+```shell
+# Start the rails console
+sudo gitlab-rails c
+
+# Execute the following in the rails console
+scheduled_queue = Sidekiq::ScheduledSet.new
+pending_job_classes = scheduled_queue.select { |job| job["class"] == "BackgroundMigrationWorker" }.map { |job| job["args"].first }.uniq
+pending_job_classes.each { |job_class| Gitlab::BackgroundMigration.steal(job_class) }
+```
+
+:::TabTitle Self-compiled (source)
+
+```shell
+# Start the rails console
+sudo -u git -H bundle exec rails RAILS_ENV=production
+
+# Execute the following in the rails console
+scheduled_queue = Sidekiq::ScheduledSet.new
+pending_job_classes = scheduled_queue.select { |job| job["class"] == "BackgroundMigrationWorker" }.map { |job| job["args"].first }.uniq
+pending_job_classes.each { |job_class| Gitlab::BackgroundMigration.steal(job_class) }
+```
+
+::EndTabs
+
+## Background migrations stuck in 'pending' state
+
+WARNING:
+The following operations can disrupt your GitLab performance. They run a number
+of Sidekiq jobs that perform various database or file updates.
+
+- GitLab 14.2 introduced an issue where a background migration named
+ `BackfillDraftStatusOnMergeRequests` can be permanently stuck in a
+ **pending** state across upgrades when the instance lacks records that match
+ the migration's target. To clean up this stuck migration, see the
+ [14.2.0 version-specific instructions](versions/gitlab_14_changes.md#1420).
+- GitLab 14.4 introduced an issue where a background migration named
+ `PopulateTopicsTotalProjectsCountCache` can be permanently stuck in a
+ **pending** state across upgrades when the instance lacks records that match
+ the migration's target. To clean up this stuck migration, see the
+ [14.4.0 version-specific instructions](versions/gitlab_14_changes.md#1440).
+- GitLab 14.5 introduced an issue where a background migration named
+ `UpdateVulnerabilityOccurrencesLocation` can be permanently stuck in a
+ **pending** state across upgrades when the instance lacks records that match
+ the migration's target. To clean up this stuck migration, see the
+ [14.5.0 version-specific instructions](versions/gitlab_14_changes.md#1450).
+- GitLab 14.8 introduced an issue where a background migration named
+ `PopulateTopicsNonPrivateProjectsCount` can be permanently stuck in a
+ **pending** state across upgrades. To clean up this stuck migration, see the
+ [14.8.0 version-specific instructions](versions/gitlab_14_changes.md#1480).
+- GitLab 14.9 introduced an issue where a background migration named
+ `ResetDuplicateCiRunnersTokenValuesOnProjects` can be permanently stuck in a
+ **pending** state across upgrades when the instance lacks records that match
+ the migration's target. To clean up this stuck migration, see the
+ [14.9.0 version-specific instructions](versions/gitlab_14_changes.md#1490).
+
+For other background migrations stuck in pending, run the following check. If
+it returns non-zero and the count does not decrease over time, follow the rest
+of the steps in this section.
+
+```shell
+# For Linux package installations:
+sudo gitlab-rails runner -e production 'puts Gitlab::Database::BackgroundMigrationJob.pending.count'
+
+# For self-compiled installations:
+cd /home/git/gitlab
+sudo -u git -H bundle exec rails runner -e production 'puts Gitlab::Database::BackgroundMigrationJob.pending.count'
+```
+
+It is safe to re-attempt these migrations to clear them out from a pending status:
+
+::Tabs
+
+:::TabTitle Linux package (Omnibus)
+
+```shell
+# Start the rails console
+sudo gitlab-rails c
+
+# Execute the following in the rails console
+Gitlab::Database::BackgroundMigrationJob.pending.find_each do |job|
+ puts "Running pending job '#{job.class_name}' with arguments #{job.arguments}"
+ result = Gitlab::BackgroundMigration.perform(job.class_name, job.arguments)
+ puts "Result: #{result}"
+end
+```
+
+:::TabTitle Self-compiled (source)
+
+```shell
+# Start the rails console
+sudo -u git -H bundle exec rails RAILS_ENV=production
+
+# Execute the following in the rails console
+Gitlab::Database::BackgroundMigrationJob.pending.find_each do |job|
+ puts "Running pending job '#{job.class_name}' with arguments #{job.arguments}"
+ result = Gitlab::BackgroundMigration.perform(job.class_name, job.arguments)
+ puts "Result: #{result}"
+end
+```
+
+::EndTabs
diff --git a/doc/update/deprecations.md b/doc/update/deprecations.md
index b4694bdf0d9..02fd4b3454e 100644
--- a/doc/update/deprecations.md
+++ b/doc/update/deprecations.md
@@ -1180,6 +1180,25 @@ automatically from GitLab 16.0 onwards.
<div class="deprecation breaking-change" data-milestone="17.0">
+### Support for setting custom schema for backup is deprecated
+
+<div class="deprecation-notes">
+- Announced in GitLab <span class="milestone">16.8</span>
+- Removal in GitLab <span class="milestone">17.0</span> ([breaking change](https://docs.gitlab.com/ee/update/terminology.html#breaking-change))
+- To discuss this change or learn more, see the [deprecation issue](https://gitlab.com/gitlab-org/gitlab/-/issues/435210).
+</div>
+
+You could configure GitLab to use a custom schema for backup, by setting
+`gitlab_rails['backup_pg_schema'] = '<schema_name>'` in `/etc/gitlab/gitlab.rb` for Linux package installations,
+or by editing `config/gitlab.yml` for self-compiled installations.
+
+While the configuration setting was available, it had no effect and did not serve the purpose it was intended.
+This configuration setting will be removed in GitLab 17.0.
+
+</div>
+
+<div class="deprecation breaking-change" data-milestone="17.0">
+
### The GitHub importer Rake task
<div class="deprecation-notes">
diff --git a/doc/update/versions/gitlab_14_changes.md b/doc/update/versions/gitlab_14_changes.md
index 4d82ce86fed..68784a27e6b 100644
--- a/doc/update/versions/gitlab_14_changes.md
+++ b/doc/update/versions/gitlab_14_changes.md
@@ -805,7 +805,7 @@ Long running batched background database migrations:
Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active':
```
- See how to [resolve this error](../background_migrations.md#database-migrations-failing-because-of-batched-background-migration-not-finished).
+ See how to [resolve this error](../background_migrations_troubleshooting.md#database-migrations-failing-because-of-batched-background-migration-not-finished).
Other issues:
diff --git a/doc/user/application_security/dependency_scanning/index.md b/doc/user/application_security/dependency_scanning/index.md
index 4e1c394c582..2570ce03005 100644
--- a/doc/user/application_security/dependency_scanning/index.md
+++ b/doc/user/application_security/dependency_scanning/index.md
@@ -806,7 +806,7 @@ The following variables allow configuration of global dependency scanning settin
| CI/CD variables | Description |
| ----------------------------|------------ |
-| `ADDITIONAL_CA_CERT_BUNDLE` | Bundle of CA certs to trust. The bundle of certificates provided here is also used by other tools during the scanning process, such as `git`, `yarn`, or `npm`. For more details, see [Using a custom SSL CA certificate authority](#using-a-custom-ssl-ca-certificate-authority). |
+| `ADDITIONAL_CA_CERT_BUNDLE` | Bundle of CA certificates to trust. The bundle of certificates provided here is also used by other tools during the scanning process, such as `git`, `yarn`, or `npm`. For more details, see [Custom TLS certificate authority](#custom-tls-certificate-authority). |
| `DS_EXCLUDED_ANALYZERS` | Specify the analyzers (by name) to exclude from Dependency Scanning. For more information, see [Analyzers](#analyzers). |
| `DS_EXCLUDED_PATHS` | Exclude files and directories from the scan based on the paths. A comma-separated list of patterns. Patterns can be globs (see [`doublestar.Match`](https://pkg.go.dev/github.com/bmatcuk/doublestar/v4@v4.0.2#Match) for supported patterns), or file or folder paths (for example, `doc,spec`). Parent directories also match patterns. Default: `"spec, test, tests, tmp"`. |
| `DS_IMAGE_SUFFIX` | Suffix added to the image name. (GitLab team members can view more information in this confidential issue: `https://gitlab.com/gitlab-org/gitlab/-/issues/354796`). Automatically set to `"-fips"` when FIPS mode is enabled. |
@@ -871,9 +871,26 @@ If one does not work and you need it we suggest
[submitting a feature request](https://gitlab.com/gitlab-org/gitlab/-/issues/new?issuable_template=Feature%20proposal%20-%20detailed&issue[title]=Docs%20feedback%20-%20feature%20proposal:%20Write%20your%20title)
or [contributing to the code](../../../development/index.md) to enable it to be used.
-### Using a custom SSL CA certificate authority
+### Custom TLS certificate authority
-You can use the `ADDITIONAL_CA_CERT_BUNDLE` CI/CD variable to configure a custom SSL CA certificate authority. The `ADDITIONAL_CA_CERT_BUNDLE` value should contain the [text representation of the X.509 PEM public-key certificate](https://www.rfc-editor.org/rfc/rfc7468#section-5.1). For example, to configure this value in the `.gitlab-ci.yml` file, use the following:
+Dependency Scanning allows for use of custom TLS certificates for SSL/TLS connections instead of the
+default shipped with the analyzer container image.
+
+Support for custom certificate authorities was introduced in the following versions.
+
+| Analyzer | Version |
+|--------------------|--------------------------------------------------------------------------------------------------------|
+| `gemnasium` | [v2.8.0](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/releases/v2.8.0) |
+| `gemnasium-maven` | [v2.9.0](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven/-/releases/v2.9.0) |
+| `gemnasium-python` | [v2.7.0](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-python/-/releases/v2.7.0) |
+
+#### Using a custom TLS certificate authority
+
+To use a custom TLS certificate authority, assign the
+[text representation of the X.509 PEM public-key certificate](https://www.rfc-editor.org/rfc/rfc7468#section-5.1)
+to the CI/CD variable `ADDITIONAL_CA_CERT_BUNDLE`.
+
+For example, to configure the certificate in the `.gitlab-ci.yml` file:
```yaml
variables:
@@ -885,8 +902,6 @@ variables:
-----END CERTIFICATE-----
```
-The `ADDITIONAL_CA_CERT_BUNDLE` value can also be configured as a [custom variable in the UI](../../../ci/variables/index.md#for-a-project), either as a `file`, which requires the path to the certificate, or as a variable, which requires the text representation of the certificate.
-
### Using private Maven repositories
If your private Maven repository requires login credentials,
@@ -1073,16 +1088,6 @@ For details on saving and transporting Docker images as a file, see the Docker d
[`docker save`](https://docs.docker.com/engine/reference/commandline/save/), [`docker load`](https://docs.docker.com/engine/reference/commandline/load/),
[`docker export`](https://docs.docker.com/engine/reference/commandline/export/), and [`docker import`](https://docs.docker.com/engine/reference/commandline/import/).
-#### Support for Custom Certificate Authorities
-
-Support for custom certificate authorities was introduced in the following versions.
-
-| Analyzer | Version |
-| -------- | ------- |
-| `gemnasium` | [v2.8.0](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/releases/v2.8.0) |
-| `gemnasium-maven` | [v2.9.0](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven/-/releases/v2.9.0) |
-| `gemnasium-python` | [v2.7.0](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-python/-/releases/v2.7.0) |
-
### Set dependency scanning CI/CD job variables to use local dependency scanning analyzers
Add the following configuration to your `.gitlab-ci.yml` file. You must change the value of
diff --git a/doc/user/project/merge_requests/approvals/settings.md b/doc/user/project/merge_requests/approvals/settings.md
index fa06fb59bc1..0120be0cf17 100644
--- a/doc/user/project/merge_requests/approvals/settings.md
+++ b/doc/user/project/merge_requests/approvals/settings.md
@@ -116,10 +116,11 @@ When this field is changed, it can affect all open merge requests depending on t
> - Requiring re-authentication by using SAML authentication for GitLab.com groups [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/5981) in GitLab 16.6 [with a flag](../../../../administration/feature_flags.md) named `ff_require_saml_auth_to_approve`. Disabled by default.
> - Requiring re-authentication by using SAML authentication for self-managed instances [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/431415) in GitLab 16.7 [with a flag](../../../../administration/feature_flags.md) named `ff_require_saml_auth_to_approve`. Disabled by default.
+> - [Enabled `ff_require_saml_auth_to_approve` by default](https://gitlab.com/gitlab-org/gitlab/-/issues/431714) in GitLab 16.8 for GitLab.com and self-managed instances.
FLAG:
-On self-managed GitLab, by default requiring re-authentication by using SAML authentication is not available. To make it available, an administrator can
-[enable the feature flag](../../../../administration/feature_flags.md) named `ff_require_saml_auth_to_approve`. On GitLab.com, this feature is not available.
+On self-managed GitLab, by default requiring re-authentication by using SAML authentication is available. To hide the feature, an administrator can
+[disable the feature flag](../../../../administration/feature_flags.md) named `ff_require_saml_auth_to_approve`. On GitLab.com, this feature is available.
You can force potential approvers to first authenticate with either:
diff --git a/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config.rb b/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config.rb
new file mode 100644
index 00000000000..de20eae8cf0
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ class BackfillPartitionIdCiPipelineConfig < BatchedMigrationJob
+ operation_name :update_all
+ feature_category :continuous_integration
+ scope_to ->(relation) { relation.where('ci_pipelines_config.pipeline_id >= ?', first_pipeline_id) }
+
+ def perform
+ each_sub_batch do |sub_batch|
+ sub_batch
+ .where('ci_pipelines_config.pipeline_id = ci_pipelines.id')
+ .update_all('partition_id = ci_pipelines.partition_id FROM ci_pipelines')
+ end
+ end
+
+ private
+
+ def first_pipeline_id
+ first_pipeline_with_partition_101 || max_pipeline_id
+ end
+
+ def first_pipeline_with_partition_101
+ connection.select_value(<<~SQL)
+ SELECT MIN(commit_id) FROM p_ci_builds WHERE partition_id = 101;
+ SQL
+ end
+
+ def max_pipeline_id
+ connection.select_value(<<~SQL)
+ SELECT MAX(pipeline_id) FROM ci_pipelines_config;
+ SQL
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/replay_events_importer.rb b/lib/gitlab/github_import/importer/replay_events_importer.rb
index bcdaa4d3ce5..83578cf7672 100644
--- a/lib/gitlab/github_import/importer/replay_events_importer.rb
+++ b/lib/gitlab/github_import/importer/replay_events_importer.rb
@@ -40,9 +40,9 @@ module Gitlab
events.each do |event|
case event.event
when 'review_requested'
- reviewers[event.requested_reviewer.login] = event.requested_reviewer.to_hash
+ reviewers[event.requested_reviewer.login] = event.requested_reviewer.to_hash if event.requested_reviewer
when 'review_request_removed'
- reviewers[event.requested_reviewer.login] = nil
+ reviewers[event.requested_reviewer.login] = nil if event.requested_reviewer
end
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 65bd7e16e1b..574b52eb337 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -1940,6 +1940,9 @@ msgstr ""
msgid "AIAgents|New agent"
msgstr ""
+msgid "AIAgent|AI Agent: %{agentId}"
+msgstr ""
+
msgid "AIPoweredSM|AI-powered features"
msgstr ""
@@ -4474,6 +4477,12 @@ msgstr ""
msgid "After you've reviewed these contribution guidelines, you'll be all set to"
msgstr ""
+msgid "AiAgents|AI Agents"
+msgstr ""
+
+msgid "AiAgents|AI agent"
+msgstr ""
+
msgid "Akismet"
msgstr ""
@@ -51578,7 +51587,19 @@ msgid_plural "Tracing|%{count} spans"
msgstr[0] ""
msgstr[1] ""
-msgid "Tracing|%{ms} ms"
+msgid "Tracing|%{h}h"
+msgstr ""
+
+msgid "Tracing|%{ms}ms"
+msgstr ""
+
+msgid "Tracing|%{m}m"
+msgstr ""
+
+msgid "Tracing|%{s}s"
+msgstr ""
+
+msgid "Tracing|0ms"
msgstr ""
msgid "Tracing|Attribute"
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 24ec5467c86..74267874eeb 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -50,6 +50,7 @@ RSpec.describe 'Database schema', feature_category: :database do
ci_namespace_monthly_usages: %w[namespace_id],
ci_pipeline_artifacts: %w[partition_id],
ci_pipeline_chat_data: %w[partition_id],
+ ci_pipelines_config: %w[partition_id],
ci_pipeline_metadata: %w[partition_id],
ci_pipeline_variables: %w[partition_id],
ci_pipelines: %w[partition_id],
diff --git a/spec/factories/ci/pipeline_config.rb b/spec/factories/ci/pipeline_config.rb
new file mode 100644
index 00000000000..a415efaf185
--- /dev/null
+++ b/spec/factories/ci/pipeline_config.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_pipeline_config, class: 'Ci::PipelineConfig' do
+ pipeline factory: :ci_empty_pipeline
+ content { "content" }
+ end
+end
diff --git a/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap b/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap
index c69547deb1c..a43b4aae586 100644
--- a/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap
+++ b/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap
@@ -141,7 +141,7 @@ exports[`Custom emoji settings list component renders table of custom emoji 1`]
class="gl-vertical-align-middle!"
role="cell"
>
- <gl-emoji
+ <div
data-fallback-src="https://gitlab.com/custom_emoji/custom_emoji/-/raw/main/img/confused_husky.gif"
data-name="confused_husky"
data-unicode-version="custom"
diff --git a/spec/frontend/custom_emoji/components/list_spec.js b/spec/frontend/custom_emoji/components/list_spec.js
index b5729d59464..4177aea2d33 100644
--- a/spec/frontend/custom_emoji/components/list_spec.js
+++ b/spec/frontend/custom_emoji/components/list_spec.js
@@ -21,6 +21,9 @@ function createComponent(propsData = {}) {
userPermissions: { createCustomEmoji: true },
...propsData,
},
+ stubs: {
+ GlEmoji: { template: '<div/>' },
+ },
});
}
diff --git a/spec/frontend/emoji/components/emoji_group_spec.js b/spec/frontend/emoji/components/emoji_group_spec.js
index a2a46bedd7b..a2e3643356f 100644
--- a/spec/frontend/emoji/components/emoji_group_spec.js
+++ b/spec/frontend/emoji/components/emoji_group_spec.js
@@ -13,6 +13,7 @@ function factory(propsData = {}) {
propsData,
stubs: {
GlButton,
+ GlEmoji: { template: '<div/>' },
},
}),
);
diff --git a/spec/frontend/kubernetes_dashboard/components/workload_table_spec.js b/spec/frontend/kubernetes_dashboard/components/workload_table_spec.js
index 369b8f32c2d..e873da07a2a 100644
--- a/spec/frontend/kubernetes_dashboard/components/workload_table_spec.js
+++ b/spec/frontend/kubernetes_dashboard/components/workload_table_spec.js
@@ -1,7 +1,7 @@
import { mount } from '@vue/test-utils';
import { GlTable, GlBadge, GlPagination } from '@gitlab/ui';
import WorkloadTable from '~/kubernetes_dashboard/components/workload_table.vue';
-import { TABLE_HEADING_CLASSES, PAGE_SIZE } from '~/kubernetes_dashboard/constants';
+import { PAGE_SIZE } from '~/kubernetes_dashboard/constants';
import { mockPodsTableItems } from '../graphql/mock_data';
let wrapper;
@@ -26,25 +26,24 @@ describe('Workload table component', () => {
{
key: 'name',
label: 'Name',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
+ tdClass: 'gl-md-w-half gl-lg-w-40p gl-word-break-word',
},
{
key: 'status',
label: 'Status',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
+ tdClass: 'gl-md-w-15',
},
{
key: 'namespace',
label: 'Namespace',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
+ tdClass: 'gl-md-w-30p gl-lg-w-40p gl-word-break-word',
},
{
key: 'age',
label: 'Age',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
];
@@ -57,13 +56,11 @@ describe('Workload table component', () => {
{
key: 'field-1',
label: 'Field-1',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
{
key: 'field-2',
label: 'Field-2',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
];
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
index cbf2184d879..78d7f4183b7 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
@@ -28,19 +28,15 @@ exports[`packages_list_row renders 1`] = `
<div
class="gl-align-items-center gl-display-flex gl-min-w-0 gl-mr-3"
>
- <router-link-stub
- ariacurrentvalue="page"
+ <a
class="gl-min-w-0 gl-text-body"
data-testid="details-link"
- event="click"
- tag="a"
- to="[object Object]"
>
<gl-truncate-stub
position="end"
text="@gitlab-org/package-15"
/>
- </router-link-stub>
+ </a>
</div>
</div>
<div
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
index 9f8fd4e28e7..afcb1798878 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
@@ -1,6 +1,7 @@
import { GlFormCheckbox, GlSprintf, GlTruncate } from '@gitlab/ui';
import Vue from 'vue';
import VueRouter from 'vue-router';
+import { RouterLinkStub } from '@vue/test-utils';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
@@ -58,6 +59,7 @@ describe('packages_list_row', () => {
ListItem,
GlSprintf,
TimeagoTooltip,
+ RouterLink: RouterLinkStub,
},
propsData: {
packageEntity,
diff --git a/spec/frontend/releases/components/app_edit_new_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index 15436832be8..90f31dca232 100644
--- a/spec/frontend/releases/components/app_edit_new_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -319,6 +319,25 @@ describe('Release edit/new component', () => {
expect(actions.saveRelease).not.toHaveBeenCalled();
});
});
+
+ describe('when tag notes are loading', () => {
+ beforeEach(async () => {
+ await factory({
+ store: {
+ modules: {
+ editNew: {
+ state: {
+ isFetchingTagNotes: true,
+ },
+ },
+ },
+ },
+ });
+ });
+ it('renders the submit button as disabled', () => {
+ expect(findSubmitButton().attributes('disabled')).toBeDefined();
+ });
+ });
});
describe('delete', () => {
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index a55b6cdef92..4dc55c12464 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -4,6 +4,7 @@ import testAction from 'helpers/vuex_action_helper';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { getTag } from '~/api/tags_api';
import { createAlert } from '~/alert';
+import { HTTP_STATUS_NOT_FOUND } from '~/lib/utils/http_status';
import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
import AccessorUtilities from '~/lib/utils/accessor';
import { s__ } from '~/locale';
@@ -128,6 +129,38 @@ describe('Release edit/new actions', () => {
{ type: types.INITIALIZE_RELEASE, payload: release },
{ type: types.UPDATE_CREATE_FROM, payload: createFrom },
],
+ expectedActions: [{ type: 'fetchTagNotes', payload: release.tagName }],
+ });
+ });
+
+ it('with no tag name, does not fetch tag information', () => {
+ const release = {
+ tagName: '',
+ tagMessage: 'hello',
+ name: '',
+ description: '',
+ milestones: [],
+ groupMilestones: [],
+ releasedAt: new Date(),
+ assets: {
+ links: [],
+ },
+ };
+ const createFrom = 'main';
+
+ window.localStorage.setItem(`${state.projectPath}/release/new`, JSON.stringify(release));
+ window.localStorage.setItem(
+ `${state.projectPath}/release/new/createFrom`,
+ JSON.stringify(createFrom),
+ );
+
+ return testAction({
+ action: actions.loadDraftRelease,
+ state,
+ expectedMutations: [
+ { type: types.INITIALIZE_RELEASE, payload: release },
+ { type: types.UPDATE_CREATE_FROM, payload: createFrom },
+ ],
});
});
});
@@ -988,6 +1021,7 @@ describe('Release edit/new actions', () => {
expect(getTag).toHaveBeenCalledWith(state.projectId, tagName);
});
+
it('creates an alert on error', async () => {
error = new Error();
getTag.mockRejectedValue(error);
@@ -1007,5 +1041,23 @@ describe('Release edit/new actions', () => {
});
expect(getTag).toHaveBeenCalledWith(state.projectId, tagName);
});
+
+ it('assumes creating a tag on 404', async () => {
+ error = { response: { status: HTTP_STATUS_NOT_FOUND } };
+ getTag.mockRejectedValue(error);
+
+ await testAction({
+ action: actions.fetchTagNotes,
+ payload: tagName,
+ state,
+ expectedMutations: [
+ { type: types.REQUEST_TAG_NOTES },
+ { type: types.RECEIVE_TAG_NOTES_SUCCESS, payload: {} },
+ ],
+ expectedActions: [{ type: 'setNewTag' }, { type: 'setCreating' }],
+ });
+
+ expect(getTag).toHaveBeenCalledWith(state.projectId, tagName);
+ });
});
});
diff --git a/spec/frontend/releases/stores/modules/detail/getters_spec.js b/spec/frontend/releases/stores/modules/detail/getters_spec.js
index 24490e19296..30a3c78641c 100644
--- a/spec/frontend/releases/stores/modules/detail/getters_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/getters_spec.js
@@ -424,7 +424,7 @@ describe('Release edit/new getters', () => {
describe('formattedReleaseNotes', () => {
it.each`
- description | includeTagNotes | tagNotes | included | showCreateFrom
+ description | includeTagNotes | tagNotes | included | isNewTag
${'release notes'} | ${true} | ${'tag notes'} | ${true} | ${false}
${'release notes'} | ${true} | ${''} | ${false} | ${false}
${'release notes'} | ${false} | ${'tag notes'} | ${false} | ${false}
@@ -432,25 +432,24 @@ describe('Release edit/new getters', () => {
${'release notes'} | ${true} | ${''} | ${false} | ${true}
${'release notes'} | ${false} | ${'tag notes'} | ${false} | ${true}
`(
- 'should include tag notes=$included when includeTagNotes=$includeTagNotes and tagNotes=$tagNotes and showCreateFrom=$showCreateFrom',
- ({ description, includeTagNotes, tagNotes, included, showCreateFrom }) => {
+ 'should include tag notes=$included when includeTagNotes=$includeTagNotes and tagNotes=$tagNotes and isNewTag=$isNewTag',
+ ({ description, includeTagNotes, tagNotes, included, isNewTag }) => {
let state;
- if (showCreateFrom) {
+ if (isNewTag) {
state = {
release: { description, tagMessage: tagNotes },
includeTagNotes,
- showCreateFrom,
};
} else {
- state = { release: { description }, includeTagNotes, tagNotes, showCreateFrom };
+ state = { release: { description }, includeTagNotes, tagNotes };
}
const text = `### ${s__('Releases|Tag message')}\n\n${tagNotes}\n`;
if (included) {
- expect(getters.formattedReleaseNotes(state)).toContain(text);
+ expect(getters.formattedReleaseNotes(state, { isNewTag })).toContain(text);
} else {
- expect(getters.formattedReleaseNotes(state)).not.toContain(text);
+ expect(getters.formattedReleaseNotes(state, { isNewTag })).not.toContain(text);
}
},
);
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb
new file mode 100644
index 00000000000..fad3e277888
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineConfig,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_config_table) { table(:ci_pipelines_config, database: :ci) }
+ let!(:pipeline_1) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline_2) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:pipeline_3) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
+ let!(:ci_pipeline_config_100) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_1.id,
+ content: "content",
+ partition_id: pipeline_1.partition_id
+ )
+ end
+
+ let!(:ci_pipeline_config_101) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_2.id,
+ content: "content",
+ partition_id: pipeline_2.partition_id
+ )
+ end
+
+ let!(:invalid_ci_pipeline_config) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_3.id,
+ content: "content",
+ partition_id: pipeline_1.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_config_table.minimum(:pipeline_id),
+ end_id: ci_pipeline_config_table.maximum(:pipeline_id),
+ batch_table: :ci_pipelines_config,
+ batch_column: :pipeline_id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ before do
+ pipeline_3.update!(partition_id: 100)
+ end
+
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_config.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to not_change { ci_pipeline_config_100.reload.partition_id }
+ .and not_change { ci_pipeline_config_101.reload.partition_id }
+ .and change { invalid_ci_pipeline_config.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb
index a2233ffe8cc..2b21232c642 100644
--- a/spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb
@@ -16,37 +16,39 @@ RSpec.describe Gitlab::GithubImport::Importer::ReplayEventsImporter, feature_cat
)
end
- let(:importer) { described_class.new(representation, project, client) }
+ let(:events) do
+ [
+ {
+ requested_reviewer: { id: 1, login: 'user1' },
+ event: 'review_requested'
+ },
+ {
+ requested_reviewer: { id: 1, login: 'user1' },
+ event: 'review_request_removed'
+ },
+ {
+ requested_reviewer: { id: 2, login: 'user2' },
+ event: 'review_requested'
+ },
+ {
+ requested_reviewer: { id: 2, login: 'user2' },
+ event: 'review_request_removed'
+ },
+ {
+ requested_reviewer: { id: 2, login: 'user2' },
+ event: 'review_requested'
+ },
+ {
+ requested_reviewer: { id: 3, login: 'user3' },
+ event: 'review_requested'
+ }
+ ]
+ end
+
+ subject(:importer) { described_class.new(representation, project, client) }
describe '#execute' do
before do
- events = [
- {
- requested_reviewer: { id: 1, login: 'user1' },
- event: 'review_requested'
- },
- {
- requested_reviewer: { id: 1, login: 'user1' },
- event: 'review_request_removed'
- },
- {
- requested_reviewer: { id: 2, login: 'user2' },
- event: 'review_requested'
- },
- {
- requested_reviewer: { id: 2, login: 'user2' },
- event: 'review_request_removed'
- },
- {
- requested_reviewer: { id: 2, login: 'user2' },
- event: 'review_requested'
- },
- {
- requested_reviewer: { id: 3, login: 'user3' },
- event: 'review_requested'
- }
- ]
-
representations = events.map { |e| Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(e) }
allow_next_instance_of(Gitlab::GithubImport::EventsCache) do |events_cache|
@@ -75,6 +77,35 @@ RSpec.describe Gitlab::GithubImport::Importer::ReplayEventsImporter, feature_cat
importer.execute
end
+
+ context 'when reviewer is a team' do
+ let(:events) do
+ [
+ {
+ event: 'review_requested',
+ requested_team: { name: 'backend-team' }
+ },
+ {
+ event: 'review_requested',
+ requested_team: { name: 'frontend-team' }
+ },
+ {
+ event: 'review_request_removed',
+ requested_team: { name: 'frontend-team' }
+ }
+ ]
+ end
+
+ it 'ignores the events and do not assign the reviewers' do
+ expect(Gitlab::GithubImport::Representation::PullRequests::ReviewRequests).to receive(:from_json_hash).with(
+ merge_request_id: association.id,
+ merge_request_iid: association.iid,
+ users: []
+ ).and_call_original
+
+ importer.execute
+ end
+ end
end
context 'when association is not found' do
diff --git a/spec/migrations/20240110090352_queue_backfill_partition_id_ci_pipeline_config_spec.rb b/spec/migrations/20240110090352_queue_backfill_partition_id_ci_pipeline_config_spec.rb
new file mode 100644
index 00000000000..9d64708eb29
--- /dev/null
+++ b/spec/migrations/20240110090352_queue_backfill_partition_id_ci_pipeline_config_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillPartitionIdCiPipelineConfig, migration: :gitlab_ci, feature_category: :continuous_integration do
+ let!(:batched_migrations) { table(:batched_background_migrations) }
+ let!(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ context 'with migration present' do
+ let!(:ci_backfill_partition_id_ci_pipeline_config_migration) do
+ batched_migrations.create!(
+ job_class_name: 'QueueBackfillPartitionIdCiPipelineConfig',
+ table_name: :ci_pipelines_config,
+ column_name: :pipeline_id,
+ job_arguments: [],
+ interval: 2.minutes,
+ min_value: 1,
+ max_value: 2,
+ batch_size: 1000,
+ sub_batch_size: 100,
+ gitlab_schema: :gitlab_ci,
+ status: 3 # finished
+ )
+ end
+
+ context 'when migration finished successfully' do
+ it 'does not raise exception' do
+ expect { migrate! }.not_to raise_error
+ end
+
+ it 'schedules background jobs for each batch of ci_pipelines_config' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ gitlab_schema: :gitlab_ci,
+ table_name: :ci_pipelines_config,
+ column_name: :pipeline_id,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ end
+ end
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_config_spec.rb b/spec/models/ci/pipeline_config_spec.rb
index 3d033d33df3..3368c40fb57 100644
--- a/spec/models/ci/pipeline_config_spec.rb
+++ b/spec/models/ci/pipeline_config_spec.rb
@@ -2,9 +2,24 @@
require 'spec_helper'
-RSpec.describe Ci::PipelineConfig, type: :model do
+RSpec.describe Ci::PipelineConfig, type: :model, feature_category: :continuous_integration do
it { is_expected.to belong_to(:pipeline) }
it { is_expected.to validate_presence_of(:pipeline) }
it { is_expected.to validate_presence_of(:content) }
+
+ describe 'partitioning', :ci_partitionable do
+ include Ci::PartitioningHelpers
+
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:pipeline_config) { create(:ci_pipeline_config, pipeline: pipeline) }
+
+ before do
+ stub_current_partition_id
+ end
+
+ it 'assigns the same partition id as the one that pipeline has' do
+ expect(pipeline_config.partition_id).to eq(ci_testing_partition_id)
+ end
+ end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 118d6050174..26a9a364ea6 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -2511,9 +2511,15 @@ RSpec.describe Group, feature_category: :groups_and_projects do
subject(:highest_group_member) { nested_group_2.highest_group_member(user) }
context 'when the user is not a member of any group in the hierarchy' do
- it 'returns nil' do
- expect(highest_group_member).to be_nil
+ it { is_expected.to be_nil }
+ end
+
+ context 'when access request to group is pending' do
+ before do
+ create(:group_member, requested_at: Time.current.utc, source: nested_group, user: user)
end
+
+ it { is_expected.to be_nil }
end
context 'when the user is only a member of one group in the hierarchy' do
diff --git a/spec/requests/projects/google_cloud/databases_controller_spec.rb b/spec/requests/projects/google_cloud/databases_controller_spec.rb
index fa978a3921f..46f6d27644c 100644
--- a/spec/requests/projects/google_cloud/databases_controller_spec.rb
+++ b/spec/requests/projects/google_cloud/databases_controller_spec.rb
@@ -96,7 +96,7 @@ RSpec.describe Projects::GoogleCloud::DatabasesController, :snowplow, feature_ca
context 'when EnableCloudsqlService fails' do
before do
- allow_next_instance_of(::GoogleCloud::EnableCloudsqlService) do |service|
+ allow_next_instance_of(::CloudSeed::GoogleCloud::EnableCloudsqlService) do |service|
allow(service).to receive(:execute)
.and_return({ status: :error, message: 'error' })
end
@@ -125,7 +125,7 @@ RSpec.describe Projects::GoogleCloud::DatabasesController, :snowplow, feature_ca
context 'when EnableCloudsqlService is successful' do
before do
- allow_next_instance_of(::GoogleCloud::EnableCloudsqlService) do |service|
+ allow_next_instance_of(::CloudSeed::GoogleCloud::EnableCloudsqlService) do |service|
allow(service).to receive(:execute)
.and_return({ status: :success, message: 'success' })
end
@@ -133,7 +133,7 @@ RSpec.describe Projects::GoogleCloud::DatabasesController, :snowplow, feature_ca
context 'when CreateCloudsqlInstanceService fails' do
before do
- allow_next_instance_of(::GoogleCloud::CreateCloudsqlInstanceService) do |service|
+ allow_next_instance_of(::CloudSeed::GoogleCloud::CreateCloudsqlInstanceService) do |service|
allow(service).to receive(:execute)
.and_return({ status: :error, message: 'error' })
end
@@ -162,7 +162,7 @@ RSpec.describe Projects::GoogleCloud::DatabasesController, :snowplow, feature_ca
context 'when CreateCloudsqlInstanceService is successful' do
before do
- allow_next_instance_of(::GoogleCloud::CreateCloudsqlInstanceService) do |service|
+ allow_next_instance_of(::CloudSeed::GoogleCloud::CreateCloudsqlInstanceService) do |service|
allow(service).to receive(:execute)
.and_return({ status: :success, message: 'success' })
end
diff --git a/spec/requests/projects/google_cloud/deployments_controller_spec.rb b/spec/requests/projects/google_cloud/deployments_controller_spec.rb
index e9eac1e7ecd..1a6482477ef 100644
--- a/spec/requests/projects/google_cloud/deployments_controller_spec.rb
+++ b/spec/requests/projects/google_cloud/deployments_controller_spec.rb
@@ -110,7 +110,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController, feature_category: :
context 'when enable service fails' do
before do
- allow_next_instance_of(GoogleCloud::EnableCloudRunService) do |service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::EnableCloudRunService) do |service|
allow(service)
.to receive(:execute)
.and_return(
@@ -146,7 +146,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController, feature_category: :
before do
mock_gcp_error = Google::Apis::ClientError.new('some_error')
- allow_next_instance_of(GoogleCloud::EnableCloudRunService) do |service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::EnableCloudRunService) do |service|
allow(service).to receive(:execute).and_raise(mock_gcp_error)
end
end
@@ -173,14 +173,14 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController, feature_category: :
context 'GCP_PROJECT_IDs are defined' do
before do
- allow_next_instance_of(GoogleCloud::EnableCloudRunService) do |enable_cloud_run_service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::EnableCloudRunService) do |enable_cloud_run_service|
allow(enable_cloud_run_service).to receive(:execute).and_return({ status: :success })
end
end
context 'when generate pipeline service fails' do
before do
- allow_next_instance_of(GoogleCloud::GeneratePipelineService) do |generate_pipeline_service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::GeneratePipelineService) do |generate_pipeline_service|
allow(generate_pipeline_service).to receive(:execute).and_return({ status: :error })
end
end
@@ -206,7 +206,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController, feature_category: :
end
it 'redirects to create merge request form' do
- allow_next_instance_of(GoogleCloud::GeneratePipelineService) do |service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::GeneratePipelineService) do |service|
allow(service).to receive(:execute).and_return({ status: :success })
end
diff --git a/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb b/spec/services/cloud_seed/google_cloud/create_cloudsql_instance_service_spec.rb
index c31e76170d5..f6f1206e753 100644
--- a/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/create_cloudsql_instance_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::CreateCloudsqlInstanceService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::CreateCloudsqlInstanceService, feature_category: :deployment_management do
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:gcp_project_id) { 'gcp_project_120' }
diff --git a/spec/services/google_cloud/create_service_accounts_service_spec.rb b/spec/services/cloud_seed/google_cloud/create_service_accounts_service_spec.rb
index 3b57f2a9e5f..da30037963b 100644
--- a/spec/services/google_cloud/create_service_accounts_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/create_service_accounts_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::CreateServiceAccountsService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::CreateServiceAccountsService, feature_category: :deployment_management do
describe '#execute' do
before do
mock_google_oauth2_creds = Struct.new(:app_id, :app_secret)
diff --git a/spec/services/google_cloud/enable_cloud_run_service_spec.rb b/spec/services/cloud_seed/google_cloud/enable_cloud_run_service_spec.rb
index 3de9e7fcd5c..09f1b3460cc 100644
--- a/spec/services/google_cloud/enable_cloud_run_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/enable_cloud_run_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::EnableCloudRunService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::EnableCloudRunService, feature_category: :deployment_management do
describe 'when a project does not have any gcp projects' do
let_it_be(:project) { create(:project) }
diff --git a/spec/services/google_cloud/enable_cloudsql_service_spec.rb b/spec/services/cloud_seed/google_cloud/enable_cloudsql_service_spec.rb
index b14b827e8b8..137393e4544 100644
--- a/spec/services/google_cloud/enable_cloudsql_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/enable_cloudsql_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::EnableCloudsqlService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::EnableCloudsqlService, feature_category: :deployment_management do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:params) do
diff --git a/spec/services/google_cloud/enable_vision_ai_service_spec.rb b/spec/services/cloud_seed/google_cloud/enable_vision_ai_service_spec.rb
index 5adafcffe69..c37b5681a4b 100644
--- a/spec/services/google_cloud/enable_vision_ai_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/enable_vision_ai_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::EnableVisionAiService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::EnableVisionAiService, feature_category: :deployment_management do
describe 'when a project does not have any gcp projects' do
let_it_be(:project) { create(:project) }
diff --git a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb b/spec/services/cloud_seed/google_cloud/fetch_google_ip_list_service_spec.rb
index f8d5ba99bf6..c4a0be78213 100644
--- a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/fetch_google_ip_list_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::FetchGoogleIpListService, :use_clean_rails_memory_store_caching,
+RSpec.describe CloudSeed::GoogleCloud::FetchGoogleIpListService, :use_clean_rails_memory_store_caching,
:clean_gitlab_redis_rate_limiting, feature_category: :build_artifacts do
include StubRequests
diff --git a/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb b/spec/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service_spec.rb
index a748fed7134..2af03291484 100644
--- a/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::GcpRegionAddOrReplaceService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::GcpRegionAddOrReplaceService, feature_category: :deployment_management do
it 'adds and replaces GCP region vars' do
project = create(:project, :public)
service = described_class.new(project)
diff --git a/spec/services/google_cloud/generate_pipeline_service_spec.rb b/spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb
index 8f49e1af901..14c1e6bae7f 100644
--- a/spec/services/google_cloud/generate_pipeline_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::GeneratePipelineService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::GeneratePipelineService, feature_category: :deployment_management do
describe 'for cloud-run' do
describe 'when there is no existing pipeline' do
let_it_be(:project) { create(:project, :repository) }
@@ -64,7 +64,10 @@ RSpec.describe GoogleCloud::GeneratePipelineService, feature_category: :deployme
describe 'when there is an existing pipeline without `deploy` stage' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before_all do
@@ -119,7 +122,10 @@ EOF
describe 'when there is an existing pipeline with `deploy` stage' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before do
@@ -166,7 +172,10 @@ EOF
describe 'when there is an existing pipeline with `includes`' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before do
@@ -210,7 +219,10 @@ EOF
describe 'when there is no existing pipeline' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_STORAGE } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_STORAGE }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before do
diff --git a/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb b/spec/services/cloud_seed/google_cloud/get_cloudsql_instances_service_spec.rb
index cd2ad00ac3f..fb17d578af7 100644
--- a/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/get_cloudsql_instances_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::GetCloudsqlInstancesService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::GetCloudsqlInstancesService, feature_category: :deployment_management do
let(:service) { described_class.new(project) }
let(:project) { create(:project) }
diff --git a/spec/services/google_cloud/service_accounts_service_spec.rb b/spec/services/cloud_seed/google_cloud/service_accounts_service_spec.rb
index c900bf7d300..62d58b3198a 100644
--- a/spec/services/google_cloud/service_accounts_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/service_accounts_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::ServiceAccountsService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::ServiceAccountsService, feature_category: :deployment_management do
let(:service) { described_class.new(project) }
describe 'find_for_project' do
diff --git a/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb b/spec/services/cloud_seed/google_cloud/setup_cloudsql_instance_service_spec.rb
index 5095277f61a..ce02672e3fa 100644
--- a/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/setup_cloudsql_instance_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::SetupCloudsqlInstanceService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::SetupCloudsqlInstanceService, feature_category: :deployment_management do
let(:random_user) { create(:user) }
let(:project) { create(:project) }
let(:list_databases_empty) { Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(items: []) }
diff --git a/spec/support/helpers/database/duplicate_indexes.yml b/spec/support/helpers/database/duplicate_indexes.yml
index acfda313020..80d409f233d 100644
--- a/spec/support/helpers/database/duplicate_indexes.yml
+++ b/spec/support/helpers/database/duplicate_indexes.yml
@@ -37,9 +37,6 @@ ci_job_artifacts:
- index_ci_job_artifacts_on_project_id
index_ci_job_artifacts_on_project_id_and_id:
- index_ci_job_artifacts_on_project_id
-ci_job_artifact_states:
- index_ci_job_artifact_states_on_job_artifact_id:
- - index_ci_job_artifact_states_on_job_artifact_id_partition_id
ci_pipeline_artifacts:
index_ci_pipeline_artifacts_on_pipeline_id_and_file_type:
- index_ci_pipeline_artifacts_on_pipeline_id
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
index 6f00a5485a2..01d6642e814 100644
--- a/spec/support/shared_examples/models/member_shared_examples.rb
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -54,6 +54,25 @@ RSpec.shared_examples 'inherited access level as a member of entity' do
expect { non_member.update!(access_level: Gitlab::Access::GUEST) }
.to change { non_member.reload.access_level }
end
+
+ context 'when access request to entity is pending' do
+ before do
+ parent_entity.members.where(user: user).update!(requested_at: Time.current)
+ end
+
+ it 'is allowed to be a reporter of the entity' do
+ entity.add_reporter(user)
+
+ expect(member.access_level).to eq(Gitlab::Access::REPORTER)
+ end
+
+ it 'is allowed to change to be a guest of the entity' do
+ entity.add_maintainer(user)
+
+ expect { member.update!(access_level: Gitlab::Access::GUEST) }
+ .to change { member.reload.access_level }.from(Gitlab::Access::MAINTAINER).to(Gitlab::Access::GUEST)
+ end
+ end
end
end
@@ -63,10 +82,9 @@ RSpec.shared_examples '#valid_level_roles' do |entity_name|
let(:entity) { create(entity_name) } # rubocop:disable Rails/SaveBang
let(:entity_member) { create("#{entity_name}_member", :developer, source: entity, user: member_user) }
let(:presenter) { described_class.new(entity_member, current_user: member_user) }
+ let(:all_permissible_roles) { entity_member.class.permissible_access_level_roles(member_user, entity) }
context 'when no parent member is present' do
- let(:all_permissible_roles) { entity_member.class.permissible_access_level_roles(member_user, entity) }
-
it 'returns all permissible roles' do
expect(presenter.valid_level_roles).to eq(all_permissible_roles)
end
@@ -80,6 +98,16 @@ RSpec.shared_examples '#valid_level_roles' do |entity_name|
it 'returns higher roles when a parent member is present' do
expect(presenter.valid_level_roles).to eq(expected_roles)
end
+
+ context 'when access request to parent is pending' do
+ before do
+ group.members.with_user(member_user).update!(requested_at: Time.current)
+ end
+
+ it 'returns all permissible roles' do
+ expect(presenter.valid_level_roles).to eq(all_permissible_roles)
+ end
+ end
end
end
diff --git a/spec/uploaders/object_storage/cdn/google_cdn_spec.rb b/spec/uploaders/object_storage/cdn/google_cdn_spec.rb
index 96413f622e8..04c6cf4bde9 100644
--- a/spec/uploaders/object_storage/cdn/google_cdn_spec.rb
+++ b/spec/uploaders/object_storage/cdn/google_cdn_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe ObjectStorage::CDN::GoogleCDN,
subject { described_class.new(options) }
before do
- WebMock.stub_request(:get, GoogleCloud::FetchGoogleIpListService::GOOGLE_IP_RANGES_URL)
+ WebMock.stub_request(:get, CloudSeed::GoogleCloud::FetchGoogleIpListService::GOOGLE_IP_RANGES_URL)
.to_return(status: 200, body: google_cloud_ips, headers: headers)
end
diff --git a/spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb b/spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb
index 7aea40807e8..e86d6771386 100644
--- a/spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb
+++ b/spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb
@@ -23,15 +23,15 @@ RSpec.describe GoogleCloud::CreateCloudsqlInstanceWorker, feature_category: :sha
described_class.new.perform(user_id, project_id, worker_options)
end
- it 'calls GoogleCloud::SetupCloudsqlInstanceService' do
- allow_next_instance_of(GoogleCloud::SetupCloudsqlInstanceService) do |service|
+ it 'calls CloudSeed::GoogleCloud::SetupCloudsqlInstanceService' do
+ allow_next_instance_of(CloudSeed::GoogleCloud::SetupCloudsqlInstanceService) do |service|
expect(service).to receive(:execute).and_return({ status: :success })
end
subject
end
- context 'when GoogleCloud::SetupCloudsqlInstanceService fails' do
+ context 'when CloudSeed::GoogleCloud::SetupCloudsqlInstanceService fails' do
subject do
user_id = random_user.id
project_id = project.id
@@ -39,7 +39,7 @@ RSpec.describe GoogleCloud::CreateCloudsqlInstanceWorker, feature_category: :sha
end
it 'raises error' do
- allow_next_instance_of(GoogleCloud::SetupCloudsqlInstanceService) do |service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::SetupCloudsqlInstanceService) do |service|
expect(service).to receive(:execute).and_return({ status: :error })
end
diff --git a/spec/workers/google_cloud/fetch_google_ip_list_worker_spec.rb b/spec/workers/google_cloud/fetch_google_ip_list_worker_spec.rb
index bdafc076465..2a7d52d987f 100644
--- a/spec/workers/google_cloud/fetch_google_ip_list_worker_spec.rb
+++ b/spec/workers/google_cloud/fetch_google_ip_list_worker_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe GoogleCloud::FetchGoogleIpListWorker, feature_category: :build_artifacts do
describe '#perform' do
it 'returns success' do
- allow_next_instance_of(GoogleCloud::FetchGoogleIpListService) do |service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::FetchGoogleIpListService) do |service|
expect(service).to receive(:execute).and_return({ status: :success })
end