Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab/ci/database.gitlab-ci.yml2
-rw-r--r--.gitlab/ci/frontend.gitlab-ci.yml2
-rw-r--r--.gitlab/ci/global.gitlab-ci.yml8
-rw-r--r--.gitlab/ci/rails.gitlab-ci.yml48
-rw-r--r--.gitlab/ci/review-apps/main.gitlab-ci.yml8
-rw-r--r--.gitlab/ci/rules.gitlab-ci.yml17
-rw-r--r--.gitlab/ci/templates/gem.gitlab-ci.yml2
-rw-r--r--.gitlab/ci/test-on-gdk/main.gitlab-ci.yml2
-rw-r--r--.rubocop_todo/gitlab/namespaced_class.yml1
-rw-r--r--.rubocop_todo/layout/argument_alignment.yml1
-rw-r--r--.rubocop_todo/lint/redundant_cop_disable_directive.yml1
-rw-r--r--.rubocop_todo/lint/symbol_conversion.yml1
-rw-r--r--.rubocop_todo/rails/pluck.yml1
-rw-r--r--.rubocop_todo/rspec/feature_category.yml2
-rw-r--r--.rubocop_todo/style/inline_disable_annotation.yml1
-rw-r--r--.rubocop_todo/style/redundant_self.yml1
-rw-r--r--app/assets/javascripts/admin/abuse_report/components/reported_content.vue2
-rw-r--r--app/assets/javascripts/boards/components/board_content.vue1
-rw-r--r--app/assets/javascripts/diffs/store/actions.js4
-rw-r--r--app/assets/javascripts/diffs/store/mutations.js2
-rw-r--r--app/assets/javascripts/diffs/utils/file_reviews.js2
-rw-r--r--app/assets/javascripts/ide/stores/modules/terminal/mutations.js2
-rw-r--r--app/assets/javascripts/import_entities/import_groups/graphql/services/local_storage_cache.js2
-rw-r--r--app/assets/javascripts/import_entities/import_projects/store/actions.js2
-rw-r--r--app/assets/javascripts/invite_members/components/invite_modal_base.vue10
-rw-r--r--app/assets/javascripts/mr_notes/init.js1
-rw-r--r--app/assets/javascripts/notes/components/discussion_resolve_button.vue6
-rw-r--r--app/assets/javascripts/notes/components/discussion_resolve_with_issue_button.vue2
-rw-r--r--app/assets/javascripts/performance_bar/components/performance_bar_app.vue5
-rw-r--r--app/assets/javascripts/projects/commit/components/commit_options_dropdown.vue2
-rw-r--r--app/assets/javascripts/search/sidebar/components/label_filter/index.vue61
-rw-r--r--app/assets/javascripts/search/store/getters.js39
-rw-r--r--app/assets/javascripts/vue_shared/directives/safe_html.js2
-rw-r--r--app/assets/javascripts/vue_shared/gl_feature_flags_plugin.js11
-rw-r--r--app/assets/stylesheets/page_bundles/boards.scss6
-rw-r--r--app/controllers/projects/merge_requests_controller.rb3
-rw-r--r--app/graphql/resolvers/ci/catalog/resource_resolver.rb31
-rw-r--r--app/graphql/types/packages/pypi/metadatum_type.rb9
-rw-r--r--app/helpers/merge_requests_helper.rb4
-rw-r--r--app/models/ci/build.rb14
-rw-r--r--app/models/packages/pypi/metadatum.rb16
-rw-r--r--app/services/packages/pypi/create_package_service.rb8
-rw-r--r--app/validators/json_schemas/vulnerability_cvss_vectors.json4
-rw-r--r--app/views/dashboard/todos/index.html.haml12
-rw-r--r--app/views/groups/settings/_permissions.html.haml1
-rw-r--r--app/views/projects/_invite_members_empty_project.html.haml2
-rw-r--r--app/views/projects/commit/_commit_box.html.haml2
-rw-r--r--app/views/projects/find_file/show.html.haml2
-rw-r--r--config/feature_flags/development/admin_group_member.yml8
-rw-r--r--config/feature_flags/development/bulk_import_details_page.yml2
-rw-r--r--config/initializers/peek.rb1
-rw-r--r--db/click_house/main/20230705124511_create_events.sql16
-rw-r--r--db/click_house/main/20230707151359_create_ci_finished_builds.sql33
-rw-r--r--db/click_house/main/20230719101806_create_ci_finished_builds_aggregated_queueing_delay_percentiles.sql11
-rw-r--r--db/click_house/main/20230724064832_create_contribution_analytics_events.sql13
-rw-r--r--db/click_house/main/20230724064918_contribution_analytics_events_materialized_view.sql16
-rw-r--r--db/click_house/main/20230808070520_create_events_cursor.sql9
-rw-r--r--db/click_house/main/20230808140217_create_ci_finished_builds_aggregated_queueing_delay_percentiles_mv.sql12
-rw-r--r--db/click_house/migrate/20230705124511_create_events.rb30
-rw-r--r--db/click_house/migrate/20230707151359_create_ci_finished_builds.rb47
-rw-r--r--db/click_house/migrate/20230719101806_create_ci_finished_builds_aggregated_queueing_delay_percentiles.rb25
-rw-r--r--db/click_house/migrate/20230724064832_create_contribution_analytics_events.rb27
-rw-r--r--db/click_house/migrate/20230724064918_create_contribution_analytics_events_materialized_view.rb30
-rw-r--r--db/click_house/migrate/20230808070520_create_sync_cursors.rb23
-rw-r--r--db/click_house/migrate/20230808140217_create_ci_finished_builds_aggregated_queueing_delay_percentiles_mv.rb26
-rw-r--r--db/click_house/migrate/20231106202300_modify_ci_finished_builds_settings.rb15
-rw-r--r--db/migrate/20231106145853_add_product_analytics_enabled_to_namespace_settings.rb9
-rw-r--r--db/schema_migrations/202311061458531
-rw-r--r--db/structure.sql1
-rw-r--r--doc/administration/audit_event_streaming/audit_event_types.md1
-rw-r--r--doc/administration/logs/log_parsing.md4
-rw-r--r--doc/administration/monitoring/performance/performance_bar.md4
-rw-r--r--doc/api/graphql/reference/index.md42
-rw-r--r--doc/api/member_roles.md2
-rw-r--r--doc/development/code_review.md4
-rw-r--r--doc/development/documentation/versions.md5
-rw-r--r--doc/development/gitaly.md4
-rw-r--r--doc/development/github_importer.md15
-rw-r--r--doc/development/img/runner_fleet_dashboard.pngbin0 -> 38440 bytes
-rw-r--r--doc/development/runner_fleet_dashboard.md245
-rw-r--r--doc/install/requirements.md1
-rw-r--r--doc/user/application_security/vulnerability_report/index.md4
-rw-r--r--doc/user/group/import/index.md11
-rw-r--r--lib/api/internal/base.rb12
-rw-r--r--lib/api/invitations.rb24
-rw-r--r--lib/api/members.rb13
-rw-r--r--lib/api/pypi_packages.rb7
-rw-r--r--lib/click_house/migration.rb89
-rw-r--r--lib/click_house/migration_support/migration_context.rb94
-rw-r--r--lib/click_house/migration_support/migration_error.rb54
-rw-r--r--lib/click_house/migration_support/migrator.rb160
-rw-r--r--lib/click_house/migration_support/schema_migration.rb71
-rw-r--r--lib/gitlab/ci/parsers/security/common.rb2
-rw-r--r--lib/gitlab/ci/templates/Cosign.gitlab-ci.yml4
-rw-r--r--lib/gitlab/encrypted_command_base.rb14
-rw-r--r--lib/gitlab/encrypted_ldap_command.rb2
-rw-r--r--lib/gitlab/encrypted_redis_command.rb56
-rw-r--r--lib/gitlab/gitaly_client.rb14
-rw-r--r--lib/gitlab/instrumentation_helper.rb10
-rw-r--r--lib/gitlab/redis/wrapper.rb40
-rw-r--r--lib/gitlab/rugged_instrumentation.rb45
-rw-r--r--lib/peek/views/rugged.rb46
-rw-r--r--lib/tasks/gitlab/click_house/migration.rake64
-rw-r--r--lib/tasks/gitlab/redis.rake23
-rw-r--r--locale/gitlab.pot27
-rw-r--r--rubocop/cop/gitlab/mark_used_feature_flags.rb11
-rw-r--r--rubocop/rubocop-code_reuse.yml1
-rw-r--r--scripts/internal_events/monitor.rb45
-rwxr-xr-xscripts/lint-rugged50
-rwxr-xr-xscripts/static-analysis1
-rw-r--r--spec/click_house/migration_support/migration_context_spec.rb233
-rw-r--r--spec/factories/ci/reports/security/findings.rb2
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_pypi_metadata.json40
-rw-r--r--spec/fixtures/click_house/migrations/drop_table/1_create_some_table.rb14
-rw-r--r--spec/fixtures/click_house/migrations/drop_table/2_drop_some_table.rb11
-rw-r--r--spec/fixtures/click_house/migrations/duplicate_name/1_create_some_table.rb14
-rw-r--r--spec/fixtures/click_house/migrations/duplicate_name/2_create_some_table.rb14
-rw-r--r--spec/fixtures/click_house/migrations/duplicate_version/1_create_some_table.rb14
-rw-r--r--spec/fixtures/click_house/migrations/duplicate_version/1_drop_some_table.rb11
-rw-r--r--spec/fixtures/click_house/migrations/migration_with_error/1_migration_with_error.rb9
-rw-r--r--spec/fixtures/click_house/migrations/migrations_over_multiple_databases/1_create_some_table_on_main_db.rb15
-rw-r--r--spec/fixtures/click_house/migrations/migrations_over_multiple_databases/2_create_some_table_on_another_db.rb16
-rw-r--r--spec/fixtures/click_house/migrations/migrations_over_multiple_databases/3_change_some_table_on_main_db.rb11
-rw-r--r--spec/fixtures/click_house/migrations/plain_table_creation/1_create_some_table.rb14
-rw-r--r--spec/fixtures/click_house/migrations/plain_table_creation_on_invalid_database/1_create_some_table.rb16
-rw-r--r--spec/fixtures/click_house/migrations/table_creation_with_down_method/1_create_some_table.rb20
-rw-r--r--spec/fixtures/security_reports/master/gl-common-scanning-report.json4
-rw-r--r--spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js2
-rw-r--r--spec/frontend/diffs/components/app_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js2
-rw-r--r--spec/frontend/diffs/store/actions_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/package_registry/mock_data.js2
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js2
-rw-r--r--spec/frontend/search/sidebar/components/label_filter_spec.js67
-rw-r--r--spec/frontend/search/store/getters_spec.js19
-rw-r--r--spec/frontend/vue_shared/mixins/gl_feature_flags_mixin_spec.js2
-rw-r--r--spec/graphql/resolvers/ci/catalog/resource_resolver_spec.rb103
-rw-r--r--spec/graphql/types/packages/pypi/metadatum_type_spec.rb9
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb38
-rw-r--r--spec/lib/gitlab/rugged_instrumentation_spec.rb27
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb6
-rw-r--r--spec/lib/peek/views/rugged_spec.rb42
-rw-r--r--spec/models/ci/build_spec.rb36
-rw-r--r--spec/models/packages/pypi/metadatum_spec.rb27
-rw-r--r--spec/requests/api/internal/base_spec.rb14
-rw-r--r--spec/requests/api/members_spec.rb48
-rw-r--r--spec/requests/api/pypi_packages_spec.rb17
-rw-r--r--spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb34
-rw-r--r--spec/services/packages/pypi/create_package_service_spec.rb24
-rw-r--r--spec/support/database/click_house/hooks.rb48
-rw-r--r--spec/support/helpers/api_internal_base_helpers.rb10
-rw-r--r--spec/support/helpers/click_house_helpers.rb74
-rw-r--r--spec/support/rspec_order_todo.yml2
-rw-r--r--spec/support/shared_examples/redis/redis_shared_examples.rb15
-rw-r--r--spec/tasks/gitlab/click_house/migration_rake_spec.rb118
-rw-r--r--spec/tasks/gitlab/redis_rake_spec.rb188
-rw-r--r--spec/tooling/quality/test_level_spec.rb4
-rw-r--r--spec/views/admin/application_settings/_repository_storage.html.haml_spec.rb2
-rw-r--r--tooling/quality/test_level.rb1
159 files changed, 2786 insertions, 703 deletions
diff --git a/.gitlab/ci/database.gitlab-ci.yml b/.gitlab/ci/database.gitlab-ci.yml
index 082d44633f8..285c99d2cbe 100644
--- a/.gitlab/ci/database.gitlab-ci.yml
+++ b/.gitlab/ci/database.gitlab-ci.yml
@@ -72,7 +72,7 @@ db:check-schema-single-db:
db:check-migrations:
extends:
- .db-job-base
- - .use-pg14 # Should match the db same version used by GDK
+ - .use-pg14 # Should match the db same version used by GDK
- .rails:rules:ee-and-foss-mr-with-migration
script:
- git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME:$CI_MERGE_REQUEST_TARGET_BRANCH_NAME --depth 20
diff --git a/.gitlab/ci/frontend.gitlab-ci.yml b/.gitlab/ci/frontend.gitlab-ci.yml
index a1c209abd98..2afa69bbff8 100644
--- a/.gitlab/ci/frontend.gitlab-ci.yml
+++ b/.gitlab/ci/frontend.gitlab-ci.yml
@@ -67,7 +67,7 @@ compile-test-assets:
paths:
- public/assets/
- node_modules/@gitlab/svgs/dist/icons.json # app/helpers/icons_helper.rb uses this file
- - node_modules/@gitlab/svgs/dist/file_icons/file_icons.json # app/helpers/icons_helper.rb uses this file
+ - node_modules/@gitlab/svgs/dist/file_icons/file_icons.json # app/helpers/icons_helper.rb uses this file
- "${WEBPACK_COMPILE_LOG_PATH}"
when: always
diff --git a/.gitlab/ci/global.gitlab-ci.yml b/.gitlab/ci/global.gitlab-ci.yml
index 1d2bbed96d3..37d91fae595 100644
--- a/.gitlab/ci/global.gitlab-ci.yml
+++ b/.gitlab/ci/global.gitlab-ci.yml
@@ -36,7 +36,7 @@
.ruby-gems-cache-push: &ruby-gems-cache-push
<<: *ruby-gems-cache
- policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
+ policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
.ruby-coverage-gems-cache: &ruby-coverage-gems-cache
key: "ruby-coverage-gems-debian-${DEBIAN_VERSION}-ruby-${RUBY_VERSION}"
@@ -46,7 +46,7 @@
.ruby-coverage-gems-cache-push: &ruby-coverage-gems-cache-push
<<: *ruby-coverage-gems-cache
- policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
+ policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
.gitaly-binaries-cache: &gitaly-binaries-cache
key:
@@ -229,7 +229,7 @@
.redis-services:
services:
- name: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images:redis-cluster-6.2.12
- alias: rediscluster # configure connections in config/redis.yml
+ alias: rediscluster # configure connections in config/redis.yml
- name: redis:${REDIS_VERSION}-alpine
.pg-base-variables:
@@ -499,7 +499,7 @@
.fast-no-clone-job:
variables:
- GIT_STRATEGY: none # We will download the required files for the job from the API
+ GIT_STRATEGY: none # We will download the required files for the job from the API
before_script:
# Logic taken from scripts/utils.sh in download_files function
- |
diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml
index 4e27baf2fbe..385b0e8b68b 100644
--- a/.gitlab/ci/rails.gitlab-ci.yml
+++ b/.gitlab/ci/rails.gitlab-ci.yml
@@ -61,7 +61,7 @@ update-ruby-gems-coverage-cache-push:
- .ruby-gems-coverage-cache-push
- .shared:rules:update-cache
variables:
- BUNDLE_WITHOUT: "" # This is to override the variable defined in .gitlab-ci.yml
+ BUNDLE_WITHOUT: "" # This is to override the variable defined in .gitlab-ci.yml
BUNDLE_ONLY: "coverage"
script:
- source scripts/utils.sh
@@ -75,7 +75,7 @@ update-ruby-gems-coverage-cache-push:
- .default-retry
- .ruby-gems-coverage-cache
variables:
- BUNDLE_WITHOUT: "" # This is to override the variable defined in .gitlab-ci.yml
+ BUNDLE_WITHOUT: "" # This is to override the variable defined in .gitlab-ci.yml
BUNDLE_ONLY: "coverage"
before_script:
- source scripts/utils.sh
@@ -348,8 +348,8 @@ rspec:artifact-collector unit:
- .artifact-collector
- .rails:rules:ee-and-foss-unit
needs:
- - rspec unit pg14 # 24 jobs
- - job: rspec unit clickhouse # 1 job
+ - rspec unit pg14 # 24 jobs
+ - job: rspec unit clickhouse # 1 job
optional: true
rspec:artifact-collector system:
@@ -357,17 +357,17 @@ rspec:artifact-collector system:
- .artifact-collector
- .rails:rules:ee-and-foss-system
needs:
- - rspec system pg14 # 26 jobs
+ - rspec system pg14 # 26 jobs
rspec:artifact-collector remainder:
extends:
- .artifact-collector
needs:
- - job: rspec integration pg14 # 13 jobs
+ - job: rspec integration pg14 # 13 jobs
optional: true
- - job: rspec migration pg14 # 12 jobs
+ - job: rspec migration pg14 # 12 jobs
optional: true
- - job: rspec background_migration pg14 # 4 jobs
+ - job: rspec background_migration pg14 # 4 jobs
optional: true
rules:
- !reference ['.rails:rules:ee-and-foss-integration', rules]
@@ -379,7 +379,7 @@ rspec:artifact-collector as-if-foss unit:
- .artifact-collector
- .rails:rules:as-if-foss-unit
needs:
- - rspec unit pg14-as-if-foss # 28 jobs
+ - rspec unit pg14-as-if-foss # 28 jobs
rspec:artifact-collector as-if-foss system:
extends:
@@ -392,11 +392,11 @@ rspec:artifact-collector as-if-foss remainder:
extends:
- .artifact-collector
needs:
- - job: rspec integration pg14-as-if-foss # 12 jobs
+ - job: rspec integration pg14-as-if-foss # 12 jobs
optional: true
- - job: rspec migration pg14-as-if-foss # 8 jobs
+ - job: rspec migration pg14-as-if-foss # 8 jobs
optional: true
- - job: rspec background_migration pg14-as-if-foss # 4 jobs
+ - job: rspec background_migration pg14-as-if-foss # 4 jobs
optional: true
rules:
- !reference ['.rails:rules:as-if-foss-integration', rules]
@@ -408,43 +408,43 @@ rspec:artifact-collector single-redis:
- .artifact-collector
- .rails:rules:single-redis
needs:
- - rspec unit pg14 single-redis # 28 jobs
- - rspec integration pg14 single-redis # 12 jobs
+ - rspec unit pg14 single-redis # 28 jobs
+ - rspec integration pg14 single-redis # 12 jobs
rspec:artifact-collector system single-redis:
extends:
- .artifact-collector
- .rails:rules:single-redis
needs:
- - rspec system pg14 single-redis # 28 jobs
+ - rspec system pg14 single-redis # 28 jobs
rspec:artifact-collector ee single-redis:
extends:
- .artifact-collector
- .rails:rules:single-redis
needs:
- - job: rspec-ee unit pg14 single-redis # 18 jobs
+ - job: rspec-ee unit pg14 single-redis # 18 jobs
optional: true
- - job: rspec-ee integration pg14 single-redis # 6 jobs
+ - job: rspec-ee integration pg14 single-redis # 6 jobs
optional: true
- - job: rspec-ee system pg14 single-redis # 10 jobs
+ - job: rspec-ee system pg14 single-redis # 10 jobs
optional: true
rspec:artifact-collector ee:
extends:
- .artifact-collector
needs:
- - job: rspec-ee migration pg14 # 2 jobs
+ - job: rspec-ee migration pg14 # 2 jobs
optional: true
- - job: rspec-ee background_migration pg14 # 2 jobs
+ - job: rspec-ee background_migration pg14 # 2 jobs
optional: true
- - job: rspec-ee unit pg14 # 22 jobs
+ - job: rspec-ee unit pg14 # 22 jobs
optional: true
- - job: rspec-ee unit clickhouse # 1 job
+ - job: rspec-ee unit clickhouse # 1 job
optional: true
- - job: rspec-ee integration pg14 # 5 jobs
+ - job: rspec-ee integration pg14 # 5 jobs
optional: true
- - job: rspec-ee system pg14 # 12 jobs
+ - job: rspec-ee system pg14 # 12 jobs
optional: true
rules:
- !reference ['.rails:rules:ee-only-migration', rules]
diff --git a/.gitlab/ci/review-apps/main.gitlab-ci.yml b/.gitlab/ci/review-apps/main.gitlab-ci.yml
index 5d8ea803d19..dfcd65238ec 100644
--- a/.gitlab/ci/review-apps/main.gitlab-ci.yml
+++ b/.gitlab/ci/review-apps/main.gitlab-ci.yml
@@ -45,10 +45,10 @@ review-build-cng-env:
scripts/trigger-build.rb
VERSION
before_script:
- - apk add --no-cache --update curl # Not present in ruby-alpine, so we add it manually
+ - apk add --no-cache --update curl # Not present in ruby-alpine, so we add it manually
- !reference [".fast-no-clone-job", before_script]
- !reference [".build-cng-env", before_script]
- - mv VERSION GITLAB_WORKHORSE_VERSION # GITLAB_WORKHORSE_VERSION is a symlink to VERSION
+ - mv VERSION GITLAB_WORKHORSE_VERSION # GITLAB_WORKHORSE_VERSION is a symlink to VERSION
review-build-cng:
extends:
@@ -98,9 +98,9 @@ review-deploy:
scripts/review_apps/seed-dast-test-data.sh
VERSION
before_script:
- - apk add --no-cache --update curl # Not present in ruby-alpine, so we add it manually
+ - apk add --no-cache --update curl # Not present in ruby-alpine, so we add it manually
- !reference [".fast-no-clone-job", before_script]
- - mv VERSION GITLAB_WORKHORSE_VERSION # GITLAB_WORKHORSE_VERSION is a symlink to VERSION
+ - mv VERSION GITLAB_WORKHORSE_VERSION # GITLAB_WORKHORSE_VERSION is a symlink to VERSION
- export GITLAB_SHELL_VERSION=$(<GITLAB_SHELL_VERSION)
- export GITALY_VERSION=$(<GITALY_SERVER_VERSION)
- export GITLAB_WORKHORSE_VERSION=$(<GITLAB_WORKHORSE_VERSION)
diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml
index a6eb019cc3f..7b160a0efb5 100644
--- a/.gitlab/ci/rules.gitlab-ci.yml
+++ b/.gitlab/ci/rules.gitlab-ci.yml
@@ -931,7 +931,7 @@
variables:
BUILD_GDK_BASE: "true"
- !reference [".qa:rules:package-and-test-never-run", rules]
- - <<: *if-default-branch-schedule-nightly # already executed in the 2-hourly schedule
+ - <<: *if-default-branch-schedule-nightly # already executed in the 2-hourly schedule
when: never
- <<: *if-default-branch-refs
- <<: *if-merge-request
@@ -1589,9 +1589,9 @@
- <<: *if-merge-request-approved-and-specific-devops-stage
changes: *code-patterns
allow_failure: true
- # We used to have a rule at the end here that would catch any remaining code MRs and allow the job to be run
- # manually. That rule is now in ".qa:rules:code-merge-request-manual" so it can be included when needed and we can
- # still use ".qa:rules:package-and-test-common" in jobs we don't want to be manual.
+ # We used to have a rule at the end here that would catch any remaining code MRs and allow the job to be run
+ # manually. That rule is now in ".qa:rules:code-merge-request-manual" so it can be included when needed and we can
+ # still use ".qa:rules:package-and-test-common" in jobs we don't want to be manual.
# Like .qa:rules:package-and-test-common but not allowed to fail.
# It's named `e2e` instead of `package-and-test` because it's used for e2e tests on GDK (and could be used
@@ -1628,7 +1628,7 @@
variables:
MR_CODE_PATTERNS: "true"
- <<: *if-merge-request
- changes: *code-qa-patterns # Includes all CI changes
+ changes: *code-qa-patterns # Includes all CI changes
- <<: *if-force-ci
when: manual
@@ -1684,7 +1684,7 @@
rules:
- if: '$QA_RUN_TESTS_ON_GDK !~ /true|yes|1/i'
when: never
- - <<: *if-default-branch-schedule-nightly # already executed in the 2-hourly schedule
+ - <<: *if-default-branch-schedule-nightly # already executed in the 2-hourly schedule
when: never
- !reference [".qa:rules:e2e-blocking", rules]
- !reference [".qa:rules:e2e-schedule-blocking", rules]
@@ -2325,13 +2325,10 @@
rules:
- <<: *if-not-ee
when: never
- - <<: *if-merge-request-labels-pipeline-expedite
+ - <<: *if-merge-request
when: never
- if: '$FAST_QUARANTINE == "false" && $RETRY_FAILED_TESTS_IN_NEW_PROCESS != "true"'
when: never
- - <<: *if-merge-request
- changes: *code-backstage-patterns
- when: always
- <<: *if-default-branch-refs
changes: *code-backstage-patterns
when: always
diff --git a/.gitlab/ci/templates/gem.gitlab-ci.yml b/.gitlab/ci/templates/gem.gitlab-ci.yml
index 8c1fa8f3a6b..449150bde6c 100644
--- a/.gitlab/ci/templates/gem.gitlab-ci.yml
+++ b/.gitlab/ci/templates/gem.gitlab-ci.yml
@@ -21,7 +21,7 @@ spec:
# Ensure dependency updates don't fail child pipelines: https://gitlab.com/gitlab-org/gitlab/-/issues/417428
- "Gemfile.lock"
- "gems/gem.gitlab-ci.yml"
- # Ensure new cop in the monolith don't break internal gems Rubocop checks: https://gitlab.com/gitlab-org/gitlab/-/issues/419915
+ # Ensure new cop in the monolith don't break internal gems Rubocop checks: https://gitlab.com/gitlab-org/gitlab/-/issues/419915
- ".rubocop.yml"
- "rubocop/**/*"
- ".rubocop_todo/**/*"
diff --git a/.gitlab/ci/test-on-gdk/main.gitlab-ci.yml b/.gitlab/ci/test-on-gdk/main.gitlab-ci.yml
index 591595f66bf..f0a5ea5090f 100644
--- a/.gitlab/ci/test-on-gdk/main.gitlab-ci.yml
+++ b/.gitlab/ci/test-on-gdk/main.gitlab-ci.yml
@@ -27,7 +27,7 @@ include:
variables:
COLORIZED_LOGS: "true"
GIT_DEPTH: "20"
- GIT_STRATEGY: "clone" # 'GIT_STRATEGY: clone' optimizes the pack-objects cache hit ratio
+ GIT_STRATEGY: "clone" # 'GIT_STRATEGY: clone' optimizes the pack-objects cache hit ratio
GIT_SUBMODULE_STRATEGY: "none"
.rules:gdk:qa-selective:
diff --git a/.rubocop_todo/gitlab/namespaced_class.yml b/.rubocop_todo/gitlab/namespaced_class.yml
index de89c1dc576..8b59571249c 100644
--- a/.rubocop_todo/gitlab/namespaced_class.yml
+++ b/.rubocop_todo/gitlab/namespaced_class.yml
@@ -1109,6 +1109,7 @@ Gitlab/NamespacedClass:
- 'lib/gitlab/encrypted_configuration.rb'
- 'lib/gitlab/encrypted_incoming_email_command.rb'
- 'lib/gitlab/encrypted_ldap_command.rb'
+ - 'lib/gitlab/encrypted_redis_command.rb'
- 'lib/gitlab/encrypted_service_desk_email_command.rb'
- 'lib/gitlab/encrypted_smtp_command.rb'
- 'lib/gitlab/environment_logger.rb'
diff --git a/.rubocop_todo/layout/argument_alignment.yml b/.rubocop_todo/layout/argument_alignment.yml
index 9dc839e4bcf..b2dc0b1f570 100644
--- a/.rubocop_todo/layout/argument_alignment.yml
+++ b/.rubocop_todo/layout/argument_alignment.yml
@@ -1488,7 +1488,6 @@ Layout/ArgumentAlignment:
- 'spec/lib/gitlab/workhorse_spec.rb'
- 'spec/lib/google_api/cloud_platform/client_spec.rb'
- 'spec/lib/peek/views/detailed_view_spec.rb'
- - 'spec/lib/peek/views/rugged_spec.rb'
- 'spec/lib/security/weak_passwords_spec.rb'
- 'spec/lib/sidebars/projects/menus/repository_menu_spec.rb'
- 'spec/lib/uploaded_file_spec.rb'
diff --git a/.rubocop_todo/lint/redundant_cop_disable_directive.yml b/.rubocop_todo/lint/redundant_cop_disable_directive.yml
index bbc9aecf637..6a9b68a832a 100644
--- a/.rubocop_todo/lint/redundant_cop_disable_directive.yml
+++ b/.rubocop_todo/lint/redundant_cop_disable_directive.yml
@@ -202,6 +202,7 @@ Lint/RedundantCopDisableDirective:
- 'lib/gitlab/diff/parser.rb'
- 'lib/gitlab/encrypted_incoming_email_command.rb'
- 'lib/gitlab/encrypted_ldap_command.rb'
+ - 'lib/gitlab/encrypted_redis_command.rb'
- 'lib/gitlab/encrypted_service_desk_email_command.rb'
- 'lib/gitlab/encrypted_smtp_command.rb'
- 'lib/gitlab/git/commit.rb'
diff --git a/.rubocop_todo/lint/symbol_conversion.yml b/.rubocop_todo/lint/symbol_conversion.yml
index ec9ff07fc0c..1fcb4eecf8b 100644
--- a/.rubocop_todo/lint/symbol_conversion.yml
+++ b/.rubocop_todo/lint/symbol_conversion.yml
@@ -140,5 +140,4 @@ Lint/SymbolConversion:
- 'spec/support/shared_examples/harbor/repositories_controller_shared_examples.rb'
- 'spec/support/shared_examples/harbor/tags_controller_shared_examples.rb'
- 'spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb'
- - 'spec/views/admin/application_settings/_repository_storage.html.haml_spec.rb'
- 'spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb'
diff --git a/.rubocop_todo/rails/pluck.yml b/.rubocop_todo/rails/pluck.yml
index 0f2a7edbeb1..6ed8c935f82 100644
--- a/.rubocop_todo/rails/pluck.yml
+++ b/.rubocop_todo/rails/pluck.yml
@@ -176,7 +176,6 @@ Rails/Pluck:
- 'spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb'
- 'spec/lib/gitlab/tree_summary_spec.rb'
- - 'spec/lib/peek/views/rugged_spec.rb'
- 'spec/models/bulk_imports/entity_spec.rb'
- 'spec/models/ci/bridge_spec.rb'
- 'spec/models/ci/build_spec.rb'
diff --git a/.rubocop_todo/rspec/feature_category.yml b/.rubocop_todo/rspec/feature_category.yml
index 7f806aebbf5..48e0329054a 100644
--- a/.rubocop_todo/rspec/feature_category.yml
+++ b/.rubocop_todo/rspec/feature_category.yml
@@ -3967,7 +3967,6 @@ RSpec/FeatureCategory:
- 'spec/lib/gitlab/robots_txt/parser_spec.rb'
- 'spec/lib/gitlab/route_map_spec.rb'
- 'spec/lib/gitlab/routing_spec.rb'
- - 'spec/lib/gitlab/rugged_instrumentation_spec.rb'
- 'spec/lib/gitlab/safe_request_loader_spec.rb'
- 'spec/lib/gitlab/safe_request_purger_spec.rb'
- 'spec/lib/gitlab/sample_data_template_spec.rb'
@@ -4262,7 +4261,6 @@ RSpec/FeatureCategory:
- 'spec/lib/peek/views/external_http_spec.rb'
- 'spec/lib/peek/views/memory_spec.rb'
- 'spec/lib/peek/views/redis_detailed_spec.rb'
- - 'spec/lib/peek/views/rugged_spec.rb'
- 'spec/lib/product_analytics/event_params_spec.rb'
- 'spec/lib/prometheus/cleanup_multiproc_dir_service_spec.rb'
- 'spec/lib/prometheus/pid_provider_spec.rb'
diff --git a/.rubocop_todo/style/inline_disable_annotation.yml b/.rubocop_todo/style/inline_disable_annotation.yml
index 87c4ca68ad0..136c5fe9a0c 100644
--- a/.rubocop_todo/style/inline_disable_annotation.yml
+++ b/.rubocop_todo/style/inline_disable_annotation.yml
@@ -2572,6 +2572,7 @@ Style/InlineDisableAnnotation:
- 'lib/gitlab/encrypted_command_base.rb'
- 'lib/gitlab/encrypted_incoming_email_command.rb'
- 'lib/gitlab/encrypted_ldap_command.rb'
+ - 'lib/gitlab/encrypted_redis_command.rb'
- 'lib/gitlab/encrypted_service_desk_email_command.rb'
- 'lib/gitlab/encrypted_smtp_command.rb'
- 'lib/gitlab/error_tracking/processor/context_payload_processor.rb'
diff --git a/.rubocop_todo/style/redundant_self.yml b/.rubocop_todo/style/redundant_self.yml
index 7653091a3f6..701ce4db7df 100644
--- a/.rubocop_todo/style/redundant_self.yml
+++ b/.rubocop_todo/style/redundant_self.yml
@@ -339,7 +339,6 @@ Style/RedundantSelf:
- 'lib/gitlab/quick_actions/dsl.rb'
- 'lib/gitlab/redis/hll.rb'
- 'lib/gitlab/routing.rb'
- - 'lib/gitlab/rugged_instrumentation.rb'
- 'lib/gitlab/search/query.rb'
- 'lib/gitlab/session.rb'
- 'lib/gitlab/sidekiq_config/cli_methods.rb'
diff --git a/app/assets/javascripts/admin/abuse_report/components/reported_content.vue b/app/assets/javascripts/admin/abuse_report/components/reported_content.vue
index bc1384b021d..99c8b3ece10 100644
--- a/app/assets/javascripts/admin/abuse_report/components/reported_content.vue
+++ b/app/assets/javascripts/admin/abuse_report/components/reported_content.vue
@@ -128,7 +128,7 @@ export default {
</gl-link>
<time-ago-tooltip
:time="report.reportedAt"
- class="gl-ml-3 gl-text-secondary gl-xs-w-full"
+ class="gl-ml-3 gl-text-secondary gl-w-full gl-sm-w-auto"
/>
</div>
</div>
diff --git a/app/assets/javascripts/boards/components/board_content.vue b/app/assets/javascripts/boards/components/board_content.vue
index 554f3bfa416..a6ff1653c17 100644
--- a/app/assets/javascripts/boards/components/board_content.vue
+++ b/app/assets/javascripts/boards/components/board_content.vue
@@ -249,7 +249,6 @@ export default {
<transition name="slide" @after-enter="afterFormEnters">
<board-add-new-column
v-if="addColumnFormVisible"
- class="gl-xs-w-full!"
:board-id="boardId"
:list-query-variables="listQueryVariables"
:lists="boardListsById"
diff --git a/app/assets/javascripts/diffs/store/actions.js b/app/assets/javascripts/diffs/store/actions.js
index 756f76569dc..fcaf8e99b2d 100644
--- a/app/assets/javascripts/diffs/store/actions.js
+++ b/app/assets/javascripts/diffs/store/actions.js
@@ -90,6 +90,7 @@ export const setBaseConfig = ({ commit }, options) => {
viewDiffsFileByFile,
mrReviews,
diffViewType,
+ perPage,
} = options;
commit(types.SET_BASE_CONFIG, {
endpoint,
@@ -105,6 +106,7 @@ export const setBaseConfig = ({ commit }, options) => {
viewDiffsFileByFile,
mrReviews,
diffViewType,
+ perPage,
});
Array.from(new Set(Object.values(mrReviews).flat())).forEach((id) => {
@@ -207,7 +209,7 @@ export const fetchFileByFile = async ({ state, getters, commit }) => {
};
export const fetchDiffFilesBatch = ({ commit, state, dispatch }) => {
- let perPage = state.viewDiffsFileByFile ? 1 : 5;
+ let perPage = state.viewDiffsFileByFile ? 1 : state.perPage;
let increaseAmount = 1.4;
const startPage = 0;
const id = window?.location?.hash;
diff --git a/app/assets/javascripts/diffs/store/mutations.js b/app/assets/javascripts/diffs/store/mutations.js
index a9a2c35faa4..08c195469e3 100644
--- a/app/assets/javascripts/diffs/store/mutations.js
+++ b/app/assets/javascripts/diffs/store/mutations.js
@@ -39,6 +39,7 @@ export default {
viewDiffsFileByFile,
mrReviews,
diffViewType,
+ perPage,
} = options;
Object.assign(state, {
endpoint,
@@ -54,6 +55,7 @@ export default {
viewDiffsFileByFile,
mrReviews,
diffViewType,
+ perPage,
});
},
diff --git a/app/assets/javascripts/diffs/utils/file_reviews.js b/app/assets/javascripts/diffs/utils/file_reviews.js
index 227be4e4a6c..581d0b6055b 100644
--- a/app/assets/javascripts/diffs/utils/file_reviews.js
+++ b/app/assets/javascripts/diffs/utils/file_reviews.js
@@ -43,7 +43,7 @@ export function reviewable(file) {
}
export function markFileReview(reviews, file, reviewed = true) {
- const usableReviews = { ...(reviews || {}) };
+ const usableReviews = { ...reviews };
const updatedReviews = usableReviews;
let fileReviews;
diff --git a/app/assets/javascripts/ide/stores/modules/terminal/mutations.js b/app/assets/javascripts/ide/stores/modules/terminal/mutations.js
index 37f40af9c2e..8adde8f6b4e 100644
--- a/app/assets/javascripts/ide/stores/modules/terminal/mutations.js
+++ b/app/assets/javascripts/ide/stores/modules/terminal/mutations.js
@@ -48,7 +48,7 @@ export default {
},
[types.SET_SESSION_STATUS](state, status) {
const session = {
- ...(state.session || {}),
+ ...state.session,
status,
};
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/services/local_storage_cache.js b/app/assets/javascripts/import_entities/import_groups/graphql/services/local_storage_cache.js
index 1aad22f0f3f..c2e35ce8270 100644
--- a/app/assets/javascripts/import_entities/import_groups/graphql/services/local_storage_cache.js
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/services/local_storage_cache.js
@@ -60,7 +60,7 @@ export class LocalStorageCache {
updateStatusByJobId(jobId, status) {
this.getCacheKeysByJobId(jobId).forEach((webUrl) =>
this.set(webUrl, {
- ...(this.get(webUrl) ?? {}),
+ ...this.get(webUrl),
progress: {
id: jobId,
status,
diff --git a/app/assets/javascripts/import_entities/import_projects/store/actions.js b/app/assets/javascripts/import_entities/import_projects/store/actions.js
index 4305f8d4db5..e5cbac71ce0 100644
--- a/app/assets/javascripts/import_entities/import_projects/store/actions.js
+++ b/app/assets/javascripts/import_entities/import_projects/store/actions.js
@@ -83,7 +83,7 @@ const fetchReposFactory = ({ reposPath = isRequired() }) => ({ state, commit })
.get(
pathWithParams({
path: reposPath,
- ...(filter ?? {}),
+ ...filter,
...paginationParams({ state }),
}),
)
diff --git a/app/assets/javascripts/invite_members/components/invite_modal_base.vue b/app/assets/javascripts/invite_members/components/invite_modal_base.vue
index 18d22395104..a14dcd38aa7 100644
--- a/app/assets/javascripts/invite_members/components/invite_modal_base.vue
+++ b/app/assets/javascripts/invite_members/components/invite_modal_base.vue
@@ -297,7 +297,7 @@ export default {
</gl-form-group>
<gl-form-group
- class="gl-w-half gl-xs-w-full"
+ class="gl-sm-w-half gl-w-full"
:label="$options.ACCESS_LEVEL"
:label-for="dropdownId"
>
@@ -317,7 +317,7 @@ export default {
</gl-form-group>
<gl-form-group
- class="gl-w-half gl-xs-w-full"
+ class="gl-sm-w-half gl-w-full"
:label="$options.ACCESS_EXPIRE_DATE"
:label-for="datepickerId"
>
@@ -338,10 +338,10 @@ export default {
<template #modal-footer>
<div
- class="gl-m-0 gl-xs-w-full gl-display-flex gl-xs-flex-direction-column! gl-flex-direction-row-reverse"
+ class="gl-m-0 gl-w-full gl-display-flex gl-xs-flex-direction-column! gl-flex-direction-row-reverse"
>
<gl-button
- class="gl-xs-w-full gl-xs-mb-3! gl-sm-ml-3!"
+ class="gl-w-full gl-sm-w-auto gl-xs-mb-3! gl-sm-ml-3!"
data-testid="invite-modal-submit"
v-bind="actionPrimary.attributes"
@click="onSubmit"
@@ -350,7 +350,7 @@ export default {
</gl-button>
<gl-button
- class="gl-xs-w-full"
+ class="gl-w-full gl-sm-w-auto"
data-testid="invite-modal-cancel"
v-bind="actionCancel.attributes"
@click="onCancel"
diff --git a/app/assets/javascripts/mr_notes/init.js b/app/assets/javascripts/mr_notes/init.js
index 28f294589ae..5594e71641b 100644
--- a/app/assets/javascripts/mr_notes/init.js
+++ b/app/assets/javascripts/mr_notes/init.js
@@ -40,6 +40,7 @@ function setupMrNotesState(store, notesDataset, diffsDataset) {
mrReviews: getReviewsForMergeRequest(mrPath),
diffViewType:
getParameterValues('view')[0] || getCookie(DIFF_VIEW_COOKIE_NAME) || INLINE_DIFF_VIEW_TYPE,
+ perPage: Number(diffsDataset.perPage),
});
}
diff --git a/app/assets/javascripts/notes/components/discussion_resolve_button.vue b/app/assets/javascripts/notes/components/discussion_resolve_button.vue
index b1aee19d5b2..cc4f360a694 100644
--- a/app/assets/javascripts/notes/components/discussion_resolve_button.vue
+++ b/app/assets/javascripts/notes/components/discussion_resolve_button.vue
@@ -21,7 +21,11 @@ export default {
</script>
<template>
- <gl-button :loading="isResolving" class="gl-xs-w-full ml-sm-2" @click="$emit('onClick')">
+ <gl-button
+ :loading="isResolving"
+ class="gl-w-full gl-sm-w-auto ml-sm-2"
+ @click="$emit('onClick')"
+ >
{{ buttonTitle }}
</gl-button>
</template>
diff --git a/app/assets/javascripts/notes/components/discussion_resolve_with_issue_button.vue b/app/assets/javascripts/notes/components/discussion_resolve_with_issue_button.vue
index 4ccba011014..34cbba8ce43 100644
--- a/app/assets/javascripts/notes/components/discussion_resolve_with_issue_button.vue
+++ b/app/assets/javascripts/notes/components/discussion_resolve_with_issue_button.vue
@@ -29,7 +29,7 @@ export default {
:href="url"
:title="$options.i18n.buttonLabel"
:aria-label="$options.i18n.buttonLabel"
- class="new-issue-for-discussion discussion-create-issue-btn gl-xs-w-full"
+ class="new-issue-for-discussion discussion-create-issue-btn gl-w-full gl-sm-w-auto"
icon="issue-new"
/>
</div>
diff --git a/app/assets/javascripts/performance_bar/components/performance_bar_app.vue b/app/assets/javascripts/performance_bar/components/performance_bar_app.vue
index 720c1e0d7f2..c5f8fd1904f 100644
--- a/app/assets/javascripts/performance_bar/components/performance_bar_app.vue
+++ b/app/assets/javascripts/performance_bar/components/performance_bar_app.vue
@@ -61,11 +61,6 @@ export default {
keys: ['feature', 'request'],
},
{
- metric: 'rugged',
- header: s__('PerformanceBar|Rugged calls'),
- keys: ['feature', 'args'],
- },
- {
metric: 'redis',
header: s__('PerformanceBar|Redis calls'),
keys: ['cmd', 'instance'],
diff --git a/app/assets/javascripts/projects/commit/components/commit_options_dropdown.vue b/app/assets/javascripts/projects/commit/components/commit_options_dropdown.vue
index 0009bd4c9a5..377310b087e 100644
--- a/app/assets/javascripts/projects/commit/components/commit_options_dropdown.vue
+++ b/app/assets/javascripts/projects/commit/components/commit_options_dropdown.vue
@@ -144,7 +144,7 @@ export default {
:toggle-text="__('Options')"
right
data-testid="commit-options-dropdown"
- class="gl-xs-w-full gl-line-height-20"
+ class="gl-line-height-20"
>
<gl-disclosure-dropdown-group :group="optionsGroup" @action="closeDropdown" />
diff --git a/app/assets/javascripts/search/sidebar/components/label_filter/index.vue b/app/assets/javascripts/search/sidebar/components/label_filter/index.vue
index ebd0406bcec..97583730958 100644
--- a/app/assets/javascripts/search/sidebar/components/label_filter/index.vue
+++ b/app/assets/javascripts/search/sidebar/components/label_filter/index.vue
@@ -55,12 +55,15 @@ export default {
},
i18n: I18N,
computed: {
- ...mapState(['useSidebarNavigation', 'searchLabelString', 'query', 'aggregations']),
+ ...mapState(['useSidebarNavigation', 'searchLabelString', 'query', 'urlQuery', 'aggregations']),
...mapGetters([
'filteredLabels',
'filteredUnselectedLabels',
'filteredAppliedSelectedLabels',
'appliedSelectedLabels',
+ 'unselectedLabels',
+ 'unappliedNewLabels',
+ 'labelAggregationBuckets',
]),
searchInputDescribeBy() {
if (this.isLoggedIn) {
@@ -100,10 +103,10 @@ export default {
return FIRST_DROPDOWN_INDEX;
},
hasSelectedLabels() {
- return this.filteredAppliedSelectedLabels.length > 0;
+ return this.filteredAppliedSelectedLabels?.length > 0;
},
hasUnselectedLabels() {
- return this.filteredUnselectedLabels.length > 0;
+ return this.filteredUnselectedLabels?.length > 0;
},
labelSearchBox() {
return this.$refs.searchLabelInputBox?.$el.querySelector('[role=searchbox]');
@@ -122,25 +125,30 @@ export default {
this.setLabelFilterSearch({ value });
},
},
- selectedFilters: {
+ selectedLabels: {
get() {
return this.combinedSelectedFilters;
},
set(value) {
this.setQuery({ key: this.$options.labelFilterData?.filterParam, value });
-
trackSelectCheckbox(value);
},
},
},
async created() {
- await this.fetchAllAggregation();
+ if (this.urlQuery?.[labelFilterData.filterParam]?.length > 0) {
+ await this.fetchAllAggregation();
+ }
},
methods: {
...mapActions(['fetchAllAggregation', 'setQuery', 'closeLabel', 'setLabelFilterSearch']),
- openDropdown() {
+ async openDropdown() {
this.isFocused = true;
+ if (!this.aggregations.error && this.filteredLabels?.length === 0) {
+ await this.fetchAllAggregation();
+ }
+
trackOpenDropdown();
},
closeDropdown(event) {
@@ -158,16 +166,8 @@ export default {
const { key } = event.target.closest('.gl-label').dataset;
this.closeLabel({ key });
},
- reactiveLabelColor(label) {
- const { color, key } = label;
-
- return this.query?.labels?.some((labelKey) => labelKey === key)
- ? color
- : `rgba(${rgbFromHex(color)}, 0.3)`;
- },
- isLabelClosable(label) {
- const { key } = label;
- return this.query?.labels?.some((labelKey) => labelKey === key);
+ inactiveLabelColor(label) {
+ return `rgba(${rgbFromHex(label.color)}, 0.3)`;
},
},
FIRST_DROPDOWN_INDEX,
@@ -188,13 +188,34 @@ export default {
</h5>
<div class="gl-my-5">
<gl-label
+ v-for="label in unappliedNewLabels"
+ :key="label.key"
+ class="gl-mr-2 gl-mb-2 gl-bg-gray-10"
+ :data-key="label.key"
+ :background-color="inactiveLabelColor(label)"
+ :title="label.title"
+ :show-close-button="false"
+ data-testid="unapplied-label"
+ />
+ <gl-label
+ v-for="label in unselectedLabels"
+ :key="label.key"
+ class="gl-mr-2 gl-mb-2 gl-bg-gray-10"
+ :data-key="label.key"
+ :background-color="inactiveLabelColor(label)"
+ :title="label.title"
+ :show-close-button="false"
+ data-testid="unselected-label"
+ />
+ <gl-label
v-for="label in appliedSelectedLabels"
:key="label.key"
class="gl-mr-2 gl-mb-2 gl-bg-gray-10"
:data-key="label.key"
- :background-color="reactiveLabelColor(label)"
+ :background-color="label.color"
:title="label.title"
- :show-close-button="isLabelClosable(label)"
+ :show-close-button="true"
+ data-testid="label"
@close="onLabelClose"
/>
</div>
@@ -245,7 +266,7 @@ export default {
$options.i18n.DROPDOWN_HEADER
}}</gl-dropdown-section-header>
<gl-dropdown-form>
- <gl-form-checkbox-group v-model="selectedFilters">
+ <gl-form-checkbox-group v-model="selectedLabels">
<label-dropdown-items
v-if="hasSelectedLabels"
:labels="filteredAppliedSelectedLabels"
diff --git a/app/assets/javascripts/search/store/getters.js b/app/assets/javascripts/search/store/getters.js
index d01fd884bad..de05e9b80b2 100644
--- a/app/assets/javascripts/search/store/getters.js
+++ b/app/assets/javascripts/search/store/getters.js
@@ -1,10 +1,24 @@
-import { findKey } from 'lodash';
+import { findKey, intersection } from 'lodash';
import { languageFilterData } from '~/search/sidebar/components/language_filter/data';
import { labelFilterData } from '~/search/sidebar/components/label_filter/data';
import { formatSearchResultCount, addCountOverLimit } from '~/search/store/utils';
import { GROUPS_LOCAL_STORAGE_KEY, PROJECTS_LOCAL_STORAGE_KEY, ICON_MAP } from './constants';
+const queryLabelFilters = (state) => state?.query?.[labelFilterData.filterParam] || [];
+const urlQueryLabelFilters = (state) => state?.urlQuery?.[labelFilterData.filterParam] || [];
+
+const appliedSelectedLabelsKeys = (state) =>
+ intersection(urlQueryLabelFilters(state), queryLabelFilters(state));
+
+const unselectedLabelsKeys = (state) =>
+ urlQueryLabelFilters(state)?.filter((label) => !queryLabelFilters(state)?.includes(label));
+
+const unappliedNewLabelKeys = (state) =>
+ state?.query?.labels?.filter((label) => !urlQueryLabelFilters(state)?.includes(label));
+
+export const queryLanguageFilters = (state) => state.query[languageFilterData.filterParam] || [];
+
export const frequentGroups = (state) => {
return state.frequentItems[GROUPS_LOCAL_STORAGE_KEY];
};
@@ -39,25 +53,28 @@ export const filteredLabels = (state) => {
};
export const filteredAppliedSelectedLabels = (state) =>
- filteredLabels(state)?.filter((label) => state?.urlQuery?.labels?.includes(label.key));
+ filteredLabels(state)?.filter((label) => urlQueryLabelFilters(state)?.includes(label.key));
export const appliedSelectedLabels = (state) => {
return labelAggregationBuckets(state)?.filter((label) =>
- state?.urlQuery?.labels?.includes(label.key),
+ appliedSelectedLabelsKeys(state)?.includes(label.key),
);
};
-export const filteredUnselectedLabels = (state) => {
- if (!state?.urlQuery?.labels) {
- return filteredLabels(state);
- }
+export const filteredUnselectedLabels = (state) =>
+ filteredLabels(state)?.filter((label) => !urlQueryLabelFilters(state)?.includes(label.key));
- return filteredLabels(state)?.filter((label) => !state?.urlQuery?.labels?.includes(label.key));
-};
+export const unselectedLabels = (state) =>
+ labelAggregationBuckets(state).filter((label) =>
+ unselectedLabelsKeys(state)?.includes(label.key),
+ );
-export const currentScope = (state) => findKey(state.navigation, { active: true });
+export const unappliedNewLabels = (state) =>
+ labelAggregationBuckets(state).filter((label) =>
+ unappliedNewLabelKeys(state)?.includes(label.key),
+ );
-export const queryLanguageFilters = (state) => state.query[languageFilterData.filterParam] || [];
+export const currentScope = (state) => findKey(state.navigation, { active: true });
export const navigationItems = (state) =>
Object.values(state.navigation).map((item) => ({
diff --git a/app/assets/javascripts/vue_shared/directives/safe_html.js b/app/assets/javascripts/vue_shared/directives/safe_html.js
index 450c7fc1bc5..c731f742771 100644
--- a/app/assets/javascripts/vue_shared/directives/safe_html.js
+++ b/app/assets/javascripts/vue_shared/directives/safe_html.js
@@ -11,7 +11,7 @@ const DEFAULT_CONFIG = {
const transform = (el, binding) => {
if (binding.oldValue !== binding.value) {
- const config = { ...DEFAULT_CONFIG, ...(binding.arg ?? {}) };
+ const config = { ...DEFAULT_CONFIG, ...binding.arg };
el.textContent = '';
diff --git a/app/assets/javascripts/vue_shared/gl_feature_flags_plugin.js b/app/assets/javascripts/vue_shared/gl_feature_flags_plugin.js
index 79946ebaecd..a1abb079cc2 100644
--- a/app/assets/javascripts/vue_shared/gl_feature_flags_plugin.js
+++ b/app/assets/javascripts/vue_shared/gl_feature_flags_plugin.js
@@ -2,12 +2,11 @@ export default (Vue) => {
Vue.mixin({
provide() {
return {
- glFeatures:
- {
- ...window.gon?.features,
- // TODO: extract into glLicensedFeatures https://gitlab.com/gitlab-org/gitlab/-/issues/322460
- ...window.gon?.licensed_features,
- } || {},
+ glFeatures: {
+ ...window.gon?.features,
+ // TODO: extract into glLicensedFeatures https://gitlab.com/gitlab-org/gitlab/-/issues/322460
+ ...window.gon?.licensed_features,
+ },
};
},
});
diff --git a/app/assets/stylesheets/page_bundles/boards.scss b/app/assets/stylesheets/page_bundles/boards.scss
index 5aca697ae26..22e42d0a7f7 100644
--- a/app/assets/stylesheets/page_bundles/boards.scss
+++ b/app/assets/stylesheets/page_bundles/boards.scss
@@ -39,6 +39,12 @@
width: 400px;
}
+ &.board-add-new-list {
+ @include media-breakpoint-down(sm) {
+ width: 100%;
+ }
+ }
+
&.is-collapsed {
.board-title-text > span,
.issue-count-badge > span {
diff --git a/app/controllers/projects/merge_requests_controller.rb b/app/controllers/projects/merge_requests_controller.rb
index 8a92db36311..fc7f56c7dd1 100644
--- a/app/controllers/projects/merge_requests_controller.rb
+++ b/app/controllers/projects/merge_requests_controller.rb
@@ -11,6 +11,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
include SourcegraphDecorator
include DiffHelper
include Gitlab::Cache::Helpers
+ include MergeRequestsHelper
prepend_before_action(only: [:index]) { authenticate_sessionless_user!(:rss) }
skip_before_action :merge_request, only: [:index, :bulk_update, :export_csv]
@@ -625,7 +626,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
end
def endpoint_diff_batch_url(project, merge_request)
- per_page = current_user&.view_diffs_file_by_file ? '1' : '5'
+ per_page = current_user&.view_diffs_file_by_file ? '1' : DIFF_BATCH_ENDPOINT_PER_PAGE.to_s
params = request
.query_parameters
.merge(view: 'inline', diff_head: true, w: show_whitespace, page: '0', per_page: per_page)
diff --git a/app/graphql/resolvers/ci/catalog/resource_resolver.rb b/app/graphql/resolvers/ci/catalog/resource_resolver.rb
index 822448c27cb..4b722bd3ec7 100644
--- a/app/graphql/resolvers/ci/catalog/resource_resolver.rb
+++ b/app/graphql/resolvers/ci/catalog/resource_resolver.rb
@@ -11,15 +11,36 @@ module Resolvers
type ::Types::Ci::Catalog::ResourceType, null: true
argument :id, ::Types::GlobalIDType[::Ci::Catalog::Resource],
- required: true,
+ required: false,
description: 'CI/CD Catalog resource global ID.'
- def resolve(id:)
- catalog_resource = ::Gitlab::Graphql::Lazy.force(GitlabSchema.find_by_gid(id))
+ argument :full_path, GraphQL::Types::ID,
+ required: false,
+ description: 'CI/CD Catalog resource full path.'
- authorize!(catalog_resource&.project)
+ def ready?(**args)
+ unless args[:id].present? ^ args[:full_path].present?
+ raise Gitlab::Graphql::Errors::ArgumentError,
+ "Exactly one of 'id' or 'full_path' arguments is required."
+ end
- catalog_resource
+ super
+ end
+
+ def resolve(id: nil, full_path: nil)
+ if full_path.present?
+ project = Project.find_by_full_path(full_path)
+ authorize!(project)
+
+ raise_resource_not_available_error! unless project.catalog_resource
+
+ project.catalog_resource
+ else
+ catalog_resource = ::Gitlab::Graphql::Lazy.force(GitlabSchema.find_by_gid(id))
+ authorize!(catalog_resource&.project)
+
+ catalog_resource
+ end
end
end
end
diff --git a/app/graphql/types/packages/pypi/metadatum_type.rb b/app/graphql/types/packages/pypi/metadatum_type.rb
index 63452d8ab6e..8ccdb592c52 100644
--- a/app/graphql/types/packages/pypi/metadatum_type.rb
+++ b/app/graphql/types/packages/pypi/metadatum_type.rb
@@ -9,8 +9,17 @@ module Types
authorize :read_package
+ field :author_email, GraphQL::Types::String, null: true,
+ description: 'Author email address(es) in RFC-822 format.'
+ field :description, GraphQL::Types::String, null: true,
+ description: 'Longer description that can run to several paragraphs.'
+ field :description_content_type, GraphQL::Types::String, null: true,
+ description: 'Markup syntax used in the description field.'
field :id, ::Types::GlobalIDType[::Packages::Pypi::Metadatum], null: false, description: 'ID of the metadatum.'
+ field :keywords, GraphQL::Types::String, null: true, description: 'List of keywords, separated by commas.'
+ field :metadata_version, GraphQL::Types::String, null: true, description: 'Metadata version.'
field :required_python, GraphQL::Types::String, null: true, description: 'Required Python version of the Pypi package.'
+ field :summary, GraphQL::Types::String, null: true, description: 'One-line summary of the description.'
end
end
end
diff --git a/app/helpers/merge_requests_helper.rb b/app/helpers/merge_requests_helper.rb
index 4cb6260c03e..558ef9c798f 100644
--- a/app/helpers/merge_requests_helper.rb
+++ b/app/helpers/merge_requests_helper.rb
@@ -3,6 +3,7 @@
module MergeRequestsHelper
include Gitlab::Utils::StrongMemoize
include CompareHelper
+ DIFF_BATCH_ENDPOINT_PER_PAGE = 5
def create_mr_button_from_event?(event)
create_mr_button?(from: event.branch_name, source_project: event.project)
@@ -202,7 +203,8 @@ module MergeRequestsHelper
source_project_full_path: merge_request.source_project&.full_path,
is_forked: project.forked?.to_s,
new_comment_template_path: profile_comment_templates_path,
- iid: merge_request.iid
+ iid: merge_request.iid,
+ per_page: DIFF_BATCH_ENDPOINT_PER_PAGE
}
end
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 05122c96948..0bb93a68470 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -921,13 +921,25 @@ module Ci
# Consider this object to have a structural integrity problems
def doom!
transaction do
- update_columns(status: :failed, failure_reason: :data_integrity_failure)
+ now = Time.current
+ attributes = {
+ status: :failed,
+ failure_reason: :data_integrity_failure,
+ updated_at: now
+ }
+ attributes[:finished_at] = now unless finished_at.present?
+
+ update_columns(attributes)
all_queuing_entries.delete_all
all_runtime_metadata.delete_all
end
deployment&.sync_status_with(self)
+ ::Gitlab::Ci::Pipeline::Metrics
+ .job_failure_reason_counter
+ .increment(reason: :data_integrity_failure)
+
Gitlab::AppLogger.info(
message: 'Build doomed',
class: self.class.name,
diff --git a/app/models/packages/pypi/metadatum.rb b/app/models/packages/pypi/metadatum.rb
index ff247fedb59..f7360409507 100644
--- a/app/models/packages/pypi/metadatum.rb
+++ b/app/models/packages/pypi/metadatum.rb
@@ -3,10 +3,24 @@
class Packages::Pypi::Metadatum < ApplicationRecord
self.primary_key = :package_id
+ MAX_REQUIRED_PYTHON_LENGTH = 255
+ MAX_KEYWORDS_LENGTH = 255
+ MAX_METADATA_VERSION_LENGTH = 16
+ MAX_AUTHOR_EMAIL_LENGTH = 2048
+ MAX_SUMMARY_LENGTH = 255
+ MAX_DESCRIPTION_LENGTH = 4000
+ MAX_DESCRIPTION_CONTENT_TYPE = 128
+
belongs_to :package, -> { where(package_type: :pypi) }, inverse_of: :pypi_metadatum
validates :package, presence: true
- validates :required_python, length: { maximum: 255 }, allow_nil: false
+ validates :required_python, length: { maximum: MAX_REQUIRED_PYTHON_LENGTH }, allow_nil: false
+ validates :keywords, length: { maximum: MAX_KEYWORDS_LENGTH }, allow_nil: true
+ validates :metadata_version, length: { maximum: MAX_METADATA_VERSION_LENGTH }, allow_nil: true
+ validates :author_email, length: { maximum: MAX_AUTHOR_EMAIL_LENGTH }, allow_nil: true
+ validates :summary, length: { maximum: MAX_SUMMARY_LENGTH }, allow_nil: true
+ validates :description, length: { maximum: MAX_DESCRIPTION_LENGTH }, allow_nil: true
+ validates :description_content_type, length: { maximum: MAX_DESCRIPTION_CONTENT_TYPE }, allow_nil: true
validate :pypi_package_type
diff --git a/app/services/packages/pypi/create_package_service.rb b/app/services/packages/pypi/create_package_service.rb
index 087a8e42a66..fca7b1bca37 100644
--- a/app/services/packages/pypi/create_package_service.rb
+++ b/app/services/packages/pypi/create_package_service.rb
@@ -9,7 +9,13 @@ module Packages
::Packages::Package.transaction do
meta = Packages::Pypi::Metadatum.new(
package: created_package,
- required_python: params[:requires_python] || ''
+ required_python: params[:requires_python] || '',
+ metadata_version: params[:metadata_version],
+ author_email: params[:author_email],
+ description: params[:description],
+ description_content_type: params[:description_content_type],
+ summary: params[:summary],
+ keywords: params[:keywords]
)
unless meta.valid?
diff --git a/app/validators/json_schemas/vulnerability_cvss_vectors.json b/app/validators/json_schemas/vulnerability_cvss_vectors.json
index 7ec1339e974..0da6de0a69d 100644
--- a/app/validators/json_schemas/vulnerability_cvss_vectors.json
+++ b/app/validators/json_schemas/vulnerability_cvss_vectors.json
@@ -9,14 +9,14 @@
"type": "string",
"default": "unknown"
},
- "vector_string": {
+ "vector": {
"type": "string",
"example": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H"
}
},
"required": [
"vendor",
- "vector_string"
+ "vector"
]
}
}
diff --git a/app/views/dashboard/todos/index.html.haml b/app/views/dashboard/todos/index.html.haml
index e4d894ede1c..4f3ca9fd71b 100644
--- a/app/views/dashboard/todos/index.html.haml
+++ b/app/views/dashboard/todos/index.html.haml
@@ -39,26 +39,26 @@
.filter-item.gl-m-2
- if params[:group_id].present?
= hidden_field_tag(:group_id, params[:group_id])
- = dropdown_tag(group_dropdown_label(params[:group_id], _("Group")), options: { toggle_class: 'js-group-search js-filter-submit gl-xs-w-full!', title: s_("Todos|Filter by group"), filter: true, filterInput: 'input#group-search', dropdown_class: 'dropdown-menu-selectable dropdown-menu-group js-filter-submit', placeholder: _("Search groups"), data: { default_label: _("Group"), display: 'static', testid: 'group-dropdown' } })
+ = dropdown_tag(group_dropdown_label(params[:group_id], _("Group")), options: { toggle_class: 'js-group-search js-filter-submit gl-w-full gl-sm-w-auto', title: s_("Todos|Filter by group"), filter: true, filterInput: 'input#group-search', dropdown_class: 'dropdown-menu-selectable dropdown-menu-group js-filter-submit', placeholder: _("Search groups"), data: { default_label: _("Group"), display: 'static', testid: 'group-dropdown' } })
.filter-item.gl-m-2
- if params[:project_id].present?
= hidden_field_tag(:project_id, params[:project_id])
- = dropdown_tag(project_dropdown_label(params[:project_id], _("Project")), options: { toggle_class: 'js-project-search js-filter-submit gl-xs-w-full!', title: s_("Todos|Filter by project"), filter: true, filterInput: 'input#project-search', dropdown_class: 'dropdown-menu-selectable dropdown-menu-project js-filter-submit', placeholder: _("Search projects"), data: { default_label: _("Project"), display: 'static' } })
+ = dropdown_tag(project_dropdown_label(params[:project_id], _("Project")), options: { toggle_class: 'js-project-search js-filter-submit gl-w-full gl-sm-w-auto', title: s_("Todos|Filter by project"), filter: true, filterInput: 'input#project-search', dropdown_class: 'dropdown-menu-selectable dropdown-menu-project js-filter-submit', placeholder: _("Search projects"), data: { default_label: _("Project"), display: 'static' } })
.filter-item.gl-m-2
- if params[:author_id].present?
= hidden_field_tag(:author_id, params[:author_id])
- = dropdown_tag(user_dropdown_label(params[:author_id], _("Author")), options: { toggle_class: 'js-user-search js-filter-submit js-author-search gl-xs-w-full!', title: s_("Todos|Filter by author"), filter: true, filterInput: 'input#author-search', dropdown_class: 'dropdown-menu-user dropdown-menu-selectable dropdown-menu-author js-filter-submit', placeholder: _("Search authors"), data: { any_user: _("Any Author"), first_user: (current_user.username if current_user), project_id: (@project.id if @project), selected: params[:author_id], field_name: 'author_id', default_label: _("Author"), todo_filter: true, todo_state_filter: params[:state] || 'pending' } })
+ = dropdown_tag(user_dropdown_label(params[:author_id], _("Author")), options: { toggle_class: 'js-user-search js-filter-submit js-author-search gl-w-full gl-sm-w-auto', title: s_("Todos|Filter by author"), filter: true, filterInput: 'input#author-search', dropdown_class: 'dropdown-menu-user dropdown-menu-selectable dropdown-menu-author js-filter-submit', placeholder: _("Search authors"), data: { any_user: _("Any Author"), first_user: (current_user.username if current_user), project_id: (@project.id if @project), selected: params[:author_id], field_name: 'author_id', default_label: _("Author"), todo_filter: true, todo_state_filter: params[:state] || 'pending' } })
.filter-item.gl-m-2
- if params[:type].present?
= hidden_field_tag(:type, params[:type])
- = dropdown_tag(todo_types_dropdown_label(params[:type], _("Type")), options: { toggle_class: 'js-type-search js-filter-submit gl-xs-w-full!', dropdown_class: 'dropdown-menu-selectable dropdown-menu-type js-filter-submit', data: { data: todo_types_options, default_label: _("Type") } })
+ = dropdown_tag(todo_types_dropdown_label(params[:type], _("Type")), options: { toggle_class: 'js-type-search js-filter-submit gl-w-full gl-sm-w-auto', dropdown_class: 'dropdown-menu-selectable dropdown-menu-type js-filter-submit', data: { data: todo_types_options, default_label: _("Type") } })
.filter-item.actions-filter.gl-m-2
- if params[:action_id].present?
= hidden_field_tag(:action_id, params[:action_id])
- = dropdown_tag(todo_actions_dropdown_label(params[:action_id], _("Action")), options: { toggle_class: 'js-action-search js-filter-submit gl-xs-w-full!', dropdown_class: 'dropdown-menu-selectable dropdown-menu-action js-filter-submit', data: { data: todo_actions_options, default_label: _("Action") } })
+ = dropdown_tag(todo_actions_dropdown_label(params[:action_id], _("Action")), options: { toggle_class: 'js-action-search js-filter-submit gl-w-full gl-sm-w-auto', dropdown_class: 'dropdown-menu-selectable dropdown-menu-action js-filter-submit', data: { data: todo_actions_options, default_label: _("Action") } })
.filter-item.sort-filter.gl-my-2
.dropdown
- %button.dropdown-menu-toggle.dropdown-menu-toggle-sort{ type: 'button', class: 'gl-xs-w-full!', 'data-toggle' => 'dropdown' }
+ %button.dropdown-menu-toggle.dropdown-menu-toggle-sort{ type: 'button', class: 'gl-w-full gl-sm-w-auto', 'data-toggle' => 'dropdown' }
%span.light
- if @sort.present?
= sort_options_hash[@sort]
diff --git a/app/views/groups/settings/_permissions.html.haml b/app/views/groups/settings/_permissions.html.haml
index e7fb8d290cd..19abacd038a 100644
--- a/app/views/groups/settings/_permissions.html.haml
+++ b/app/views/groups/settings/_permissions.html.haml
@@ -38,6 +38,7 @@
= render 'groups/settings/lfs', f: f
= render_if_exists 'groups/settings/code_suggestions', f: f, group: @group
= render_if_exists 'groups/settings/experimental_settings', f: f, group: @group
+ = render_if_exists 'groups/settings/product_analytics_settings', f: f, group: @group
= render 'groups/settings/git_access_protocols', f: f, group: @group
= render 'groups/settings/project_creation_level', f: f, group: @group
= render 'groups/settings/subgroup_creation_level', f: f, group: @group
diff --git a/app/views/projects/_invite_members_empty_project.html.haml b/app/views/projects/_invite_members_empty_project.html.haml
index d6cab06f773..14b0e82e021 100644
--- a/app/views/projects/_invite_members_empty_project.html.haml
+++ b/app/views/projects/_invite_members_empty_project.html.haml
@@ -4,6 +4,6 @@
= s_('InviteMember|Invite your team')
%p= s_('InviteMember|Add members to this project and start collaborating with your team.')
.js-invite-members-trigger{ data: { variant: 'confirm',
- classes: 'gl-mb-8 gl-xs-w-full',
+ classes: 'gl-mb-8 gl-w-full gl-sm-w-auto',
display_text: s_('InviteMember|Invite members'),
trigger_source: 'project_empty_page' } }
diff --git a/app/views/projects/commit/_commit_box.html.haml b/app/views/projects/commit/_commit_box.html.haml
index ee9c49a47bf..42482a773be 100644
--- a/app/views/projects/commit/_commit_box.html.haml
+++ b/app/views/projects/commit/_commit_box.html.haml
@@ -19,7 +19,7 @@
#{time_ago_with_tooltip(@commit.committed_date)}
#js-commit-comments-button{ data: { comments_count: @notes_count.to_i } }
- = link_button_to _('Browse files'), project_tree_path(@project, @commit), class: 'gl-mr-3 gl-xs-w-full gl-xs-mb-3'
+ = link_button_to _('Browse files'), project_tree_path(@project, @commit), class: 'gl-mr-3 gl-w-full gl-sm-w-auto gl-xs-mb-3'
#js-commit-options-dropdown{ data: commit_options_dropdown_data(@project, @commit) }
.commit-box{ data: { project_path: project_path(@project) } }
diff --git a/app/views/projects/find_file/show.html.haml b/app/views/projects/find_file/show.html.haml
index 0c760ab82c9..997e7b7f24d 100644
--- a/app/views/projects/find_file/show.html.haml
+++ b/app/views/projects/find_file/show.html.haml
@@ -6,7 +6,7 @@
- blob_path = project_blob_path(@project, @ref)
.file-finder-holder.tree-holder.clearfix.js-file-finder.gl-pt-4{ data: { file_find_url: "#{escape_javascript(project_files_path(@project, @ref, ref_type: @ref_type, format: :json))}", find_tree_url: escape_javascript(tree_path), blob_url_template: escape_javascript(blob_path), ref_type: @ref_type } }
.nav-block.gl-xs-mr-0
- .tree-ref-holder.gl-xs-mb-3.gl-xs-w-full.gl-max-w-26
+ .tree-ref-holder.gl-xs-mb-3.gl-max-w-26
#js-blob-ref-switcher{ data: { project_id: @project.id, ref: @ref, ref_type: @ref_type, namespace: "/-/find_file" } }
%ul.breadcrumb.repo-breadcrumb.gl-flex-nowrap
%li.breadcrumb-item.gl-white-space-nowrap
diff --git a/config/feature_flags/development/admin_group_member.yml b/config/feature_flags/development/admin_group_member.yml
deleted file mode 100644
index 2d4869375c4..00000000000
--- a/config/feature_flags/development/admin_group_member.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: admin_group_member
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/131914
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/426580
-milestone: '16.5'
-type: development
-group: group::authentication
-default_enabled: false
diff --git a/config/feature_flags/development/bulk_import_details_page.yml b/config/feature_flags/development/bulk_import_details_page.yml
index 2e4576656c5..c8265161233 100644
--- a/config/feature_flags/development/bulk_import_details_page.yml
+++ b/config/feature_flags/development/bulk_import_details_page.yml
@@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/429109
milestone: '16.6'
type: development
group: group::import and integrate
-default_enabled: false
+default_enabled: true
diff --git a/config/initializers/peek.rb b/config/initializers/peek.rb
index 6ac116f46f5..e1c59851fb1 100644
--- a/config/initializers/peek.rb
+++ b/config/initializers/peek.rb
@@ -16,7 +16,6 @@ Peek.into Peek::Views::Gitaly
Peek.into Peek::Views::RedisDetailed
Peek.into Peek::Views::Elasticsearch
Peek.into Peek::Views::Zoekt
-Peek.into Peek::Views::Rugged
Peek.into Peek::Views::ExternalHttp
Peek.into Peek::Views::ClickHouse
Peek.into Peek::Views::BulletDetailed if defined?(Bullet)
diff --git a/db/click_house/main/20230705124511_create_events.sql b/db/click_house/main/20230705124511_create_events.sql
deleted file mode 100644
index 8af45443e4c..00000000000
--- a/db/click_house/main/20230705124511_create_events.sql
+++ /dev/null
@@ -1,16 +0,0 @@
-CREATE TABLE events
-(
- id UInt64 DEFAULT 0,
- path String DEFAULT '',
- author_id UInt64 DEFAULT 0,
- target_id UInt64 DEFAULT 0,
- target_type LowCardinality(String) DEFAULT '',
- action UInt8 DEFAULT 0,
- deleted UInt8 DEFAULT 0,
- created_at DateTime64(6, 'UTC') DEFAULT now(),
- updated_at DateTime64(6, 'UTC') DEFAULT now()
-)
-ENGINE = ReplacingMergeTree(updated_at, deleted)
-PRIMARY KEY (id)
-ORDER BY (id)
-PARTITION BY toYear(created_at)
diff --git a/db/click_house/main/20230707151359_create_ci_finished_builds.sql b/db/click_house/main/20230707151359_create_ci_finished_builds.sql
deleted file mode 100644
index 9fd17e1968f..00000000000
--- a/db/click_house/main/20230707151359_create_ci_finished_builds.sql
+++ /dev/null
@@ -1,33 +0,0 @@
--- source table for CI analytics, almost useless on it's own, but it's a basis for creating materialized views
-CREATE TABLE ci_finished_builds
-(
- id UInt64 DEFAULT 0,
- project_id UInt64 DEFAULT 0,
- pipeline_id UInt64 DEFAULT 0,
- status LowCardinality(String) DEFAULT '',
-
- --- Fields to calculate timings
- created_at DateTime64(6, 'UTC') DEFAULT now(),
- queued_at DateTime64(6, 'UTC') DEFAULT now(),
- finished_at DateTime64(6, 'UTC') DEFAULT now(),
- started_at DateTime64(6, 'UTC') DEFAULT now(),
-
- runner_id UInt64 DEFAULT 0,
- runner_manager_system_xid String DEFAULT '',
-
- --- Runner fields
- runner_run_untagged Boolean DEFAULT FALSE,
- runner_type UInt8 DEFAULT 0,
- runner_manager_version LowCardinality(String) DEFAULT '',
- runner_manager_revision LowCardinality(String) DEFAULT '',
- runner_manager_platform LowCardinality(String) DEFAULT '',
- runner_manager_architecture LowCardinality(String) DEFAULT '',
-
- --- Materialized columns
- duration Int64 MATERIALIZED age('ms', started_at, finished_at),
- queueing_duration Int64 MATERIALIZED age('ms', queued_at, started_at)
- --- This table is incomplete, we'll add more fields before starting the data migration
-)
-ENGINE = ReplacingMergeTree -- Using ReplacingMergeTree just in case we accidentally insert the same data twice
-ORDER BY (status, runner_type, project_id, finished_at, id)
-PARTITION BY toYear(finished_at);
diff --git a/db/click_house/main/20230719101806_create_ci_finished_builds_aggregated_queueing_delay_percentiles.sql b/db/click_house/main/20230719101806_create_ci_finished_builds_aggregated_queueing_delay_percentiles.sql
deleted file mode 100644
index 0b05c3a37f6..00000000000
--- a/db/click_house/main/20230719101806_create_ci_finished_builds_aggregated_queueing_delay_percentiles.sql
+++ /dev/null
@@ -1,11 +0,0 @@
-CREATE TABLE ci_finished_builds_aggregated_queueing_delay_percentiles
-(
- status LowCardinality(String) DEFAULT '',
- runner_type UInt8 DEFAULT 0,
- started_at_bucket DateTime64(6, 'UTC') DEFAULT now(),
-
- count_builds AggregateFunction(count),
- queueing_duration_quantile AggregateFunction(quantile, Int64)
-)
-ENGINE = AggregatingMergeTree()
-ORDER BY (started_at_bucket, status, runner_type);
diff --git a/db/click_house/main/20230724064832_create_contribution_analytics_events.sql b/db/click_house/main/20230724064832_create_contribution_analytics_events.sql
deleted file mode 100644
index 7867897e897..00000000000
--- a/db/click_house/main/20230724064832_create_contribution_analytics_events.sql
+++ /dev/null
@@ -1,13 +0,0 @@
-CREATE TABLE contribution_analytics_events
-(
- id UInt64 DEFAULT 0,
- path String DEFAULT '',
- author_id UInt64 DEFAULT 0,
- target_type LowCardinality(String) DEFAULT '',
- action UInt8 DEFAULT 0,
- created_at Date DEFAULT toDate(now()),
- updated_at DateTime64(6, 'UTC') DEFAULT now()
-)
- ENGINE = MergeTree
- ORDER BY (path, created_at, author_id, id)
- PARTITION BY toYear(created_at);
diff --git a/db/click_house/main/20230724064918_contribution_analytics_events_materialized_view.sql b/db/click_house/main/20230724064918_contribution_analytics_events_materialized_view.sql
deleted file mode 100644
index 669b03ce0f3..00000000000
--- a/db/click_house/main/20230724064918_contribution_analytics_events_materialized_view.sql
+++ /dev/null
@@ -1,16 +0,0 @@
-CREATE MATERIALIZED VIEW contribution_analytics_events_mv
-TO contribution_analytics_events
-AS
-SELECT
- id,
- argMax(path, events.updated_at) as path,
- argMax(author_id, events.updated_at) as author_id,
- argMax(target_type, events.updated_at) as target_type,
- argMax(action, events.updated_at) as action,
- argMax(date(created_at), events.updated_at) as created_at,
- max(events.updated_at) as updated_at
-FROM events
-where (("events"."action" = 5 AND "events"."target_type" = '')
- OR ("events"."action" IN (1, 3, 7, 12)
- AND "events"."target_type" IN ('MergeRequest', 'Issue')))
-GROUP BY id
diff --git a/db/click_house/main/20230808070520_create_events_cursor.sql b/db/click_house/main/20230808070520_create_events_cursor.sql
deleted file mode 100644
index effc3c64f60..00000000000
--- a/db/click_house/main/20230808070520_create_events_cursor.sql
+++ /dev/null
@@ -1,9 +0,0 @@
-CREATE TABLE sync_cursors
-(
- table_name LowCardinality(String) DEFAULT '',
- primary_key_value UInt64 DEFAULT 0,
- recorded_at DateTime64(6, 'UTC') DEFAULT now()
-)
-ENGINE = ReplacingMergeTree(recorded_at)
-ORDER BY (table_name)
-PRIMARY KEY (table_name)
diff --git a/db/click_house/main/20230808140217_create_ci_finished_builds_aggregated_queueing_delay_percentiles_mv.sql b/db/click_house/main/20230808140217_create_ci_finished_builds_aggregated_queueing_delay_percentiles_mv.sql
deleted file mode 100644
index 504e2d87609..00000000000
--- a/db/click_house/main/20230808140217_create_ci_finished_builds_aggregated_queueing_delay_percentiles_mv.sql
+++ /dev/null
@@ -1,12 +0,0 @@
-CREATE MATERIALIZED VIEW ci_finished_builds_aggregated_queueing_delay_percentiles_mv
-TO ci_finished_builds_aggregated_queueing_delay_percentiles
-AS
-SELECT
- status,
- runner_type,
- toStartOfInterval(started_at, INTERVAL 5 minute) AS started_at_bucket,
-
- countState(*) as count_builds,
- quantileState(queueing_duration) AS queueing_duration_quantile
-FROM ci_finished_builds
-GROUP BY status, runner_type, started_at_bucket
diff --git a/db/click_house/migrate/20230705124511_create_events.rb b/db/click_house/migrate/20230705124511_create_events.rb
new file mode 100644
index 00000000000..cd60ade5d4d
--- /dev/null
+++ b/db/click_house/migrate/20230705124511_create_events.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+class CreateEvents < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE TABLE IF NOT EXISTS events
+ (
+ id UInt64 DEFAULT 0,
+ path String DEFAULT '',
+ author_id UInt64 DEFAULT 0,
+ target_id UInt64 DEFAULT 0,
+ target_type LowCardinality(String) DEFAULT '',
+ action UInt8 DEFAULT 0,
+ deleted UInt8 DEFAULT 0,
+ created_at DateTime64(6, 'UTC') DEFAULT now(),
+ updated_at DateTime64(6, 'UTC') DEFAULT now()
+ )
+ ENGINE = ReplacingMergeTree(updated_at, deleted)
+ PRIMARY KEY (id)
+ ORDER BY (id)
+ PARTITION BY toYear(created_at)
+ SQL
+ end
+
+ def down
+ execute <<~SQL
+ DROP TABLE events
+ SQL
+ end
+end
diff --git a/db/click_house/migrate/20230707151359_create_ci_finished_builds.rb b/db/click_house/migrate/20230707151359_create_ci_finished_builds.rb
new file mode 100644
index 00000000000..39521af8d99
--- /dev/null
+++ b/db/click_house/migrate/20230707151359_create_ci_finished_builds.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+class CreateCiFinishedBuilds < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ -- source table for CI analytics, almost useless on it's own, but it's a basis for creating materialized views
+ CREATE TABLE IF NOT EXISTS ci_finished_builds
+ (
+ id UInt64 DEFAULT 0,
+ project_id UInt64 DEFAULT 0,
+ pipeline_id UInt64 DEFAULT 0,
+ status LowCardinality(String) DEFAULT '',
+
+ --- Fields to calculate timings
+ created_at DateTime64(6, 'UTC') DEFAULT now(),
+ queued_at DateTime64(6, 'UTC') DEFAULT now(),
+ finished_at DateTime64(6, 'UTC') DEFAULT now(),
+ started_at DateTime64(6, 'UTC') DEFAULT now(),
+
+ runner_id UInt64 DEFAULT 0,
+ runner_manager_system_xid String DEFAULT '',
+
+ --- Runner fields
+ runner_run_untagged Boolean DEFAULT FALSE,
+ runner_type UInt8 DEFAULT 0,
+ runner_manager_version LowCardinality(String) DEFAULT '',
+ runner_manager_revision LowCardinality(String) DEFAULT '',
+ runner_manager_platform LowCardinality(String) DEFAULT '',
+ runner_manager_architecture LowCardinality(String) DEFAULT '',
+
+ --- Materialized columns
+ duration Int64 MATERIALIZED age('ms', started_at, finished_at),
+ queueing_duration Int64 MATERIALIZED age('ms', queued_at, started_at)
+ --- This table is incomplete, we'll add more fields before starting the data migration
+ )
+ ENGINE = ReplacingMergeTree -- Using ReplacingMergeTree just in case we accidentally insert the same data twice
+ ORDER BY (status, runner_type, project_id, finished_at, id)
+ PARTITION BY toYear(finished_at)
+ SQL
+ end
+
+ def down
+ execute <<~SQL
+ DROP TABLE ci_finished_builds
+ SQL
+ end
+end
diff --git a/db/click_house/migrate/20230719101806_create_ci_finished_builds_aggregated_queueing_delay_percentiles.rb b/db/click_house/migrate/20230719101806_create_ci_finished_builds_aggregated_queueing_delay_percentiles.rb
new file mode 100644
index 00000000000..47934d8fe02
--- /dev/null
+++ b/db/click_house/migrate/20230719101806_create_ci_finished_builds_aggregated_queueing_delay_percentiles.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+class CreateCiFinishedBuildsAggregatedQueueingDelayPercentiles < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE TABLE IF NOT EXISTS ci_finished_builds_aggregated_queueing_delay_percentiles
+ (
+ status LowCardinality(String) DEFAULT '',
+ runner_type UInt8 DEFAULT 0,
+ started_at_bucket DateTime64(6, 'UTC') DEFAULT now(),
+
+ count_builds AggregateFunction(count),
+ queueing_duration_quantile AggregateFunction(quantile, Int64)
+ )
+ ENGINE = AggregatingMergeTree()
+ ORDER BY (started_at_bucket, status, runner_type)
+ SQL
+ end
+
+ def down
+ execute <<~SQL
+ DROP TABLE ci_finished_builds_aggregated_queueing_delay_percentiles
+ SQL
+ end
+end
diff --git a/db/click_house/migrate/20230724064832_create_contribution_analytics_events.rb b/db/click_house/migrate/20230724064832_create_contribution_analytics_events.rb
new file mode 100644
index 00000000000..2606ae3adc9
--- /dev/null
+++ b/db/click_house/migrate/20230724064832_create_contribution_analytics_events.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+class CreateContributionAnalyticsEvents < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE TABLE IF NOT EXISTS contribution_analytics_events
+ (
+ id UInt64 DEFAULT 0,
+ path String DEFAULT '',
+ author_id UInt64 DEFAULT 0,
+ target_type LowCardinality(String) DEFAULT '',
+ action UInt8 DEFAULT 0,
+ created_at Date DEFAULT toDate(now()),
+ updated_at DateTime64(6, 'UTC') DEFAULT now()
+ )
+ ENGINE = MergeTree
+ ORDER BY (path, created_at, author_id, id)
+ PARTITION BY toYear(created_at);
+ SQL
+ end
+
+ def down
+ execute <<~SQL
+ DROP TABLE contribution_analytics_events
+ SQL
+ end
+end
diff --git a/db/click_house/migrate/20230724064918_create_contribution_analytics_events_materialized_view.rb b/db/click_house/migrate/20230724064918_create_contribution_analytics_events_materialized_view.rb
new file mode 100644
index 00000000000..956a26d80f3
--- /dev/null
+++ b/db/click_house/migrate/20230724064918_create_contribution_analytics_events_materialized_view.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+class CreateContributionAnalyticsEventsMaterializedView < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE MATERIALIZED VIEW IF NOT EXISTS contribution_analytics_events_mv
+ TO contribution_analytics_events
+ AS
+ SELECT
+ id,
+ argMax(path, events.updated_at) as path,
+ argMax(author_id, events.updated_at) as author_id,
+ argMax(target_type, events.updated_at) as target_type,
+ argMax(action, events.updated_at) as action,
+ argMax(date(created_at), events.updated_at) as created_at,
+ max(events.updated_at) as updated_at
+ FROM events
+ WHERE (("events"."action" = 5 AND "events"."target_type" = '')
+ OR ("events"."action" IN (1, 3, 7, 12)
+ AND "events"."target_type" IN ('MergeRequest', 'Issue')))
+ GROUP BY id
+ SQL
+ end
+
+ def down
+ execute <<~SQL
+ DROP VIEW contribution_analytics_events_mv
+ SQL
+ end
+end
diff --git a/db/click_house/migrate/20230808070520_create_sync_cursors.rb b/db/click_house/migrate/20230808070520_create_sync_cursors.rb
new file mode 100644
index 00000000000..7583f8ec0c5
--- /dev/null
+++ b/db/click_house/migrate/20230808070520_create_sync_cursors.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+class CreateSyncCursors < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE TABLE IF NOT EXISTS sync_cursors
+ (
+ table_name LowCardinality(String) DEFAULT '',
+ primary_key_value UInt64 DEFAULT 0,
+ recorded_at DateTime64(6, 'UTC') DEFAULT now()
+ )
+ ENGINE = ReplacingMergeTree(recorded_at)
+ ORDER BY (table_name)
+ PRIMARY KEY (table_name)
+ SQL
+ end
+
+ def down
+ execute <<~SQL
+ DROP TABLE sync_cursors
+ SQL
+ end
+end
diff --git a/db/click_house/migrate/20230808140217_create_ci_finished_builds_aggregated_queueing_delay_percentiles_mv.rb b/db/click_house/migrate/20230808140217_create_ci_finished_builds_aggregated_queueing_delay_percentiles_mv.rb
new file mode 100644
index 00000000000..cc029d48436
--- /dev/null
+++ b/db/click_house/migrate/20230808140217_create_ci_finished_builds_aggregated_queueing_delay_percentiles_mv.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+class CreateCiFinishedBuildsAggregatedQueueingDelayPercentilesMv < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE MATERIALIZED VIEW IF NOT EXISTS ci_finished_builds_aggregated_queueing_delay_percentiles_mv
+ TO ci_finished_builds_aggregated_queueing_delay_percentiles
+ AS
+ SELECT
+ status,
+ runner_type,
+ toStartOfInterval(started_at, INTERVAL 5 minute) AS started_at_bucket,
+
+ countState(*) as count_builds,
+ quantileState(queueing_duration) AS queueing_duration_quantile
+ FROM ci_finished_builds
+ GROUP BY status, runner_type, started_at_bucket
+ SQL
+ end
+
+ def down
+ execute <<~SQL
+ DROP VIEW ci_finished_builds_aggregated_queueing_delay_percentiles_mv
+ SQL
+ end
+end
diff --git a/db/click_house/migrate/20231106202300_modify_ci_finished_builds_settings.rb b/db/click_house/migrate/20231106202300_modify_ci_finished_builds_settings.rb
new file mode 100644
index 00000000000..d9951725c9b
--- /dev/null
+++ b/db/click_house/migrate/20231106202300_modify_ci_finished_builds_settings.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+class ModifyCiFinishedBuildsSettings < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ ALTER TABLE ci_finished_builds MODIFY SETTING use_async_block_ids_cache = true
+ SQL
+ end
+
+ def down
+ execute <<~SQL
+ ALTER TABLE ci_finished_builds MODIFY SETTING use_async_block_ids_cache = false
+ SQL
+ end
+end
diff --git a/db/migrate/20231106145853_add_product_analytics_enabled_to_namespace_settings.rb b/db/migrate/20231106145853_add_product_analytics_enabled_to_namespace_settings.rb
new file mode 100644
index 00000000000..45b617be6ca
--- /dev/null
+++ b/db/migrate/20231106145853_add_product_analytics_enabled_to_namespace_settings.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddProductAnalyticsEnabledToNamespaceSettings < Gitlab::Database::Migration[2.2]
+ milestone '16.6'
+
+ def change
+ add_column :namespace_settings, :product_analytics_enabled, :boolean, default: false, null: false
+ end
+end
diff --git a/db/schema_migrations/20231106145853 b/db/schema_migrations/20231106145853
new file mode 100644
index 00000000000..0c50f91529f
--- /dev/null
+++ b/db/schema_migrations/20231106145853
@@ -0,0 +1 @@
+daa117df4a6d8e9a39fcf12e2c64917b7c66429952343b65212fcb27ad30130a \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 1553694704b..ab1bf35fc90 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -19469,6 +19469,7 @@ CREATE TABLE namespace_settings (
third_party_ai_features_enabled boolean DEFAULT true NOT NULL,
default_branch_protection_defaults jsonb DEFAULT '{}'::jsonb NOT NULL,
service_access_tokens_expiration_enforced boolean DEFAULT true NOT NULL,
+ product_analytics_enabled boolean DEFAULT false NOT NULL,
CONSTRAINT check_0ba93c78c7 CHECK ((char_length(default_branch_name) <= 255)),
CONSTRAINT namespace_settings_unique_project_download_limit_alertlist_size CHECK ((cardinality(unique_project_download_limit_alertlist) <= 100)),
CONSTRAINT namespace_settings_unique_project_download_limit_allowlist_size CHECK ((cardinality(unique_project_download_limit_allowlist) <= 100))
diff --git a/doc/administration/audit_event_streaming/audit_event_types.md b/doc/administration/audit_event_streaming/audit_event_types.md
index a35ba4eb9c5..88212045d8e 100644
--- a/doc/administration/audit_event_streaming/audit_event_types.md
+++ b/doc/administration/audit_event_streaming/audit_event_types.md
@@ -45,6 +45,7 @@ Audit event types belong to the following product categories.
| [`audit_events_streaming_instance_headers_destroy`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/127228) | Triggered when a streaming header for instance level external audit event destination is deleted| **{check-circle}** Yes | **{check-circle}** Yes | GitLab [16.3](https://gitlab.com/gitlab-org/gitlab/-/issues/417433) |
| [`audit_events_streaming_instance_headers_update`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/127228) | Triggered when a streaming header for instance level external audit event destination is updated| **{check-circle}** Yes | **{check-circle}** Yes | GitLab [16.3](https://gitlab.com/gitlab-org/gitlab/-/issues/417433) |
| [`create_event_streaming_destination`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74632) | Event triggered when an external audit event destination is created| **{check-circle}** Yes | **{check-circle}** Yes | GitLab [14.6](https://gitlab.com/gitlab-org/gitlab/-/issues/344664) |
+| [`create_http_namespace_filter`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/136047) | Event triggered when a namespace filter for an external audit event destination for a top-level group is created.| **{check-circle}** Yes | **{check-circle}** Yes | GitLab [16.6](https://gitlab.com/gitlab-org/gitlab/-/issues/424176) |
| [`create_instance_event_streaming_destination`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/123882) | Event triggered when an instance level external audit event destination is created| **{check-circle}** Yes | **{check-circle}** Yes | GitLab [16.2](https://gitlab.com/gitlab-org/gitlab/-/issues/404730) |
| [`destroy_event_streaming_destination`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74632) | Event triggered when an external audit event destination is deleted| **{check-circle}** Yes | **{check-circle}** Yes | GitLab [14.6](https://gitlab.com/gitlab-org/gitlab/-/issues/344664) |
| [`destroy_instance_event_streaming_destination`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/125846) | Event triggered when an instance level external audit event destination is deleted| **{check-circle}** Yes | **{check-circle}** Yes | GitLab [16.2](https://gitlab.com/gitlab-org/gitlab/-/issues/404730) |
diff --git a/doc/administration/logs/log_parsing.md b/doc/administration/logs/log_parsing.md
index 3049e4ccc32..b281620fcf3 100644
--- a/doc/administration/logs/log_parsing.md
+++ b/doc/administration/logs/log_parsing.md
@@ -96,10 +96,10 @@ grep <PROJECT_NAME> <FILE> | jq .
jq 'select(.duration_s > 5000)' <FILE>
```
-#### Find all project requests with more than 5 rugged calls
+#### Find all project requests with more than 5 Gitaly calls
```shell
-grep <PROJECT_NAME> <FILE> | jq 'select(.rugged_calls > 5)'
+grep <PROJECT_NAME> <FILE> | jq 'select(.gitaly_calls > 5)'
```
#### Find all requests with a Gitaly duration > 10 seconds
diff --git a/doc/administration/monitoring/performance/performance_bar.md b/doc/administration/monitoring/performance/performance_bar.md
index 12fa79b3c13..95717f0c54f 100644
--- a/doc/administration/monitoring/performance/performance_bar.md
+++ b/doc/administration/monitoring/performance/performance_bar.md
@@ -17,6 +17,8 @@ For example:
## Available information
+> Rugged calls [removed](https://gitlab.com/gitlab-org/gitlab/-/issues/421591) in GitLab 16.6.
+
From left to right, the performance bar displays:
- **Current Host**: the current host serving the page.
@@ -37,8 +39,6 @@ From left to right, the performance bar displays:
- **Gitaly calls**: the time taken (in milliseconds) and the total number of
[Gitaly](../../gitaly/index.md) calls. Select to display a modal window with more
details.
-- **Rugged calls**: the time taken (in milliseconds) and the total number of
- Rugged calls. Select to display a modal window with more details.
- **Redis calls**: the time taken (in milliseconds) and the total number of
Redis calls. Select to display a modal window with more details.
- **Elasticsearch calls**: the time taken (in milliseconds) and the total number of
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 987a94736a3..11f2482de5f 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -136,7 +136,8 @@ Returns [`CiCatalogResource`](#cicatalogresource).
| Name | Type | Description |
| ---- | ---- | ----------- |
-| <a id="querycicatalogresourceid"></a>`id` | [`CiCatalogResourceID!`](#cicatalogresourceid) | CI/CD Catalog resource global ID. |
+| <a id="querycicatalogresourcefullpath"></a>`fullPath` | [`ID`](#id) | CI/CD Catalog resource full path. |
+| <a id="querycicatalogresourceid"></a>`id` | [`CiCatalogResourceID`](#cicatalogresourceid) | CI/CD Catalog resource global ID. |
### `Query.ciCatalogResources`
@@ -1565,6 +1566,27 @@ Input type: `AuditEventsStreamingHeadersUpdateInput`
| <a id="mutationauditeventsstreamingheadersupdateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationauditeventsstreamingheadersupdateheader"></a>`header` | [`AuditEventStreamingHeader`](#auditeventstreamingheader) | Updates header. |
+### `Mutation.auditEventsStreamingHttpNamespaceFiltersAdd`
+
+Input type: `AuditEventsStreamingHTTPNamespaceFiltersAddInput`
+
+#### Arguments
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| <a id="mutationauditeventsstreaminghttpnamespacefiltersaddclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
+| <a id="mutationauditeventsstreaminghttpnamespacefiltersadddestinationid"></a>`destinationId` | [`AuditEventsExternalAuditEventDestinationID!`](#auditeventsexternalauditeventdestinationid) | Destination ID. |
+| <a id="mutationauditeventsstreaminghttpnamespacefiltersaddgrouppath"></a>`groupPath` | [`ID`](#id) | Full path of the group. |
+| <a id="mutationauditeventsstreaminghttpnamespacefiltersaddprojectpath"></a>`projectPath` | [`ID`](#id) | Full path of the project. |
+
+#### Fields
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| <a id="mutationauditeventsstreaminghttpnamespacefiltersaddclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
+| <a id="mutationauditeventsstreaminghttpnamespacefiltersadderrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
+| <a id="mutationauditeventsstreaminghttpnamespacefiltersaddnamespacefilter"></a>`namespaceFilter` | [`AuditEventStreamingHTTPNamespaceFilter`](#auditeventstreaminghttpnamespacefilter) | Namespace filter created. |
+
### `Mutation.auditEventsStreamingInstanceHeadersCreate`
Input type: `AuditEventsStreamingInstanceHeadersCreateInput`
@@ -14074,6 +14096,18 @@ Represents a HTTP header key/value that belongs to an audit streaming destinatio
| <a id="auditeventstreamingheaderkey"></a>`key` | [`String!`](#string) | Key of the header. |
| <a id="auditeventstreamingheadervalue"></a>`value` | [`String!`](#string) | Value of the header. |
+### `AuditEventsStreamingHTTPNamespaceFiltersAddPayload`
+
+Autogenerated return type of AuditEventsStreamingHTTPNamespaceFiltersAdd.
+
+#### Fields
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| <a id="auditeventsstreaminghttpnamespacefiltersaddpayloadclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
+| <a id="auditeventsstreaminghttpnamespacefiltersaddpayloaderrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
+| <a id="auditeventsstreaminghttpnamespacefiltersaddpayloadnamespacefilter"></a>`namespaceFilter` | [`AuditEventStreamingHTTPNamespaceFilter`](#auditeventstreaminghttpnamespacefilter) | Namespace filter created. |
+
### `AuditEventsStreamingInstanceHeader`
Represents a HTTP header key/value that belongs to an instance level audit streaming destination.
@@ -24936,8 +24970,14 @@ Pypi metadata.
| Name | Type | Description |
| ---- | ---- | ----------- |
+| <a id="pypimetadataauthoremail"></a>`authorEmail` | [`String`](#string) | Author email address(es) in RFC-822 format. |
+| <a id="pypimetadatadescription"></a>`description` | [`String`](#string) | Longer description that can run to several paragraphs. |
+| <a id="pypimetadatadescriptioncontenttype"></a>`descriptionContentType` | [`String`](#string) | Markup syntax used in the description field. |
| <a id="pypimetadataid"></a>`id` | [`PackagesPypiMetadatumID!`](#packagespypimetadatumid) | ID of the metadatum. |
+| <a id="pypimetadatakeywords"></a>`keywords` | [`String`](#string) | List of keywords, separated by commas. |
+| <a id="pypimetadatametadataversion"></a>`metadataVersion` | [`String`](#string) | Metadata version. |
| <a id="pypimetadatarequiredpython"></a>`requiredPython` | [`String`](#string) | Required Python version of the Pypi package. |
+| <a id="pypimetadatasummary"></a>`summary` | [`String`](#string) | One-line summary of the description. |
### `QueryComplexity`
diff --git a/doc/api/member_roles.md b/doc/api/member_roles.md
index 79f7bc2b3ad..cc50a8e225a 100644
--- a/doc/api/member_roles.md
+++ b/doc/api/member_roles.md
@@ -13,7 +13,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> - [Read dependency added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/126247) in GitLab 16.3.
> - [Name and description fields added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/126423) in GitLab 16.3.
> - [Admin merge request introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128302) in GitLab 16.4 [with a flag](../administration/feature_flags.md) named `admin_merge_request`. Disabled by default.
-> - [Admin group members introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/131914) in GitLab 16.5 [with a flag](../administration/feature_flags.md) named `admin_group_member`. Disabled by default.
+> - [Admin group members introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/131914) in GitLab 16.5 [with a flag](../administration/feature_flags.md) named `admin_group_member`. Disabled by default. The feature flag has been removed in GitLab 16.6.
> - [Manage project access tokens introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132342) in GitLab 16.5 in [with a flag](../administration/feature_flags.md) named `manage_project_access_tokens`. Disabled by default.
FLAG:
diff --git a/doc/development/code_review.md b/doc/development/code_review.md
index b9f24c5615f..c2f2a7643ae 100644
--- a/doc/development/code_review.md
+++ b/doc/development/code_review.md
@@ -115,10 +115,10 @@ It picks reviewers and maintainers from the list at the
page, with these behaviors:
- It doesn't pick people whose Slack or [GitLab status](../user/profile/index.md#set-your-current-status):
- - Contains the string `OOO`, `PTO`, `Parental Leave`, or `Friends and Family`.
+ - Contains the string `OOO`, `PTO`, `Parental Leave`, `Friends and Family`, or `Conference`.
- GitLab user **Busy** indicator is set to `True`.
- Emoji is from one of these categories:
- - **On leave** - 🌴 `:palm_tree:`, 🏖️ `:beach:`, ⛱ `:beach_umbrella:`, 🏖 `:beach_with_umbrella:`, 🌞 `:sun_with_face:`, 🎡 `:ferris_wheel:`
+ - **On leave** - 🌴 `:palm_tree:`, 🏖️ `:beach:`, ⛱ `:beach_umbrella:`, 🏖 `:beach_with_umbrella:`, 🌞 `:sun_with_face:`, 🎡 `:ferris_wheel:`, 🏙 `:cityscape:`
- **Out sick** - 🌡️ `:thermometer:`, 🤒 `:face_with_thermometer:`
- **At capacity** - 🔴 `:red_circle:`
- **Focus mode** - 💡 `:bulb:` (focusing on their team's work)
diff --git a/doc/development/documentation/versions.md b/doc/development/documentation/versions.md
index dadae134f4c..bd83ed7eff2 100644
--- a/doc/development/documentation/versions.md
+++ b/doc/development/documentation/versions.md
@@ -119,9 +119,8 @@ To deprecate a page or topic:
You can add any additional context-specific details that might help users.
-1. Add the following HTML comments above and below the content.
- For `remove_date`, set a date three months after the release where it
- will be removed.
+1. Add the following HTML comments above and below the content. For `remove_date`,
+ set a date three months after the [release where it will be removed](https://about.gitlab.com/releases/).
```markdown
<!--- start_remove The following content will be removed on remove_date: 'YYYY-MM-DD' -->
diff --git a/doc/development/gitaly.md b/doc/development/gitaly.md
index d23e00748cd..ed7fb6325d6 100644
--- a/doc/development/gitaly.md
+++ b/doc/development/gitaly.md
@@ -41,8 +41,8 @@ To read or write Git data, a request has to be made to Gitaly. This means that
if you're developing a new feature where you need data that's not yet available
in `lib/gitlab/git` changes have to be made to Gitaly.
-There should be no new code that touches Git repositories via disk access (for example,
-Rugged, `git`, `rm -rf`) anywhere in the `gitlab` repository. Anything that
+There should be no new code that touches Git repositories via disk access
+anywhere in the `gitlab` repository. Anything that
needs direct access to the Git repository *must* be implemented in Gitaly, and
exposed via an RPC.
diff --git a/doc/development/github_importer.md b/doc/development/github_importer.md
index d4c51ad54dc..1c33febecf4 100644
--- a/doc/development/github_importer.md
+++ b/doc/development/github_importer.md
@@ -213,6 +213,21 @@ process. This is done by calling `ProjectImportState#refresh_jid_expiration`. By
refreshing this TTL we can ensure our import does not get marked as failed so
long we're still performing work.
+## GitHub rate limit
+
+GitHub has a rate limit of 5,000 API calls per hour. The number of requests
+necessary to import a project is largely dominated by the number of unique users
+involved in a project (for example, issue authors), because we need the email address of users to map
+them to GitLab users. Other data such as issue pages and comments typically only requires a few dozen requests to import.
+
+We handle the rate limit by doing the following:
+
+1. After we hit the rate limit, we automatically reschedule jobs in such a way that they are not executed until the rate
+ limit has been reset.
+1. We cache the mapping of GitHub users to GitLab users in Redis.
+
+More information on user caching can be found below.
+
## Caching user lookups
When mapping GitHub users to GitLab users we need to (in the worst case)
diff --git a/doc/development/img/runner_fleet_dashboard.png b/doc/development/img/runner_fleet_dashboard.png
new file mode 100644
index 00000000000..242ebf4aea9
--- /dev/null
+++ b/doc/development/img/runner_fleet_dashboard.png
Binary files differ
diff --git a/doc/development/runner_fleet_dashboard.md b/doc/development/runner_fleet_dashboard.md
new file mode 100644
index 00000000000..2a7c7d05453
--- /dev/null
+++ b/doc/development/runner_fleet_dashboard.md
@@ -0,0 +1,245 @@
+---
+stage: Verify
+group: Runner
+info: >-
+ To determine the technical writer assigned to the Stage/Group associated with
+ this page, see
+ https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
+---
+# Runner Fleet Dashboard **(ULTIMATE BETA)**
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/424495) in GitLab 16.6 behind several [feature flags](#enable-feature-flags).
+
+This feature is in [BETA](../policy/experiment-beta-support.md).
+To join the list of users testing this feature, contact us in
+[epic 11180](https://gitlab.com/groups/gitlab-org/-/epics/11180).
+
+GitLab administrators can use the Runner Fleet Dashboard to assess the health of your instance runners.
+The Runner Fleet Dashboard shows:
+
+- Recent CI errors related caused by runner infrastructure.
+- Number of concurrent jobs executed on most busy runners.
+- Histogram of job queue times (available only with ClickHouse).
+
+There is a proposal to introduce [more features](#whats-next) to the Runner Fleet Dashboard.
+
+![Runner Fleet Dashboard](img/runner_fleet_dashboard.png)
+
+## View the Runner Fleet Dashboard
+
+Prerequisites:
+
+- You must be an administrator.
+
+To view the runner fleet dashboard:
+
+1. On the left sidebar, select **Search or go to**.
+1. Select **Admin Area**.
+1. Select **Runners**.
+1. Click **Fleet dashboard**.
+
+Most of the dashboard works without any additional actions, with the
+exception of **Wait time to pick a job** chart and [proposed features](#whats-next).
+These features require setting up an additional infrastructure, described in this page.
+
+To test the Runner Fleet Dashboard and gather feedback, we have launched an early adopters program
+for some customers to try this feature.
+
+## Requirements
+
+To test the Runner Fleet Dashboard as part of the early adopters program, you must:
+
+- Run GitLab 16.6 or above.
+- Have an [Ultimate license](https://about.gitlab.com/pricing/).
+- Be able to run ClickHouse database. We recommend using [ClickHouse Cloud](https://clickhouse.cloud/).
+
+## Setup
+
+To setup ClickHouse as the GitLab data storage:
+
+1. [Run ClickHouse Cluster and configure database](#run-and-configure-clickhouse).
+1. [Configure GitLab connection to Clickhouse](#configure-the-gitlab-connection-to-clickhouse).
+1. [Enable the feature flags](#enable-feature-flags).
+
+### Run and configure ClickHouse
+
+The most straightforward way to run ClickHouse is with [ClickHouse Cloud](https://clickhouse.cloud/).
+You can also [run ClickHouse on your own server](https://clickhouse.com/docs/en/install). Refer to the ClickHouse
+documentation regarding [recommendations for self-managed instances](https://clickhouse.com/docs/en/install#recommendations-for-self-managed-clickhouse).
+
+When you run ClickHouse on a hosted server, various data points might impact the resource consumption, like the number
+of builds that run on your instance each month, the selected hardware, the data center choice to host ClickHouse, and more.
+Regardless, the cost should not be significant.
+
+NOTE:
+ClickHouse is a secondary data store for GitLab. All your data is still stored in Postgres,
+and only duplicated in ClickHouse for analytics purposes.
+
+To create necessary user and database objects:
+
+1. Generate a secure password and save it.
+1. Sign in to the ClickHouse SQL console.
+1. Execute the following command. Replace `PASSWORD_HERE` with the generated password.
+
+ ```sql
+ CREATE DATABASE gitlab_clickhouse_main_production;
+ CREATE USER gitlab IDENTIFIED WITH sha256_password BY 'PASSWORD_HERE';
+ CREATE ROLE gitlab_app;
+ GRANT SELECT, INSERT, ALTER, CREATE, UPDATE, DROP, TRUNCATE, OPTIMIZE ON gitlab_clickhouse_main_production.* TO gitlab_app;
+ GRANT gitlab_app TO gitlab;
+ ```
+
+1. Connect to the `gitlab_clickhouse_main_production` database (or just switch it in the ClickHouse Cloud UI).
+
+1. To create the required database objects, execute:
+
+ ```sql
+ CREATE TABLE ci_finished_builds
+ (
+ id UInt64 DEFAULT 0,
+ project_id UInt64 DEFAULT 0,
+ pipeline_id UInt64 DEFAULT 0,
+ status LowCardinality(String) DEFAULT '',
+ created_at DateTime64(6, 'UTC') DEFAULT now(),
+ queued_at DateTime64(6, 'UTC') DEFAULT now(),
+ finished_at DateTime64(6, 'UTC') DEFAULT now(),
+ started_at DateTime64(6, 'UTC') DEFAULT now(),
+ runner_id UInt64 DEFAULT 0,
+ runner_manager_system_xid String DEFAULT '',
+ runner_run_untagged Boolean DEFAULT FALSE,
+ runner_type UInt8 DEFAULT 0,
+ runner_manager_version LowCardinality(String) DEFAULT '',
+ runner_manager_revision LowCardinality(String) DEFAULT '',
+ runner_manager_platform LowCardinality(String) DEFAULT '',
+ runner_manager_architecture LowCardinality(String) DEFAULT '',
+ duration Int64 MATERIALIZED age('ms', started_at, finished_at),
+ queueing_duration Int64 MATERIALIZED age('ms', queued_at, started_at)
+ )
+ ENGINE = ReplacingMergeTree
+ ORDER BY (status, runner_type, project_id, finished_at, id)
+ PARTITION BY toYear(finished_at);
+
+ CREATE TABLE ci_finished_builds_aggregated_queueing_delay_percentiles
+ (
+ status LowCardinality(String) DEFAULT '',
+ runner_type UInt8 DEFAULT 0,
+ started_at_bucket DateTime64(6, 'UTC') DEFAULT now(),
+
+ count_builds AggregateFunction(count),
+ queueing_duration_quantile AggregateFunction(quantile, Int64)
+ )
+ ENGINE = AggregatingMergeTree()
+ ORDER BY (started_at_bucket, status, runner_type);
+
+ CREATE MATERIALIZED VIEW ci_finished_builds_aggregated_queueing_delay_percentiles_mv
+ TO ci_finished_builds_aggregated_queueing_delay_percentiles
+ AS
+ SELECT
+ status,
+ runner_type,
+ toStartOfInterval(started_at, INTERVAL 5 minute) AS started_at_bucket,
+
+ countState(*) as count_builds,
+ quantileState(queueing_duration) AS queueing_duration_quantile
+ FROM ci_finished_builds
+ GROUP BY status, runner_type, started_at_bucket;
+ ```
+
+### Configure the GitLab connection to ClickHouse
+
+::Tabs
+
+:::TabTitle Linux package
+
+To provide GitLab with ClickHouse credentials:
+
+1. Edit `/etc/gitlab/gitlab.rb`:
+
+ ```ruby
+ gitlab_rails['clickhouse_databases']['main']['database'] = 'gitlab_clickhouse_main_production'
+ gitlab_rails['clickhouse_databases']['main']['url'] = 'https://example.com/path'
+ gitlab_rails['clickhouse_databases']['main']['username'] = 'gitlab'
+ gitlab_rails['clickhouse_databases']['main']['password'] = 'PASSWORD_HERE' # replace with the actual password
+ ```
+
+1. Save the file and reconfigure GitLab:
+
+ ```shell
+ sudo gitlab-ctl reconfigure
+ ```
+
+:::TabTitle Helm chart (Kubernetes)
+
+1. Save the ClickHouse password as a Kubernetes Secret:
+
+ ```shell
+ kubectl create secret generic gitlab-clickhouse-password --from-literal="main_password=PASSWORD_HERE"
+ ```
+
+1. Export the Helm values:
+
+ ```shell
+ helm get values gitlab > gitlab_values.yaml
+ ```
+
+1. Edit `gitlab_values.yaml`:
+
+ ```yaml
+ global:
+ clickhouse:
+ enabled: true
+ main:
+ username: default
+ password:
+ secret: gitlab-clickhouse-password
+ key: main_password
+ database: gitlab_clickhouse_main_production
+ url: 'http://example.com'
+ ```
+
+1. Save the file and apply the new values:
+
+ ```shell
+ helm upgrade -f gitlab_values.yaml gitlab gitlab/gitlab
+ ```
+
+::EndTabs
+
+To verify that your connection is set up successfully:
+
+1. Log in to [Rails console](../administration/operations/rails_console.md#starting-a-rails-console-session)
+1. Execute the following:
+
+ ```ruby
+ ClickHouse::Client.select('SELECT 1', :main)
+ ```
+
+ If successful, the command returns `[{"1"=>1}]`
+
+### Enable feature flags
+
+Features that use ClickHouse are currently under development and are disabled by feature flags.
+
+To enable these features, [enable](../administration/feature_flags.md#how-to-enable-and-disable-features-behind-flags)
+the following feature flags:
+
+| Feature flag name | Purpose |
+|------------------------------------|---------------------------------------------------------------------------|
+| `ci_data_ingestion_to_click_house` | Enables synchronization of new finished CI builds to Clickhouse database. |
+| `clickhouse_ci_analytics` | Enables the **Wait time to pick a job** chart. |
+
+## What's next
+
+Support for usage and cost analysis are proposed in
+[epic 11183](https://gitlab.com/groups/gitlab-org/-/epics/11183).
+
+## Feedback
+
+To help us improve the Runner Fleet Dashboard, you can provide feedback in
+[issue 421737](https://gitlab.com/gitlab-org/gitlab/-/issues/421737).
+In particular:
+
+- How easy or difficult it was to setup GitLab to make the dashboard work.
+- How useful you found the dashboard.
+- What other information you would like to see on that dashboard.
+- Any other related thoughts and ideas.
diff --git a/doc/install/requirements.md b/doc/install/requirements.md
index 2a03d9deeda..d20a5ecc561 100644
--- a/doc/install/requirements.md
+++ b/doc/install/requirements.md
@@ -230,7 +230,6 @@ The recommended number of threads is dependent on several factors, including tot
- If the operating system has a maximum 2 GB of memory, the recommended number of threads is `1`.
A higher value results in excess swapping, and decrease performance.
-- If legacy Rugged code is in use, the recommended number of threads is `1`.
- In all other cases, the recommended number of threads is `4`. We don't recommend setting this
higher, due to how [Ruby MRI multi-threading](https://en.wikipedia.org/wiki/Global_interpreter_lock)
works.
diff --git a/doc/user/application_security/vulnerability_report/index.md b/doc/user/application_security/vulnerability_report/index.md
index 54fe54edc3f..e71aab5839e 100644
--- a/doc/user/application_security/vulnerability_report/index.md
+++ b/doc/user/application_security/vulnerability_report/index.md
@@ -175,7 +175,7 @@ Fields included are:
- Group name
- Project name
-- Scanner type
+- Tool
- Scanner name
- Status
- Vulnerability
@@ -189,6 +189,8 @@ Fields included are:
- Location
- Activity: Returns `true` if the vulnerability is resolved on the default branch, and `false` if not.
- Comments
+- Full Path
+- CVSS Vectors
NOTE:
Full details are available through our
diff --git a/doc/user/group/import/index.md b/doc/user/group/import/index.md
index e5d33534c86..24d5ca5b214 100644
--- a/doc/user/group/import/index.md
+++ b/doc/user/group/import/index.md
@@ -240,7 +240,16 @@ To view group import history:
1. On the left sidebar, at the top, select **Create new** (**{plus}**) and **New group**.
1. Select **Import group**.
1. In the upper-right corner, select **History**.
-1. If there are any errors for a particular import, you can see them by selecting **Details**.
+1. If there are any errors for a particular import, select **See failures** to see their details.
+
+### Review results of the import
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/429109) in GitLab 16.6 [with a flag](../../feature_flags.md) named `bulk_import_details_page`. Enabled by default.
+
+To review the results of an import:
+
+1. Go to the [Group import history page](#group-import-history).
+1. To see the details of a failed import, select the **See failures** link on any import with a **Failed** status.
### Migrated group items
diff --git a/lib/api/internal/base.rb b/lib/api/internal/base.rb
index 5ad058fd07d..87b3838fb85 100644
--- a/lib/api/internal/base.rb
+++ b/lib/api/internal/base.rb
@@ -55,6 +55,11 @@ module API
env = parse_env
Gitlab::Git::HookEnv.set(gl_repository, env) if container
+ # Snapshot repositories have different relative path than the main repository. For access
+ # checks that need quarantined objects the relative path in also sent with Gitaly RPCs
+ # calls as a header.
+ populate_relative_path(params[:relative_path])
+
actor.update_last_used_at!
check_result = access_check_result
@@ -97,6 +102,12 @@ module API
end
# rubocop: enable Metrics/AbcSize
+ def populate_relative_path(relative_path)
+ return unless Gitlab::SafeRequestStore.active?
+
+ Gitlab::SafeRequestStore[:gitlab_git_relative_path] = relative_path
+ end
+
def validate_actor(actor)
return 'Could not find the given key' unless actor.key
@@ -121,6 +132,7 @@ module API
# username - user name for Git over SSH in keyless SSH cert mode
# protocol - Git access protocol being used, e.g. HTTP or SSH
# project - project full_path (not path on disk)
+ # relative_path - relative path of repository having access checks performed.
# action - git action (git-upload-pack or git-receive-pack)
# changes - changes as "oldrev newrev ref", see Gitlab::ChangesList
# check_ip - optional, only in EE version, may limit access to
diff --git a/lib/api/invitations.rb b/lib/api/invitations.rb
index 6b4b7b355d0..d625b2c0fe6 100644
--- a/lib/api/invitations.rb
+++ b/lib/api/invitations.rb
@@ -42,11 +42,7 @@ module API
source = find_source(source_type, params[:id])
- if ::Feature.enabled?(:admin_group_member, source)
- authorize_admin_source_member!(source_type, source)
- else
- authorize_admin_source!(source_type, source)
- end
+ authorize_admin_source_member!(source_type, source)
create_service_params = params.merge(source: source)
@@ -69,11 +65,7 @@ module API
source = find_source(source_type, params[:id])
query = params[:query]
- if ::Feature.enabled?(:admin_group_member, source)
- authorize_admin_source_member!(source_type, source)
- else
- authorize_admin_source!(source_type, source)
- end
+ authorize_admin_source_member!(source_type, source)
invitations = paginate(retrieve_member_invitations(source, query))
@@ -95,11 +87,7 @@ module API
source = find_source(source_type, params.delete(:id))
invite_email = params[:email]
- if ::Feature.enabled?(:admin_group_member, source)
- authorize_admin_source_member!(source_type, source)
- else
- authorize_admin_source!(source_type, source)
- end
+ authorize_admin_source_member!(source_type, source)
invite = retrieve_member_invitations(source, invite_email).first
not_found! unless invite
@@ -137,11 +125,7 @@ module API
source = find_source(source_type, params[:id])
invite_email = params[:email]
- if ::Feature.enabled?(:admin_group_member, source)
- authorize_admin_source_member!(source_type, source)
- else
- authorize_admin_source!(source_type, source)
- end
+ authorize_admin_source_member!(source_type, source)
invite = retrieve_member_invitations(source, invite_email).first
not_found! unless invite
diff --git a/lib/api/members.rb b/lib/api/members.rb
index bdbdea70da0..56a15c41e1c 100644
--- a/lib/api/members.rb
+++ b/lib/api/members.rb
@@ -118,11 +118,8 @@ module API
post ":id/members", feature_category: feature_category do
source = find_source(source_type, params[:id])
- if ::Feature.enabled?(:admin_group_member, source)
- authorize_admin_source_member!(source_type, source)
- else
- authorize_admin_source!(source_type, source)
- end
+
+ authorize_admin_source_member!(source_type, source)
create_service_params = params.merge(source: source)
@@ -148,11 +145,7 @@ module API
source = find_source(source_type, params.delete(:id))
member = source_members(source).find_by!(user_id: params[:user_id])
- if ::Feature.enabled?(:admin_group_member, source)
- authorize_update_source_member!(source_type, member)
- else
- authorize_admin_source!(source_type, source)
- end
+ authorize_update_source_member!(source_type, member)
result = ::Members::UpdateService
.new(current_user, declared_params(include_missing: false))
diff --git a/lib/api/pypi_packages.rb b/lib/api/pypi_packages.rb
index 027a11738d3..3313b3a87cd 100644
--- a/lib/api/pypi_packages.rb
+++ b/lib/api/pypi_packages.rb
@@ -280,6 +280,13 @@ module API
optional :requires_python, type: String, documentation: { example: '>=3.7' }
optional :md5_digest, type: String, documentation: { example: '900150983cd24fb0d6963f7d28e17f72' }
optional :sha256_digest, type: String, regexp: Gitlab::Regex.sha256_regex, documentation: { example: 'ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad' }
+ optional :metadata_version, type: String, documentation: { example: '2.3' }
+ optional :author_email, type: String, documentation: { example: 'cschultz@example.com, snoopy@peanuts.com' }
+ optional :description, type: String
+ optional :description_content_type, type: String,
+ documentation: { example: 'text/markdown; charset=UTF-8; variant=GFM' }
+ optional :summary, type: String, documentation: { example: 'A module for collecting votes from beagles.' }
+ optional :keywords, type: String, documentation: { example: 'dog,puppy,voting,election' }
end
route_setting :authentication, deploy_token_allowed: true, basic_auth_personal_access_token: true, job_token_allowed: :basic_auth
diff --git a/lib/click_house/migration.rb b/lib/click_house/migration.rb
new file mode 100644
index 00000000000..410a7ec86bc
--- /dev/null
+++ b/lib/click_house/migration.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+module ClickHouse
+ class Migration
+ cattr_accessor :verbose, :client_configuration
+ attr_accessor :name, :version
+
+ class << self
+ attr_accessor :delegate
+ end
+
+ def initialize(name = self.class.name, version = nil)
+ @name = name
+ @version = version
+ end
+
+ self.client_configuration = ClickHouse::Client.configuration
+ self.verbose = true
+ # instantiate the delegate object after initialize is defined
+ self.delegate = new
+
+ MIGRATION_FILENAME_REGEXP = /\A([0-9]+)_([_a-z0-9]*)\.?([_a-z0-9]*)?\.rb\z/
+
+ def database
+ self.class.constants.include?(:SCHEMA) ? self.class.const_get(:SCHEMA, false) : :main
+ end
+
+ def execute(query)
+ ClickHouse::Client.execute(query, database, self.class.client_configuration)
+ end
+
+ def up
+ self.class.delegate = self
+
+ return unless self.class.respond_to?(:up)
+
+ self.class.up
+ end
+
+ def down
+ self.class.delegate = self
+
+ return unless self.class.respond_to?(:down)
+
+ self.class.down
+ end
+
+ # Execute this migration in the named direction
+ def migrate(direction)
+ return unless respond_to?(direction)
+
+ case direction
+ when :up then announce 'migrating'
+ when :down then announce 'reverting'
+ end
+
+ time = Benchmark.measure do
+ exec_migration(direction)
+ end
+
+ case direction
+ when :up then announce format("migrated (%.4fs)", time.real)
+ write
+ when :down then announce format("reverted (%.4fs)", time.real)
+ write
+ end
+ end
+
+ private
+
+ def exec_migration(direction)
+ # noinspection RubyCaseWithoutElseBlockInspection
+ case direction
+ when :up then up
+ when :down then down
+ end
+ end
+
+ def write(text = '')
+ $stdout.puts(text) if verbose
+ end
+
+ def announce(message)
+ text = "#{version} #{name}: #{message}"
+ length = [0, 75 - text.length].max
+ write format('== %s %s', text, '=' * length)
+ end
+ end
+end
diff --git a/lib/click_house/migration_support/migration_context.rb b/lib/click_house/migration_support/migration_context.rb
new file mode 100644
index 00000000000..6e4dd2a97c2
--- /dev/null
+++ b/lib/click_house/migration_support/migration_context.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+module ClickHouse
+ module MigrationSupport
+ # MigrationContext sets the context in which a migration is run.
+ #
+ # A migration context requires the path to the migrations is set
+ # in the +migrations_paths+ parameter. Optionally a +schema_migration+
+ # class can be provided. For most applications, +SchemaMigration+ is
+ # sufficient. Multiple database applications need a +SchemaMigration+
+ # per primary database.
+ class MigrationContext
+ attr_reader :migrations_paths, :schema_migration
+
+ def initialize(migrations_paths, schema_migration)
+ @migrations_paths = migrations_paths
+ @schema_migration = schema_migration
+ end
+
+ def up(target_version = nil, &block)
+ selected_migrations = block ? migrations.select(&block) : migrations
+
+ migrate(:up, selected_migrations, target_version)
+ end
+
+ def down(target_version = nil, &block)
+ selected_migrations = block ? migrations.select(&block) : migrations
+
+ migrate(:down, selected_migrations, target_version)
+ end
+
+ private
+
+ def migrate(direction, selected_migrations, target_version = nil)
+ ClickHouse::MigrationSupport::Migrator.new(
+ direction,
+ selected_migrations,
+ schema_migration,
+ target_version
+ ).migrate
+ end
+
+ def migrations
+ migrations = migration_files.map do |file|
+ version, name, scope = parse_migration_filename(file)
+
+ raise ClickHouse::MigrationSupport::IllegalMigrationNameError, file unless version
+
+ version = version.to_i
+ name = name.camelize
+
+ MigrationProxy.new(name, version, file, scope)
+ end
+
+ migrations.sort_by(&:version)
+ end
+
+ def migration_files
+ paths = Array(migrations_paths)
+ Dir[*paths.flat_map { |path| "#{path}/**/[0-9]*_*.rb" }]
+ end
+
+ def parse_migration_filename(filename)
+ File.basename(filename).scan(ClickHouse::Migration::MIGRATION_FILENAME_REGEXP).first
+ end
+ end
+
+ # MigrationProxy is used to defer loading of the actual migration classes
+ # until they are needed
+ MigrationProxy = Struct.new(:name, :version, :filename, :scope) do
+ def initialize(name, version, filename, scope)
+ super
+ @migration = nil
+ end
+
+ def basename
+ File.basename(filename)
+ end
+
+ delegate :migrate, :announce, :write, :database, to: :migration
+
+ private
+
+ def migration
+ @migration ||= load_migration
+ end
+
+ def load_migration
+ require(File.expand_path(filename))
+ name.constantize.new(name, version)
+ end
+ end
+ end
+end
diff --git a/lib/click_house/migration_support/migration_error.rb b/lib/click_house/migration_support/migration_error.rb
new file mode 100644
index 00000000000..0638d487e37
--- /dev/null
+++ b/lib/click_house/migration_support/migration_error.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+module ClickHouse
+ module MigrationSupport
+ class MigrationError < StandardError
+ def initialize(message = nil)
+ message = "\n\n#{message}\n\n" if message
+ super
+ end
+ end
+
+ class IllegalMigrationNameError < MigrationError
+ def initialize(name = nil)
+ if name
+ super("Illegal name for migration file: #{name}\n\t(only lower case letters, numbers, and '_' allowed).")
+ else
+ super('Illegal name for migration.')
+ end
+ end
+ end
+
+ IrreversibleMigration = Class.new(MigrationError)
+
+ class DuplicateMigrationVersionError < MigrationError
+ def initialize(version = nil)
+ if version
+ super("Multiple migrations have the version number #{version}.")
+ else
+ super('Duplicate migration version error.')
+ end
+ end
+ end
+
+ class DuplicateMigrationNameError < MigrationError
+ def initialize(name = nil)
+ if name
+ super("Multiple migrations have the name #{name}.")
+ else
+ super('Duplicate migration name.')
+ end
+ end
+ end
+
+ class UnknownMigrationVersionError < MigrationError
+ def initialize(version = nil)
+ if version
+ super("No migration with version number #{version}.")
+ else
+ super('Unknown migration version.')
+ end
+ end
+ end
+ end
+end
diff --git a/lib/click_house/migration_support/migrator.rb b/lib/click_house/migration_support/migrator.rb
new file mode 100644
index 00000000000..5c67b3a5ff1
--- /dev/null
+++ b/lib/click_house/migration_support/migrator.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+module ClickHouse
+ module MigrationSupport
+ class Migrator
+ class << self
+ attr_accessor :migrations_paths
+ end
+
+ attr_accessor :logger
+
+ self.migrations_paths = ["db/click_house/migrate"]
+
+ def initialize(direction, migrations, schema_migration, target_version = nil, logger = Gitlab::AppLogger)
+ @direction = direction
+ @target_version = target_version
+ @migrated_versions = {}
+ @migrations = migrations
+ @schema_migration = schema_migration
+ @logger = logger
+
+ validate(@migrations)
+
+ migrations.map(&:database).uniq.each do |database|
+ @schema_migration.create_table(database)
+ end
+ end
+
+ def current_version
+ @migrated_versions.values.flatten.max || 0
+ end
+
+ def current_migration
+ migrations.detect { |m| m.version == current_version }
+ end
+ alias_method :current, :current_migration
+
+ def run
+ run_without_lock
+ end
+
+ def migrate
+ migrate_without_lock
+ end
+
+ def runnable
+ runnable = migrations[start..finish]
+
+ if up?
+ runnable.reject { |m| ran?(m) }
+ else
+ # skip the last migration if we're headed down, but not ALL the way down
+ runnable.pop if target
+ runnable.find_all { |m| ran?(m) }
+ end
+ end
+
+ def migrations
+ down? ? @migrations.reverse : @migrations.sort_by(&:version)
+ end
+
+ def pending_migrations(database)
+ already_migrated = migrated(database)
+
+ migrations.reject { |m| already_migrated.include?(m.version) }
+ end
+
+ def migrated(database)
+ @migrated_versions[database] || load_migrated(database)
+ end
+
+ def load_migrated(database)
+ @migrated_versions[database] = Set.new(@schema_migration.all_versions(database).map(&:to_i))
+ end
+
+ private
+
+ # Used for running a specific migration.
+ def run_without_lock
+ migration = migrations.detect { |m| m.version == @target_version }
+
+ raise ClickHouse::MigrationSupport::UnknownMigrationVersionError, @target_version if migration.nil?
+
+ execute_migration(migration)
+ end
+
+ # Used for running multiple migrations up to or down to a certain value.
+ def migrate_without_lock
+ raise ClickHouse::MigrationSupport::UnknownMigrationVersionError, @target_version if invalid_target?
+
+ runnable.each(&method(:execute_migration)) # rubocop: disable Performance/MethodObjectAsBlock -- Execute through proxy
+ end
+
+ def ran?(migration)
+ migrated(migration.database).include?(migration.version.to_i)
+ end
+
+ # Return true if a valid version is not provided.
+ def invalid_target?
+ return unless @target_version
+ return if @target_version == 0
+
+ !target
+ end
+
+ def execute_migration(migration)
+ database = migration.database
+
+ return if down? && migrated(database).exclude?(migration.version.to_i)
+ return if up? && migrated(database).include?(migration.version.to_i)
+
+ logger.info "Migrating to #{migration.name} (#{migration.version})" if logger
+
+ migration.migrate(@direction)
+ record_version_state_after_migrating(database, migration.version)
+ rescue StandardError => e
+ msg = "An error has occurred, all later migrations canceled:\n\n#{e}"
+ raise StandardError, msg, e.backtrace
+ end
+
+ def target
+ migrations.detect { |m| m.version == @target_version }
+ end
+
+ def finish
+ migrations.index(target) || (migrations.size - 1)
+ end
+
+ def start
+ up? ? 0 : (migrations.index(current) || 0)
+ end
+
+ def validate(migrations)
+ name, = migrations.group_by(&:name).find { |_, v| v.length > 1 }
+ raise ClickHouse::MigrationSupport::DuplicateMigrationNameError, name if name
+
+ version, = migrations.group_by(&:version).find { |_, v| v.length > 1 }
+ raise ClickHouse::MigrationSupport::DuplicateMigrationVersionError, version if version
+ end
+
+ def record_version_state_after_migrating(database, version)
+ if down?
+ migrated(database).delete(version)
+ @schema_migration.create!(database, version: version.to_s, active: 0)
+ else
+ migrated(database) << version
+ @schema_migration.create!(database, version: version.to_s)
+ end
+ end
+
+ def up?
+ @direction == :up
+ end
+
+ def down?
+ @direction == :down
+ end
+ end
+ end
+end
diff --git a/lib/click_house/migration_support/schema_migration.rb b/lib/click_house/migration_support/schema_migration.rb
new file mode 100644
index 00000000000..e82debbad0d
--- /dev/null
+++ b/lib/click_house/migration_support/schema_migration.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+module ClickHouse
+ module MigrationSupport
+ class SchemaMigration
+ class_attribute :table_name_prefix, instance_writer: false, default: ''
+ class_attribute :table_name_suffix, instance_writer: false, default: ''
+ class_attribute :schema_migrations_table_name, instance_accessor: false, default: 'schema_migrations'
+
+ class << self
+ TABLE_EXISTS_QUERY = <<~SQL.squish
+ SELECT 1 FROM system.tables
+ WHERE name = {table_name: String} AND database = {database_name: String}
+ SQL
+
+ def primary_key
+ 'version'
+ end
+
+ def table_name
+ "#{table_name_prefix}#{schema_migrations_table_name}#{table_name_suffix}"
+ end
+
+ def table_exists?(database, configuration = ClickHouse::Migration.client_configuration)
+ database_name = configuration.databases[database]&.database
+ return false unless database_name
+
+ placeholders = { table_name: table_name, database_name: database_name }
+ query = ClickHouse::Client::Query.new(raw_query: TABLE_EXISTS_QUERY, placeholders: placeholders)
+
+ ClickHouse::Client.select(query, database, configuration).any?
+ end
+
+ def create_table(database, configuration = ClickHouse::Migration.client_configuration)
+ return if table_exists?(database, configuration)
+
+ query = <<~SQL
+ CREATE TABLE #{table_name} (
+ version LowCardinality(String),
+ active UInt8 NOT NULL DEFAULT 1,
+ applied_at DateTime64(6, 'UTC') NOT NULL DEFAULT now64()
+ )
+ ENGINE = ReplacingMergeTree(applied_at)
+ PRIMARY KEY(version)
+ ORDER BY (version)
+ SQL
+
+ ClickHouse::Client.execute(query, database, configuration)
+ end
+
+ def all_versions(database)
+ query = <<~SQL
+ SELECT version FROM #{table_name} FINAL
+ WHERE active = 1
+ ORDER BY (version)
+ SQL
+
+ ClickHouse::Client.select(query, database, ClickHouse::Migration.client_configuration).pluck('version')
+ end
+
+ def create!(database, **args)
+ insert_sql = <<~SQL
+ INSERT INTO #{table_name} (#{args.keys.join(',')}) VALUES (#{args.values.join(',')})
+ SQL
+
+ ClickHouse::Client.execute(insert_sql, database, ClickHouse::Migration.client_configuration)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/parsers/security/common.rb b/lib/gitlab/ci/parsers/security/common.rb
index 9032faa66d4..be6c6c2558b 100644
--- a/lib/gitlab/ci/parsers/security/common.rb
+++ b/lib/gitlab/ci/parsers/security/common.rb
@@ -141,7 +141,7 @@ module Gitlab
project_id: @project.id,
found_by_pipeline: report.pipeline,
vulnerability_finding_signatures_enabled: @signatures_enabled,
- cvss: data['cvss'] || []
+ cvss: data['cvss_vectors'] || []
)
)
end
diff --git a/lib/gitlab/ci/templates/Cosign.gitlab-ci.yml b/lib/gitlab/ci/templates/Cosign.gitlab-ci.yml
index 356062c734e..324128678de 100644
--- a/lib/gitlab/ci/templates/Cosign.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Cosign.gitlab-ci.yml
@@ -12,9 +12,9 @@ include:
docker-build:
variables:
- COSIGN_YES: "true" # Used by Cosign to skip confirmation prompts for non-destructive operations
+ COSIGN_YES: "true" # Used by Cosign to skip confirmation prompts for non-destructive operations
id_tokens:
- SIGSTORE_ID_TOKEN: # Used by Cosign to get certificate from Fulcio
+ SIGSTORE_ID_TOKEN: # Used by Cosign to get certificate from Fulcio
aud: sigstore
after_script:
- apk add --update cosign
diff --git a/lib/gitlab/encrypted_command_base.rb b/lib/gitlab/encrypted_command_base.rb
index b35c28b85cd..679d9d8e31a 100644
--- a/lib/gitlab/encrypted_command_base.rb
+++ b/lib/gitlab/encrypted_command_base.rb
@@ -7,12 +7,12 @@ module Gitlab
EDIT_COMMAND_NAME = "base"
class << self
- def encrypted_secrets
+ def encrypted_secrets(**args)
raise NotImplementedError
end
- def write(contents)
- encrypted = encrypted_secrets
+ def write(contents, args: {})
+ encrypted = encrypted_secrets(**args)
return unless validate_config(encrypted)
validate_contents(contents)
@@ -25,8 +25,8 @@ module Gitlab
warn "Couldn't decrypt #{encrypted.content_path}. Perhaps you passed the wrong key?"
end
- def edit
- encrypted = encrypted_secrets
+ def edit(args: {})
+ encrypted = encrypted_secrets(**args)
return unless validate_config(encrypted)
if ENV["EDITOR"].blank?
@@ -58,8 +58,8 @@ module Gitlab
temp_file&.unlink
end
- def show
- encrypted = encrypted_secrets
+ def show(args: {})
+ encrypted = encrypted_secrets(**args)
return unless validate_config(encrypted)
puts encrypted.read.presence || "File '#{encrypted.content_path}' does not exist. Use `gitlab-rake #{self::EDIT_COMMAND_NAME}` to change that."
diff --git a/lib/gitlab/encrypted_ldap_command.rb b/lib/gitlab/encrypted_ldap_command.rb
index 5e1eabe7ec6..442c675f19e 100644
--- a/lib/gitlab/encrypted_ldap_command.rb
+++ b/lib/gitlab/encrypted_ldap_command.rb
@@ -1,6 +1,5 @@
# frozen_string_literal: true
-# rubocop:disable Rails/Output
module Gitlab
class EncryptedLdapCommand < EncryptedCommandBase
DISPLAY_NAME = "LDAP"
@@ -21,4 +20,3 @@ module Gitlab
end
end
end
-# rubocop:enable Rails/Output
diff --git a/lib/gitlab/encrypted_redis_command.rb b/lib/gitlab/encrypted_redis_command.rb
new file mode 100644
index 00000000000..608edcdb950
--- /dev/null
+++ b/lib/gitlab/encrypted_redis_command.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+# rubocop:disable Rails/Output
+module Gitlab
+ class EncryptedRedisCommand < EncryptedCommandBase
+ DISPLAY_NAME = "Redis"
+ EDIT_COMMAND_NAME = "gitlab:redis:secret:edit"
+
+ class << self
+ def all_redis_instance_class_names
+ Gitlab::Redis::ALL_CLASSES.map do |c|
+ normalized_instance_name(c)
+ end
+ end
+
+ def normalized_instance_name(instance)
+ if instance.is_a?(Class)
+ # Gitlab::Redis::SharedState => sharedstate
+ instance.name.demodulize.to_s.downcase
+ else
+ # Drop all hyphens, underscores, and spaces from the name
+ # eg.: shared_state => sharedstate
+ instance.gsub(/[-_ ]/, '').downcase
+ end
+ end
+
+ def encrypted_secrets(**args)
+ if args[:instance_name]
+ instance_class = Gitlab::Redis::ALL_CLASSES.find do |instance|
+ normalized_instance_name(instance) == normalized_instance_name(args[:instance_name])
+ end
+
+ unless instance_class
+ error_message = <<~MSG
+ Specified instance name #{args[:instance_name]} does not exist.
+ The available instances are #{all_redis_instance_class_names.join(', ')}."
+ MSG
+
+ raise error_message
+ end
+ else
+ instance_class = Gitlab::Redis::Cache
+ end
+
+ instance_class.encrypted_secrets
+ end
+
+ def encrypted_file_template
+ <<~YAML
+ # password: '123'
+ YAML
+ end
+ end
+ end
+end
+# rubocop:enable Rails/Output
diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb
index 5ec58fc4f44..da38c11ebca 100644
--- a/lib/gitlab/gitaly_client.rb
+++ b/lib/gitlab/gitaly_client.rb
@@ -328,6 +328,8 @@ module Gitlab
'client_name' => CLIENT_NAME
}
+ relative_path = fetch_relative_path
+
context_data = Gitlab::ApplicationContext.current
feature_stack = Thread.current[:gitaly_feature_stack]
@@ -339,6 +341,7 @@ module Gitlab
metadata['username'] = context_data['meta.user'] if context_data&.fetch('meta.user', nil)
metadata['user_id'] = context_data['meta.user_id'].to_s if context_data&.fetch('meta.user_id', nil)
metadata['remote_ip'] = context_data['meta.remote_ip'] if context_data&.fetch('meta.remote_ip', nil)
+ metadata['relative-path-bin'] = relative_path if relative_path
metadata.merge!(Feature::Gitaly.server_feature_flags(**feature_flag_actors))
metadata.merge!(route_to_primary)
@@ -348,6 +351,17 @@ module Gitlab
{ metadata: metadata, deadline: deadline_info[:deadline] }
end
+ # The GitLab `internal/allowed/` API sets the :gitlab_git_relative_path
+ # variable. This provides the repository relative path which can be used to
+ # locate snapshot repositories in Gitaly which act as a quarantine repository
+ # until a transaction is committed.
+ def self.fetch_relative_path
+ return unless Gitlab::SafeRequestStore.active?
+ return if Gitlab::SafeRequestStore[:gitlab_git_relative_path].blank?
+
+ Gitlab::SafeRequestStore.fetch(:gitlab_git_relative_path)
+ end
+
# Gitlab::Git::HookEnv will set the :gitlab_git_env variable in case we're
# running in the context of a Gitaly hook call, which may make use of
# quarantined object directories. We thus need to pass along the path of
diff --git a/lib/gitlab/instrumentation_helper.rb b/lib/gitlab/instrumentation_helper.rb
index 2a3c4db5ffa..49078a7ccd0 100644
--- a/lib/gitlab/instrumentation_helper.rb
+++ b/lib/gitlab/instrumentation_helper.rb
@@ -12,7 +12,6 @@ module Gitlab
def add_instrumentation_data(payload)
instrument_gitaly(payload)
- instrument_rugged(payload)
instrument_redis(payload)
instrument_elasticsearch(payload)
instrument_zoekt(payload)
@@ -40,15 +39,6 @@ module Gitlab
payload[:gitaly_duration_s] = Gitlab::GitalyClient.query_time
end
- def instrument_rugged(payload)
- rugged_calls = Gitlab::RuggedInstrumentation.query_count
-
- return if rugged_calls == 0
-
- payload[:rugged_calls] = rugged_calls
- payload[:rugged_duration_s] = Gitlab::RuggedInstrumentation.query_time
- end
-
def instrument_redis(payload)
payload.merge! ::Gitlab::Instrumentation::Redis.payload
end
diff --git a/lib/gitlab/redis/wrapper.rb b/lib/gitlab/redis/wrapper.rb
index 2bcf4769b5a..d5470bc0016 100644
--- a/lib/gitlab/redis/wrapper.rb
+++ b/lib/gitlab/redis/wrapper.rb
@@ -19,7 +19,7 @@ module Gitlab
InvalidPathError = Class.new(StandardError)
class << self
- delegate :params, :url, :store, to: :new
+ delegate :params, :url, :store, :encrypted_secrets, to: :new
def with
pool.with { |redis| yield redis }
@@ -110,6 +110,14 @@ module Gitlab
raw_config_hash[:sentinels]
end
+ def secret_file
+ if raw_config_hash[:secret_file].blank?
+ File.join(Settings.encrypted_settings['path'], 'redis.yaml.enc')
+ else
+ Settings.absolute(raw_config_hash[:secret_file])
+ end
+ end
+
def sentinels?
sentinels && !sentinels.empty?
end
@@ -118,22 +126,44 @@ module Gitlab
::Redis::Store::Factory.create(redis_store_options.merge(extras))
end
+ def encrypted_secrets
+ # In rake tasks, we have to populate the encrypted_secrets even if the
+ # file does not exist, as it is the job of one of those tasks to create
+ # the file. In other cases, like when being loaded as part of spinning
+ # up test environment via `scripts/setup-test-env`, we should gate on
+ # the presence of the specified secret file so that
+ # `Settings.encrypted`, which might not be loadable does not gets
+ # called.
+ Settings.encrypted(secret_file) if File.exist?(secret_file) || ::Gitlab::Runtime.rake?
+ end
+
private
def redis_store_options
config = raw_config_hash
config[:instrumentation_class] ||= self.class.instrumentation_class
- result = if config[:cluster].present?
- config[:db] = 0 # Redis Cluster only supports db 0
- config
+ decrypted_config = parse_encrypted_config(config)
+
+ result = if decrypted_config[:cluster].present?
+ decrypted_config[:db] = 0 # Redis Cluster only supports db 0
+ decrypted_config
else
- parse_redis_url(config)
+ parse_redis_url(decrypted_config)
end
parse_client_tls_options(result)
end
+ def parse_encrypted_config(encrypted_config)
+ encrypted_config.delete(:secret_file)
+
+ decrypted_secrets = encrypted_secrets&.config
+ encrypted_config.merge!(decrypted_secrets) if decrypted_secrets
+
+ encrypted_config
+ end
+
def parse_redis_url(config)
redis_url = config.delete(:url)
redis_uri = URI.parse(redis_url)
diff --git a/lib/gitlab/rugged_instrumentation.rb b/lib/gitlab/rugged_instrumentation.rb
deleted file mode 100644
index 36a3a491de6..00000000000
--- a/lib/gitlab/rugged_instrumentation.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module RuggedInstrumentation
- def self.query_time
- query_time = SafeRequestStore[:rugged_query_time] || 0
- query_time.round(Gitlab::InstrumentationHelper::DURATION_PRECISION)
- end
-
- def self.add_query_time(duration)
- SafeRequestStore[:rugged_query_time] ||= 0
- SafeRequestStore[:rugged_query_time] += duration
- end
-
- def self.query_time_ms
- (self.query_time * 1000).round(2)
- end
-
- def self.query_count
- SafeRequestStore[:rugged_call_count] ||= 0
- end
-
- def self.increment_query_count
- SafeRequestStore[:rugged_call_count] ||= 0
- SafeRequestStore[:rugged_call_count] += 1
- end
-
- def self.active?
- SafeRequestStore.active?
- end
-
- def self.add_call_details(details)
- return unless Gitlab::PerformanceBar.enabled_for_request?
-
- Gitlab::SafeRequestStore[:rugged_call_details] ||= []
- Gitlab::SafeRequestStore[:rugged_call_details] << details
- end
-
- def self.list_call_details
- return [] unless Gitlab::PerformanceBar.enabled_for_request?
-
- Gitlab::SafeRequestStore[:rugged_call_details] || []
- end
- end
-end
diff --git a/lib/peek/views/rugged.rb b/lib/peek/views/rugged.rb
deleted file mode 100644
index 3ed54a010f8..00000000000
--- a/lib/peek/views/rugged.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-module Peek
- module Views
- class Rugged < DetailedView
- def results
- return {} unless calls > 0
-
- super
- end
-
- private
-
- def duration
- ::Gitlab::RuggedInstrumentation.query_time_ms
- end
-
- def calls
- ::Gitlab::RuggedInstrumentation.query_count
- end
-
- def call_details
- ::Gitlab::RuggedInstrumentation.list_call_details
- end
-
- def format_call_details(call)
- super.merge(args: format_args(call[:args]))
- end
-
- def format_args(args)
- args.map do |arg|
- # ActiveSupport::JSON recursively calls as_json on all
- # instance variables, and if that instance variable points to
- # something that refers back to the same instance, we can wind
- # up in an infinite loop. Currently this only seems to happen with
- # Gitlab::Git::Repository and ::Repository.
- if arg.instance_variables.present?
- arg.to_s
- else
- arg
- end
- end
- end
- end
- end
-end
diff --git a/lib/tasks/gitlab/click_house/migration.rake b/lib/tasks/gitlab/click_house/migration.rake
new file mode 100644
index 00000000000..2c4bce65d80
--- /dev/null
+++ b/lib/tasks/gitlab/click_house/migration.rake
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+namespace :gitlab do
+ namespace :clickhouse do
+ task :prepare_schema_migration_table, [:database] => :environment do |_t, args|
+ require_relative '../../../../lib/click_house/migration_support/schema_migration'
+
+ ClickHouse::MigrationSupport::SchemaMigration.create_table(args.database&.to_sym || :main)
+ end
+
+ desc 'GitLab | ClickHouse | Migrate'
+ task migrate: [:prepare_schema_migration_table] do
+ migrate(:up)
+ end
+
+ desc 'GitLab | ClickHouse | Rollback'
+ task rollback: [:prepare_schema_migration_table] do
+ migrate(:down)
+ end
+
+ private
+
+ def check_target_version
+ return unless target_version
+
+ version = ENV['VERSION']
+
+ return if ClickHouse::Migration::MIGRATION_FILENAME_REGEXP.match?(version) || /\A\d+\z/.match?(version)
+
+ raise "Invalid format of target version: `VERSION=#{version}`"
+ end
+
+ def target_version
+ ENV['VERSION'].to_i if ENV['VERSION'] && !ENV['VERSION'].empty?
+ end
+
+ def verbose
+ ENV['VERBOSE'] ? ENV['VERBOSE'] != 'false' : true
+ end
+
+ def migrate(direction)
+ require_relative '../../../../lib/click_house/migration_support/schema_migration'
+ require_relative '../../../../lib/click_house/migration_support/migration_context'
+ require_relative '../../../../lib/click_house/migration_support/migrator'
+
+ check_target_version
+
+ scope = ENV['SCOPE']
+ verbose_was = ClickHouse::Migration.verbose
+ ClickHouse::Migration.verbose = verbose
+
+ migrations_paths = ClickHouse::MigrationSupport::Migrator.migrations_paths
+ schema_migration = ClickHouse::MigrationSupport::SchemaMigration
+ migration_context = ClickHouse::MigrationSupport::MigrationContext.new(migrations_paths, schema_migration)
+ migrations_ran = migration_context.public_send(direction, target_version) do |migration|
+ scope.blank? || scope == migration.scope
+ end
+
+ puts('No migrations ran.') unless migrations_ran&.any?
+ ensure
+ ClickHouse::Migration.verbose = verbose_was
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/redis.rake b/lib/tasks/gitlab/redis.rake
new file mode 100644
index 00000000000..6983c5fc318
--- /dev/null
+++ b/lib/tasks/gitlab/redis.rake
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+namespace :gitlab do
+ namespace :redis do
+ namespace :secret do
+ desc "GitLab | Redis | Secret | Show Redis secret"
+ task :show, [:instance_name] => [:environment] do |_t, args|
+ Gitlab::EncryptedRedisCommand.show(args: args)
+ end
+
+ desc "GitLab | Redis | Secret | Edit Redis secret"
+ task :edit, [:instance_name] => [:environment] do |_t, args|
+ Gitlab::EncryptedRedisCommand.edit(args: args)
+ end
+
+ desc "GitLab | Redis | Secret | Write Redis secret"
+ task :write, [:instance_name] => [:environment] do |_t, args|
+ content = $stdin.tty? ? $stdin.gets : $stdin.read
+ Gitlab::EncryptedRedisCommand.write(content, args: args)
+ end
+ end
+ end
+end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 44dd115c15f..38cadff29e0 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -24110,6 +24110,9 @@ msgstr ""
msgid "I accept the %{terms_link}"
msgstr ""
+msgid "I am sorry, I am unable to find what you are looking for."
+msgstr ""
+
msgid "I forgot my password"
msgstr ""
@@ -34532,9 +34535,6 @@ msgstr ""
msgid "PerformanceBar|Redis calls"
msgstr ""
-msgid "PerformanceBar|Rugged calls"
-msgstr ""
-
msgid "PerformanceBar|SQL queries"
msgstr ""
@@ -36233,6 +36233,9 @@ msgstr ""
msgid "Product analytics"
msgstr ""
+msgid "Product analytics requires Experiment and Beta features to be enabled."
+msgstr ""
+
msgid "ProductAnalytics|1. Add the NPM package to your package.json using your preferred package manager"
msgstr ""
@@ -36404,6 +36407,9 @@ msgstr ""
msgid "ProductAnalytics|Previous month"
msgstr ""
+msgid "ProductAnalytics|Product Analytics"
+msgstr ""
+
msgid "ProductAnalytics|Product analytics onboarding"
msgstr ""
@@ -36446,6 +36452,9 @@ msgstr ""
msgid "ProductAnalytics|Something went wrong while loading product analytics usage data. Refresh the page to try again."
msgstr ""
+msgid "ProductAnalytics|Store, query, and visualize quantitative data to get insights into user value."
+msgstr ""
+
msgid "ProductAnalytics|The connection string for your Snowplow configurator instance."
msgstr ""
@@ -36479,6 +36488,9 @@ msgstr ""
msgid "ProductAnalytics|Usage by project"
msgstr ""
+msgid "ProductAnalytics|Use product analytics"
+msgstr ""
+
msgid "ProductAnalytics|Used to retrieve dashboard data from the Cube instance."
msgstr ""
@@ -49101,6 +49113,9 @@ msgstr ""
msgid "This epic does not exist or you don't have sufficient permission."
msgstr ""
+msgid "This feature is only allowed in groups that enable this feature."
+msgstr ""
+
msgid "This feature requires local storage to be enabled"
msgstr ""
@@ -53143,6 +53158,9 @@ msgstr ""
msgid "VulnerabilityExport|CVE"
msgstr ""
+msgid "VulnerabilityExport|CVSS Vectors"
+msgstr ""
+
msgid "VulnerabilityExport|CWE"
msgstr ""
@@ -55307,6 +55325,9 @@ msgstr ""
msgid "You do not belong to any projects yet."
msgstr ""
+msgid "You do not have access to AI features."
+msgstr ""
+
msgid "You do not have access to any projects for creating incidents."
msgstr ""
diff --git a/rubocop/cop/gitlab/mark_used_feature_flags.rb b/rubocop/cop/gitlab/mark_used_feature_flags.rb
index 65a1731fc28..4c6cc6c6778 100644
--- a/rubocop/cop/gitlab/mark_used_feature_flags.rb
+++ b/rubocop/cop/gitlab/mark_used_feature_flags.rb
@@ -16,9 +16,6 @@ module RuboCop
EXPERIMENT_METHODS = %i[
experiment
].freeze
- RUGGED_METHODS = %i[
- use_rugged?
- ].freeze
WORKER_METHODS = %i[
data_consistency
deduplicate
@@ -28,7 +25,7 @@ module RuboCop
push_force_frontend_feature_flag
limit_feature_flag=
limit_feature_flag_for_override=
- ].freeze + EXPERIMENT_METHODS + RUGGED_METHODS + WORKER_METHODS
+ ].freeze + EXPERIMENT_METHODS + WORKER_METHODS
RESTRICT_ON_SEND = FEATURE_METHODS + SELF_METHODS
@@ -119,7 +116,7 @@ module RuboCop
pair.key.value == :feature_flag
end&.value
else
- arg_index = rugged_method?(node) ? 3 : 2
+ arg_index = 2
node.children[arg_index]
end
@@ -156,10 +153,6 @@ module RuboCop
class_caller(node) == "Feature::Gitaly"
end
- def rugged_method?(node)
- RUGGED_METHODS.include?(method_name(node))
- end
-
def feature_method?(node)
FEATURE_METHODS.include?(method_name(node)) && (caller_is_feature?(node) || caller_is_feature_gitaly?(node))
end
diff --git a/rubocop/rubocop-code_reuse.yml b/rubocop/rubocop-code_reuse.yml
index f96de5caf99..2bd3339368d 100644
--- a/rubocop/rubocop-code_reuse.yml
+++ b/rubocop/rubocop-code_reuse.yml
@@ -24,6 +24,7 @@ CodeReuse/ActiveRecord:
- danger/**/*.rb
- lib/backup/**/*.rb
- lib/banzai/**/*.rb
+ - lib/click_house/migration_support/**/*.rb
- lib/gitlab/background_migration/**/*.rb
- lib/gitlab/cycle_analytics/**/*.rb
- lib/gitlab/counters/**/*.rb
diff --git a/scripts/internal_events/monitor.rb b/scripts/internal_events/monitor.rb
index 0f7e347ded7..e9ba1dbfbb7 100644
--- a/scripts/internal_events/monitor.rb
+++ b/scripts/internal_events/monitor.rb
@@ -135,6 +135,26 @@ def generate_metrics_table
)
end
+def render_screen(paused)
+ metrics_table = generate_metrics_table
+ events_table = generate_snowplow_table
+
+ print TTY::Cursor.clear_screen
+ print TTY::Cursor.move_to(0, 0)
+
+ puts "Updated at #{Time.current} #{'[PAUSED]' if paused}"
+ puts "Monitored events: #{ARGV.join(', ')}"
+ puts
+
+ puts metrics_table
+
+ puts events_table
+
+ puts
+ puts "Press \"p\" to toggle refresh. (It makes it easier to select and copy the tables)"
+ puts "Press \"q\" to quit"
+end
+
begin
snowplow_data
rescue Errno::ECONNREFUSED
@@ -144,21 +164,20 @@ rescue Errno::ECONNREFUSED
exit 1
end
+reader = TTY::Reader.new
+paused = false
+
begin
loop do
- metrics_table = generate_metrics_table
- events_table = generate_snowplow_table
-
- print TTY::Cursor.clear_screen
- print TTY::Cursor.move_to(0, 0)
-
- puts "Updated at #{Time.current}"
- puts "Monitored events: #{ARGV.join(', ')}"
- puts
-
- puts metrics_table
-
- puts events_table
+ case reader.read_keypress(nonblock: true)
+ when 'p'
+ paused = !paused
+ render_screen(paused)
+ when 'q'
+ break
+ end
+
+ render_screen(paused) unless paused
sleep 1
end
diff --git a/scripts/lint-rugged b/scripts/lint-rugged
deleted file mode 100755
index 73708b52772..00000000000
--- a/scripts/lint-rugged
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env ruby
-# frozen_string_literal: true
-
-ALLOWED = [
- # https://gitlab.com/gitlab-org/gitaly/issues/760
- 'lib/elasticsearch/git/repository.rb',
-
- # Needed to avoid using the git binary to validate a branch name
- 'lib/gitlab/git_ref_validator.rb',
-
- # Reverted Rugged calls due to Gitaly atop NFS performance
- # See https://docs.gitlab.com/ee/development/gitaly.html#legacy-rugged-code.
- 'lib/gitlab/git/rugged_impl/',
- 'lib/gitlab/gitaly_client/storage_settings.rb',
-
- # Needed to detect Rugged enabled: https://gitlab.com/gitlab-org/gitlab/issues/35371
- 'lib/gitlab/config_checker/puma_rugged_checker.rb',
-
- # Needed for GPG/X509 commit signature API
- #
- 'app/models/commit.rb',
- 'lib/api/entities/commit_signature.rb',
-
- # Needed for logging
- 'config/initializers/peek.rb',
- 'config/initializers/lograge.rb',
- 'lib/gitlab/grape_logging/loggers/perf_logger.rb',
- 'lib/gitlab/instrumentation_helper.rb',
- 'lib/gitlab/sidekiq_middleware/instrumentation_logger.rb',
- 'lib/gitlab/rugged_instrumentation.rb',
- 'lib/peek/views/rugged.rb'
-].freeze
-
-rugged_lines = IO.popen(%w[git grep -i -n rugged -- app config lib], &:read).lines
-rugged_lines = rugged_lines.select { |l| /^[^:]*\.rb:/ =~ l }
-rugged_lines = rugged_lines.reject { |l| l.start_with?(*ALLOWED) }
-rugged_lines = rugged_lines.reject { |l| /(include|prepend) Gitlab::Git::RuggedImpl/ =~ l }
-rugged_lines = rugged_lines.reject { |l| l.include?('Gitlab::ConfigChecker::PumaRuggedChecker.check') }
-rugged_lines = rugged_lines.reject do |line|
- code, _comment = line.split('# ', 2)
- code !~ /rugged/i
-end
-
-exit if rugged_lines.empty?
-
-puts "Using Rugged is only allowed in test and #{ALLOWED}\n\n"
-
-puts rugged_lines
-
-exit(false)
diff --git a/scripts/static-analysis b/scripts/static-analysis
index 41583166e04..fa394ac46c4 100755
--- a/scripts/static-analysis
+++ b/scripts/static-analysis
@@ -50,7 +50,6 @@ class StaticAnalysis
Task.new(%w[scripts/lint-conflicts.sh], 1),
Task.new(%w[yarn run block-dependencies], 1),
Task.new(%w[yarn run check-dependencies], 1),
- Task.new(%w[scripts/lint-rugged], 1),
Task.new(%w[scripts/gemfile_lock_changed.sh], 1)
].compact.freeze
diff --git a/spec/click_house/migration_support/migration_context_spec.rb b/spec/click_house/migration_support/migration_context_spec.rb
new file mode 100644
index 00000000000..9df8391270d
--- /dev/null
+++ b/spec/click_house/migration_support/migration_context_spec.rb
@@ -0,0 +1,233 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_relative '../../../lib/click_house/migration_support/migration_error'
+
+RSpec.describe ClickHouse::MigrationSupport::MigrationContext,
+ click_house: :without_migrations, feature_category: :database do
+ include ClickHouseHelpers
+
+ # We don't need to delete data since we don't modify Postgres data
+ self.use_transactional_tests = false
+
+ let_it_be(:schema_migration) { ClickHouse::MigrationSupport::SchemaMigration }
+
+ let(:migrations_base_dir) { 'click_house/migrations' }
+ let(:migrations_dir) { expand_fixture_path("#{migrations_base_dir}/#{migrations_dirname}") }
+ let(:migration_context) { described_class.new(migrations_dir, schema_migration) }
+ let(:target_version) { nil }
+
+ after do
+ clear_consts(expand_fixture_path(migrations_base_dir))
+ end
+
+ describe 'performs migrations' do
+ subject(:migration) { migrate(target_version, migration_context) }
+
+ describe 'when creating a table' do
+ let(:migrations_dirname) { 'plain_table_creation' }
+
+ it 'creates a table' do
+ expect { migration }.to change { active_schema_migrations_count }.from(0).to(1)
+
+ table_schema = describe_table('some')
+ expect(schema_migrations).to contain_exactly(a_hash_including(version: '1', active: 1))
+ expect(table_schema).to match({
+ id: a_hash_including(type: 'UInt64'),
+ date: a_hash_including(type: 'Date')
+ })
+ end
+ end
+
+ describe 'when dropping a table' do
+ let(:migrations_dirname) { 'drop_table' }
+ let(:target_version) { 2 }
+
+ it 'drops table' do
+ migrate(1, migration_context)
+ expect(table_names).to include('some')
+
+ migration
+ expect(table_names).not_to include('some')
+ end
+ end
+
+ context 'when a migration raises an error' do
+ let(:migrations_dirname) { 'migration_with_error' }
+
+ it 'passes the error to caller as a StandardError' do
+ expect { migration }.to raise_error StandardError,
+ "An error has occurred, all later migrations canceled:\n\nA migration error happened"
+ expect(schema_migrations).to be_empty
+ end
+ end
+
+ context 'when a migration targets an unknown database' do
+ let(:migrations_dirname) { 'plain_table_creation_on_invalid_database' }
+
+ it 'raises ConfigurationError' do
+ expect { migration }.to raise_error ClickHouse::Client::ConfigurationError,
+ "The database 'unknown_database' is not configured"
+ end
+ end
+
+ context 'when migrations target multiple databases' do
+ let_it_be(:config) { ClickHouse::Client::Configuration.new }
+ let_it_be(:main_db_config) { [:main, config] }
+ let_it_be(:another_db_config) { [:another_db, config] }
+ let_it_be(:another_database_name) { 'gitlab_clickhouse_test_2' }
+
+ let(:migrations_dirname) { 'migrations_over_multiple_databases' }
+
+ before(:context) do
+ # Ensure we have a second database to run the test on
+ clone_database_configuration(:main, :another_db, another_database_name, config)
+
+ with_net_connect_allowed do
+ ClickHouse::Client.execute("CREATE DATABASE IF NOT EXISTS #{another_database_name}", :main, config)
+ end
+ end
+
+ after(:context) do
+ with_net_connect_allowed do
+ ClickHouse::Client.execute("DROP DATABASE #{another_database_name}", :another_db, config)
+ end
+ end
+
+ around do |example|
+ clear_db(configuration: config)
+
+ previous_config = ClickHouse::Migration.client_configuration
+ ClickHouse::Migration.client_configuration = config
+
+ example.run
+ ensure
+ ClickHouse::Migration.client_configuration = previous_config
+ end
+
+ def clone_database_configuration(source_db_identifier, target_db_identifier, target_db_name, target_config)
+ raw_config = Rails.application.config_for(:click_house)
+ raw_config.each do |database_identifier, db_config|
+ register_database(target_config, database_identifier, db_config)
+ end
+
+ target_db_config = raw_config[source_db_identifier].merge(database: target_db_name)
+ register_database(target_config, target_db_identifier, target_db_config)
+ target_config.http_post_proc = ClickHouse::Client.configuration.http_post_proc
+ target_config.json_parser = ClickHouse::Client.configuration.json_parser
+ target_config.logger = ::Logger.new(IO::NULL)
+ end
+
+ it 'registers migrations on respective database', :aggregate_failures do
+ expect { migrate(2, migration_context) }
+ .to change { active_schema_migrations_count(*main_db_config) }.from(0).to(1)
+ .and change { active_schema_migrations_count(*another_db_config) }.from(0).to(1)
+
+ expect(schema_migrations(*another_db_config)).to contain_exactly(a_hash_including(version: '2', active: 1))
+ expect(table_names(*main_db_config)).not_to include('some_on_another_db')
+ expect(table_names(*another_db_config)).not_to include('some')
+
+ expect(describe_table('some', *main_db_config)).to match({
+ id: a_hash_including(type: 'UInt64'),
+ date: a_hash_including(type: 'Date')
+ })
+ expect(describe_table('some_on_another_db', *another_db_config)).to match({
+ id: a_hash_including(type: 'UInt64'),
+ date: a_hash_including(type: 'Date')
+ })
+
+ expect { migrate(nil, migration_context) }
+ .to change { active_schema_migrations_count(*main_db_config) }.to(2)
+ .and not_change { active_schema_migrations_count(*another_db_config) }
+
+ expect(schema_migrations(*main_db_config)).to match([
+ a_hash_including(version: '1', active: 1),
+ a_hash_including(version: '3', active: 1)
+ ])
+ expect(schema_migrations(*another_db_config)).to match_array(a_hash_including(version: '2', active: 1))
+
+ expect(describe_table('some', *main_db_config)).to match({
+ id: a_hash_including(type: 'UInt64'),
+ timestamp: a_hash_including(type: 'Date')
+ })
+ end
+ end
+
+ context 'when target_version is incorrect' do
+ let(:target_version) { 2 }
+ let(:migrations_dirname) { 'plain_table_creation' }
+
+ it 'raises UnknownMigrationVersionError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::UnknownMigrationVersionError
+
+ expect(active_schema_migrations_count).to eq 0
+ end
+ end
+
+ context 'when migrations with duplicate name exist' do
+ let(:migrations_dirname) { 'duplicate_name' }
+
+ it 'raises DuplicateMigrationNameError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::DuplicateMigrationNameError
+
+ expect(active_schema_migrations_count).to eq 0
+ end
+ end
+
+ context 'when migrations with duplicate version exist' do
+ let(:migrations_dirname) { 'duplicate_version' }
+
+ it 'raises DuplicateMigrationVersionError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::DuplicateMigrationVersionError
+
+ expect(active_schema_migrations_count).to eq 0
+ end
+ end
+ end
+
+ describe 'performs rollbacks' do
+ subject(:migration) { rollback(target_version, migration_context) }
+
+ before do
+ migrate(nil, migration_context)
+ end
+
+ context 'when migrating back all the way to 0' do
+ let(:target_version) { 0 }
+
+ context 'when down method is present' do
+ let(:migrations_dirname) { 'table_creation_with_down_method' }
+
+ it 'removes migration and performs down method' do
+ expect(table_names).to include('some')
+
+ expect { migration }.to change { active_schema_migrations_count }.from(1).to(0)
+
+ expect(table_names).not_to include('some')
+ expect(schema_migrations).to contain_exactly(a_hash_including(version: '1', active: 0))
+ end
+ end
+
+ context 'when down method is missing' do
+ let(:migrations_dirname) { 'plain_table_creation' }
+
+ it 'removes migration ignoring missing down method' do
+ expect { migration }.to change { active_schema_migrations_count }.from(1).to(0)
+ .and not_change { table_names & %w[some] }.from(%w[some])
+ end
+ end
+ end
+
+ context 'when target_version is incorrect' do
+ let(:target_version) { -1 }
+ let(:migrations_dirname) { 'plain_table_creation' }
+
+ it 'raises UnknownMigrationVersionError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::UnknownMigrationVersionError
+
+ expect(active_schema_migrations_count).to eq 1
+ end
+ end
+ end
+end
diff --git a/spec/factories/ci/reports/security/findings.rb b/spec/factories/ci/reports/security/findings.rb
index 202c2789b45..670d833c1f8 100644
--- a/spec/factories/ci/reports/security/findings.rb
+++ b/spec/factories/ci/reports/security/findings.rb
@@ -10,7 +10,7 @@ FactoryBot.define do
metadata_version { 'sast:1.0' }
name { 'Cipher with no integrity' }
report_type { :sast }
- cvss { [{ vendor: "GitLab", vector_string: "CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:L/I:L/A:N" }] }
+ cvss { [{ vendor: "GitLab", vector: "CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:L/I:L/A:N" }] }
original_data do
{
description: "The cipher does not provide data integrity update 1",
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_pypi_metadata.json b/spec/fixtures/api/schemas/graphql/packages/package_pypi_metadata.json
index cecebe3a0e9..c9b941ed8fa 100644
--- a/spec/fixtures/api/schemas/graphql/packages/package_pypi_metadata.json
+++ b/spec/fixtures/api/schemas/graphql/packages/package_pypi_metadata.json
@@ -1,13 +1,51 @@
{
"type": "object",
"additionalProperties": false,
- "required": ["id"],
+ "required": [
+ "id"
+ ],
"properties": {
"id": {
"type": "string"
},
+ "authorEmail": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "description": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "descriptionContentType": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "keywords": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "metadataVersion": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
"requiredPython": {
"type": "string"
+ },
+ "summary": {
+ "type": [
+ "string",
+ "null"
+ ]
}
}
}
diff --git a/spec/fixtures/click_house/migrations/drop_table/1_create_some_table.rb b/spec/fixtures/click_house/migrations/drop_table/1_create_some_table.rb
new file mode 100644
index 00000000000..14ef80cbdb7
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/drop_table/1_create_some_table.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class CreateSomeTable < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE TABLE some (
+ id UInt64,
+ date Date
+ ) ENGINE = Memory
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/drop_table/2_drop_some_table.rb b/spec/fixtures/click_house/migrations/drop_table/2_drop_some_table.rb
new file mode 100644
index 00000000000..82045b08e21
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/drop_table/2_drop_some_table.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class DropSomeTable < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ DROP TABLE some
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/duplicate_name/1_create_some_table.rb b/spec/fixtures/click_house/migrations/duplicate_name/1_create_some_table.rb
new file mode 100644
index 00000000000..14ef80cbdb7
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/duplicate_name/1_create_some_table.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class CreateSomeTable < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE TABLE some (
+ id UInt64,
+ date Date
+ ) ENGINE = Memory
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/duplicate_name/2_create_some_table.rb b/spec/fixtures/click_house/migrations/duplicate_name/2_create_some_table.rb
new file mode 100644
index 00000000000..be6c1905502
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/duplicate_name/2_create_some_table.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class CreateSomeTable2 < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE TABLE some (
+ id UInt64,
+ date Date
+ ) ENGINE = Memory
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/duplicate_version/1_create_some_table.rb b/spec/fixtures/click_house/migrations/duplicate_version/1_create_some_table.rb
new file mode 100644
index 00000000000..14ef80cbdb7
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/duplicate_version/1_create_some_table.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class CreateSomeTable < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE TABLE some (
+ id UInt64,
+ date Date
+ ) ENGINE = Memory
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/duplicate_version/1_drop_some_table.rb b/spec/fixtures/click_house/migrations/duplicate_version/1_drop_some_table.rb
new file mode 100644
index 00000000000..82045b08e21
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/duplicate_version/1_drop_some_table.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class DropSomeTable < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ DROP TABLE some
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/migration_with_error/1_migration_with_error.rb b/spec/fixtures/click_house/migrations/migration_with_error/1_migration_with_error.rb
new file mode 100644
index 00000000000..b8ae3df2085
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/migration_with_error/1_migration_with_error.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class MigrationWithError < ClickHouse::Migration
+ def up
+ raise ClickHouse::Client::DatabaseError, 'A migration error happened'
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/1_create_some_table_on_main_db.rb b/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/1_create_some_table_on_main_db.rb
new file mode 100644
index 00000000000..98d71d9507b
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/1_create_some_table_on_main_db.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class CreateSomeTableOnMainDb < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE TABLE some (
+ id UInt64,
+ date Date
+ ) ENGINE = MergeTree
+ PRIMARY KEY(id)
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/2_create_some_table_on_another_db.rb b/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/2_create_some_table_on_another_db.rb
new file mode 100644
index 00000000000..b8cd86a67f5
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/2_create_some_table_on_another_db.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class CreateSomeTableOnAnotherDb < ClickHouse::Migration
+ SCHEMA = :another_db
+
+ def up
+ execute <<~SQL
+ CREATE TABLE some_on_another_db (
+ id UInt64,
+ date Date
+ ) ENGINE = Memory
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/3_change_some_table_on_main_db.rb b/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/3_change_some_table_on_main_db.rb
new file mode 100644
index 00000000000..9112ab79fc5
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/migrations_over_multiple_databases/3_change_some_table_on_main_db.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class ChangeSomeTableOnMainDb < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ ALTER TABLE some RENAME COLUMN date to timestamp
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/plain_table_creation/1_create_some_table.rb b/spec/fixtures/click_house/migrations/plain_table_creation/1_create_some_table.rb
new file mode 100644
index 00000000000..14ef80cbdb7
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/plain_table_creation/1_create_some_table.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class CreateSomeTable < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE TABLE some (
+ id UInt64,
+ date Date
+ ) ENGINE = Memory
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/plain_table_creation_on_invalid_database/1_create_some_table.rb b/spec/fixtures/click_house/migrations/plain_table_creation_on_invalid_database/1_create_some_table.rb
new file mode 100644
index 00000000000..ee900ef24c5
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/plain_table_creation_on_invalid_database/1_create_some_table.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class CreateSomeTable < ClickHouse::Migration
+ SCHEMA = :unknown_database
+
+ def up
+ execute <<~SQL
+ CREATE TABLE some (
+ id UInt64,
+ date Date
+ ) ENGINE = Memory
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/click_house/migrations/table_creation_with_down_method/1_create_some_table.rb b/spec/fixtures/click_house/migrations/table_creation_with_down_method/1_create_some_table.rb
new file mode 100644
index 00000000000..7ac92b9ee38
--- /dev/null
+++ b/spec/fixtures/click_house/migrations/table_creation_with_down_method/1_create_some_table.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
+class CreateSomeTable < ClickHouse::Migration
+ def up
+ execute <<~SQL
+ CREATE TABLE some (
+ id UInt64,
+ date Date
+ ) ENGINE = Memory
+ SQL
+ end
+
+ def down
+ execute <<~SQL
+ DROP TABLE some
+ SQL
+ end
+end
+# rubocop: enable Gitlab/NamespacedClass
diff --git a/spec/fixtures/security_reports/master/gl-common-scanning-report.json b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
index 47e2a503b02..35db4779920 100644
--- a/spec/fixtures/security_reports/master/gl-common-scanning-report.json
+++ b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
@@ -12,10 +12,10 @@
"id": "gemnasium",
"name": "Gemnasium"
},
- "cvss": [
+ "cvss_vectors": [
{
"vendor": "GitLab",
- "vector_string": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H"
+ "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H"
}
],
"location": {
diff --git a/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js b/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js
index aef06a74fdd..086a4bc1ec0 100644
--- a/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js
+++ b/spec/frontend/authentication/two_factor_auth/components/recovery_codes_spec.js
@@ -21,7 +21,7 @@ describe('RecoveryCodes', () => {
propsData: {
codes,
profileAccountPath,
- ...(options?.propsData || {}),
+ ...options?.propsData,
},
...options,
}),
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index f09003edc0c..63d9a2471b6 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -65,7 +65,7 @@ describe('diffs/components/app', () => {
const provide = {
...provisions,
glFeatures: {
- ...(provisions.glFeatures || {}),
+ ...provisions.glFeatures,
},
};
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index b0d98e0e4a6..d6539a5bffa 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -103,7 +103,7 @@ describe('DiffFileHeader component', () => {
const createComponent = ({ props, options = {} } = {}) => {
mockStoreConfig = cloneDeep(defaultMockStoreConfig);
- const store = new Vuex.Store({ ...mockStoreConfig, ...(options.store || {}) });
+ const store = new Vuex.Store({ ...mockStoreConfig, ...options.store });
wrapper = shallowMount(DiffFileHeader, {
propsData: {
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index 51f8f04fc11..8cf376b13e3 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -521,7 +521,7 @@ describe('DiffsStoreActions', () => {
return testAction(
diffActions.fetchDiffFilesBatch,
{},
- { endpointBatch, diffViewType: 'inline', diffFiles: [] },
+ { endpointBatch, diffViewType: 'inline', diffFiles: [], perPage: 5 },
[
{ type: types.SET_BATCH_LOADING_STATE, payload: 'loading' },
{ type: types.SET_RETRIEVING_BATCHES, payload: true },
diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js
index 91dc02f8f39..6c03f91b73d 100644
--- a/spec/frontend/packages_and_registries/package_registry/mock_data.js
+++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js
@@ -308,7 +308,7 @@ export const packageMetadataQuery = (packageType) => {
id: 'gid://gitlab/Packages::Package/111',
packageType,
metadata: {
- ...(packageTypeMetadataQueryMapping[packageType]?.() ?? {}),
+ ...packageTypeMetadataQueryMapping[packageType]?.(),
},
__typename: 'PackageDetailsType',
},
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
index 50d09481b93..f6ecee4cd53 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
@@ -51,7 +51,7 @@ describe('Interval Pattern Input Component', () => {
beforeEach(() => {
oldWindowGl = window.gl;
window.gl = {
- ...(window.gl || {}),
+ ...window.gl,
pipelineScheduleFieldErrors: {
updateFormValidityState: jest.fn(),
},
diff --git a/spec/frontend/search/sidebar/components/label_filter_spec.js b/spec/frontend/search/sidebar/components/label_filter_spec.js
index 07b2e176610..9d2a0c5e739 100644
--- a/spec/frontend/search/sidebar/components/label_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/label_filter_spec.js
@@ -13,7 +13,11 @@ import Vue from 'vue';
import Vuex from 'vuex';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import { MOCK_QUERY, MOCK_LABEL_AGGREGATIONS } from 'jest/search/mock_data';
+import {
+ MOCK_QUERY,
+ MOCK_LABEL_AGGREGATIONS,
+ MOCK_FILTERED_UNSELECTED_LABELS,
+} from 'jest/search/mock_data';
import LabelFilter from '~/search/sidebar/components/label_filter/index.vue';
import LabelDropdownItems from '~/search/sidebar/components/label_filter/label_dropdown_items.vue';
@@ -52,8 +56,15 @@ describe('GlobalSearchSidebarLabelFilter', () => {
let trackingSpy;
let config;
let store;
+ let state;
+
+ const createComponent = (initialState, gettersStubs) => {
+ state = createState({
+ query: MOCK_QUERY,
+ aggregations: MOCK_LABEL_AGGREGATIONS,
+ ...initialState,
+ });
- const createComponent = (initialState) => {
config = {
actions: {
...actions,
@@ -62,13 +73,12 @@ describe('GlobalSearchSidebarLabelFilter', () => {
setLabelFilterSearch: actionSpies.setLabelFilterSearch,
setQuery: actionSpies.setQuery,
},
- getters,
+ state,
+ getters: {
+ ...getters,
+ ...gettersStubs,
+ },
mutations,
- state: createState({
- query: MOCK_QUERY,
- aggregations: MOCK_LABEL_AGGREGATIONS,
- ...initialState,
- }),
};
store = new Vuex.Store(config);
@@ -95,6 +105,10 @@ describe('GlobalSearchSidebarLabelFilter', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findNoLabelsFoundMessage = () => wrapper.findComponentByTestId('no-labels-found-message');
+ const findLabelPills = () => wrapper.findAllComponentsByTestId('label');
+ const findSelectedUappliedLavelPills = () => wrapper.findAllComponentsByTestId('unapplied-label');
+ const findClosedUnappliedPills = () => wrapper.findAllComponentsByTestId('unselected-label');
+
describe('Renders correctly closed', () => {
beforeEach(async () => {
createComponent();
@@ -349,5 +363,42 @@ describe('GlobalSearchSidebarLabelFilter', () => {
});
});
});
+
+ describe('newly selected and unapplied labels show as pills above dropdown', () => {
+ beforeEach(() => {
+ const mockGetters = { unappliedNewLabels: jest.fn(() => MOCK_FILTERED_UNSELECTED_LABELS) };
+ createComponent({}, mockGetters);
+ });
+
+ it('has correct pills', () => {
+ expect(findSelectedUappliedLavelPills()).toHaveLength(2);
+ });
+ });
+
+ describe('applied labels show as pills above dropdown', () => {
+ beforeEach(() => {
+ const mockGetters = {
+ appliedSelectedLabels: jest.fn(() => MOCK_FILTERED_UNSELECTED_LABELS),
+ };
+ createComponent({}, mockGetters);
+ });
+
+ it('has correct pills', () => {
+ expect(findLabelPills()).toHaveLength(2);
+ });
+ });
+
+ describe('closed unapplied labels show as pills above dropdown', () => {
+ beforeEach(() => {
+ const mockGetters = {
+ unselectedLabels: jest.fn(() => MOCK_FILTERED_UNSELECTED_LABELS),
+ };
+ createComponent({}, mockGetters);
+ });
+
+ it('has correct pills', () => {
+ expect(findClosedUnappliedPills()).toHaveLength(2);
+ });
+ });
});
});
diff --git a/spec/frontend/search/store/getters_spec.js b/spec/frontend/search/store/getters_spec.js
index 571525bd025..8e988ce5c4a 100644
--- a/spec/frontend/search/store/getters_spec.js
+++ b/spec/frontend/search/store/getters_spec.js
@@ -134,4 +134,23 @@ describe('Global Search Store Getters', () => {
]);
});
});
+
+ describe('unselectedLabels', () => {
+ it('returns all labels that are not selected', () => {
+ state.query.labels = ['60'];
+ expect(getters.unselectedLabels(state)).toStrictEqual([MOCK_LABEL_SEARCH_RESULT]);
+ });
+ });
+
+ describe('unappliedNewLabels', () => {
+ it('returns all labels that are selected but not applied', () => {
+ // Applied labels
+ state.urlQuery.labels = ['37', '60'];
+ // Applied and selected labels
+ state.query.labels = ['37', '6', '73', '60'];
+ // Selected but unapplied labels
+ // expect(getters.unappliedNewLabels(state)).toStrictEqual(MOCK_FILTERED_UNSELECTED_LABELS);
+ expect(getters.unappliedNewLabels(state).map(({ key }) => key)).toStrictEqual(['6', '73']);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/mixins/gl_feature_flags_mixin_spec.js b/spec/frontend/vue_shared/mixins/gl_feature_flags_mixin_spec.js
index 3ce12caf95a..1f579a1e945 100644
--- a/spec/frontend/vue_shared/mixins/gl_feature_flags_mixin_spec.js
+++ b/spec/frontend/vue_shared/mixins/gl_feature_flags_mixin_spec.js
@@ -19,7 +19,7 @@ describe('GitLab Feature Flags Mixin', () => {
wrapper = shallowMount(component, {
provide: {
- glFeatures: { ...(gon.features || {}) },
+ glFeatures: { ...gon.features },
},
});
});
diff --git a/spec/graphql/resolvers/ci/catalog/resource_resolver_spec.rb b/spec/graphql/resolvers/ci/catalog/resource_resolver_spec.rb
index c49366527e1..19fc0c7fc4c 100644
--- a/spec/graphql/resolvers/ci/catalog/resource_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/catalog/resource_resolver_spec.rb
@@ -11,37 +11,112 @@ RSpec.describe Resolvers::Ci::Catalog::ResourceResolver, feature_category: :pipe
let_it_be(:user) { create(:user) }
describe '#resolve' do
- context 'when the user can read code on the catalog resource project' do
- before_all do
- namespace.add_developer(user)
+ context 'when id argument is provided' do
+ context 'when the user is authorised to view the resource' do
+ before_all do
+ namespace.add_developer(user)
+ end
+
+ context 'when resource is found' do
+ it 'returns a single CI/CD Catalog resource' do
+ result = resolve(described_class, ctx: { current_user: user },
+ args: { id: resource.to_global_id.to_s })
+
+ expect(result.id).to eq(resource.id)
+ expect(result.class).to eq(Ci::Catalog::Resource)
+ end
+ end
+
+ context 'when resource is not found' do
+ it 'raises ResourceNotAvailable error' do
+ result = resolve(described_class, ctx: { current_user: user },
+ args: { id: "gid://gitlab/Ci::Catalog::Resource/not-a-real-id" })
+
+ expect(result).to be_a(::Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
end
- context 'when resource is found' do
- it 'returns a single CI/CD Catalog resource' do
+ context 'when user is not authorised to view the resource' do
+ it 'raises ResourceNotAvailable error' do
result = resolve(described_class, ctx: { current_user: user },
args: { id: resource.to_global_id.to_s })
- expect(result.id).to eq(resource.id)
- expect(result.class).to eq(Ci::Catalog::Resource)
+ expect(result).to be_a(::Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+
+ context 'when full_path argument is provided' do
+ context 'when the user is authorised to view the resource' do
+ before_all do
+ namespace.add_developer(user)
+ end
+
+ context 'when resource is found' do
+ it 'returns a single CI/CD Catalog resource' do
+ result = resolve(described_class, ctx: { current_user: user },
+ args: { full_path: resource.project.full_path })
+
+ expect(result.id).to eq(resource.id)
+ expect(result.class).to eq(Ci::Catalog::Resource)
+ end
+ end
+
+ context 'when resource is not found' do
+ it 'raises ResourceNotAvailable error' do
+ result = resolve(described_class, ctx: { current_user: user },
+ args: { full_path: "project/non_exisitng_resource" })
+
+ expect(result).to be_a(::Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when project is not a catalog resource' do
+ let_it_be(:project) { create(:project, :private, namespace: namespace) }
+
+ it 'raises ResourceNotAvailable error' do
+ result = resolve(described_class, ctx: { current_user: user }, args: { full_path: project.full_path })
+
+ expect(result).to be_a(::Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
end
end
- context 'when resource does not exist' do
+ context 'when user is not authorised to view the resource' do
it 'raises ResourceNotAvailable error' do
result = resolve(described_class, ctx: { current_user: user },
- args: { id: "gid://gitlab/Ci::Catalog::Resource/not-a-real-id" })
+ args: { full_path: resource.project.full_path })
expect(result).to be_a(::Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
end
- context 'when the user cannot read code on the catalog resource project' do
- it 'raises ResourceNotAvailable error' do
- result = resolve(described_class, ctx: { current_user: user },
- args: { id: resource.to_global_id.to_s })
+ context 'when neither id nor full_path argument is provided' do
+ before_all do
+ namespace.add_developer(user)
+ end
+ it 'raises ArgumentError' do
+ expect_graphql_error_to_be_created(::Gitlab::Graphql::Errors::ArgumentError,
+ "Exactly one of 'id' or 'full_path' arguments is required.") do
+ resolve(described_class, ctx: { current_user: user },
+ args: {})
+ end
+ end
+ end
+
+ context 'when both full_path and id arguments are provided' do
+ before_all do
+ namespace.add_developer(user)
+ end
- expect(result).to be_a(::Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'raises ArgumentError' do
+ expect_graphql_error_to_be_created(::Gitlab::Graphql::Errors::ArgumentError,
+ "Exactly one of 'id' or 'full_path' arguments is required.") do
+ resolve(described_class, ctx: { current_user: user },
+ args: { full_path: resource.project.full_path, id: resource.to_global_id.to_s })
+ end
end
end
end
diff --git a/spec/graphql/types/packages/pypi/metadatum_type_spec.rb b/spec/graphql/types/packages/pypi/metadatum_type_spec.rb
index 16fb3ef2098..831307490a9 100644
--- a/spec/graphql/types/packages/pypi/metadatum_type_spec.rb
+++ b/spec/graphql/types/packages/pypi/metadatum_type_spec.rb
@@ -5,7 +5,14 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['PypiMetadata'] do
it 'includes pypi metadatum fields' do
expected_fields = %w[
- id required_python
+ author_email
+ description
+ description_content_type
+ id
+ keywords
+ metadata_version
+ required_python
+ summary
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index 0073d2ebe80..00639d9574b 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -517,6 +517,44 @@ RSpec.describe Gitlab::GitalyClient, feature_category: :gitaly do
end
end
+ describe '.fetch_relative_path' do
+ subject { described_class.request_kwargs('default', timeout: 1)[:metadata]['relative-path-bin'] }
+
+ let(:relative_path) { 'relative_path' }
+
+ context 'when RequestStore is disabled' do
+ it 'does not set a relative path' do
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when RequestStore is enabled', :request_store do
+ context 'when RequestStore is empty' do
+ it 'does not set a relative path' do
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when RequestStore contains a relalive_path value' do
+ before do
+ Gitlab::SafeRequestStore[:gitlab_git_relative_path] = relative_path
+ end
+
+ it 'sets a base64 encoded version of relative_path' do
+ is_expected.to eq(relative_path)
+ end
+
+ context 'when relalive_path is empty' do
+ let(:relative_path) { '' }
+
+ it 'does not set a relative path' do
+ is_expected.to be_nil
+ end
+ end
+ end
+ end
+ end
+
context 'gitlab_git_env' do
let(:policy) { 'gitaly-route-repository-accessor-policy' }
diff --git a/spec/lib/gitlab/rugged_instrumentation_spec.rb b/spec/lib/gitlab/rugged_instrumentation_spec.rb
deleted file mode 100644
index 393bb957aba..00000000000
--- a/spec/lib/gitlab/rugged_instrumentation_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::RuggedInstrumentation, :request_store do
- subject { described_class }
-
- describe '.query_time' do
- it 'increments query times' do
- subject.add_query_time(0.4510004)
- subject.add_query_time(0.3220004)
-
- expect(subject.query_time).to eq(0.773001)
- expect(subject.query_time_ms).to eq(773.0)
- end
- end
-
- describe '.increment_query_count' do
- it 'tracks query counts' do
- expect(subject.query_count).to eq(0)
-
- 2.times { subject.increment_query_count }
-
- expect(subject.query_count).to eq(2)
- end
- end
-end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 172c4f27900..2e07fa100e8 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -238,13 +238,11 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
- context 'with Gitaly, Rugged, and Redis calls' do
+ context 'with Gitaly, and Redis calls' do
let(:timing_data) do
{
gitaly_calls: 10,
gitaly_duration_s: 10000,
- rugged_calls: 1,
- rugged_duration_s: 5000,
redis_calls: 3,
redis_duration_s: 1234
}
@@ -261,7 +259,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
- it 'logs with Gitaly and Rugged timing data', :aggregate_failures do
+ it 'logs with Gitaly timing data', :aggregate_failures do
travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload).ordered
diff --git a/spec/lib/peek/views/rugged_spec.rb b/spec/lib/peek/views/rugged_spec.rb
deleted file mode 100644
index 31418b5fc81..00000000000
--- a/spec/lib/peek/views/rugged_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Peek::Views::Rugged, :request_store do
- subject { described_class.new }
-
- let(:project) { create(:project) }
-
- before do
- allow(Gitlab::PerformanceBar).to receive(:enabled_for_request?).and_return(true)
- end
-
- it 'returns no results' do
- expect(subject.results).to eq({})
- end
-
- it 'returns aggregated results' do
- ::Gitlab::RuggedInstrumentation.add_query_time(1.234)
- ::Gitlab::RuggedInstrumentation.increment_query_count
- ::Gitlab::RuggedInstrumentation.increment_query_count
-
- ::Gitlab::RuggedInstrumentation.add_call_details(feature: :rugged_test,
- args: [project.repository.raw, 'HEAD'],
- duration: 0.123)
- ::Gitlab::RuggedInstrumentation.add_call_details(feature: :rugged_test2,
- args: [project.repository, 'refs/heads/master'],
- duration: 0.456)
-
- results = subject.results
- expect(results[:calls]).to eq(2)
- expect(results[:duration]).to eq('1234.00ms')
- expect(results[:details].count).to eq(2)
-
- expected = [
- [project.repository.raw.to_s, "HEAD"],
- [project.repository.to_s, "refs/heads/master"]
- ]
-
- expect(results[:details].map { |data| data[:args] }).to match_array(expected)
- end
-end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 19087d00991..2e552c8d524 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -5229,16 +5229,34 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
subject { build.doom! }
let(:traits) { [] }
- let(:build) { create(:ci_build, *traits, pipeline: pipeline) }
+ let(:build) do
+ travel(-1.minute) do
+ create(:ci_build, *traits, pipeline: pipeline)
+ end
+ end
- it 'updates status and failure_reason', :aggregate_failures do
- subject
+ it 'updates status, failure_reason, finished_at and updated_at', :aggregate_failures do
+ old_timestamp = build.updated_at
+ new_timestamp = \
+ freeze_time do
+ Time.current.tap do
+ subject
+ end
+ end
+
+ expect(old_timestamp).not_to eq(new_timestamp)
+ expect(build.updated_at).to eq(new_timestamp)
+ expect(build.finished_at).to eq(new_timestamp)
expect(build.status).to eq("failed")
expect(build.failure_reason).to eq("data_integrity_failure")
end
- it 'logs a message' do
+ it 'logs a message and increments the job failure counter', :aggregate_failures do
+ expect(::Gitlab::Ci::Pipeline::Metrics.job_failure_reason_counter)
+ .to(receive(:increment))
+ .with(reason: :data_integrity_failure)
+
expect(Gitlab::AppLogger)
.to receive(:info)
.with(a_hash_including(message: 'Build doomed', class: build.class.name, build_id: build.id))
@@ -5273,12 +5291,20 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
context 'with running builds' do
let(:traits) { [:picked] }
- it 'drops associated runtime metadata' do
+ it 'drops associated runtime metadata', :aggregate_failures do
subject
expect(build.reload.runtime_metadata).not_to be_present
end
end
+
+ context 'finished builds' do
+ let(:traits) { [:finished] }
+
+ it 'does not update finished_at' do
+ expect { subject }.not_to change { build.reload.finished_at }
+ end
+ end
end
it 'does not generate cross DB queries when a record is created via FactoryBot' do
diff --git a/spec/models/packages/pypi/metadatum_spec.rb b/spec/models/packages/pypi/metadatum_spec.rb
index 6c83c4ed143..6c93f84124f 100644
--- a/spec/models/packages/pypi/metadatum_spec.rb
+++ b/spec/models/packages/pypi/metadatum_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Packages::Pypi::Metadatum, type: :model do
+RSpec.describe Packages::Pypi::Metadatum, type: :model, feature_category: :package_registry do
describe 'relationships' do
it { is_expected.to belong_to(:package) }
end
@@ -9,8 +9,29 @@ RSpec.describe Packages::Pypi::Metadatum, type: :model do
describe 'validations' do
it { is_expected.to validate_presence_of(:package) }
it { is_expected.to allow_value('').for(:required_python) }
- it { is_expected.not_to allow_value(nil).for(:required_python) }
- it { is_expected.not_to allow_value('a' * 256).for(:required_python) }
+ it { is_expected.to validate_length_of(:required_python).is_at_most(described_class::MAX_REQUIRED_PYTHON_LENGTH) }
+ it { is_expected.to allow_value('').for(:keywords) }
+ it { is_expected.to allow_value(nil).for(:keywords) }
+ it { is_expected.to validate_length_of(:keywords).is_at_most(described_class::MAX_KEYWORDS_LENGTH) }
+ it { is_expected.to allow_value('').for(:metadata_version) }
+ it { is_expected.to allow_value(nil).for(:metadata_version) }
+ it { is_expected.to validate_length_of(:metadata_version).is_at_most(described_class::MAX_METADATA_VERSION_LENGTH) }
+ it { is_expected.to allow_value('').for(:author_email) }
+ it { is_expected.to allow_value(nil).for(:author_email) }
+ it { is_expected.to validate_length_of(:author_email).is_at_most(described_class::MAX_AUTHOR_EMAIL_LENGTH) }
+ it { is_expected.to allow_value('').for(:summary) }
+ it { is_expected.to allow_value(nil).for(:summary) }
+ it { is_expected.to validate_length_of(:summary).is_at_most(described_class::MAX_SUMMARY_LENGTH) }
+ it { is_expected.to allow_value('').for(:description) }
+ it { is_expected.to allow_value(nil).for(:description) }
+ it { is_expected.to validate_length_of(:description).is_at_most(described_class::MAX_DESCRIPTION_LENGTH) }
+ it { is_expected.to allow_value('').for(:description_content_type) }
+ it { is_expected.to allow_value(nil).for(:description_content_type) }
+
+ it {
+ is_expected.to validate_length_of(:description_content_type)
+ .is_at_most(described_class::MAX_DESCRIPTION_CONTENT_TYPE)
+ }
describe '#pypi_package_type' do
it 'will not allow a package with a different package_type' do
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index ededefebb6a..e59633b6d35 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -560,6 +560,20 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
end
end
+ context 'when Gitaly provides a relative_path argument', :request_store do
+ subject { push(key, project, relative_path: relative_path) }
+
+ let(:relative_path) { 'relative_path' }
+
+ it 'stores relative_path value in RequestStore' do
+ allow(Gitlab::SafeRequestStore).to receive(:[]=).and_call_original
+ expect(Gitlab::SafeRequestStore).to receive(:[]=).with(:gitlab_git_relative_path, relative_path)
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
context "git push with project.wiki" do
subject { push(key, project.wiki, env: env.to_json) }
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index 8dab9d555cf..feb24a4e73f 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -826,48 +826,20 @@ RSpec.describe API::Members, feature_category: :groups_and_projects do
end
end
- context 'with admin_group_member FF disabled' do
- before do
- stub_feature_flags(admin_group_member: false)
- end
-
- it_behaves_like 'POST /:source_type/:id/members', 'project' do
- let(:source) { project }
- end
-
- it_behaves_like 'POST /:source_type/:id/members', 'group' do
- let(:source) { group }
- end
-
- it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'project' do
- let(:source) { project }
- end
-
- it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'group' do
- let(:source) { group }
- end
+ it_behaves_like 'POST /:source_type/:id/members', 'project' do
+ let(:source) { project }
end
- context 'with admin_group_member FF enabled' do
- before do
- stub_feature_flags(admin_group_member: true)
- end
-
- it_behaves_like 'POST /:source_type/:id/members', 'project' do
- let(:source) { project }
- end
-
- it_behaves_like 'POST /:source_type/:id/members', 'group' do
- let(:source) { group }
- end
+ it_behaves_like 'POST /:source_type/:id/members', 'group' do
+ let(:source) { group }
+ end
- it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'project' do
- let(:source) { project }
- end
+ it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'project' do
+ let(:source) { project }
+ end
- it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'group' do
- let(:source) { group }
- end
+ it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'group' do
+ let(:source) { group }
end
it_behaves_like 'DELETE /:source_type/:id/members/:user_id', 'project' do
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index 0b2641b062c..9305155d285 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -207,7 +207,22 @@ RSpec.describe API::PypiPackages, feature_category: :package_registry do
let(:url) { "/projects/#{project.id}/packages/pypi" }
let(:headers) { {} }
let(:requires_python) { '>=3.7' }
- let(:base_params) { { requires_python: requires_python, version: '1.0.0', name: 'sample-project', sha256_digest: '1' * 64, md5_digest: '1' * 32 } }
+ let(:base_params) do
+ {
+ requires_python: requires_python,
+ version: '1.0.0',
+ name: 'sample-project',
+ sha256_digest: '1' * 64,
+ md5_digest: '1' * 32,
+ metadata_version: '2.3',
+ author_email: 'cschultz@example.com, snoopy@peanuts.com',
+ description: 'Example description',
+ description_content_type: 'text/plain',
+ summary: 'A module for collecting votes from beagles.',
+ keywords: 'dog,puppy,voting,election'
+ }
+ end
+
let(:params) { base_params.merge(content: temp_file(file_name)) }
let(:send_rewritten_field) { true }
let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace, user: user, property: 'i_package_pypi_user' } }
diff --git a/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb b/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
index 4b7ea6b72e5..8d80a554a2a 100644
--- a/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
+++ b/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
@@ -146,40 +146,6 @@ RSpec.describe RuboCop::Cop::Gitlab::MarkUsedFeatureFlags do
end
end
- %w[
- use_rugged?
- ].each do |feature_flag_method|
- context "#{feature_flag_method} method" do
- context 'a string feature flag' do
- include_examples 'sets flag as used', %|#{feature_flag_method}(arg, "baz")|, 'baz'
- end
-
- context 'a symbol feature flag' do
- include_examples 'sets flag as used', %|#{feature_flag_method}(arg, :baz)|, 'baz'
- end
-
- context 'an interpolated string feature flag with a string prefix' do
- include_examples 'sets flag as used', %|#{feature_flag_method}(arg, "foo_\#{bar}")|, %w[foo_hello foo_world]
- end
-
- context 'an interpolated symbol feature flag with a string prefix' do
- include_examples 'sets flag as used', %|#{feature_flag_method}(arg, :"foo_\#{bar}")|, %w[foo_hello foo_world]
- end
-
- context 'an interpolated string feature flag with a string prefix and suffix' do
- include_examples 'does not set any flags as used', %|#{feature_flag_method}(arg, :"foo_\#{bar}_baz")|
- end
-
- context 'a dynamic string feature flag as a variable' do
- include_examples 'does not set any flags as used', %|#{feature_flag_method}(a_variable, an_arg)|
- end
-
- context 'an integer feature flag' do
- include_examples 'does not set any flags as used', %|#{feature_flag_method}(arg, 123)|
- end
- end
- end
-
describe 'self.limit_feature_flag = :foo' do
include_examples 'sets flag as used', 'self.limit_feature_flag = :foo', 'foo'
end
diff --git a/spec/services/packages/pypi/create_package_service_spec.rb b/spec/services/packages/pypi/create_package_service_spec.rb
index 0d278e32e89..abff91d1878 100644
--- a/spec/services/packages/pypi/create_package_service_spec.rb
+++ b/spec/services/packages/pypi/create_package_service_spec.rb
@@ -69,6 +69,30 @@ RSpec.describe Packages::Pypi::CreatePackageService, :aggregate_failures, featur
end
end
+ context 'with additional metadata' do
+ before do
+ params.merge!(
+ metadata_version: '2.3',
+ author_email: 'cschultz@example.com, snoopy@peanuts.com',
+ description: 'Example description',
+ description_content_type: 'text/plain',
+ summary: 'A module for collecting votes from beagles.',
+ keywords: 'dog,puppy,voting,election'
+ )
+ end
+
+ it 'creates the package' do
+ expect { subject }.to change { Packages::Package.pypi.count }.by(1)
+
+ expect(created_package.pypi_metadatum.metadata_version).to eq('2.3')
+ expect(created_package.pypi_metadatum.author_email).to eq('cschultz@example.com, snoopy@peanuts.com')
+ expect(created_package.pypi_metadatum.description).to eq('Example description')
+ expect(created_package.pypi_metadatum.description_content_type).to eq('text/plain')
+ expect(created_package.pypi_metadatum.summary).to eq('A module for collecting votes from beagles.')
+ expect(created_package.pypi_metadatum.keywords).to eq('dog,puppy,voting,election')
+ end
+ end
+
context 'with an invalid metadata' do
let(:requires_python) { 'x' * 256 }
diff --git a/spec/support/database/click_house/hooks.rb b/spec/support/database/click_house/hooks.rb
index b970d3daf84..c13778f9c36 100644
--- a/spec/support/database/click_house/hooks.rb
+++ b/spec/support/database/click_house/hooks.rb
@@ -9,6 +9,8 @@ class ClickHouseTestRunner
"(SELECT '#{table}' AS table FROM #{table} LIMIT 1)"
end.join(' UNION ALL ')
+ next if query.empty?
+
tables_with_data = ClickHouse::Client.select(query, db).pluck('table')
tables_with_data.each do |table|
ClickHouse::Client.execute("TRUNCATE TABLE #{table}", db)
@@ -16,20 +18,27 @@ class ClickHouseTestRunner
end
end
- def ensure_schema
- return if @ensure_schema
-
- ClickHouse::Client.configuration.databases.each_key do |db|
+ def clear_db(configuration = ClickHouse::Client.configuration)
+ configuration.databases.each_key do |db|
# drop all tables
- lookup_tables(db).each do |table|
- ClickHouse::Client.execute("DROP TABLE IF EXISTS #{table}", db)
+ lookup_tables(db, configuration).each do |table|
+ ClickHouse::Client.execute("DROP TABLE IF EXISTS #{table}", db, configuration)
end
- # run the schema SQL files
- Dir[Rails.root.join("db/click_house/#{db}/*.sql")].each do |file|
- ClickHouse::Client.execute(File.read(file), db)
- end
+ ClickHouse::MigrationSupport::SchemaMigration.create_table(db, configuration)
end
+ end
+
+ def ensure_schema
+ return if @ensure_schema
+
+ clear_db
+
+ # run the schema SQL files
+ migrations_paths = ClickHouse::MigrationSupport::Migrator.migrations_paths
+ schema_migration = ClickHouse::MigrationSupport::SchemaMigration
+ migration_context = ClickHouse::MigrationSupport::MigrationContext.new(migrations_paths, schema_migration)
+ migration_context.up
@ensure_schema = true
end
@@ -38,11 +47,11 @@ class ClickHouseTestRunner
def tables_for(db)
@tables ||= {}
- @tables[db] ||= lookup_tables(db)
+ @tables[db] ||= lookup_tables(db) - [ClickHouse::MigrationSupport::SchemaMigration.table_name]
end
- def lookup_tables(db)
- ClickHouse::Client.select('SHOW TABLES', db).pluck('name')
+ def lookup_tables(db, configuration = ClickHouse::Client.configuration)
+ ClickHouse::Client.select('SHOW TABLES', db, configuration).pluck('name')
end
end
# rubocop: enable Gitlab/NamespacedClass
@@ -52,10 +61,19 @@ RSpec.configure do |config|
config.around(:each, :click_house) do |example|
with_net_connect_allowed do
- click_house_test_runner.ensure_schema
- click_house_test_runner.truncate_tables
+ was_verbose = ClickHouse::Migration.verbose
+ ClickHouse::Migration.verbose = false
+
+ if example.example.metadata[:click_house] == :without_migrations
+ click_house_test_runner.clear_db
+ else
+ click_house_test_runner.ensure_schema
+ click_house_test_runner.truncate_tables
+ end
example.run
+ ensure
+ ClickHouse::Migration.verbose = was_verbose
end
end
end
diff --git a/spec/support/helpers/api_internal_base_helpers.rb b/spec/support/helpers/api_internal_base_helpers.rb
index 0c334e164a6..d3ae1a5c3b2 100644
--- a/spec/support/helpers/api_internal_base_helpers.rb
+++ b/spec/support/helpers/api_internal_base_helpers.rb
@@ -41,18 +41,19 @@ module APIInternalBaseHelpers
)
end
- def push(key, container, protocol = 'ssh', env: nil, changes: nil)
+ def push(key, container, protocol = 'ssh', env: nil, changes: nil, relative_path: nil)
push_with_path(
key,
full_path: full_path_for(container),
gl_repository: gl_repository_for(container),
protocol: protocol,
env: env,
- changes: changes
+ changes: changes,
+ relative_path: relative_path
)
end
- def push_with_path(key, full_path:, gl_repository: nil, protocol: 'ssh', env: nil, changes: nil)
+ def push_with_path(key, full_path:, gl_repository: nil, protocol: 'ssh', env: nil, changes: nil, relative_path: nil)
changes ||= 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master'
params = {
@@ -61,7 +62,8 @@ module APIInternalBaseHelpers
project: full_path,
action: 'git-receive-pack',
protocol: protocol,
- env: env
+ env: env,
+ relative_path: relative_path
}
params[:gl_repository] = gl_repository if gl_repository
diff --git a/spec/support/helpers/click_house_helpers.rb b/spec/support/helpers/click_house_helpers.rb
new file mode 100644
index 00000000000..93d5b1d6e01
--- /dev/null
+++ b/spec/support/helpers/click_house_helpers.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+module ClickHouseHelpers
+ private
+
+ def migrate(target_version, migration_context)
+ quietly { migration_context.up(target_version) }
+ end
+
+ def rollback(target_version, migration_context)
+ quietly { migration_context.down(target_version) }
+ end
+
+ def table_names(database = :main, configuration = ClickHouse::Client.configuration)
+ ClickHouse::Client.select('SHOW TABLES', database, configuration).pluck('name')
+ end
+
+ def active_schema_migrations_count(database = :main, configuration = ClickHouse::Client.configuration)
+ query = <<~SQL
+ SELECT COUNT(*) AS count FROM schema_migrations FINAL WHERE active = 1
+ SQL
+
+ ClickHouse::Client.select(query, database, configuration).first['count']
+ end
+
+ def describe_table(table_name, database = :main, configuration = ClickHouse::Client.configuration)
+ ClickHouse::Client
+ .select("DESCRIBE TABLE #{table_name} FORMAT JSON", database, configuration)
+ .map(&:symbolize_keys)
+ .index_by { |h| h[:name].to_sym }
+ end
+
+ def schema_migrations(database = :main, configuration = ClickHouse::Client.configuration)
+ ClickHouse::Client
+ .select('SELECT * FROM schema_migrations FINAL ORDER BY version ASC', database, configuration)
+ .map(&:symbolize_keys)
+ end
+
+ def clear_db(configuration: ClickHouse::Client.configuration)
+ ClickHouseTestRunner.new.clear_db(configuration)
+ end
+
+ def register_database(config, database_identifier, db_config)
+ config.register_database(
+ database_identifier,
+ database: db_config[:database],
+ url: db_config[:url],
+ username: db_config[:username],
+ password: db_config[:password],
+ variables: db_config[:variables] || {}
+ )
+ end
+
+ def clear_consts(fixtures_path)
+ $LOADED_FEATURES.select { |file| file.include? fixtures_path }.each do |file|
+ const = File.basename(file)
+ .scan(ClickHouse::Migration::MIGRATION_FILENAME_REGEXP)[0][1]
+ .camelcase
+ .safe_constantize
+
+ Object.send(:remove_const, const.to_s) if const
+ $LOADED_FEATURES.delete(file)
+ end
+ end
+
+ def quietly(&_block)
+ was_verbose = ClickHouse::Migration.verbose
+ ClickHouse::Migration.verbose = false
+
+ yield
+ ensure
+ ClickHouse::Migration.verbose = was_verbose
+ end
+end
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index cb1f147bdbd..da23f81e86e 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -6649,7 +6649,6 @@
- './spec/lib/gitlab/robots_txt/parser_spec.rb'
- './spec/lib/gitlab/route_map_spec.rb'
- './spec/lib/gitlab/routing_spec.rb'
-- './spec/lib/gitlab/rugged_instrumentation_spec.rb'
- './spec/lib/gitlab/runtime_spec.rb'
- './spec/lib/gitlab/saas_spec.rb'
- './spec/lib/gitlab/safe_request_loader_spec.rb'
@@ -6930,7 +6929,6 @@
- './spec/lib/peek/views/external_http_spec.rb'
- './spec/lib/peek/views/memory_spec.rb'
- './spec/lib/peek/views/redis_detailed_spec.rb'
-- './spec/lib/peek/views/rugged_spec.rb'
- './spec/lib/product_analytics/event_params_spec.rb'
- './spec/lib/prometheus/cleanup_multiproc_dir_service_spec.rb'
- './spec/lib/prometheus/pid_provider_spec.rb'
diff --git a/spec/support/shared_examples/redis/redis_shared_examples.rb b/spec/support/shared_examples/redis/redis_shared_examples.rb
index 1270efd4701..f184f678283 100644
--- a/spec/support/shared_examples/redis/redis_shared_examples.rb
+++ b/spec/support/shared_examples/redis/redis_shared_examples.rb
@@ -365,6 +365,21 @@ RSpec.shared_examples "redis_shared_examples" do
end
end
+ describe '#secret_file' do
+ context 'when explicitly specified in config file' do
+ it 'returns the absolute path of specified file inside Rails root' do
+ allow(subject).to receive(:raw_config_hash).and_return({ secret_file: '/etc/gitlab/redis_secret.enc' })
+ expect(subject.send(:secret_file)).to eq('/etc/gitlab/redis_secret.enc')
+ end
+ end
+
+ context 'when not explicitly specified' do
+ it 'returns the default path in the encrypted settings shared directory' do
+ expect(subject.send(:secret_file)).to eq(Rails.root.join("shared/encrypted_settings/redis.yaml.enc").to_s)
+ end
+ end
+ end
+
describe "#parse_client_tls_options" do
let(:dummy_certificate) { OpenSSL::X509::Certificate.new }
let(:dummy_key) { OpenSSL::PKey::RSA.new }
diff --git a/spec/tasks/gitlab/click_house/migration_rake_spec.rb b/spec/tasks/gitlab/click_house/migration_rake_spec.rb
new file mode 100644
index 00000000000..6b834d52e9a
--- /dev/null
+++ b/spec/tasks/gitlab/click_house/migration_rake_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'gitlab:clickhouse', click_house: :without_migrations, feature_category: :database do
+ include ClickHouseHelpers
+
+ # We don't need to delete data since we don't modify Postgres data
+ self.use_transactional_tests = false
+
+ let(:migrations_base_dir) { 'click_house/migrations' }
+ let(:migrations_dirname) { '' }
+ let(:migrations_dir) { expand_fixture_path("#{migrations_base_dir}/#{migrations_dirname}") }
+
+ before(:all) do
+ Rake.application.rake_require 'tasks/gitlab/click_house/migration'
+ end
+
+ before do
+ stub_env('VERBOSE', 'false')
+ end
+
+ describe 'migrate' do
+ subject(:migration) { run_rake_task('gitlab:clickhouse:migrate') }
+
+ let(:target_version) { nil }
+
+ around do |example|
+ ClickHouse::MigrationSupport::Migrator.migrations_paths = [migrations_dir]
+
+ example.run
+
+ clear_consts(expand_fixture_path(migrations_base_dir))
+ end
+
+ before do
+ stub_env('VERSION', target_version) if target_version
+ end
+
+ describe 'when creating a table' do
+ let(:migrations_dirname) { 'plain_table_creation' }
+
+ it 'creates a table' do
+ expect { migration }.to change { active_schema_migrations_count }.from(0).to(1)
+
+ expect(describe_table('some')).to match({
+ id: a_hash_including(type: 'UInt64'),
+ date: a_hash_including(type: 'Date')
+ })
+ end
+ end
+
+ describe 'when dropping a table' do
+ let(:migrations_dirname) { 'drop_table' }
+ let(:target_version) { 2 }
+
+ it 'drops table' do
+ stub_env('VERSION', 1)
+ run_rake_task('gitlab:clickhouse:migrate')
+
+ expect(table_names).to include('some')
+
+ stub_env('VERSION', target_version)
+ migration
+ expect(table_names).not_to include('some')
+ end
+ end
+
+ describe 'with VERSION is invalid' do
+ let(:migrations_dirname) { 'plain_table_creation' }
+ let(:target_version) { 'invalid' }
+
+ it { expect { migration }.to raise_error RuntimeError, 'Invalid format of target version: `VERSION=invalid`' }
+ end
+ end
+
+ describe 'rollback' do
+ subject(:migration) { run_rake_task('gitlab:clickhouse:rollback') }
+
+ let(:schema_migration) { ClickHouse::MigrationSupport::SchemaMigration }
+
+ around do |example|
+ ClickHouse::MigrationSupport::Migrator.migrations_paths = [migrations_dir]
+ migrate(nil, ClickHouse::MigrationSupport::MigrationContext.new(migrations_dir, schema_migration))
+
+ example.run
+
+ clear_consts(expand_fixture_path(migrations_base_dir))
+ end
+
+ context 'when migrating back all the way to 0' do
+ let(:target_version) { 0 }
+
+ context 'when down method is present' do
+ let(:migrations_dirname) { 'table_creation_with_down_method' }
+
+ it 'removes migration' do
+ expect(table_names).to include('some')
+
+ migration
+ expect(table_names).not_to include('some')
+ end
+ end
+ end
+ end
+
+ %w[gitlab:clickhouse:migrate].each do |task|
+ context "when running #{task}" do
+ it "does run gitlab:clickhouse:prepare_schema_migration_table before" do
+ expect(Rake::Task['gitlab:clickhouse:prepare_schema_migration_table']).to receive(:execute).and_return(true)
+ expect(Rake::Task[task]).to receive(:execute).and_return(true)
+
+ Rake::Task['gitlab:clickhouse:prepare_schema_migration_table'].reenable
+ run_rake_task(task)
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/redis_rake_spec.rb b/spec/tasks/gitlab/redis_rake_spec.rb
new file mode 100644
index 00000000000..bfad25be4fd
--- /dev/null
+++ b/spec/tasks/gitlab/redis_rake_spec.rb
@@ -0,0 +1,188 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'gitlab:redis:secret rake tasks', :silence_stdout, feature_category: :build do
+ let(:redis_secret_file) { 'tmp/tests/redisenc/redis_secret.yaml.enc' }
+
+ before do
+ Rake.application.rake_require 'tasks/gitlab/redis'
+ stub_env('EDITOR', 'cat')
+ stub_warn_user_is_not_gitlab
+ FileUtils.mkdir_p('tmp/tests/redisenc/')
+ allow(::Gitlab::Runtime).to receive(:rake?).and_return(true)
+ allow_next_instance_of(Gitlab::Redis::Cache) do |instance|
+ allow(instance).to receive(:secret_file).and_return(redis_secret_file)
+ end
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(SecureRandom.hex(64))
+ end
+
+ after do
+ FileUtils.rm_rf(Rails.root.join('tmp/tests/redisenc'))
+ end
+
+ describe ':show' do
+ it 'displays error when file does not exist' do
+ expect do
+ run_rake_task('gitlab:redis:secret:show')
+ end.to output(/File .* does not exist. Use `gitlab-rake gitlab:redis:secret:edit` to change that./).to_stdout
+ end
+
+ it 'displays error when key does not exist' do
+ Settings.encrypted(redis_secret_file).write('somevalue')
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(nil)
+ expect do
+ run_rake_task('gitlab:redis:secret:show')
+ end.to output(/Missing encryption key encrypted_settings_key_base./).to_stderr
+ end
+
+ it 'displays error when key is changed' do
+ Settings.encrypted(redis_secret_file).write('somevalue')
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(SecureRandom.hex(64))
+ expect do
+ run_rake_task('gitlab:redis:secret:show')
+ end.to output(/Couldn't decrypt .* Perhaps you passed the wrong key?/).to_stderr
+ end
+
+ it 'outputs the unencrypted content when present' do
+ encrypted = Settings.encrypted(redis_secret_file)
+ encrypted.write('somevalue')
+ expect { run_rake_task('gitlab:redis:secret:show') }.to output(/somevalue/).to_stdout
+ end
+ end
+
+ describe 'edit' do
+ it 'creates encrypted file' do
+ expect { run_rake_task('gitlab:redis:secret:edit') }.to output(/File encrypted and saved./).to_stdout
+ expect(File.exist?(redis_secret_file)).to be true
+ value = Settings.encrypted(redis_secret_file)
+ expect(value.read).to match(/password: '123'/)
+ end
+
+ it 'displays error when key does not exist' do
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(nil)
+ expect do
+ run_rake_task('gitlab:redis:secret:edit')
+ end.to output(/Missing encryption key encrypted_settings_key_base./).to_stderr
+ end
+
+ it 'displays error when key is changed' do
+ Settings.encrypted(redis_secret_file).write('somevalue')
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(SecureRandom.hex(64))
+ expect do
+ run_rake_task('gitlab:redis:secret:edit')
+ end.to output(/Couldn't decrypt .* Perhaps you passed the wrong key?/).to_stderr
+ end
+
+ it 'displays error when write directory does not exist' do
+ FileUtils.rm_rf(Rails.root.join('tmp/tests/redisenc'))
+ expect { run_rake_task('gitlab:redis:secret:edit') }.to output(/Directory .* does not exist./).to_stderr
+ end
+
+ it 'shows a warning when content is invalid' do
+ Settings.encrypted(redis_secret_file).write('somevalue')
+ expect do
+ run_rake_task('gitlab:redis:secret:edit')
+ end.to output(/WARNING: Content was not a valid Redis secret yml file/).to_stdout
+ value = Settings.encrypted(redis_secret_file)
+ expect(value.read).to match(/somevalue/)
+ end
+
+ it 'displays error when $EDITOR is not set' do
+ stub_env('EDITOR', nil)
+ expect do
+ run_rake_task('gitlab:redis:secret:edit')
+ end.to output(/No \$EDITOR specified to open file. Please provide one when running the command/).to_stderr
+ end
+ end
+
+ describe 'write' do
+ before do
+ allow($stdin).to receive(:tty?).and_return(false)
+ allow($stdin).to receive(:read).and_return('testvalue')
+ end
+
+ it 'creates encrypted file from stdin' do
+ expect { run_rake_task('gitlab:redis:secret:write') }.to output(/File encrypted and saved./).to_stdout
+ expect(File.exist?(redis_secret_file)).to be true
+ value = Settings.encrypted(redis_secret_file)
+ expect(value.read).to match(/testvalue/)
+ end
+
+ it 'displays error when key does not exist' do
+ allow(Gitlab::Application.secrets).to receive(:encrypted_settings_key_base).and_return(nil)
+ expect do
+ run_rake_task('gitlab:redis:secret:write')
+ end.to output(/Missing encryption key encrypted_settings_key_base./).to_stderr
+ end
+
+ it 'displays error when write directory does not exist' do
+ FileUtils.rm_rf('tmp/tests/redisenc/')
+ expect { run_rake_task('gitlab:redis:secret:write') }.to output(/Directory .* does not exist./).to_stderr
+ end
+
+ it 'shows a warning when content is invalid' do
+ Settings.encrypted(redis_secret_file).write('somevalue')
+ expect do
+ run_rake_task('gitlab:redis:secret:edit')
+ end.to output(/WARNING: Content was not a valid Redis secret yml file/).to_stdout
+ expect(Settings.encrypted(redis_secret_file).read).to match(/somevalue/)
+ end
+ end
+
+ context 'when an instance class is specified' do
+ before do
+ allow_next_instance_of(Gitlab::Redis::SharedState) do |instance|
+ allow(instance).to receive(:secret_file).and_return(redis_secret_file)
+ end
+ end
+
+ context 'when actual name is used' do
+ it 'uses the correct Redis class' do
+ expect(Gitlab::Redis::SharedState).to receive(:encrypted_secrets).and_call_original
+
+ run_rake_task('gitlab:redis:secret:edit', 'SharedState')
+ end
+ end
+
+ context 'when name in lowercase is used' do
+ it 'uses the correct Redis class' do
+ expect(Gitlab::Redis::SharedState).to receive(:encrypted_secrets).and_call_original
+
+ run_rake_task('gitlab:redis:secret:edit', 'sharedstate')
+ end
+ end
+
+ context 'when name with underscores is used' do
+ it 'uses the correct Redis class' do
+ expect(Gitlab::Redis::SharedState).to receive(:encrypted_secrets).and_call_original
+
+ run_rake_task('gitlab:redis:secret:edit', 'shared_state')
+ end
+ end
+
+ context 'when name with hyphens is used' do
+ it 'uses the correct Redis class' do
+ expect(Gitlab::Redis::SharedState).to receive(:encrypted_secrets).and_call_original
+
+ run_rake_task('gitlab:redis:secret:edit', 'shared-state')
+ end
+ end
+
+ context 'when name with spaces is used' do
+ it 'uses the correct Redis class' do
+ expect(Gitlab::Redis::SharedState).to receive(:encrypted_secrets).and_call_original
+
+ run_rake_task('gitlab:redis:secret:edit', 'shared state')
+ end
+ end
+
+ context 'when an invalid name is used' do
+ it 'raises error' do
+ expect do
+ run_rake_task('gitlab:redis:secret:edit', 'foobar')
+ end.to raise_error(/Specified instance name foobar does not exist./)
+ end
+ end
+ end
+end
diff --git a/spec/tooling/quality/test_level_spec.rb b/spec/tooling/quality/test_level_spec.rb
index 6ccd2e46f7b..d7d04015b48 100644
--- a/spec/tooling/quality/test_level_spec.rb
+++ b/spec/tooling/quality/test_level_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Quality::TestLevel, feature_category: :tooling do
context 'when level is unit' do
it 'returns a pattern' do
expect(subject.pattern(:unit))
- .to eq("spec/{bin,channels,components,config,contracts,db,dependencies,elastic,elastic_integration,experiments,factories,finders,frontend,graphql,haml_lint,helpers,initializers,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling}{,/**/}*_spec.rb")
+ .to eq("spec/{bin,channels,click_house,components,config,contracts,db,dependencies,elastic,elastic_integration,experiments,factories,finders,frontend,graphql,haml_lint,helpers,initializers,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling}{,/**/}*_spec.rb")
end
end
@@ -121,7 +121,7 @@ RSpec.describe Quality::TestLevel, feature_category: :tooling do
context 'when level is unit' do
it 'returns a regexp' do
expect(subject.regexp(:unit))
- .to eq(%r{spec/(bin|channels|components|config|contracts|db|dependencies|elastic|elastic_integration|experiments|factories|finders|frontend|graphql|haml_lint|helpers|initializers|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling)/})
+ .to eq(%r{spec/(bin|channels|click_house|components|config|contracts|db|dependencies|elastic|elastic_integration|experiments|factories|finders|frontend|graphql|haml_lint|helpers|initializers|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling)/})
end
end
diff --git a/spec/views/admin/application_settings/_repository_storage.html.haml_spec.rb b/spec/views/admin/application_settings/_repository_storage.html.haml_spec.rb
index 244157a3b14..34821149444 100644
--- a/spec/views/admin/application_settings/_repository_storage.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/_repository_storage.html.haml_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'admin/application_settings/_repository_storage.html.haml' do
let(:app_settings) { build(:application_setting, repository_storages_weighted: repository_storages_weighted) }
before do
- stub_storage_settings({ 'default': {}, 'mepmep': {}, 'foobar': {} })
+ stub_storage_settings({ default: {}, mepmep: {}, foobar: {} })
assign(:application_setting, app_settings)
end
diff --git a/tooling/quality/test_level.rb b/tooling/quality/test_level.rb
index 20e00763f65..050eb4f4daf 100644
--- a/tooling/quality/test_level.rb
+++ b/tooling/quality/test_level.rb
@@ -18,6 +18,7 @@ module Quality
unit: %w[
bin
channels
+ click_house
components
config
contracts