Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.rubocop_todo/gitlab/namespaced_class.yml1
-rw-r--r--.rubocop_todo/layout/line_length.yml3
-rw-r--r--.rubocop_todo/performance/map_compact.yml1
-rw-r--r--.rubocop_todo/rspec/feature_category.yml1
-rw-r--r--.rubocop_todo/rspec/named_subject.yml1
-rw-r--r--.rubocop_todo/style/arguments_forwarding.yml1
-rw-r--r--.rubocop_todo/style/guard_clause.yml1
-rw-r--r--.rubocop_todo/style/if_unless_modifier.yml1
-rw-r--r--.rubocop_todo/style/inline_disable_annotation.yml1
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--Gemfile.checksum8
-rw-r--r--Gemfile.lock8
-rw-r--r--app/assets/javascripts/constants.js2
-rw-r--r--app/assets/javascripts/gl_form.js16
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/checks/constants.js17
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/checks/message.vue17
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/merge_checks.vue7
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/base_token.vue57
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/user_token.vue72
-rw-r--r--app/controllers/projects/gcp/artifact_registry/base_controller.rb43
-rw-r--r--app/controllers/projects/gcp/artifact_registry/docker_images_controller.rb135
-rw-r--r--app/controllers/projects/gcp/artifact_registry/setup_controller.rb11
-rw-r--r--app/graphql/resolvers/namespace_projects_resolver.rb6
-rw-r--r--app/helpers/dashboard_helper.rb4
-rw-r--r--app/helpers/todos_helper.rb4
-rw-r--r--app/mailers/previews/notify_preview.rb2
-rw-r--r--app/models/namespace.rb6
-rw-r--r--app/models/namespaces/traversal/cached.rb54
-rw-r--r--app/models/namespaces/traversal/linear.rb4
-rw-r--r--app/models/namespaces/traversal/recursive.rb6
-rw-r--r--app/models/user_detail.rb1
-rw-r--r--app/services/groups/update_service.rb3
-rw-r--r--app/validators/json_schemas/user_detail_onboarding_status.json17
-rw-r--r--app/views/dashboard/issues.html.haml2
-rw-r--r--app/views/dashboard/merge_requests.html.haml2
-rw-r--r--app/views/dashboard/todos/index.html.haml5
-rw-r--r--app/views/layouts/devise.html.haml6
-rw-r--r--app/views/projects/gcp/artifact_registry/docker_images/_docker_image.html.haml33
-rw-r--r--app/views/projects/gcp/artifact_registry/docker_images/_docker_image_tag.html.haml1
-rw-r--r--app/views/projects/gcp/artifact_registry/docker_images/_pagination.html.haml13
-rw-r--r--app/views/projects/gcp/artifact_registry/docker_images/index.html.haml23
-rw-r--r--app/views/projects/gcp/artifact_registry/setup/new.html.haml31
-rw-r--r--app/views/shared/wikis/edit.html.haml3
-rwxr-xr-xbin/saas-feature.rb381
-rw-r--r--config/feature_flags/development/gcp_technical_demo.yml8
-rw-r--r--config/initializers/rest-client-hostname_override.rb2
-rw-r--r--config/routes/gcp.rb13
-rw-r--r--config/routes/project.rb1
-rw-r--r--db/migrate/20240106000000_migrate_data_from_workspaces_url_column.rb2
-rw-r--r--db/migrate/20240116212237_add_onboarding_status_to_user_details.rb10
-rw-r--r--db/migrate/20240123000000_update_workspaces_url_prefix_column.rb20
-rw-r--r--db/post_migrate/20240122071840_ensure_backfill_for_ci_builds_integer_columns_is_finished.rb35
-rw-r--r--db/post_migrate/20240123071840_prepare_async_indexes_for_p_ci_builds_auto_canceled_by_id.rb27
-rw-r--r--db/post_migrate/20240124081840_prepare_async_indexes_for_p_ci_builds_commit_id_part1.rb32
-rw-r--r--db/schema_migrations/202401162122371
-rw-r--r--db/schema_migrations/202401220718401
-rw-r--r--db/schema_migrations/202401230000001
-rw-r--r--db/schema_migrations/202401230718401
-rw-r--r--db/schema_migrations/202401240818401
-rw-r--r--db/structure.sql1
-rw-r--r--doc/api/graphql/reference/index.md2
-rw-r--r--doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-as-a-service.pngbin0 -> 153784 bytes
-rw-r--r--doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-forecasting.pngbin0 -> 155472 bytes
-rw-r--r--doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-reporting.pngbin0 -> 275988 bytes
-rw-r--r--doc/architecture/blueprints/capacity_planning/images/tamland-as-a-service.pngbin46896 -> 0 bytes
-rw-r--r--doc/architecture/blueprints/capacity_planning/images/tamland-as-part-of-stack.pngbin38591 -> 0 bytes
-rw-r--r--doc/architecture/blueprints/capacity_planning/index.md85
-rw-r--r--doc/ci/runners/saas/macos_saas_runner.md4
-rw-r--r--doc/development/ee_features.md23
-rw-r--r--doc/development/permissions/custom_roles.md166
-rw-r--r--doc/development/secure_coding_guidelines.md8
-rw-r--r--gems/gitlab-http/spec/gitlab/stub_requests.rb4
-rw-r--r--lib/gitlab/ci/config/external/mapper.rb1
-rw-r--r--lib/gitlab/ci/config/external/mapper/normalizer.rb4
-rw-r--r--lib/gitlab/pages/url_builder.rb34
-rw-r--r--lib/gitlab/url_blocker.rb429
-rw-r--r--locale/gitlab.pot6
-rw-r--r--spec/bin/saas_feature_spec.rb218
-rw-r--r--spec/config/settings_spec.rb15
-rw-r--r--spec/factories/namespaces/descendants.rb6
-rw-r--r--spec/frontend/vue_merge_request_widget/components/checks/draft_spec.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js37
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js25
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js118
-rw-r--r--spec/graphql/resolvers/namespace_projects_resolver_spec.rb8
-rw-r--r--spec/helpers/dashboard_helper_spec.rb6
-rw-r--r--spec/helpers/todos_helper_spec.rb6
-rw-r--r--spec/initializers/rest-client-hostname_override_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper/normalizer_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb18
-rw-r--r--spec/lib/gitlab/database/loose_foreign_keys_spec.rb4
-rw-r--r--spec/lib/gitlab/pages/url_builder_spec.rb12
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb1009
-rw-r--r--spec/models/namespaces/traversal/cached_spec.rb196
-rw-r--r--spec/models/user_detail_spec.rb72
-rw-r--r--spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb137
-rw-r--r--spec/requests/projects/gcp/artifact_registry/setup_controller_spec.rb73
-rw-r--r--spec/services/groups/update_service_spec.rb2
-rw-r--r--spec/support/helpers/stub_requests.rb6
-rw-r--r--spec/support/rspec_order_todo.yml1
-rw-r--r--spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/namespaces/traversal_examples.rb8
104 files changed, 1473 insertions, 2466 deletions
diff --git a/.rubocop_todo/gitlab/namespaced_class.yml b/.rubocop_todo/gitlab/namespaced_class.yml
index 4cc5f4ffef0..eff4a6506cf 100644
--- a/.rubocop_todo/gitlab/namespaced_class.yml
+++ b/.rubocop_todo/gitlab/namespaced_class.yml
@@ -1215,7 +1215,6 @@ Gitlab/NamespacedClass:
- 'lib/gitlab/untrusted_regexp.rb'
- 'lib/gitlab/untrusted_regexp/ruby_syntax.rb'
- 'lib/gitlab/uploads_transfer.rb'
- - 'lib/gitlab/url_blocker.rb'
- 'lib/gitlab/url_builder.rb'
- 'lib/gitlab/url_helpers.rb'
- 'lib/gitlab/url_sanitizer.rb'
diff --git a/.rubocop_todo/layout/line_length.yml b/.rubocop_todo/layout/line_length.yml
index 59f19430b57..6dd83f1aceb 100644
--- a/.rubocop_todo/layout/line_length.yml
+++ b/.rubocop_todo/layout/line_length.yml
@@ -2671,7 +2671,6 @@ Layout/LineLength:
- 'lib/gitlab/tracking.rb'
- 'lib/gitlab/tracking/destinations/snowplow.rb'
- 'lib/gitlab/tracking/event_definition.rb'
- - 'lib/gitlab/url_blocker.rb'
- 'lib/gitlab/usage/metric_definition.rb'
- 'lib/gitlab/usage/metrics/aggregates/aggregate.rb'
- 'lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection.rb'
@@ -3801,7 +3800,6 @@ Layout/LineLength:
- 'spec/lib/gitlab/tracking/event_definition_spec.rb'
- 'spec/lib/gitlab/tracking/standard_context_spec.rb'
- 'spec/lib/gitlab/tracking_spec.rb'
- - 'spec/lib/gitlab/url_blocker_spec.rb'
- 'spec/lib/gitlab/url_builder_spec.rb'
- 'spec/lib/gitlab/usage/metric_definition_spec.rb'
- 'spec/lib/gitlab/usage/metric_spec.rb'
@@ -4024,7 +4022,6 @@ Layout/LineLength:
- 'spec/models/todo_spec.rb'
- 'spec/models/upload_spec.rb'
- 'spec/models/uploads/fog_spec.rb'
- - 'spec/models/user_detail_spec.rb'
- 'spec/models/user_spec.rb'
- 'spec/models/wiki_page_spec.rb'
- 'spec/models/x509_certificate_spec.rb'
diff --git a/.rubocop_todo/performance/map_compact.yml b/.rubocop_todo/performance/map_compact.yml
index 576a610006e..4a3395ba823 100644
--- a/.rubocop_todo/performance/map_compact.yml
+++ b/.rubocop_todo/performance/map_compact.yml
@@ -103,7 +103,6 @@ Performance/MapCompact:
- 'lib/gitlab/git/commit.rb'
- 'lib/gitlab/git/conflict/file.rb'
- 'lib/gitlab/sql/pattern.rb'
- - 'lib/gitlab/url_blocker.rb'
- 'qa/qa/page/component/issuable/sidebar.rb'
- 'qa/qa/support/formatters/test_metrics_formatter.rb'
- 'qa/qa/tools/ci/ff_changes.rb'
diff --git a/.rubocop_todo/rspec/feature_category.yml b/.rubocop_todo/rspec/feature_category.yml
index ff159a848f2..de0f4213855 100644
--- a/.rubocop_todo/rspec/feature_category.yml
+++ b/.rubocop_todo/rspec/feature_category.yml
@@ -4679,7 +4679,6 @@ RSpec/FeatureCategory:
- 'spec/models/user_agent_detail_spec.rb'
- 'spec/models/user_canonical_email_spec.rb'
- 'spec/models/user_custom_attribute_spec.rb'
- - 'spec/models/user_detail_spec.rb'
- 'spec/models/user_highest_role_spec.rb'
- 'spec/models/user_mentions/commit_user_mention_spec.rb'
- 'spec/models/user_mentions/issue_user_mention_spec.rb'
diff --git a/.rubocop_todo/rspec/named_subject.yml b/.rubocop_todo/rspec/named_subject.yml
index 13ea468885a..77ed1246b7a 100644
--- a/.rubocop_todo/rspec/named_subject.yml
+++ b/.rubocop_todo/rspec/named_subject.yml
@@ -2416,7 +2416,6 @@ RSpec/NamedSubject:
- 'spec/lib/gitlab/tree_summary_spec.rb'
- 'spec/lib/gitlab/unicode_spec.rb'
- 'spec/lib/gitlab/untrusted_regexp_spec.rb'
- - 'spec/lib/gitlab/url_blocker_spec.rb'
- 'spec/lib/gitlab/url_builder_spec.rb'
- 'spec/lib/gitlab/usage/metric_definition_spec.rb'
- 'spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb'
diff --git a/.rubocop_todo/style/arguments_forwarding.yml b/.rubocop_todo/style/arguments_forwarding.yml
index f578b75e5f0..b72751ed279 100644
--- a/.rubocop_todo/style/arguments_forwarding.yml
+++ b/.rubocop_todo/style/arguments_forwarding.yml
@@ -115,7 +115,6 @@ Style/ArgumentsForwarding:
- 'lib/gitlab/redis/multi_store.rb'
- 'lib/gitlab/repository_cache.rb'
- 'lib/gitlab/tracking.rb'
- - 'lib/gitlab/url_blocker.rb'
- 'lib/gitlab/url_builder.rb'
- 'lib/gitlab/usage/metrics/query.rb'
- 'lib/gitlab_settings/settings.rb'
diff --git a/.rubocop_todo/style/guard_clause.yml b/.rubocop_todo/style/guard_clause.yml
index 933b33999e3..9896356b1f6 100644
--- a/.rubocop_todo/style/guard_clause.yml
+++ b/.rubocop_todo/style/guard_clause.yml
@@ -538,7 +538,6 @@ Style/GuardClause:
- 'lib/gitlab/sidekiq_config/cli_methods.rb'
- 'lib/gitlab/sidekiq_middleware/size_limiter/compressor.rb'
- 'lib/gitlab/sql/set_operator.rb'
- - 'lib/gitlab/url_blocker.rb'
- 'lib/gitlab/usage/metrics/instrumentations/count_bulk_imports_entities_metric.rb'
- 'lib/gitlab/utils/override.rb'
- 'lib/gitlab/webpack/manifest.rb'
diff --git a/.rubocop_todo/style/if_unless_modifier.yml b/.rubocop_todo/style/if_unless_modifier.yml
index cb5e5134dcf..72189f27bd0 100644
--- a/.rubocop_todo/style/if_unless_modifier.yml
+++ b/.rubocop_todo/style/if_unless_modifier.yml
@@ -881,7 +881,6 @@ Style/IfUnlessModifier:
- 'lib/gitlab/suggestions/suggestion_set.rb'
- 'lib/gitlab/template_parser/eval_state.rb'
- 'lib/gitlab/untrusted_regexp.rb'
- - 'lib/gitlab/url_blocker.rb'
- 'lib/gitlab/usage_data_counters/base_counter.rb'
- 'lib/gitlab/usage_data_counters/hll_redis_counter.rb'
- 'lib/gitlab/utils/delegator_override.rb'
diff --git a/.rubocop_todo/style/inline_disable_annotation.yml b/.rubocop_todo/style/inline_disable_annotation.yml
index 470ede21304..58650799d8d 100644
--- a/.rubocop_todo/style/inline_disable_annotation.yml
+++ b/.rubocop_todo/style/inline_disable_annotation.yml
@@ -2620,7 +2620,6 @@ Style/InlineDisableAnnotation:
- 'lib/gitlab/throttle.rb'
- 'lib/gitlab/tracking.rb'
- 'lib/gitlab/uploads/migration_helper.rb'
- - 'lib/gitlab/url_blocker.rb'
- 'lib/gitlab/url_builder.rb'
- 'lib/gitlab/usage/metrics/instrumentations/count_imported_projects_metric.rb'
- 'lib/gitlab/usage/metrics/instrumentations/database_metric.rb'
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index 08cc8e9bddf..255d283413d 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-7eb79ebcb084d4e881777f44ca5055cce6e60ccf
+4261fb1dccde53037494d42745c59583f6644553
diff --git a/Gemfile.checksum b/Gemfile.checksum
index dcd0f1e82fe..2124bcd11d0 100644
--- a/Gemfile.checksum
+++ b/Gemfile.checksum
@@ -71,8 +71,8 @@
{"name":"capybara-screenshot","version":"1.0.26","platform":"ruby","checksum":"816b9370a07752097c82a05f568aaf5d3b7f45c3db5d3aab2014071e1b3c0c77"},
{"name":"carrierwave","version":"1.3.4","platform":"ruby","checksum":"81772dabd1830edbd7f4526d2ae2c79f974f1d48900c3f03f7ecb7c657463a21"},
{"name":"cbor","version":"0.5.9.6","platform":"ruby","checksum":"434a147658dd1df24ec9e7b3297c1fd4f8a691c97d0e688b3049df8e728b2114"},
-{"name":"character_set","version":"1.4.1","platform":"java","checksum":"38b632136b40e02fecba2898497b07ac640cc121f17ac536eaf19873d50053d0"},
-{"name":"character_set","version":"1.4.1","platform":"ruby","checksum":"f71b1ac35b21c4c6f9f26b8a67c7eec8e10bdf0da17488ac7f8fae756d9f8062"},
+{"name":"character_set","version":"1.8.0","platform":"java","checksum":"2d94ac33d6569434cf1ba464012b5e98010f5dafbd7b750e8d7db79f4c8eb8f7"},
+{"name":"character_set","version":"1.8.0","platform":"ruby","checksum":"2b7317462adaedff0bd1576ae86d71bc5efe133a5d0b7c257021b00fe3153f51"},
{"name":"charlock_holmes","version":"0.7.7","platform":"ruby","checksum":"1790eca3f661ffa6bbf5866c53c7191e4b8472626fc4997ff9dbe7c425e2cb43"},
{"name":"chef-config","version":"18.3.0","platform":"ruby","checksum":"c183a2ff41da8d63b1e4a60853c9c701a053ab9afe13df767a578db5f07072df"},
{"name":"chef-utils","version":"18.3.0","platform":"ruby","checksum":"827f7aace26ba9f5f8aca45059644205cc715baded80229f1fd5518d21970701"},
@@ -502,7 +502,6 @@
{"name":"rb-inotify","version":"0.10.1","platform":"ruby","checksum":"050062d4f31d307cca52c3f6a7f4b946df8de25fc4bd373e1a5142e41034a7ca"},
{"name":"rb_sys","version":"0.9.86","platform":"ruby","checksum":"65d35ad5f2f2e7257607310186d6a178f34d0fee807d3b1af5611db6a5503a8c"},
{"name":"rbtrace","version":"0.4.14","platform":"ruby","checksum":"162bbf89cecabfc4f09c869b655f6f3a679c4870ebb7cbdcadf7393a81cc1769"},
-{"name":"rbtree","version":"0.4.6","platform":"ruby","checksum":"14eea4469b24fd2472542e5f3eb105d6344c8ccf36f0b56d55fdcfeb4e0f10fc"},
{"name":"rchardet","version":"1.8.0","platform":"ruby","checksum":"693acd5253d5ade81a51940697955f6dd4bb2f0d245bda76a8e23deec70a52c7"},
{"name":"re2","version":"2.7.0","platform":"aarch64-linux","checksum":"778921298b6e8aba26a6230dd298c9b361b92e45024f81fa6aee788060fa307c"},
{"name":"re2","version":"2.7.0","platform":"arm-linux","checksum":"d328b5286d83ae265e13b855da8e348a976f80f91b748045b52073a570577954"},
@@ -592,7 +591,6 @@
{"name":"sentry-raven","version":"3.1.2","platform":"ruby","checksum":"103d3b122958810d34898ce2e705bcf549ddb9d855a70ce9a3970ee2484f364a"},
{"name":"sentry-ruby","version":"5.10.0","platform":"ruby","checksum":"115c24c0aee1309210f3a2988fb118e2bec1f11609feeda90e694388b1183619"},
{"name":"sentry-sidekiq","version":"5.10.0","platform":"ruby","checksum":"cc81018d0733fb1be3fb5641c9e0b61030bbeaa1d0b23ca64797d70def7aea1a"},
-{"name":"set","version":"1.0.2","platform":"ruby","checksum":"02ffa4de1f2621495e05b72326040dd014d7abbcb02fea698bc600a389992c02"},
{"name":"sexp_processor","version":"4.17.1","platform":"ruby","checksum":"91110946720307f30bf1d549e90d9a529fef40d1fc471c069c8cca7667015da0"},
{"name":"shellany","version":"0.0.1","platform":"ruby","checksum":"0e127a9132698766d7e752e82cdac8250b6adbd09e6c0a7fbbb6f61964fedee7"},
{"name":"shoulda-matchers","version":"5.1.0","platform":"ruby","checksum":"a01d20589989e9653ab4a28c67d9db2b82bcf0a2496cf01d5e1a95a4aaaf5b07"},
@@ -613,8 +611,6 @@
{"name":"snowplow-tracker","version":"0.8.0","platform":"ruby","checksum":"7ba6f4f1443a829845fd28e63eda72d9d3d247f485310ddcccaebbc52b734a38"},
{"name":"solargraph","version":"0.47.2","platform":"ruby","checksum":"87ca4b799b9155c2c31c15954c483e952fdacd800f52d6709b901dd447bcac6a"},
{"name":"sorbet-runtime","version":"0.5.11144","platform":"ruby","checksum":"cb36dfc4ede6d206fa6f7587d4be7c8b4fcd3cc9fd5792614fb9b6c7030548a0"},
-{"name":"sorted_set","version":"1.0.3","platform":"java","checksum":"996283f2e5c6e838825bcdcee31d6306515ae5f24bcb0ee4ce09dfff32919b8c"},
-{"name":"sorted_set","version":"1.0.3","platform":"ruby","checksum":"4f2b8bee6e8c59cbd296228c0f1f81679357177a8b6859dcc2a99e86cce6372f"},
{"name":"spamcheck","version":"1.3.0","platform":"ruby","checksum":"a46082752257838d8484c844736e309ec499f85dcc51283a5f973b33f1c994f5"},
{"name":"spring","version":"4.1.0","platform":"ruby","checksum":"f17f080fb0df558d663c897a6229ed3d5cc54819ab51876ea6eef49a67f0a3cb"},
{"name":"spring-commands-rspec","version":"1.0.4","platform":"ruby","checksum":"6202e54fa4767452e3641461a83347645af478bf45dddcca9737b43af0dd1a2c"},
diff --git a/Gemfile.lock b/Gemfile.lock
index 2f7c295dde6..9dfcf3ac1f4 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -383,8 +383,7 @@ GEM
mime-types (>= 1.16)
ssrf_filter (~> 1.0, < 1.1.0)
cbor (0.5.9.6)
- character_set (1.4.1)
- sorted_set (~> 1.0)
+ character_set (1.8.0)
charlock_holmes (0.7.7)
chef-config (18.3.0)
addressable
@@ -1350,7 +1349,6 @@ GEM
ffi (>= 1.0.6)
msgpack (>= 0.4.3)
optimist (>= 3.0.0)
- rbtree (0.4.6)
rchardet (1.8.0)
re2 (2.7.0)
mini_portile2 (~> 2.8.5)
@@ -1547,7 +1545,6 @@ GEM
sentry-sidekiq (5.10.0)
sentry-ruby (~> 5.10.0)
sidekiq (>= 3.0)
- set (1.0.2)
sexp_processor (4.17.1)
shellany (0.0.1)
shoulda-matchers (5.1.0)
@@ -1601,9 +1598,6 @@ GEM
tilt (~> 2.0)
yard (~> 0.9, >= 0.9.24)
sorbet-runtime (0.5.11144)
- sorted_set (1.0.3)
- rbtree
- set (~> 1.0)
spamcheck (1.3.0)
grpc (~> 1.0)
spring (4.1.0)
diff --git a/app/assets/javascripts/constants.js b/app/assets/javascripts/constants.js
index 631968ff531..f43a2d5d8ff 100644
--- a/app/assets/javascripts/constants.js
+++ b/app/assets/javascripts/constants.js
@@ -3,5 +3,3 @@ export const getModifierKey = (removeSuffix = false) => {
const winKey = `Ctrl${removeSuffix ? '' : '+'}`;
return window.gl?.client?.isMac ? '⌘' : winKey;
};
-
-export const PRELOAD_THROTTLE_TIMEOUT_MS = 4000;
diff --git a/app/assets/javascripts/gl_form.js b/app/assets/javascripts/gl_form.js
index 776f27a8583..f4008fe3cc9 100644
--- a/app/assets/javascripts/gl_form.js
+++ b/app/assets/javascripts/gl_form.js
@@ -5,7 +5,6 @@ import GfmAutoComplete, { defaultAutocompleteConfig } from 'ee_else_ce/gfm_auto_
import { disableButtonIfEmptyField } from '~/lib/utils/common_utils';
import dropzoneInput from './dropzone_input';
import { addMarkdownListeners, removeMarkdownListeners } from './lib/utils/text_markdown';
-import { PRELOAD_THROTTLE_TIMEOUT_MS } from './constants';
export default class GLForm {
/**
@@ -69,21 +68,6 @@ export default class GLForm {
);
this.autoComplete = new GfmAutoComplete(dataSources);
this.autoComplete.setup(this.form.find('.js-gfm-input'), this.enableGFM);
-
- if (this.preloadMembers && dataSources?.members) {
- // for now the preload is only implemented for the members
- // timeout helping to trottle the preloads in the case content_editor
- // is set as main comment editor and support for rspec tests
- // https://gitlab.com/gitlab-org/gitlab/-/issues/427437
-
- requestIdleCallback(() =>
- setTimeout(
- () => this.autoComplete?.fetchData($('.js-gfm-input'), '@'),
- PRELOAD_THROTTLE_TIMEOUT_MS,
- ),
- );
- }
-
this.formDropzone = dropzoneInput(this.form, { parallelUploads: 1 });
if (this.form.is(':not(.js-no-autosize)')) {
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/checks/constants.js b/app/assets/javascripts/vue_merge_request_widget/components/checks/constants.js
index 24bc7017e06..88efcfa46e7 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/checks/constants.js
+++ b/app/assets/javascripts/vue_merge_request_widget/components/checks/constants.js
@@ -1,3 +1,5 @@
+import { __ } from '~/locale';
+
export const COMPONENTS = {
conflict: () => import('./conflicts.vue'),
discussions_not_resolved: () => import('./unresolved_discussions.vue'),
@@ -5,3 +7,18 @@ export const COMPONENTS = {
need_rebase: () => import('./rebase.vue'),
default: () => import('./message.vue'),
};
+
+export const FAILURE_REASONS = {
+ broken_status: __('Cannot merge the source into the target branch, due to a conflict.'),
+ ci_must_pass: __('Pipeline must succeed.'),
+ conflict: __('Merge conflicts must be resolved.'),
+ discussions_not_resolved: __('Unresolved discussions must be resolved.'),
+ draft_status: __('Merge request must not be draft.'),
+ not_open: __('Merge request must be open.'),
+ need_rebase: __('Merge request must be rebased, because a fast-forward merge is not possible.'),
+ not_approved: __('All required approvals must be given.'),
+ policies_denied: __('Denied licenses must be removed or approved.'),
+ merge_request_blocked: __('Merge request is blocked by another merge request.'),
+ status_checks_must_pass: __('Status checks must pass.'),
+ jira_association_missing: __('Either the title or description must reference a Jira issue.'),
+};
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/checks/message.vue b/app/assets/javascripts/vue_merge_request_widget/components/checks/message.vue
index 7f21445559a..da3cb1397dd 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/checks/message.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/checks/message.vue
@@ -1,6 +1,6 @@
<script>
-import { __ } from '~/locale';
import StatusIcon from '../widget/status_icon.vue';
+import { FAILURE_REASONS } from './constants';
const ICON_NAMES = {
failed: 'failed',
@@ -8,21 +8,6 @@ const ICON_NAMES = {
success: 'success',
};
-export const FAILURE_REASONS = {
- broken_status: __('Cannot merge the source into the target branch, due to a conflict.'),
- ci_must_pass: __('Pipeline must succeed.'),
- conflict: __('Merge conflicts must be resolved.'),
- discussions_not_resolved: __('Unresolved discussions must be resolved.'),
- draft_status: __('Merge request must not be draft.'),
- not_open: __('Merge request must be open.'),
- need_rebase: __('Merge request must be rebased, because a fast-forward merge is not possible.'),
- not_approved: __('All required approvals must be given.'),
- policies_denied: __('Denied licenses must be removed or approved.'),
- merge_request_blocked: __('Merge request is blocked by another merge request.'),
- status_checks_must_pass: __('Status checks must pass.'),
- jira_association_missing: __('Either the title or description must reference a Jira issue.'),
-};
-
export default {
name: 'MergeChecksMessage',
components: {
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/merge_checks.vue b/app/assets/javascripts/vue_merge_request_widget/components/merge_checks.vue
index 9afed170097..016278db4ca 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/merge_checks.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/merge_checks.vue
@@ -3,7 +3,10 @@ import { GlSkeletonLoader } from '@gitlab/ui';
import { __, n__, sprintf } from '~/locale';
import { TYPENAME_MERGE_REQUEST } from '~/graphql_shared/constants';
import { convertToGraphQLId } from '~/graphql_shared/utils';
-import { COMPONENTS } from '~/vue_merge_request_widget/components/checks/constants';
+import {
+ COMPONENTS,
+ FAILURE_REASONS,
+} from '~/vue_merge_request_widget/components/checks/constants';
import mergeRequestQueryVariablesMixin from '../mixins/merge_request_query_variables';
import mergeChecksQuery from '../queries/merge_checks.query.graphql';
import mergeChecksSubscription from '../queries/merge_checks.subscription.graphql';
@@ -102,7 +105,7 @@ export default {
const order = ['FAILED', 'SUCCESS'];
return [...this.checks]
- .filter((s) => s.status !== 'INACTIVE')
+ .filter((s) => s.status !== 'INACTIVE' && FAILURE_REASONS[s.identifier.toLowerCase()])
.sort((a, b) => order.indexOf(a.status) - order.indexOf(b.status));
},
failedChecks() {
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/base_token.vue b/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/base_token.vue
index 5d72ac34e73..8ea97ad73b4 100644
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/base_token.vue
+++ b/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/base_token.vue
@@ -78,16 +78,12 @@ export default {
required: false,
default: undefined,
},
- multiSelectValues: {
- type: Array,
- required: false,
- default: () => [],
- },
},
data() {
return {
hasFetched: false, // use this to avoid flash of `No suggestions found` before fetching
searchKey: '',
+ selectedTokens: [],
recentSuggestions: this.config.recentSuggestionsStorageKey
? getRecentlyUsedSuggestions(this.config.recentSuggestionsStorageKey) ?? []
: [],
@@ -197,6 +193,30 @@ export default {
}
},
},
+ value: {
+ deep: true,
+ immediate: true,
+ handler(newValue) {
+ const { data } = newValue;
+
+ if (!this.multiSelectEnabled) {
+ return;
+ }
+
+ // don't add empty values to selectedUsernames
+ if (!data) {
+ return;
+ }
+
+ if (Array.isArray(data)) {
+ this.selectedTokens = data;
+ // !active so we don't add strings while searching, e.g. r, ro, roo
+ // !includes so we don't add the same usernames (if @input is emitted twice)
+ } else if (!this.active && !this.selectedTokens.includes(data)) {
+ this.selectedTokens = this.selectedTokens.concat(data);
+ }
+ },
+ },
},
methods: {
handleInput: debounce(function debouncedSearch({ data, operator }) {
@@ -222,7 +242,15 @@ export default {
}, DEBOUNCE_DELAY),
handleTokenValueSelected(selectedValue) {
if (this.multiSelectEnabled) {
- this.$emit('token-selected', selectedValue);
+ const index = this.selectedTokens.indexOf(selectedValue);
+ if (index > -1) {
+ this.selectedTokens.splice(index, 1);
+ } else {
+ this.selectedTokens.push(selectedValue);
+ }
+
+ // need to clear search
+ this.$emit('input', { ...this.value, data: '' });
}
const activeTokenValue = this.getActiveTokenValue(this.suggestions, selectedValue);
@@ -253,7 +281,7 @@ export default {
:config="validatedConfig"
:value="value"
:active="active"
- :multi-select-values="multiSelectValues"
+ :multi-select-values="selectedTokens"
v-bind="$attrs"
v-on="$listeners"
@input="handleInput"
@@ -265,6 +293,7 @@ export default {
:view-token-props="/* eslint-disable @gitlab/vue-no-new-non-primitive-in-template */ {
...viewTokenProps,
activeTokenValue,
+ selectedTokens,
} /* eslint-enable @gitlab/vue-no-new-non-primitive-in-template */"
></slot>
</template>
@@ -274,6 +303,7 @@ export default {
:view-token-props="/* eslint-disable @gitlab/vue-no-new-non-primitive-in-template */ {
...viewTokenProps,
activeTokenValue,
+ selectedTokens,
} /* eslint-enable @gitlab/vue-no-new-non-primitive-in-template */"
></slot>
</template>
@@ -290,17 +320,26 @@ export default {
</template>
<template v-if="showRecentSuggestions">
<gl-dropdown-section-header>{{ __('Recently used') }}</gl-dropdown-section-header>
- <slot name="suggestions-list" :suggestions="recentSuggestions"></slot>
+ <slot
+ name="suggestions-list"
+ :suggestions="recentSuggestions"
+ :selections="selectedTokens"
+ ></slot>
<gl-dropdown-divider />
</template>
<slot
v-if="showPreloadedSuggestions"
name="suggestions-list"
:suggestions="preloadedSuggestions"
+ :selections="selectedTokens"
></slot>
<gl-loading-icon v-if="suggestionsLoading" size="sm" />
<template v-else-if="showAvailableSuggestions">
- <slot name="suggestions-list" :suggestions="availableSuggestions"></slot>
+ <slot
+ name="suggestions-list"
+ :suggestions="availableSuggestions"
+ :selections="selectedTokens"
+ ></slot>
</template>
<gl-dropdown-text v-else-if="showNoMatchesText">
{{ __('No matches found') }}
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/user_token.vue b/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/user_token.vue
index 87e295d00dd..8cf4759d419 100644
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/user_token.vue
+++ b/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/user_token.vue
@@ -6,8 +6,7 @@ import { __ } from '~/locale';
import { WORKSPACE_GROUP, WORKSPACE_PROJECT } from '~/issues/constants';
import usersAutocompleteQuery from '~/graphql_shared/queries/users_autocomplete.query.graphql';
-import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
-import { OPERATORS_TO_GROUP, OPTIONS_NONE_ANY } from '../constants';
+import { OPTIONS_NONE_ANY } from '../constants';
import BaseToken from './base_token.vue';
@@ -19,7 +18,6 @@ export default {
GlIntersperse,
GlFilteredSearchSuggestion,
},
- mixins: [glFeatureFlagMixin()],
props: {
config: {
type: Object,
@@ -40,7 +38,6 @@ export default {
users: this.config.initialUsers || [],
allUsers: this.config.initialUsers || [],
loading: false,
- selectedUsernames: [],
};
},
computed: {
@@ -56,39 +53,6 @@ export default {
fetchUsersQuery() {
return this.config.fetchUsers ? this.config.fetchUsers : this.fetchUsersBySearchTerm;
},
- multiSelectEnabled() {
- return (
- this.config.multiSelect &&
- this.glFeatures.groupMultiSelectTokens &&
- OPERATORS_TO_GROUP.includes(this.value.operator)
- );
- },
- },
- watch: {
- value: {
- deep: true,
- immediate: true,
- handler(newValue) {
- const { data } = newValue;
-
- if (!this.multiSelectEnabled) {
- return;
- }
-
- // don't add empty values to selectedUsernames
- if (!data) {
- return;
- }
-
- if (Array.isArray(data)) {
- this.selectedUsernames = data;
- // !active so we don't add strings while searching, e.g. r, ro, roo
- // !includes so we don't add the same usernames (if @input is emitted twice)
- } else if (!this.active && !this.selectedUsernames.includes(data)) {
- this.selectedUsernames = this.selectedUsernames.concat(data);
- }
- },
- },
},
methods: {
getActiveUser(users, data) {
@@ -104,26 +68,6 @@ export default {
const user = this.getActiveUser(this.allUsers, username);
return this.getAvatarUrl(user);
},
- addCheckIcon(username) {
- return this.multiSelectEnabled && this.selectedUsernames.includes(username);
- },
- addPadding(username) {
- return this.multiSelectEnabled && !this.selectedUsernames.includes(username);
- },
- handleSelected(username) {
- if (!this.multiSelectEnabled) {
- return;
- }
-
- const index = this.selectedUsernames.indexOf(username);
- if (index > -1) {
- this.selectedUsernames.splice(index, 1);
- } else {
- this.selectedUsernames.push(username);
- }
-
- this.$emit('input', { ...this.value, data: '' });
- },
fetchUsersBySearchTerm(search) {
return this.$apollo
.query({
@@ -171,16 +115,14 @@ export default {
:get-active-token-value="getActiveUser"
:default-suggestions="defaultUsers"
:preloaded-suggestions="preloadedUsers"
- :multi-select-values="selectedUsernames"
v-bind="$attrs"
@fetch-suggestions="fetchUsers"
- @token-selected="handleSelected"
v-on="$listeners"
>
- <template #view="{ viewTokenProps: { inputValue, activeTokenValue } }">
- <gl-intersperse v-if="multiSelectEnabled" separator=",">
+ <template #view="{ viewTokenProps: { inputValue, activeTokenValue, selectedTokens } }">
+ <gl-intersperse v-if="selectedTokens.length > 0" separator=",">
<span
- v-for="(username, index) in selectedUsernames"
+ v-for="(username, index) in selectedTokens"
:key="username"
:class="{ 'gl-ml-2': index > 0 }"
><gl-avatar :size="16" :src="avatarFor(username)" class="gl-mr-1" />{{
@@ -198,7 +140,7 @@ export default {
{{ activeTokenValue ? activeTokenValue.name : inputValue }}
</template>
</template>
- <template #suggestions-list="{ suggestions }">
+ <template #suggestions-list="{ suggestions, selections = [] }">
<gl-filtered-search-suggestion
v-for="user in suggestions"
:key="user.username"
@@ -206,10 +148,10 @@ export default {
>
<div
class="gl-display-flex gl-align-items-center"
- :class="{ 'gl-pl-6': addPadding(user.username) }"
+ :class="{ 'gl-pl-6': !selections.includes(user.username) }"
>
<gl-icon
- v-if="addCheckIcon(user.username)"
+ v-if="selections.includes(user.username)"
name="check"
class="gl-mr-3 gl-text-secondary gl-flex-shrink-0"
/>
diff --git a/app/controllers/projects/gcp/artifact_registry/base_controller.rb b/app/controllers/projects/gcp/artifact_registry/base_controller.rb
deleted file mode 100644
index 4084427f3e5..00000000000
--- a/app/controllers/projects/gcp/artifact_registry/base_controller.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-module Projects
- module Gcp
- module ArtifactRegistry
- class BaseController < ::Projects::ApplicationController
- before_action :ensure_feature_flag
- before_action :ensure_saas
- before_action :authorize_read_container_image!
- before_action :ensure_private_project
-
- feature_category :container_registry
- urgency :low
-
- private
-
- def ensure_feature_flag
- return if Feature.enabled?(:gcp_technical_demo, project)
-
- @error = 'Feature flag disabled'
-
- render
- end
-
- def ensure_saas
- return if Gitlab.com_except_jh? # rubocop: disable Gitlab/AvoidGitlabInstanceChecks -- demo requirement
-
- @error = "Can't run here"
-
- render
- end
-
- def ensure_private_project
- return if project.private?
-
- @error = 'Can only run on private projects'
-
- render
- end
- end
- end
- end
-end
diff --git a/app/controllers/projects/gcp/artifact_registry/docker_images_controller.rb b/app/controllers/projects/gcp/artifact_registry/docker_images_controller.rb
deleted file mode 100644
index 60adbbe6e5d..00000000000
--- a/app/controllers/projects/gcp/artifact_registry/docker_images_controller.rb
+++ /dev/null
@@ -1,135 +0,0 @@
-# frozen_string_literal: true
-
-module Projects
- module Gcp
- module ArtifactRegistry
- class DockerImagesController < Projects::Gcp::ArtifactRegistry::BaseController
- before_action :require_gcp_params
- before_action :handle_pagination
-
- REPO_NAME_REGEX = %r{/repositories/(.*)/dockerImages/}
-
- def index
- result = service.execute(page_token: params[:page_token])
-
- if result.success?
- @docker_images = process_docker_images(result.payload[:images] || [])
- @next_page_token = result.payload[:next_page_token]
- @artifact_repository_name = artifact_repository_name
- @error = @docker_images.blank? ? 'No docker images' : false
- else
- @error = result.message
- end
- end
-
- private
-
- def service
- ::GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService.new(
- project: @project,
- current_user: current_user,
- params: {
- gcp_project_id: gcp_project_id,
- gcp_location: gcp_location,
- gcp_repository: gcp_ar_repository,
- gcp_wlif: gcp_wlif_url
- }
- )
- end
-
- def process_docker_images(raw_images)
- raw_images.map { |r| process_docker_image(r) }
- end
-
- def process_docker_image(raw_image)
- DockerImage.new(
- name: raw_image[:name],
- uri: raw_image[:uri],
- tags: raw_image[:tags],
- image_size_bytes: raw_image[:size_bytes],
- media_type: raw_image[:media_type],
- upload_time: raw_image[:uploaded_at],
- build_time: raw_image[:built_at],
- update_time: raw_image[:updated_at]
- )
- end
-
- def artifact_repository_name
- return unless @docker_images.present?
-
- (@docker_images.first.name || '')[REPO_NAME_REGEX, 1]
- end
-
- def handle_pagination
- @page = Integer(params[:page] || 1)
- @page_tokens = {}
- @previous_page_token = nil
-
- if params[:page_tokens]
- @page_tokens = ::Gitlab::Json.parse(Base64.decode64(params[:page_tokens]))
- @previous_page_token = @page_tokens[(@page - 1).to_s]
- end
-
- @page_tokens[@page.to_s] = params[:page_token]
- @page_tokens = Base64.encode64(::Gitlab::Json.dump(@page_tokens.compact))
- end
-
- def require_gcp_params
- return unless gcp_project_id.blank? || gcp_location.blank? || gcp_ar_repository.blank? || gcp_wlif_url.blank?
-
- redirect_to new_namespace_project_gcp_artifact_registry_setup_path
- end
-
- def gcp_project_id
- params[:gcp_project_id]
- end
-
- def gcp_location
- params[:gcp_location]
- end
-
- def gcp_ar_repository
- params[:gcp_ar_repository]
- end
-
- def gcp_wlif_url
- params[:gcp_wlif_url]
- end
-
- class DockerImage
- include ActiveModel::API
-
- attr_accessor :name, :uri, :tags, :image_size_bytes, :upload_time, :media_type, :build_time, :update_time
-
- SHORT_NAME_REGEX = %r{dockerImages/(.*)$}
-
- def short_name
- (name || '')[SHORT_NAME_REGEX, 1]
- end
-
- def updated_at
- return unless update_time
-
- Time.zone.parse(update_time)
- end
-
- def built_at
- return unless build_time
-
- Time.zone.parse(build_time)
- end
-
- def uploaded_at
- return unless upload_time
-
- Time.zone.parse(upload_time)
- end
-
- def details_url
- "https://#{uri}"
- end
- end
- end
- end
- end
-end
diff --git a/app/controllers/projects/gcp/artifact_registry/setup_controller.rb b/app/controllers/projects/gcp/artifact_registry/setup_controller.rb
deleted file mode 100644
index e90304ce593..00000000000
--- a/app/controllers/projects/gcp/artifact_registry/setup_controller.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-module Projects
- module Gcp
- module ArtifactRegistry
- class SetupController < ::Projects::Gcp::ArtifactRegistry::BaseController
- def new; end
- end
- end
- end
-end
diff --git a/app/graphql/resolvers/namespace_projects_resolver.rb b/app/graphql/resolvers/namespace_projects_resolver.rb
index f0781058bea..1e8a7365fc0 100644
--- a/app/graphql/resolvers/namespace_projects_resolver.rb
+++ b/app/graphql/resolvers/namespace_projects_resolver.rb
@@ -7,6 +7,11 @@ module Resolvers
default_value: false,
description: 'Include also subgroup projects.'
+ argument :include_archived, GraphQL::Types::Boolean,
+ required: false,
+ default_value: true,
+ description: 'Include also archived projects.'
+
argument :not_aimed_for_deletion, GraphQL::Types::Boolean,
required: false,
default_value: false,
@@ -65,6 +70,7 @@ module Resolvers
def finder_params(args)
{
include_subgroups: args.dig(:include_subgroups),
+ include_archived: args.dig(:include_archived),
not_aimed_for_deletion: args.dig(:not_aimed_for_deletion),
sort: args.dig(:sort),
search: args.dig(:search),
diff --git a/app/helpers/dashboard_helper.rb b/app/helpers/dashboard_helper.rb
index 3756584e3b3..89f6d61ef44 100644
--- a/app/helpers/dashboard_helper.rb
+++ b/app/helpers/dashboard_helper.rb
@@ -33,6 +33,10 @@ module DashboardHelper
end
end
end
+
+ def user_groups_requiring_reauth
+ []
+ end
end
DashboardHelper.prepend_mod_with('DashboardHelper')
diff --git a/app/helpers/todos_helper.rb b/app/helpers/todos_helper.rb
index fc4d69dcdbc..7d29cd7a877 100644
--- a/app/helpers/todos_helper.rb
+++ b/app/helpers/todos_helper.rb
@@ -254,6 +254,10 @@ module TodosHelper
!todo.build_failed? && !todo.unmergeable?
end
+ def todo_groups_requiring_saml_reauth(_todos)
+ []
+ end
+
private
def todos_design_path(todo, path_options)
diff --git a/app/mailers/previews/notify_preview.rb b/app/mailers/previews/notify_preview.rb
index c7d6f2843de..1b083c70bba 100644
--- a/app/mailers/previews/notify_preview.rb
+++ b/app/mailers/previews/notify_preview.rb
@@ -381,7 +381,7 @@ class NotifyPreview < ActionMailer::Preview
def custom_email_credential
@custom_email_credential ||= project.service_desk_custom_email_credential || ServiceDesk::CustomEmailCredential.create!(
project: project,
- smtp_address: 'smtp.gmail.com', # Use gmail, because Gitlab::UrlBlocker resolves DNS
+ smtp_address: 'smtp.gmail.com', # Use gmail, because Gitlab::HTTP_V2::UrlBlocker resolves DNS
smtp_port: 587,
smtp_username: 'user@gmail.com',
smtp_password: 'supersecret'
diff --git a/app/models/namespace.rb b/app/models/namespace.rb
index 238556f0cf0..9020f90fd3c 100644
--- a/app/models/namespace.rb
+++ b/app/models/namespace.rb
@@ -472,12 +472,8 @@ class Namespace < ApplicationRecord
false
end
- def all_project_ids
- all_projects.pluck(:id)
- end
-
def all_project_ids_except(ids)
- all_projects.where.not(id: ids).pluck(:id)
+ all_project_ids.where.not(id: ids)
end
# Deprecated, use #licensed_feature_available? instead. Remove once Namespace#feature_available? isn't used anymore.
diff --git a/app/models/namespaces/traversal/cached.rb b/app/models/namespaces/traversal/cached.rb
index 55eaaa4667e..b962038d039 100644
--- a/app/models/namespaces/traversal/cached.rb
+++ b/app/models/namespaces/traversal/cached.rb
@@ -10,8 +10,62 @@ module Namespaces
after_destroy :invalidate_descendants_cache
end
+ override :self_and_descendant_ids
+ def self_and_descendant_ids
+ return super unless attempt_to_use_cached_data?
+
+ scope_with_cached_ids(
+ super,
+ self.class,
+ Namespaces::Descendants.arel_table[:self_and_descendant_group_ids]
+ )
+ end
+
+ override :all_project_ids
+ def all_project_ids
+ return super unless attempt_to_use_cached_data?
+
+ scope_with_cached_ids(
+ all_projects.select(:id),
+ Project,
+ Namespaces::Descendants.arel_table[:all_project_ids]
+ )
+ end
+
private
+ # This method implements an OR based cache lookup using COALESCE, similar what you would do in Ruby:
+ # return cheap_cached_data || expensive_uncached_data
+ def scope_with_cached_ids(consistent_ids_scope, model, cached_ids_column)
+ # Look up the cached ids and unnest them into rows if the cache is up to date.
+ cache_lookup_query = Namespaces::Descendants
+ .where(outdated_at: nil, namespace_id: id)
+ .select(cached_ids_column.as('ids'))
+
+ # Invoke the consistent lookup query and collect the ids as a single array value
+ consistent_descendant_ids_scope = model
+ .from(consistent_ids_scope.arel.as(model.table_name))
+ .reselect(Arel::Nodes::NamedFunction.new('ARRAY_AGG', [model.arel_table[:id]]).as('ids'))
+ .unscope(where: :type)
+
+ from = <<~SQL
+ UNNEST(
+ COALESCE(
+ (SELECT ids FROM (#{cache_lookup_query.to_sql}) cached_query),
+ (SELECT ids FROM (#{consistent_descendant_ids_scope.to_sql}) consistent_query))
+ ) AS #{model.table_name}(id)
+ SQL
+
+ model
+ .from(from)
+ .unscope(where: :type)
+ .select(:id)
+ end
+
+ def attempt_to_use_cached_data?
+ Feature.enabled?(:group_hierarchy_optimization, self, type: :beta)
+ end
+
override :sync_traversal_ids
def sync_traversal_ids
super
diff --git a/app/models/namespaces/traversal/linear.rb b/app/models/namespaces/traversal/linear.rb
index c3348c49ea1..5779b777fd7 100644
--- a/app/models/namespaces/traversal/linear.rb
+++ b/app/models/namespaces/traversal/linear.rb
@@ -106,6 +106,10 @@ module Namespaces
end
end
+ def all_project_ids
+ all_projects.select(:id)
+ end
+
def self_and_descendants
return super unless use_traversal_ids?
diff --git a/app/models/namespaces/traversal/recursive.rb b/app/models/namespaces/traversal/recursive.rb
index 1c5d395cb3c..3d551243cfb 100644
--- a/app/models/namespaces/traversal/recursive.rb
+++ b/app/models/namespaces/traversal/recursive.rb
@@ -19,6 +19,12 @@ module Namespaces
end
alias_method :recursive_root_ancestor, :root_ancestor
+ def all_project_ids
+ namespace = user_namespace? ? self : recursive_self_and_descendant_ids
+ Project.where(namespace: namespace).select(:id)
+ end
+ alias_method :recursive_all_project_ids, :all_project_ids
+
# Returns all ancestors, self, and descendants of the current namespace.
def self_and_hierarchy
object_hierarchy(self.class.where(id: id))
diff --git a/app/models/user_detail.rb b/app/models/user_detail.rb
index bbb08ed5774..e6dc99d114b 100644
--- a/app/models/user_detail.rb
+++ b/app/models/user_detail.rb
@@ -39,6 +39,7 @@ class UserDetail < MainClusterwide::ApplicationRecord
validates :skype, length: { maximum: DEFAULT_FIELD_LENGTH }, allow_blank: true
validates :twitter, length: { maximum: DEFAULT_FIELD_LENGTH }, allow_blank: true
validates :website_url, length: { maximum: DEFAULT_FIELD_LENGTH }, url: true, allow_blank: true, if: :website_url_changed?
+ validates :onboarding_status, json_schema: { filename: 'user_detail_onboarding_status' }
before_validation :sanitize_attrs
before_save :prevent_nil_fields
diff --git a/app/services/groups/update_service.rb b/app/services/groups/update_service.rb
index a6ef8c8743b..bdf943091e9 100644
--- a/app/services/groups/update_service.rb
+++ b/app/services/groups/update_service.rb
@@ -61,7 +61,8 @@ module Groups
params[:namespace_descendants_attributes] = {
traversal_ids: group.traversal_ids,
all_project_ids: [],
- self_and_descendant_group_ids: []
+ self_and_descendant_group_ids: [],
+ outdated_at: Time.current
}
else
return unless group.namespace_descendants
diff --git a/app/validators/json_schemas/user_detail_onboarding_status.json b/app/validators/json_schemas/user_detail_onboarding_status.json
new file mode 100644
index 00000000000..548e81f1955
--- /dev/null
+++ b/app/validators/json_schemas/user_detail_onboarding_status.json
@@ -0,0 +1,17 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Onboarding Status",
+ "description": "Onboarding Status items recorded during onboarding/registration",
+ "type": "object",
+ "properties": {
+ "step_url": {
+ "description": "Onboarding step the user is currently on or last step before finishing",
+ "type": "string"
+ },
+ "email_opt_in": {
+ "description": "Setting to guide marketing email opt-ins outside of the product. See https://gitlab.com/gitlab-org/gitlab/-/issues/435741",
+ "type": "boolean"
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/app/views/dashboard/issues.html.haml b/app/views/dashboard/issues.html.haml
index 78c3270114e..f7b2ba59549 100644
--- a/app/views/dashboard/issues.html.haml
+++ b/app/views/dashboard/issues.html.haml
@@ -6,6 +6,8 @@
= auto_discovery_link_tag(:atom, safe_params.merge(rss_url_options).to_h, title: "#{current_user.name} issues")
= render_dashboard_ultimate_trial(current_user)
+= render_if_exists 'shared/dashboard/saml_reauth_notice',
+ groups_requiring_saml_reauth: user_groups_requiring_reauth
.page-title-holder.gl-display-flex.gl-align-items-center
%h1.page-title.gl-font-size-h-display= _('Issues')
diff --git a/app/views/dashboard/merge_requests.html.haml b/app/views/dashboard/merge_requests.html.haml
index 91cec50226b..d29cb56db07 100644
--- a/app/views/dashboard/merge_requests.html.haml
+++ b/app/views/dashboard/merge_requests.html.haml
@@ -11,6 +11,8 @@
add_page_specific_style 'page_bundles/issuable_list'
= render_dashboard_ultimate_trial(current_user)
+= render_if_exists 'shared/dashboard/saml_reauth_notice',
+ groups_requiring_saml_reauth: user_groups_requiring_reauth
.page-title-holder.d-flex.align-items-start.flex-column.flex-sm-row.align-items-sm-center
%h1.page-title.gl-font-size-h-display= title
diff --git a/app/views/dashboard/todos/index.html.haml b/app/views/dashboard/todos/index.html.haml
index 1b0bd10db77..0587ba61db4 100644
--- a/app/views/dashboard/todos/index.html.haml
+++ b/app/views/dashboard/todos/index.html.haml
@@ -2,7 +2,10 @@
= render_two_factor_auth_recovery_settings_check
= render_dashboard_ultimate_trial(current_user)
-= render_if_exists 'dashboard/todos/saml_reauth_notice'
+
+= render_if_exists 'shared/dashboard/saml_reauth_notice',
+ groups_requiring_saml_reauth: todo_groups_requiring_saml_reauth(@todos)
+
- add_page_specific_style 'page_bundles/todos'
- add_page_specific_style 'page_bundles/issuable'
- filter_by_done = params[:state] == 'done'
diff --git a/app/views/layouts/devise.html.haml b/app/views/layouts/devise.html.haml
index 0ae2e5337f5..a7e078b074a 100644
--- a/app/views/layouts/devise.html.haml
+++ b/app/views/layouts/devise.html.haml
@@ -2,7 +2,7 @@
- custom_text = custom_sign_in_description
!!! 5
%html.html-devise-layout{ class: user_application_theme, lang: I18n.locale }
- = render "layouts/head", { startup_filename: 'signin' }
+ = render "layouts/head"
%body.gl-h-full.login-page.navless{ class: "#{system_message_class} #{client_class_list}", data: { page: body_data_page, testid: 'login-page' } }
= header_message
= render "layouts/init_client_detection_flags"
@@ -15,12 +15,12 @@
.row.gl-mt-5.gl-row-gap-5
.col-md.order-12.sm-bg-gray
.col-sm-12
- %h1.mb-3.gl-font-size-h2
+ %h1.gl-mb-5.gl-font-size-h2
= brand_title
= custom_text
.col-md.order-md-12
.col-sm-12.bar
- .gl-text-center
+ .gl-text-center.gl-mb-5
= brand_image
= yield
- else
diff --git a/app/views/projects/gcp/artifact_registry/docker_images/_docker_image.html.haml b/app/views/projects/gcp/artifact_registry/docker_images/_docker_image.html.haml
deleted file mode 100644
index 750dea9896f..00000000000
--- a/app/views/projects/gcp/artifact_registry/docker_images/_docker_image.html.haml
+++ /dev/null
@@ -1,33 +0,0 @@
-.gl-display-flex.gl-flex-direction-column
- .gl-display-flex.gl-flex-direction-column.gl-border-b-solid.gl-border-t-solid.gl-border-t-1.gl-border-b-1.gl-border-t-transparent.gl-border-b-gray-100
- .gl-display-flex.gl-align-items-center.gl-py-3
- .gl-display-flex.gl-flex-direction-column.gl-sm-flex-direction-row.gl-justify-content-space-between.gl-align-items-stretch.gl-flex-grow-1
- .gl-display-flex.gl-flex-direction-column.gl-mb-3.gl-sm-mb-0.gl-min-w-0.gl-flex-grow-1
- .gl-display-flex.gl-align-items-center.gl-text-body.gl-font-weight-bold.gl-font-size-h2
- %span.gl-text-body.gl-font-weight-bold= docker_image.short_name
- .gl-bg-gray-50.gl-inset-border-1-gray-100.gl-rounded-base.gl-pt-6
- .gl-display-flex.gl-align-items-top.gl-font-monospace.gl-font-sm.gl-word-break-all.gl-p-4.gl-border-b-solid.gl-border-gray-100.gl-border-b-1
- = sprite_icon('information-o', css_class: 'gl-text-gray-500 gl-mr-3 gl-icon s16')
- Full name: #{docker_image.name}
- .gl-display-flex.gl-align-items-top.gl-font-monospace.gl-font-sm.gl-word-break-all.gl-p-4.gl-border-b-solid.gl-border-gray-100.gl-border-b-1
- = sprite_icon('earth', css_class: 'gl-text-gray-500 gl-mr-3 gl-icon s16')
- %a{ href: docker_image.details_url, target: 'blank', rel: 'noopener noreferrer' }
- Artifact Registry details page
- .gl-display-flex.gl-align-items-top.gl-font-monospace.gl-font-sm.gl-word-break-all.gl-p-4.gl-border-b-solid.gl-border-gray-100.gl-border-b-1
- = sprite_icon('doc-code', css_class: 'gl-text-gray-500 gl-mr-3 gl-icon s16')
- Media Type: #{docker_image.media_type}
- .gl-display-flex.gl-align-items-top.gl-font-monospace.gl-font-sm.gl-word-break-all.gl-p-4.gl-border-b-solid.gl-border-gray-100.gl-border-b-1
- = sprite_icon('archive', css_class: 'gl-text-gray-500 gl-mr-3 gl-icon s16')
- Size: #{number_to_human_size(docker_image.image_size_bytes)}
- .gl-display-flex.gl-align-items-top.gl-font-monospace.gl-font-sm.gl-word-break-all.gl-p-4.gl-border-b-solid.gl-border-gray-100.gl-border-b-1
- = sprite_icon('calendar', css_class: 'gl-text-gray-500 gl-mr-3 gl-icon s16')
- Built at: #{docker_image.built_at&.to_fs}
- .gl-display-flex.gl-align-items-top.gl-font-monospace.gl-font-sm.gl-word-break-all.gl-p-4.gl-border-b-solid.gl-border-gray-100.gl-border-b-1
- = sprite_icon('calendar', css_class: 'gl-text-gray-500 gl-mr-3 gl-icon s16')
- Uploaded at: #{docker_image.uploaded_at&.to_fs}
- .gl-display-flex.gl-align-items-top.gl-font-monospace.gl-font-sm.gl-word-break-all.gl-p-4.gl-border-b-solid.gl-border-gray-100.gl-border-b-1
- = sprite_icon('calendar', css_class: 'gl-text-gray-500 gl-mr-3 gl-icon s16')
- Updated at: #{docker_image.updated_at&.to_fs}
- - if docker_image.tags.present?
- .gl-display-flex.gl-align-items-center.gl-text-gray-500.gl-min-h-6.gl-min-w-0.gl-flex-grow-1.gl-pt-4
- = render partial: 'docker_image_tag', collection: docker_image.tags
diff --git a/app/views/projects/gcp/artifact_registry/docker_images/_docker_image_tag.html.haml b/app/views/projects/gcp/artifact_registry/docker_images/_docker_image_tag.html.haml
deleted file mode 100644
index a030cd7d634..00000000000
--- a/app/views/projects/gcp/artifact_registry/docker_images/_docker_image_tag.html.haml
+++ /dev/null
@@ -1 +0,0 @@
-%a.gl-button.btn.btn-md.btn-default.gl-mr-3!= docker_image_tag
diff --git a/app/views/projects/gcp/artifact_registry/docker_images/_pagination.html.haml b/app/views/projects/gcp/artifact_registry/docker_images/_pagination.html.haml
deleted file mode 100644
index df98ba8d68e..00000000000
--- a/app/views/projects/gcp/artifact_registry/docker_images/_pagination.html.haml
+++ /dev/null
@@ -1,13 +0,0 @@
-.gl-display-flex.gl-justify-content-center
- %nav.gl-pagination.gl-mt-3
- .gl-keyset-pagination.btn-group
- - if @page > 1
- = link_to 'Prev', namespace_project_gcp_artifact_registry_docker_images_path(params[:namespace_id], params[:project_id], page_token: @previous_page_token, page_tokens: @page_tokens, page: @page - 1, gcp_project_id: params[:gcp_project_id], gcp_location: params[:gcp_location], gcp_ar_repository: params[:gcp_ar_repository], gcp_wlif_url: params[:gcp_wlif_url]), class: 'btn btn-default btn-md gl-button'
- - else
- %span.btn.btn-default.btn-md.gl-button.disabled= 'Prev'
- - if @next_page_token.present?
- = link_to 'Next', namespace_project_gcp_artifact_registry_docker_images_path(params[:namespace_id], params[:project_id], page_token: @next_page_token, page_tokens: @page_tokens, page: @page + 1, gcp_project_id: params[:gcp_project_id], gcp_location: params[:gcp_location], gcp_ar_repository: params[:gcp_ar_repository], gcp_wlif_url: params[:gcp_wlif_url]), class: 'btn btn-default btn-md gl-button'
- - else
- %span.btn.btn-default.btn-md.gl-button.disabled= 'Next'
-
-
diff --git a/app/views/projects/gcp/artifact_registry/docker_images/index.html.haml b/app/views/projects/gcp/artifact_registry/docker_images/index.html.haml
deleted file mode 100644
index b487a175691..00000000000
--- a/app/views/projects/gcp/artifact_registry/docker_images/index.html.haml
+++ /dev/null
@@ -1,23 +0,0 @@
-- page_title 'Artifact Registry Docker Images'
-
-- unless @error
- .gl-display-flex.gl-flex-direction-column
- .gl-display-flex.gl-justify-content-space-between.gl-py-3
- .gl-flex-direction-column.gl-flex-grow-1
- .gl-display-flex
- .gl-display-flex.gl-flex-direction-column
- %h2.gl-font-size-h1.gl-mt-3.gl-mb-0 Docker Images of #{@artifact_repository_name}
- = render partial: 'pagination'
- = render partial: 'docker_image', collection: @docker_images
- = render partial: 'pagination'
-- else
- .flash-container.flash-container-page.sticky
- .gl-alert.flash-notice.gl-alert-info
- .gl-alert-icon-container
- = sprite_icon('information-o', css_class: 's16 gl-alert-icon gl-alert-icon-no-title')
- .gl-alert-content
- .gl-alert-body
- - if @error
- = @error
- - else
- Nothing to show here.
diff --git a/app/views/projects/gcp/artifact_registry/setup/new.html.haml b/app/views/projects/gcp/artifact_registry/setup/new.html.haml
deleted file mode 100644
index 39ce0093372..00000000000
--- a/app/views/projects/gcp/artifact_registry/setup/new.html.haml
+++ /dev/null
@@ -1,31 +0,0 @@
-- page_title 'Artifact Registry Setup'
-
-- if @error.present?
- .flash-container.flash-container-page.sticky
- .gl-alert.flash-notice.gl-alert-info
- .gl-alert-icon-container
- = sprite_icon('information-o', css_class: 's16 gl-alert-icon gl-alert-icon-no-title')
- .gl-alert-content
- .gl-alert-body= @error
-- else
- %p
-
- = form_tag namespace_project_gcp_artifact_registry_docker_images_path , method: :get do
- .form-group.row
- = label_tag :gcp_project_id, 'Google Project ID', class: 'col-form-label col-md-2'
- .col-md-4
- = text_field_tag :gcp_project_id, nil, class: 'form-control gl-form-input gl-mr-3'
- .form-group.row
- = label_tag :gcp_location, 'Google Project Location', class: 'col-form-label col-md-2'
- .col-md-4
- = text_field_tag :gcp_location, nil, class: 'form-control gl-form-input gl-mr-3'
- .form-group.row
- = label_tag :gcp_ar_repository, 'Artifact Registry Repository Name', class: 'col-form-label col-md-2'
- .col-md-4
- = text_field_tag :gcp_ar_repository, nil, class: 'form-control gl-form-input gl-mr-3'
- .form-group.row
- = label_tag :gcp_wlif_url, 'Worflow Identity Federation url', class: 'col-form-label col-md-2'
- .col-md-4
- = text_field_tag :gcp_wlif_url, nil, class: 'form-control gl-form-input gl-mr-3'
- .form-actions
- = submit_tag 'Setup', class: 'gl-button btn btn-confirm'
diff --git a/app/views/shared/wikis/edit.html.haml b/app/views/shared/wikis/edit.html.haml
index ce8c7782c7f..ffe479329b4 100644
--- a/app/views/shared/wikis/edit.html.haml
+++ b/app/views/shared/wikis/edit.html.haml
@@ -1,3 +1,4 @@
+- breadcrumb_title(s_("Wiki|New Page")) unless @page.persisted?
- wiki_page_title @page, @page.persisted? ? _('Edit') : _('New')
- add_page_specific_style 'page_bundles/wiki'
- @gfm_form = true
@@ -16,7 +17,7 @@
&middot;
= s_("Wiki|Edit Page")
- else
- = s_("Wiki|Create New Page")
+ = s_("Wiki|New Page")
.nav-controls.pb-md-3.pb-lg-0
- if @page.persisted?
diff --git a/bin/saas-feature.rb b/bin/saas-feature.rb
new file mode 100755
index 00000000000..878c204d381
--- /dev/null
+++ b/bin/saas-feature.rb
@@ -0,0 +1,381 @@
+#!/usr/bin/env ruby
+#
+# Generate a SaaS feature entry file in the correct location.
+#
+# Automatically stages the file and amends the previous commit if the `--amend`
+# argument is used.
+
+require 'fileutils'
+require 'httparty'
+require 'json'
+require 'optparse'
+require 'readline'
+require 'shellwords'
+require 'uri'
+require 'yaml'
+
+require_relative '../lib/gitlab/popen'
+
+module SaasFeatureHelpers
+ Abort = Class.new(StandardError)
+ Done = Class.new(StandardError)
+
+ def capture_stdout(cmd)
+ output = IO.popen(cmd, &:read)
+ fail_with "command failed: #{cmd.join(' ')}" unless $?.success?
+ output
+ end
+
+ def fail_with(message)
+ raise Abort, "\e[31merror\e[0m #{message}"
+ end
+end
+
+class SaasFeatureOptionParser
+ extend SaasFeatureHelpers
+
+ WWW_GITLAB_COM_SITE = 'https://about.gitlab.com'
+ WWW_GITLAB_COM_GROUPS_JSON = "#{WWW_GITLAB_COM_SITE}/groups.json".freeze
+ COPY_COMMANDS = [
+ 'pbcopy', # macOS
+ 'xclip -selection clipboard', # Linux
+ 'xsel --clipboard --input', # Linux
+ 'wl-copy' # Wayland
+ ].freeze
+ OPEN_COMMANDS = [
+ 'open', # macOS
+ 'xdg-open' # Linux
+ ].freeze
+
+ Options = Struct.new(
+ :name,
+ :group,
+ :milestone,
+ :amend,
+ :dry_run,
+ :force,
+ :introduced_by_url,
+ keyword_init: true
+ )
+
+ class << self
+ def parse(argv)
+ options = Options.new
+
+ parser = OptionParser.new do |opts|
+ opts.banner = "Usage: #{__FILE__} [options] <saas-feature>\n\n"
+
+ # Note: We do not provide a shorthand for this in order to match the `git
+ # commit` interface
+ opts.on('--amend', 'Amend the previous commit') do |value|
+ options.amend = value
+ end
+
+ opts.on('-f', '--force', 'Overwrite an existing entry') do |value|
+ options.force = value
+ end
+
+ opts.on('-m', '--introduced-by-url [string]', String, 'URL of merge request introducing the SaaS feature') do |value|
+ options.introduced_by_url = value
+ end
+
+ opts.on('-M', '--milestone [string]', String, 'Milestone in which the SaaS feature was introduced') do |value|
+ options.milestone = value
+ end
+
+ opts.on('-n', '--dry-run', "Don't actually write anything, just print") do |value|
+ options.dry_run = value
+ end
+
+ opts.on('-g', '--group [string]', String, 'The group introducing a SaaS feature, like: `group::project management`') do |value|
+ options.group = value if group_labels.include?(value)
+ end
+
+ opts.on('-h', '--help', 'Print help message') do
+ $stdout.puts opts
+ raise Done.new
+ end
+ end
+
+ parser.parse!(argv)
+
+ unless argv.one?
+ $stdout.puts parser.help
+ $stdout.puts
+ raise Abort, 'SaaS feature name is required'
+ end
+
+ # Name is a first name
+ options.name = argv.first.downcase.tr('-', '_')
+
+ options
+ end
+
+ def groups
+ @groups ||= fetch_json(WWW_GITLAB_COM_GROUPS_JSON)
+ end
+
+ def group_labels
+ @group_labels ||= groups.map { |_, group| group['label'] }.sort
+ end
+
+ def find_group_by_label(label)
+ groups.find { |_, group| group['label'] == label }[1]
+ end
+
+ def group_list
+ group_labels.map.with_index do |group_label, index|
+ "#{index + 1}. #{group_label}"
+ end
+ end
+
+ def fzf_available?
+ find_compatible_command(%w[fzf])
+ end
+
+ def prompt_readline(prompt:)
+ Readline.readline('?> ', false)&.strip
+ end
+
+ def prompt_fzf(list:, prompt:)
+ arr = list.join("\n")
+
+ selection = IO.popen(%W[fzf --tac --prompt #{prompt}], "r+") do |pipe|
+ pipe.puts(arr)
+ pipe.close_write
+ pipe.readlines
+ end.join.strip
+
+ selection[/(\d+)\./, 1]
+ end
+
+ def print_list(list)
+ return if list.empty?
+
+ $stdout.puts list.join("\n")
+ end
+
+ def print_prompt(prompt)
+ $stdout.puts
+ $stdout.puts ">> #{prompt}:"
+ $stdout.puts
+ end
+
+ def prompt_list(prompt:, list: nil)
+ if fzf_available?
+ prompt_fzf(list: list, prompt: prompt)
+ else
+ prompt_readline(prompt: prompt)
+ end
+ end
+
+ def fetch_json(json_url)
+ json = with_retries { HTTParty.get(json_url, format: :plain) }
+ JSON.parse(json)
+ end
+
+ def with_retries(attempts: 3)
+ yield
+ rescue Errno::ECONNRESET, OpenSSL::SSL::SSLError, Net::OpenTimeout
+ retry if (attempts -= 1).positive?
+ raise
+ end
+
+ def read_group
+ prompt = 'Specify the group label to which the SaaS feature belongs, from the following list'
+
+ unless fzf_available?
+ print_prompt(prompt)
+ print_list(group_list)
+ end
+
+ loop do
+ group = prompt_list(prompt: prompt, list: group_list)
+ group = group_labels[group.to_i - 1] unless group.to_i.zero?
+
+ if group_labels.include?(group)
+ $stdout.puts "You picked the group '#{group}'"
+ return group
+ else
+ $stderr.puts "The group label isn't in the above labels list"
+ end
+
+ end
+ end
+
+ def read_introduced_by_url
+ read_url('URL of the MR introducing the SaaS feature (enter to skip and let Danger provide a suggestion directly in the MR):')
+ end
+
+ def read_milestone
+ milestone = File.read('VERSION')
+ milestone.gsub(/^(\d+\.\d+).*$/, '\1').chomp
+ end
+
+ def read_url(prompt)
+ $stdout.puts
+ $stdout.puts ">> #{prompt}"
+
+ loop do
+ url = Readline.readline('?> ', false)&.strip
+ url = nil if url.empty?
+ return url if url.nil? || valid_url?(url)
+ end
+ end
+
+ def valid_url?(url)
+ unless url.start_with?('https://')
+ $stderr.puts 'URL needs to start with https://'
+ return false
+ end
+
+ response = HTTParty.head(url)
+
+ return true if response.success?
+
+ $stderr.puts "URL '#{url}' isn't valid!"
+ end
+
+ def open_url!(url)
+ _, open_url_status = Gitlab::Popen.popen([open_command, url])
+
+ open_url_status
+ end
+
+ def copy_to_clipboard!(text)
+ IO.popen(copy_to_clipboard_command.shellsplit, 'w') do |pipe|
+ pipe.print(text)
+ end
+ end
+
+ def copy_to_clipboard_command
+ find_compatible_command(COPY_COMMANDS)
+ end
+
+ def open_command
+ find_compatible_command(OPEN_COMMANDS)
+ end
+
+ def find_compatible_command(commands)
+ commands.find do |command|
+ Gitlab::Popen.popen(%W[which #{command.split(' ')[0]}])[1] == 0
+ end
+ end
+ end
+end
+
+class SaasFeatureCreator
+ include SaasFeatureHelpers
+
+ attr_reader :options
+
+ def initialize(options)
+ @options = options
+ end
+
+ def execute
+ assert_feature_branch!
+ assert_name!
+ assert_existing_saas_feature!
+
+ options.group ||= SaasFeatureOptionParser.read_group
+ options.introduced_by_url ||= SaasFeatureOptionParser.read_introduced_by_url
+ options.milestone ||= SaasFeatureOptionParser.read_milestone
+
+ $stdout.puts "\e[32mcreate\e[0m #{file_path}"
+ $stdout.puts contents
+
+ unless options.dry_run
+ write
+ amend_commit if options.amend
+ end
+
+ if editor
+ system(editor, file_path)
+ end
+ end
+
+ private
+
+ def contents
+ config_hash.to_yaml
+ end
+
+ def config_hash
+ {
+ 'name' => options.name,
+ 'introduced_by_url' => options.introduced_by_url,
+ 'milestone' => options.milestone,
+ 'group' => options.group
+ }
+ end
+
+ def write
+ FileUtils.mkdir_p(File.dirname(file_path))
+ File.write(file_path, contents)
+ end
+
+ def editor
+ ENV['EDITOR']
+ end
+
+ def amend_commit
+ fail_with 'git add failed' unless system(*%W[git add #{file_path}])
+
+ system('git commit --amend')
+ end
+
+ def assert_feature_branch!
+ return unless branch_name == 'master'
+
+ fail_with 'Create a branch first!'
+ end
+
+ def assert_existing_saas_feature!
+ existing_path = all_saas_feature_names[options.name]
+ return unless existing_path
+ return if options.force
+
+ fail_with "#{existing_path} already exists! Use `--force` to overwrite."
+ end
+
+ def assert_name!
+ return if options.name.match(/\A[a-z0-9_-]+\Z/)
+
+ fail_with 'Provide a name for the SaaS feature that is [a-z0-9_-]'
+ end
+
+ def file_path
+ saas_features_path.sub('*.yml', options.name + '.yml')
+ end
+
+ def all_saas_feature_names
+ # check flatten needs
+ @all_saas_feature_names ||=
+ Dir.glob(saas_features_path).map do |path|
+ [File.basename(path, '.yml'), path]
+ end.to_h
+ end
+
+ def saas_features_path
+ File.join('ee', 'config', 'saas_features', '*.yml')
+ end
+
+ def branch_name
+ @branch_name ||= capture_stdout(%w[git symbolic-ref --short HEAD]).strip
+ end
+end
+
+if $0 == __FILE__
+ begin
+ options = SaasFeatureOptionParser.parse(ARGV)
+ SaasFeatureCreator.new(options).execute
+ rescue SaasFeatureHelpers::Abort => ex
+ $stderr.puts ex.message
+ exit 1
+ rescue SaasFeatureHelpers::Done
+ exit
+ end
+end
+
+# vim: ft=ruby
diff --git a/config/feature_flags/development/gcp_technical_demo.yml b/config/feature_flags/development/gcp_technical_demo.yml
deleted file mode 100644
index 98e4529ed33..00000000000
--- a/config/feature_flags/development/gcp_technical_demo.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: gcp_technical_demo
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/139802
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/434685
-milestone: '16.7'
-type: development
-group: group::container registry
-default_enabled: false
diff --git a/config/initializers/rest-client-hostname_override.rb b/config/initializers/rest-client-hostname_override.rb
index c12c0d431fd..6a8c7285de5 100644
--- a/config/initializers/rest-client-hostname_override.rb
+++ b/config/initializers/rest-client-hostname_override.rb
@@ -20,7 +20,7 @@ module RestClient
raise ArgumentError, "URL is blocked: #{e.message}"
end
- # Gitlab::UrlBlocker returns a Addressable::URI which we need to coerce
+ # Gitlab::HTTP_V2::UrlBlocker returns a Addressable::URI which we need to coerce
# to URI so that rest-client can use it to determine if it's a
# URI::HTTPS or not. It uses it to set `net.use_ssl` to true or not:
#
diff --git a/config/routes/gcp.rb b/config/routes/gcp.rb
deleted file mode 100644
index b5e7c561782..00000000000
--- a/config/routes/gcp.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-# All routing related to gcp
-# that is already under /-/ scope only
-
-scope format: false do
- namespace :gcp do
- namespace :artifact_registry do
- resources :docker_images, only: :index
- resources :setup, only: :new
- end
- end
-end
diff --git a/config/routes/project.rb b/config/routes/project.rb
index 23733bbe821..c8d81535da8 100644
--- a/config/routes/project.rb
+++ b/config/routes/project.rb
@@ -408,7 +408,6 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
# its preferable to keep it below all other project routes
draw :repository
draw :wiki
- draw :gcp
namespace :import do
resource :jira, only: [:show], controller: :jira
diff --git a/db/migrate/20240106000000_migrate_data_from_workspaces_url_column.rb b/db/migrate/20240106000000_migrate_data_from_workspaces_url_column.rb
index e5f5ffc376e..369c262be4e 100644
--- a/db/migrate/20240106000000_migrate_data_from_workspaces_url_column.rb
+++ b/db/migrate/20240106000000_migrate_data_from_workspaces_url_column.rb
@@ -13,7 +13,7 @@ class MigrateDataFromWorkspacesUrlColumn < Gitlab::Database::Migration[2.2]
each_batch_range('workspaces', scope: ->(table) { table.all }, of: BATCH_SIZE) do |min, max|
execute(<<~SQL)
UPDATE workspaces
- SET url_prefix = CONCAT('https://#{DEFAULT_PORT}-', name),
+ SET url_prefix = CONCAT('#{DEFAULT_PORT}-', name),
dns_zone = remote_development_agent_configs.dns_zone,
url_query_string = CASE
WHEN POSITION('?' IN url) > 0
diff --git a/db/migrate/20240116212237_add_onboarding_status_to_user_details.rb b/db/migrate/20240116212237_add_onboarding_status_to_user_details.rb
new file mode 100644
index 00000000000..9bb2e85f8b9
--- /dev/null
+++ b/db/migrate/20240116212237_add_onboarding_status_to_user_details.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+class AddOnboardingStatusToUserDetails < Gitlab::Database::Migration[2.2]
+ milestone '16.9'
+ enable_lock_retries!
+
+ def change
+ add_column :user_details, :onboarding_status, :jsonb, default: {}, null: false
+ end
+end
diff --git a/db/migrate/20240123000000_update_workspaces_url_prefix_column.rb b/db/migrate/20240123000000_update_workspaces_url_prefix_column.rb
new file mode 100644
index 00000000000..084a7e4b0b9
--- /dev/null
+++ b/db/migrate/20240123000000_update_workspaces_url_prefix_column.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+class UpdateWorkspacesUrlPrefixColumn < Gitlab::Database::Migration[2.2]
+ milestone '16.8'
+ disable_ddl_transaction!
+
+ restrict_gitlab_migration gitlab_schema: :gitlab_main
+
+ def up
+ execute(<<~SQL)
+ UPDATE workspaces
+ SET url_prefix=REPLACE(url_prefix, 'https://', '')
+ WHERE url_prefix LIKE 'https://%'
+ SQL
+ end
+
+ def down
+ # no-op
+ end
+end
diff --git a/db/post_migrate/20240122071840_ensure_backfill_for_ci_builds_integer_columns_is_finished.rb b/db/post_migrate/20240122071840_ensure_backfill_for_ci_builds_integer_columns_is_finished.rb
new file mode 100644
index 00000000000..874dcd0b640
--- /dev/null
+++ b/db/post_migrate/20240122071840_ensure_backfill_for_ci_builds_integer_columns_is_finished.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+class EnsureBackfillForCiBuildsIntegerColumnsIsFinished < Gitlab::Database::Migration[2.2]
+ include Gitlab::Database::MigrationHelpers::ConvertToBigint
+ milestone '16.9'
+
+ restrict_gitlab_migration gitlab_schema: :gitlab_ci
+ disable_ddl_transaction!
+
+ TABLE_NAME = :ci_builds
+ COLUMN_NAMES = %w[
+ auto_canceled_by_id
+ commit_id
+ erased_by_id
+ project_id
+ runner_id
+ trigger_request_id
+ upstream_pipeline_id
+ user_id
+ ]
+ BIGINT_COLUMN_NAMES = COLUMN_NAMES.map { |name| "#{name}_convert_to_bigint" }
+
+ def up
+ ensure_batched_background_migration_is_finished(
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: TABLE_NAME,
+ column_name: 'id',
+ job_arguments: [COLUMN_NAMES, BIGINT_COLUMN_NAMES]
+ )
+ end
+
+ def down
+ # no-op
+ end
+end
diff --git a/db/post_migrate/20240123071840_prepare_async_indexes_for_p_ci_builds_auto_canceled_by_id.rb b/db/post_migrate/20240123071840_prepare_async_indexes_for_p_ci_builds_auto_canceled_by_id.rb
new file mode 100644
index 00000000000..d894d29e721
--- /dev/null
+++ b/db/post_migrate/20240123071840_prepare_async_indexes_for_p_ci_builds_auto_canceled_by_id.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+class PrepareAsyncIndexesForPCiBuildsAutoCanceledById < Gitlab::Database::Migration[2.2]
+ include Gitlab::Database::PartitioningMigrationHelpers
+
+ milestone '16.9'
+ disable_ddl_transaction!
+
+ INDEX_NAME = "p_ci_builds_auto_canceled_by_id_bigint_idx"
+ TABLE_NAME = :p_ci_builds
+ COLUMN_NAME = :auto_canceled_by_id_convert_to_bigint
+ WHERE_CLAUSE = "auto_canceled_by_id_convert_to_bigint IS NOT NULL"
+
+ def up
+ Gitlab::Database::PostgresPartitionedTable.each_partition(TABLE_NAME) do |partition|
+ index_name = generated_index_name(partition.identifier, INDEX_NAME)
+ prepare_async_index partition.identifier, COLUMN_NAME, name: index_name, where: WHERE_CLAUSE
+ end
+ end
+
+ def down
+ Gitlab::Database::PostgresPartitionedTable.each_partition(TABLE_NAME) do |partition|
+ index_name = generated_index_name(partition.identifier, INDEX_NAME)
+ unprepare_async_index partition.identifier, COLUMN_NAME, name: index_name, where: WHERE_CLAUSE
+ end
+ end
+end
diff --git a/db/post_migrate/20240124081840_prepare_async_indexes_for_p_ci_builds_commit_id_part1.rb b/db/post_migrate/20240124081840_prepare_async_indexes_for_p_ci_builds_commit_id_part1.rb
new file mode 100644
index 00000000000..a6d2ef157a2
--- /dev/null
+++ b/db/post_migrate/20240124081840_prepare_async_indexes_for_p_ci_builds_commit_id_part1.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+class PrepareAsyncIndexesForPCiBuildsCommitIdPart1 < Gitlab::Database::Migration[2.2]
+ include Gitlab::Database::PartitioningMigrationHelpers
+
+ milestone '16.9'
+ disable_ddl_transaction!
+
+ INDEXES = [
+ [[:commit_id_convert_to_bigint, :status, :type], "p_ci_builds_commit_id_bigint_status_type_idx"],
+ [[:commit_id_convert_to_bigint, :type, :name, :ref], "p_ci_builds_commit_id_bigint_type_name_ref_idx"]
+ ]
+ TABLE_NAME = :p_ci_builds
+
+ def up
+ Gitlab::Database::PostgresPartitionedTable.each_partition(TABLE_NAME) do |partition|
+ INDEXES.each do |columns, routing_table_index_name|
+ index_name = generated_index_name(partition.identifier, routing_table_index_name)
+ prepare_async_index partition.identifier, columns, name: index_name
+ end
+ end
+ end
+
+ def down
+ Gitlab::Database::PostgresPartitionedTable.each_partition(TABLE_NAME) do |partition|
+ INDEXES.each do |columns, routing_table_index_name|
+ index_name = generated_index_name(partition.identifier, routing_table_index_name)
+ unprepare_async_index partition.identifier, columns, name: index_name
+ end
+ end
+ end
+end
diff --git a/db/schema_migrations/20240116212237 b/db/schema_migrations/20240116212237
new file mode 100644
index 00000000000..24059535076
--- /dev/null
+++ b/db/schema_migrations/20240116212237
@@ -0,0 +1 @@
+d58f59f84c1d9c08f8ba3466c844b01a1ab8ea429de9b0fb43dcd53e7611e2d6 \ No newline at end of file
diff --git a/db/schema_migrations/20240122071840 b/db/schema_migrations/20240122071840
new file mode 100644
index 00000000000..b3363bc4b78
--- /dev/null
+++ b/db/schema_migrations/20240122071840
@@ -0,0 +1 @@
+a404dcf1d57886af351b696f8cbc451e7e81d69618a6bd3c30e1676a99172ca6 \ No newline at end of file
diff --git a/db/schema_migrations/20240123000000 b/db/schema_migrations/20240123000000
new file mode 100644
index 00000000000..ab013634ab0
--- /dev/null
+++ b/db/schema_migrations/20240123000000
@@ -0,0 +1 @@
+554281098ec89db0f0bc0402e34c1fe413003afa63b0001da8d5ad772dd9725c \ No newline at end of file
diff --git a/db/schema_migrations/20240123071840 b/db/schema_migrations/20240123071840
new file mode 100644
index 00000000000..e68e60a9ce8
--- /dev/null
+++ b/db/schema_migrations/20240123071840
@@ -0,0 +1 @@
+3feac709d7c1e6d911068d096e876fcbdace5c3e47036a3c8d9bb347a4d6f4ba \ No newline at end of file
diff --git a/db/schema_migrations/20240124081840 b/db/schema_migrations/20240124081840
new file mode 100644
index 00000000000..608c0f2ed71
--- /dev/null
+++ b/db/schema_migrations/20240124081840
@@ -0,0 +1 @@
+93164c045f7581f7dbcd606f2217514344bf2fdb72a6ece05241214e7a7f3c9d \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index c7651cd9dee..9879415c0e1 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -24956,6 +24956,7 @@ CREATE TABLE user_details (
email_reset_offered_at timestamp with time zone,
mastodon text DEFAULT ''::text NOT NULL,
project_authorizations_recalculated_at timestamp with time zone DEFAULT '2010-01-01 00:00:00+00'::timestamp with time zone NOT NULL,
+ onboarding_status jsonb DEFAULT '{}'::jsonb NOT NULL,
CONSTRAINT check_245664af82 CHECK ((char_length(webauthn_xid) <= 100)),
CONSTRAINT check_444573ee52 CHECK ((char_length(skype) <= 500)),
CONSTRAINT check_466a25be35 CHECK ((char_length(twitter) <= 500)),
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index b21e82c9b54..92363f5dad8 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -20240,6 +20240,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="groupprojectshascodecoverage"></a>`hasCodeCoverage` | [`Boolean`](#boolean) | Returns only the projects which have code coverage. |
| <a id="groupprojectshasvulnerabilities"></a>`hasVulnerabilities` | [`Boolean`](#boolean) | Returns only the projects which have vulnerabilities. |
| <a id="groupprojectsids"></a>`ids` | [`[ID!]`](#id) | Filter projects by IDs. |
+| <a id="groupprojectsincludearchived"></a>`includeArchived` | [`Boolean`](#boolean) | Include also archived projects. |
| <a id="groupprojectsincludesubgroups"></a>`includeSubgroups` | [`Boolean`](#boolean) | Include also subgroup projects. |
| <a id="groupprojectsnotaimedfordeletion"></a>`notAimedForDeletion` | [`Boolean`](#boolean) | Include projects that are not aimed for deletion. |
| <a id="groupprojectssbomcomponentid"></a>`sbomComponentId` | [`ID`](#id) | Return only the projects related to the specified SBOM component. |
@@ -23297,6 +23298,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="namespaceprojectshascodecoverage"></a>`hasCodeCoverage` | [`Boolean`](#boolean) | Returns only the projects which have code coverage. |
| <a id="namespaceprojectshasvulnerabilities"></a>`hasVulnerabilities` | [`Boolean`](#boolean) | Returns only the projects which have vulnerabilities. |
| <a id="namespaceprojectsids"></a>`ids` | [`[ID!]`](#id) | Filter projects by IDs. |
+| <a id="namespaceprojectsincludearchived"></a>`includeArchived` | [`Boolean`](#boolean) | Include also archived projects. |
| <a id="namespaceprojectsincludesubgroups"></a>`includeSubgroups` | [`Boolean`](#boolean) | Include also subgroup projects. |
| <a id="namespaceprojectsnotaimedfordeletion"></a>`notAimedForDeletion` | [`Boolean`](#boolean) | Include projects that are not aimed for deletion. |
| <a id="namespaceprojectssbomcomponentid"></a>`sbomComponentId` | [`ID`](#id) | Return only the projects related to the specified SBOM component. |
diff --git a/doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-as-a-service.png b/doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-as-a-service.png
new file mode 100644
index 00000000000..c30fa2970eb
--- /dev/null
+++ b/doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-as-a-service.png
Binary files differ
diff --git a/doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-forecasting.png b/doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-forecasting.png
new file mode 100644
index 00000000000..25d959560e2
--- /dev/null
+++ b/doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-forecasting.png
Binary files differ
diff --git a/doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-reporting.png b/doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-reporting.png
new file mode 100644
index 00000000000..9d9a207571c
--- /dev/null
+++ b/doc/architecture/blueprints/capacity_planning/images/dedicated-capacity-planning-reporting.png
Binary files differ
diff --git a/doc/architecture/blueprints/capacity_planning/images/tamland-as-a-service.png b/doc/architecture/blueprints/capacity_planning/images/tamland-as-a-service.png
deleted file mode 100644
index fa8f1223917..00000000000
--- a/doc/architecture/blueprints/capacity_planning/images/tamland-as-a-service.png
+++ /dev/null
Binary files differ
diff --git a/doc/architecture/blueprints/capacity_planning/images/tamland-as-part-of-stack.png b/doc/architecture/blueprints/capacity_planning/images/tamland-as-part-of-stack.png
deleted file mode 100644
index 0b47d91e133..00000000000
--- a/doc/architecture/blueprints/capacity_planning/images/tamland-as-part-of-stack.png
+++ /dev/null
Binary files differ
diff --git a/doc/architecture/blueprints/capacity_planning/index.md b/doc/architecture/blueprints/capacity_planning/index.md
index 31740d50368..0d1ad84c914 100644
--- a/doc/architecture/blueprints/capacity_planning/index.md
+++ b/doc/architecture/blueprints/capacity_planning/index.md
@@ -3,7 +3,7 @@ status: proposed
creation-date: "2023-09-11"
authors: [ "@abrandl" ]
coach: "@andrewn"
-approvers: [ "@swiskow", "@rnienaber", "@o-lluch" ]
+approvers: [ "@swiskow", "@lmcandrew", "@o-lluch" ]
---
<!-- Blueprints often contain forward-looking statements -->
@@ -19,7 +19,7 @@ We make use of [Tamland](https://gitlab.com/gitlab-com/gl-infra/tamland), a tool
We propose to include Tamland as a part of the GitLab Dedicated stack and execute forecasting from within the tenant environments.
Tamland predicts SLO violations and their respective dates, which need to be reviewed and acted upon.
-In terms of team organisation, the Dedicated team is proposed to own the tenant-side setup for Tamland and to own the predicted SLO violations, with the help and guidance of the Scalability::Projections team, which drives further development, documentation and overall guidance for capacity planning, including for Dedicated.
+In terms of team organisation, the Dedicated team is proposed to own the tenant-side setup for Tamland and to own the predicted SLO violations, with the help and guidance of the Scalability::Observability team, which drives further development, documentation and overall guidance for capacity planning, including for Dedicated.
With this setup, we aim to turn Tamland into a more generic tool, which can be used in various environments including but not limited to Dedicated tenants.
Long-term, we think of including Tamland in self-managed installations and think of Tamland as a candidate for open source release.
@@ -32,8 +32,8 @@ Long-term, we think of including Tamland in self-managed installations and think
It implements [capacity planning](https://about.gitlab.com/handbook/engineering/infrastructure/capacity-planning/) for GitLab.com, which is a [controlled activity covered by SOC 2](https://gitlab.com/gitlab-com/gl-security/security-assurance/security-compliance-commercial-and-dedicated/observation-management/-/issues/604).
As of today, it is used exclusively for GitLab.com to predict upcoming SLO violations across hundreds of monitored infrastructure components.
-Tamland produces a [report](https://gitlab-com.gitlab.io/gl-infra/tamland/intro.html) (hosted on GitLab Pages) containing forecast plots, information around predicted violations and other information around the components monitored.
-Any predicted SLO violation result in a capacity warning issue being created in the [issue tracker for capacity planning](https://gitlab.com/gitlab-com/gl-infra/capacity-planning/-/boards/2816983) on GitLab.com.
+Tamland produces a [report](https://gitlab-com.gitlab.io/gl-infra/tamland/intro.html) (internal link, hosted on GitLab Pages) containing forecast plots, information around predicted violations and other information around the components monitored.
+Any predicted SLO violation results in a capacity warning issue being created in the [issue tracker for capacity planning](https://gitlab.com/gitlab-com/gl-infra/capacity-planning/-/boards/2816983) on GitLab.com.
At present, Tamland is quite tailor made and specific for GitLab.com:
@@ -44,13 +44,13 @@ At present, Tamland is quite tailor made and specific for GitLab.com:
For illustration, we can see a saturation forecast plot below for the `disk_space` resource for a PostgreSQL service called `patroni-ci`.
Within the 90 days forecast horizon, we predict a violation of the `soft` SLO (set at 85% saturation) and this resulted in the creation of a [capacity planning issue](https://gitlab.com/gitlab-com/gl-infra/capacity-planning/-/issues/1219) for further review and potential actions.
-At present, the Scalability::Projections group reviews those issues and engages with the respective DRI for the service in question to remedy a saturation concern.
+At present, the Scalability::Observability group reviews those issues and engages with the respective DRI for the service in question to remedy a saturation concern.
<img src="images/image-20230911144743188.png" alt="image-20230911144743188" style="zoom:67%;" />
For GitLab.com capacity planning, we operate Tamland from a scheduled CI pipeline with access to the central Thanos, which provides saturation and utilization metrics for GitLab.com.
The CI pipeline produces the desired report, exposes it on GitLab Pages and also creates capacity planning issues.
-Scalability::Projections runs a capacity planning triage rotation which entails reviewing and prioritizing any open issues and their respective saturation concerns.
+Scalability::Observability runs a capacity planning triage rotation which entails reviewing and prioritizing any open issues and their respective saturation concerns.
### Problem Statement
@@ -62,7 +62,7 @@ These metrics are standardized in the [metrics catalog](https://gitlab.com/gitla
In order to provide capacity planning and forecasts for saturation metrics for each tenant, we'd like to get Tamland set up for GitLab Dedicated.
-While Tamland is developed by the Scalability::Projections and this team also owns the capacity planning process for GitLab.com, they don't have access to any of the Dedicated infrastructure as we have strong isolation implemented for Dedicated environments.
+While Tamland is developed by the Scalability::Observability and this team also owns the capacity planning process for GitLab.com, they don't have access to any of the Dedicated infrastructure as we have strong isolation implemented for Dedicated environments.
As such, the technical design choices are going to affect how those teams interact and vice versa. We include this consideration into this documentation as we think the organisational aspect is a crucial part of it.
### Key questions
@@ -79,21 +79,27 @@ As such, the technical design choices are going to affect how those teams intera
1. Tamland's output is forecasting data only (plots, SLO violation dates, etc. - no report, no issue management - see below)
1. Tamland stores the output data in a S3 bucket for further inspection
-#### Non-goals
+### Goals: Iteration 1
-##### Reporting
+In Iteration 0, we've integrated Tamland into GitLab Dedicated environments and started to generate forecasting data for each tenant regularly.
-As of today, it's not quite clear yet how we'd like to consume forecasting data across tenants.
-In contrast to GitLab.com, we generate forecasts across a potentially large number of tenants.
-At this point, we suspect that we're more interested in an aggregate report across tenants rather than individual, very detailed saturation forecasts.
-As such, this is subject to refinement in a further iteration once we have the underlying data available and gathered practical insight in how we consume this information.
+In order to consume this data and make it actionable, this iteration is about providing reporting functionality for GitLab Dedicated:
+We generate a GitLab Pages deployed static site that contains individual Tamland reports for all tenants.
-##### Issue management
+We use the default Tamland report to generate the per-tenant report.
+In a future iteration, we may want to provide another type of report specifically tailored for GitLab Dedicated needs.
-While each predicted SLO violation results in the creation of a GitLab issue, this may not be the right mode of raising awareness for Dedicated.
-Similar to the reporting side, this is subject to further discussion once we have data to look at.
+### Goals: Iteration 2
-##### Customizing forecasting models
+In order to raise awareness for a predicted SLO violation, Tamland has functionality to manage a GitLab issue tracker and create an issue for a capacity warning.
+We use this, for example, to manage capacity warnings for GitLab.com using the [`gitlab-com` capacity planning tracker](https://gitlab.com/gitlab-com/gl-infra/capacity-planning-trackers/gitlab-com/-/issues).
+
+For GitLab Dedicated tenants, we suggest to use the `gitlab-dedicated` capacity planning tracker in a similar fashion:
+For each predicted SLO violation with reasonable confidence, we create a capacity warning issue on this tracker and use a scoped label to distinguish warnings for different tenants (see below for more details).
+
+### Non-goals
+
+#### Customizing forecasting models
Forecasting models can and should be tuned and informed with domain knowledge to produce accurate forecasts.
This information is a part of the Tamland manifest.
@@ -105,9 +111,11 @@ Dedicated environments are fully isolated and run their own Prometheus instance
Tamland will run from each individual Dedicated tenant environment, consume metrics from Prometheus and store the resulting data in S3.
From there, we consume forecast data and act on it.
-![tamland-as-part-of-stack](images/tamland-as-part-of-stack.png)
+![dedicated-capacity-planning-forecasting](images/dedicated-capacity-planning-forecasting.png)
-### Storage for output and cache
+### Generating forecasts
+
+#### Storage for output and cache
Any data Tamland relies on is stored in a S3 bucket.
We use one bucket per tenant to clearly separate data between tenants.
@@ -117,7 +125,7 @@ We use one bucket per tenant to clearly separate data between tenants.
There is no need for a persistent state across Tamland runs aside from the S3 bucket.
-### Benefits of executing inside tenant environments
+#### Benefits of executing inside tenant environments
Each Tamland run for a single environment (tenant) can take a few hours to execute.
With the number of tenants expected to increase significantly, we need to consider scaling the execution environment for Tamland.
@@ -125,11 +133,11 @@ With the number of tenants expected to increase significantly, we need to consid
In this design, Tamland becomes a part of the Dedicated stack and a component of the individual tenant environment.
As such, scaling the execution environment for Tamland is solved by design, because tenant forecasts execute inherently parallel in their respective environments.
-### Distribution model: Docker
+#### Distribution model: Docker + Helm chart
-Tamland is released as a Docker image, see [Tamland's README](https://gitlab.com/gitlab-com/gl-infra/tamland/-/blob/main/README.md) for further details.
+Tamland is released as a Docker image along with a Helm chart, see [Tamland's README](https://gitlab.com/gitlab-com/gl-infra/tamland/-/blob/main/README.md) for further details.
-### Tamland manifest
+#### Tamland Manifest
The manifest contains information about which saturation metrics to forecast on (see this [manifest example](https://gitlab.com/gitlab-com/gl-infra/tamland/-/blob/62854e1afbc2ed3160a55a738ea587e0cf7f994f/saturation.json) for GitLab.com).
This will be generated from the metrics catalog and will be the same for all tenants for starters.
@@ -139,10 +147,33 @@ On a regular basis, a scheduled pipeline grabs the metrics catalog, generates th
On the Dedicated tenants, we download the latest version of the committed JSON manifest from `tamland-dedicated` and use this as input to execute Tamland.
-### Acting on forecast insights
+### Capacity planning reports and Capacity Warnings
+
+Based on Tamland's forecasting data, we generate reports to display forecasting information and enable teams to act on capacity warnings by creating capacity warnings in a GitLab issue tracker.
+
+![dedicated-capacity-planning-reporting](images/dedicated-capacity-planning-reporting.png)
+
+The Scalability::Observability team maintains an [internal GitLab project called `gitlab-dedicated`](https://gitlab.com/gitlab-com/gl-infra/capacity-planning-trackers/gitlab-dedicated).
+This project contains a scheduled CI pipeline to regularly produce a [static site deployed to GitLab Pages (only available internally)](https://gitlab-com.gitlab.io/gl-infra/capacity-planning-trackers/gitlab-dedicated/).
+It also contains functionality to create and manage capacity warnings in the issue tracker of this project.
+
+CI configuration for this project contains a list of tenants along with their respective metadata (e.g. AWS account, codename, etc.).
+
+For each configured tenant, the CI pipeline uses a central IAM role in the amp account.
+With this role, a tenant-specific IAM role can be assumed, which has read-only access to the respective S3 bucket containing the tenant's forecasting data.
+
+The CI pipeline produces a standard Tamland report for each tenant and integrates all individual reports into a single static site.
+This site provides unified access to capacity forecasting insights across tenant environments.
+
+Along with the report, the CI pipeline also reacts to predicted SLO violations and creates a capacity warning issue in the project's issue tracker.
+As the tracker is being used for *all* GitLab Dedicated tenants, we employ a `~tenant:CN` label to distinguish tenant environments (e.g. we use `~tenant:C1` for the tenant with codename C1).
+These issues contain further information about the tenant and component affected, along with forecasts and status information.
+The intention here is to create visibility into predicted SLO violations and provide a way for the Dedicated team to engage with capacity warnings directly (e.g. for discussion, work scheduling etc.).
+
+Overall, the Dedicated teams and operators use the Tamland report and issue tracker to act on capacity warnings.
-When Tamland forecast data is available for a tenant, the Dedicated teams consume this data and act on it accordingly.
-The Scalability::Observability group is going to support and guide this process to get started and help interpret data, along with implementing Tamland features required to streamline this process for Dedicated in further iterations.
+In order to get started, we suggest the Dedicated group to take a regular pass across the capacity warnings and triage those.
+For additional visibility, we may want to consider enabling getting Slack updates sent out for new capacity warnings created.
## Alternative Solution
@@ -150,7 +181,7 @@ The Scalability::Observability group is going to support and guide this process
An alternative design, we don't consider an option at this point, is to setup Tamland as a Service and run it fully **outside** of tenant environments.
-![tamland-as-a-service](images/tamland-as-a-service.png)
+![dedicated-capacity-planning-as-a-service](images/dedicated-capacity-planning-as-a-service.png)
In this design, a central Prometheus/Thanos instance is needed to provide the metrics data for Tamland.
Dedicated tenants use remote-write to push their Prometheus data to the central Thanos instance.
diff --git a/doc/ci/runners/saas/macos_saas_runner.md b/doc/ci/runners/saas/macos_saas_runner.md
index dcf81158e82..55deced783a 100644
--- a/doc/ci/runners/saas/macos_saas_runner.md
+++ b/doc/ci/runners/saas/macos_saas_runner.md
@@ -38,8 +38,8 @@ in your `.gitlab-ci.yml` file. Each image runs a specific version of macOS and X
| VM image | Status | |
|----------------------------|--------|--------------|
| `macos-12-xcode-14` | `Deprecated` | (Removal in GitLab 16.10) |
-| `macos-13-xcode-14` | `GA` | [Preinstalled Software](https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/job-images/-/blob/main/toolchain/macos-13.yml) |
-| `macos-14-xcode-15` | `GA` | [Preinstalled Software](https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/job-images/-/blob/main/toolchain/macos-14.yml) |
+| `macos-13-xcode-14` | `GA` | [Preinstalled Software](https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/job-images/-/blob/36d443841732f2d4f7e3de1bce63f530edef1676/toolchain/macos-13.yml) |
+| `macos-14-xcode-15` | `GA` | [Preinstalled Software](https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/job-images/-/blob/36d443841732f2d4f7e3de1bce63f530edef1676/toolchain/macos-14.yml) |
If no image is specified, the macOS runner uses `macos-13-xcode-14`.
diff --git a/doc/development/ee_features.md b/doc/development/ee_features.md
index 78177612aa9..fd00b8b86cb 100644
--- a/doc/development/ee_features.md
+++ b/doc/development/ee_features.md
@@ -63,6 +63,29 @@ Each SaaS feature is defined in a separate YAML file consisting of a number of f
| `milestone` | no | Milestone in which the SaaS feature was created. |
| `group` | no | The [group](https://about.gitlab.com/handbook/product/categories/#devops-stages) that owns the feature flag. |
+#### Create a new SaaS feature file definition
+
+The GitLab codebase provides [`bin/saas-feature.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/bin/saas-feature.rb),
+a dedicated tool to create new SaaS feature definitions.
+The tool asks various questions about the new SaaS feature, then creates
+a YAML definition in `ee/config/saas_features`.
+
+Only SaaS features that have a YAML definition file can be used when running the development or testing environments.
+
+```shell
+❯ bin/saas-feature my_saas_feature
+You picked the group 'group::acquisition'
+
+>> URL of the MR introducing the SaaS feature (enter to skip and let Danger provide a suggestion directly in the MR):
+?> https://gitlab.com/gitlab-org/gitlab/-/merge_requests/38602
+create ee/config/saas_features/my_saas_feature.yml
+---
+name: my_saas_feature
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/38602
+milestone: '16.8'
+group: group::acquisition
+```
+
### Opting out of a SaaS-only feature on another SaaS instance (JiHu)
Prepend the `ee/lib/ee/gitlab/saas.rb` module and override the `Gitlab::Saas.feature_available?` method.
diff --git a/doc/development/permissions/custom_roles.md b/doc/development/permissions/custom_roles.md
index 7a0d325d09a..457fe5a5d8b 100644
--- a/doc/development/permissions/custom_roles.md
+++ b/doc/development/permissions/custom_roles.md
@@ -168,74 +168,87 @@ For example, you see in `GroupPolicy` that there is an ability called
`read_project_security_dashboard`. You'd like to make both customizable. Rather
than adding a row to the `member_roles` table for each ability, consider
renaming them to `read_security_dashboard` and adding `read_security_dashboard`
-to the `member_roles` table. This is more expected because it means that
-enabling `read_security_dashboard` on the parent group will enable the custom role.
-For example, `GroupPolicy` has an ability called `read_group_security_dashboard` and `ProjectPolicy` has an ability
-called `read_project_security_dashboard`. If you would like to make both customizable, rather than adding a row to the
-`member_roles` table for each ability, consider renaming them to `read_security_dashboard` and adding
-`read_security_dashboard` to the `member_roles` table. This convention means that enabling `read_security_dashboard` on
+to the `member_roles` table. Enabling `read_security_dashboard` on
the parent group will allow the custom role to access the group security dashboard and the project security dashboard
for each project in that group. Enabling the same permission on a specific project will allow access to that projects'
security dashboard.
### Implement a new ability
-To add a new ability to a custom role:
+#### Step 1. Generate a configuration file
-- Generate YAML file by running `./ee/bin/custom-ability` generator
-- Add a new column to `member_roles` table, either manually or by running `custom_roles:code` generator, eg. by running `rails generate gitlab:custom_roles:code --ability new_ability_name`. The ability parameter is case sensitive and has to exactly match the permission name from the YAML file.
-- Add the ability to the respective Policy for example in [this change in merge request 114734](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/114734/diffs#diff-content-edcbe28bdecbd848d4d9efdc5b5e9bddd2a7299e).
-- Update the specs. Don't forget to add a spec to `ee/spec/requests/custom_roles` - the spec template file was pre-generated if you used the code generator
-- Compile the documentation by running `bundle exec rake gitlab:custom_roles:compile_docs`
-- Update the GraphQL documentation by running `bundle exec rake gitlab:graphql:compile_docs`
-
-Examples of merge requests adding new abilities to custom roles:
+- Run `./ee/bin/custom-ability <ABILITY_NAME>` to generate a configuration file for the new ability.
+- This will generate a YAML file in `ee/config/custom_abilities` which follows the following schema:
-- [Read code](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/106256)
-- [Read vulnerability](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/114734)
-- [Admin vulnerability](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/121534)
+| Field | Required | Description |
+| ----- | -------- |--------------|
+| `name` | yes | Unique, lowercase and underscored name describing the custom ability. Must match the filename. |
+| `description` | yes | Human-readable description of the custom ability. |
+| `feature_category` | yes | Name of the feature category. For example, `vulnerability_management`. |
+| `introduced_by_issue` | yes | Issue URL that proposed the addition of this custom ability. |
+| `introduced_by_mr` | yes | MR URL that added this custom ability. |
+| `milestone` | yes | Milestone in which this custom ability was added. |
+| `group_ability` | yes | Boolean value to indicate whether this ability is checked on group level. |
+| `project_ability` | yes | Boolean value to whether this ability is checked on project level. |
+| `requirements` | no | The list of custom permissions this ability is dependent on. For instance `admin_vulnerability` is dependent on `read_vulnerability`. If none, then enter `[]` |
-The above merge requests don't use YAML files and code generators. Some of the changes are not needed anymore. We will update the documentation once we have a permission implemented using the generators.
+#### Step 2: Create a migration file
-If you have any concerns, put the new ability behind a feature flag.
+- Run `bundle exec rails generate gitlab:custom_roles:code --ability <ABILITY_NAME>` which will generate a migration file to add the ability as a boolean column to the `member_roles` table.
-#### Documenting handling the feature flag
+#### Step 3: Update policies
-- When you introduce a new custom ability under a feature flag, add the `feature_flag` attribute to the appropriate ability YAML file.
-- When you enable the ability by default, add the `feature_flag_enabled_milestone` and `feature_flag_enabled_mr` attributes to the appropriate ability YAML file and regenerate the documentation.
-- You do not have to include these attributes in the YAML file if the feature flag is enabled by default in the same release as the ability is introduced.
+- If the ability is checked on a group level, add rule(s) to GroupPolicy to enable the ability.
+- For example: if the ability we would like to add is `read_dependency`, then an update to `ee/app/policies/ee/group_policy.rb` would look like as follows:
-#### Testing
+```ruby
+desc "Custom role on group that enables read dependency"
+condition(:role_enables_read_dependency) do
+ ::Auth::MemberRoleAbilityLoader.new(
+ user: @user,
+ resource: @subject,
+ ability: :read_dependency
+ ).has_ability?
+end
+
+rule { custom_roles_allowed & role_enables_read_dependency }.policy do
+ enable :read_dependency
+end
+```
-Unit tests are preferred to test out changes to any policies affected by the
-addition of new custom permissions. Custom Roles is an Ultimate tier feature so
-these tests can be found in the `ee/spec/policies` directory. The [spec file](https://gitlab.com/gitlab-org/gitlab/-/blob/13baa4e8c92a56260591a5bf0a58d3339890ee10/ee/spec/policies/project_policy_spec.rb#L2726-2740) for
-the `ProjectPolicy` contains shared examples that can be used to test out the
-following conditions:
+- Similarly, If the ability is checked on a project level, add rule(s) to ProjectPolicy to enable the ability.
+- For example: if the ability we would like to add is `read_dependency`, then an update to `ee/app/policies/ee/project_policy.rb` would look like as follows:
-- when the `custom_roles` licensed feature is not enabled
-- when the `custom_roles` licensed feature is enabled
- - when a user is a member of a custom role via an inherited group member
- - when a user is a member of a custom role via a direct group member
- - when a user is a member of a custom role via a direct project membership
+```ruby
+desc "Custom role on project that enables read dependency"
+condition(:role_enables_read_dependency) do
+ ::Auth::MemberRoleAbilityLoader.new(
+ user: @user,
+ resource: @subject,
+ ability: :read_dependency
+ ).has_ability?
+end
+
+rule { custom_roles_allowed & role_enables_read_dependency }.policy do
+ enable :read_dependency
+end
+```
-Below is an example for testing out `ProjectPolicy` related changes.
+- Not all abilities need to be enabled on both levels, for instance `admin_terraform_state` allows users to manage a project's terraform state. It only needs to be enabled on the project level and not the group level, and thus only needs to be configured in `ee/app/policies/ee/project_policy.rb`.
-```ruby
- context 'for a role with `custom_permission` enabled' do
- let(:member_role_abilities) { { custom_permission: true } }
- let(:allowed_abilities) { [:custom_permission] }
+#### Step 4: Verify
- it_behaves_like 'custom roles abilities'
- end
-```
+- Ensure SaaS mode is enabled with `GITLAB_SIMULATE_SAAS=1`.
+- Navigate to any Group that you are an owner of, then go to `Settings -> Roles and Permissions`.
+- Click on `Add new role` and create a custom role with the permission you have just created.
+- Navigate to the Group's `Manage -> Members` page and assign a member to this newly created custom role.
+- Next, log-in as that member and ensure that you are able to access the page that the custom ability is intended for.
-Request specs are preferred to test out any endpoint that allow access via a custom role permission.
-This includes controllers, REST API, and GraphQL. Examples of request specs can be found in `ee/spec/requests/custom_roles/`. In this directory you will find a sub-directory named after each permission that can be enabled via a custom role.
-The `custom_roles` licensed feature must be enabled to test this functionality.
+#### Step 5: Add specs
-Below is an example of the typical setup that is required to test out a
-Rails Controller endpoint.
+- Add the ability as a trait in the `MemberRoles` factory, `ee/spec/factories/member_roles.rb`.
+- Add tests to `ee/spec/requests/custom_roles/<ABILITY_NAME>/request_spec.rb` to ensure that once the user has been assigned the custom ability, they can successfully access the controllers, REST API endpoints and GraphQL API endpoints.
+- Below is an example of the typical setup that is required to test a Rails Controller endpoint.
```ruby
let_it_be(:user) { create(:user) }
@@ -245,6 +258,7 @@ Rails Controller endpoint.
before do
stub_licensed_features(custom_roles: true)
+
sign_in(user)
end
@@ -260,8 +274,7 @@ Rails Controller endpoint.
end
```
-Below is an example of the typical setup that is required to test out a GraphQL
-mutation.
+- Below is an example of the typical setup that is required to test a GraphQL mutation.
```ruby
let_it_be(:user) { create(:user) }
@@ -271,57 +284,36 @@ mutation.
before do
stub_licensed_features(custom_roles: true)
+
+ sign_in(user)
end
describe MyMutation do
include GraphqlHelpers
describe '#show' do
- it 'allows access' do
- post_graphql_mutation(graphql_mutation(:my_mutation, {
- example: "Example"
- }), current_user: user)
-
- expect(response).to have_gitlab_http_status(:success)
- mutation_response = graphql_mutation_response(:my_mutation)
- expect(mutation_response).to be_present
- expect(mutation_response["errors"]).to be_empty
- end
+ let(:mutation) { graphql_mutation(:my_mutation) }
+
+ it_behaves_like 'a working graphql query'
end
end
```
-[`GITLAB_DEBUG_POLICIES=true`](#finding-existing-abilities-checks) can be used
-to troubleshoot runtime policy decisions.
-
-## Custom abilities definition
-
-All new custom abilities must have a type definition stored in `ee/config/custom_abilities` that contains a single source of truth for every ability that is part of custom roles feature.
+- Add tests to `ProjectPolicy` and/or `GroupPolicy`. Below is an example for testing `ProjectPolicy` related changes.
-### Add a new custom ability definition
-
-To add a new custom ability:
+```ruby
+ context 'for a member role with read_dependency true' do
+ let(:member_role_abilities) { { read_dependency: true } }
+ let(:allowed_abilities) { [:read_dependency] }
-1. Create the YAML definition. You can either:
- - Use the `ee/bin/custom-ability` CLI to create the YAML definition automatically.
- - Perform manual steps to create a new file in `ee/config/custom_abilities/` with the filename matching the name of the ability name.
-1. Add contents to the file that conform to the [schema](#schema) defined in `ee/config/custom_abilities/types/type_schema.json`.
-1. Add [tests](#testing) for the new ability in `ee/spec/requests/custom_roles/` with a new directory named after the ability name.
+ it_behaves_like 'custom roles abilities'
+ end
+```
-### Schema
+#### Step 6: Update documentation
-| Field | Required | Description |
-| ----- | -------- |--------------|
-| `name` | yes | Unique, lowercase and underscored name describing the custom ability. Must match the filename. |
-| `description` | yes | Human-readable description of the custom ability. |
-| `feature_category` | yes | Name of the feature category. For example, `vulnerability_management`. |
-| `introduced_by_issue` | yes | Issue URL that proposed the addition of this custom ability. |
-| `introduced_by_mr` | no | MR URL that added this custom ability. |
-| `milestone` | yes | Milestone in which this custom ability was added. |
-| `group_ability` | yes | Indicate whether this ability is checked on group level. |
-| `project_ability` | yes | Indicate whether this ability is checked on project level. |
-| `requirements` | no | The custom abilities that need to be enabled for this ability. |
-| `skip_seat_consumption` | yes | Indicate wheter this ability should be skiped when counting licensed users. |
+- Update the list of custom abilities by running `bundle exec rake gitlab:custom_roles:compile_docs`
+- Update the GraphQL documentation by running `bundle exec rake gitlab:graphql:compile_docs`
### Privilege escalation consideration
diff --git a/doc/development/secure_coding_guidelines.md b/doc/development/secure_coding_guidelines.md
index d8fad6deb9c..75df9a67aff 100644
--- a/doc/development/secure_coding_guidelines.md
+++ b/doc/development/secure_coding_guidelines.md
@@ -258,7 +258,7 @@ the mitigations for a new feature.
#### URL blocker & validation libraries
-[`Gitlab::UrlBlocker`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/url_blocker.rb) can be used to validate that a
+[`Gitlab::HTTP_V2::UrlBlocker`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/gems/gitlab-http/lib/gitlab/http_v2/url_blocker.rb) can be used to validate that a
provided URL meets a set of constraints. Importantly, when `dns_rebind_protection` is `true`, the method returns a known-safe URI where the hostname
has been replaced with an IP address. This prevents DNS rebinding attacks, because the DNS record has been resolved. However, if we ignore this returned
value, we **will not** be protected against DNS rebinding.
@@ -1234,7 +1234,7 @@ These types of bugs are often seen in environments which allow multi-threading a
**Example 1:** you have a model which accepts a URL as input. When the model is created you verify that the URL host resolves to a public IP address, to prevent attackers making internal network calls. But DNS records can change ([DNS rebinding](#server-side-request-forgery-ssrf)]). An attacker updates the DNS record to `127.0.0.1`, and when your code resolves those URL host it results in sending a potentially malicious request to a server on the internal network. The property was valid at the "time of check", but invalid and malicious at "time of use".
-GitLab-specific example can be found in [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/214401) where, although `Gitlab::UrlBlocker.validate!` was called, the returned value was not used. This made it vulnerable to TOCTOU bug and SSRF protection bypass through [DNS rebinding](#server-side-request-forgery-ssrf). The fix was to [use the validated IP address](https://gitlab.com/gitlab-org/gitlab/-/commit/7af8abd4df9a98f7a1ae7c4ec9840d0a7a8c684d).
+GitLab-specific example can be found in [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/214401) where, although `Gitlab::HTTP_V2::UrlBlocker.validate!` was called, the returned value was not used. This made it vulnerable to TOCTOU bug and SSRF protection bypass through [DNS rebinding](#server-side-request-forgery-ssrf). The fix was to [use the validated IP address](https://gitlab.com/gitlab-org/gitlab/-/commit/85c6a73598e72ab104ab29b72bf83661cd961646).
**Example 2:** you have a feature which schedules jobs. When the user schedules the job, they have permission to do so. But imagine if, between the time they schedule the job and the time it is run, their permissions are restricted. Unless you re-check permissions at time of use, you could inadvertently allow unauthorized activity.
@@ -1264,9 +1264,9 @@ end
- Use your framework's validations and database features to impose constraints and atomic reads and writes.
- Read about [Server Side Request Forgery (SSRF) and DNS rebinding](#server-side-request-forgery-ssrf)
-An example of well implemented `Gitlab::UrlBlocker.validate!` call that prevents TOCTOU bug:
+An example of well implemented `Gitlab::HTTP_V2::UrlBlocker.validate!` call that prevents TOCTOU bug:
-1. [Preventing DNS rebinding in Gitea importer](https://gitlab.com/gitlab-org/gitlab/-/commit/7af8abd4df9a98f7a1ae7c4ec9840d0a7a8c684d)
+1. [Preventing DNS rebinding in Gitea importer](https://gitlab.com/gitlab-org/gitlab/-/commit/85c6a73598e72ab104ab29b72bf83661cd961646)
### Resources
diff --git a/gems/gitlab-http/spec/gitlab/stub_requests.rb b/gems/gitlab-http/spec/gitlab/stub_requests.rb
index ea4a6865251..048d41a625a 100644
--- a/gems/gitlab-http/spec/gitlab/stub_requests.rb
+++ b/gems/gitlab-http/spec/gitlab/stub_requests.rb
@@ -23,7 +23,7 @@ module Gitlab
socket = Socket.sockaddr_in(port, ip_address)
addr = Addrinfo.new(socket)
- # See Gitlab::UrlBlocker
+ # See Gitlab::HTTP_V2::UrlBlocker
allow(Addrinfo).to receive(:getaddrinfo)
.with(url.hostname, url.port, nil, :STREAM)
.and_return([addr])
@@ -35,7 +35,7 @@ module Gitlab
socket = Socket.sockaddr_in(port, ip_address)
addr = Addrinfo.new(socket)
- # See Gitlab::UrlBlocker
+ # See Gitlab::HTTP_V2::UrlBlocker
allow(Addrinfo).to receive(:getaddrinfo).and_call_original
allow(Addrinfo).to receive(:getaddrinfo)
.with(url.hostname, anything, nil, :STREAM)
diff --git a/lib/gitlab/ci/config/external/mapper.rb b/lib/gitlab/ci/config/external/mapper.rb
index cff7954235f..fcd65b2f2d7 100644
--- a/lib/gitlab/ci/config/external/mapper.rb
+++ b/lib/gitlab/ci/config/external/mapper.rb
@@ -11,6 +11,7 @@ module Gitlab
AmbigiousSpecificationError = Class.new(Error)
TooManyIncludesError = Class.new(Error)
TooMuchDataInPipelineTreeError = Class.new(Error)
+ InvalidTypeError = Class.new(Error)
def initialize(values, context)
@locations = Array.wrap(values.fetch(:include, [])).compact
diff --git a/lib/gitlab/ci/config/external/mapper/normalizer.rb b/lib/gitlab/ci/config/external/mapper/normalizer.rb
index 8fc798e78a0..b07726f7c11 100644
--- a/lib/gitlab/ci/config/external/mapper/normalizer.rb
+++ b/lib/gitlab/ci/config/external/mapper/normalizer.rb
@@ -25,8 +25,10 @@ module Gitlab
location = variables_expander.expand(location)
normalize_location_string(location)
- else
+ elsif location.is_a?(Hash)
location.deep_symbolize_keys
+ else
+ raise Mapper::InvalidTypeError, 'Each include must be a hash or a string'
end
end
end
diff --git a/lib/gitlab/pages/url_builder.rb b/lib/gitlab/pages/url_builder.rb
index 4d025af4ce5..f01ec54b853 100644
--- a/lib/gitlab/pages/url_builder.rb
+++ b/lib/gitlab/pages/url_builder.rb
@@ -14,7 +14,18 @@ module Gitlab
end
def pages_url(with_unique_domain: false)
- find_url(with_unique_domain).downcase
+ return namespace_in_path_url(with_unique_domain && unique_domain_enabled?) if config.namespace_in_path
+ return unique_url if with_unique_domain && unique_domain_enabled?
+
+ project_path_url = "#{config.protocol}://#{project_path}".downcase
+
+ # If the project path is the same as host, we serve it as group page
+ # On development we ignore the URL port to make it work on GDK
+ return namespace_url if Rails.env.development? && portless(namespace_url) == project_path_url
+ # If the project path is the same as host, we serve it as group page
+ return namespace_url if namespace_url == project_path_url
+
+ "#{namespace_url}/#{project_path}"
end
def unique_host
@@ -52,21 +63,6 @@ module Gitlab
attr_reader :project, :project_path
- def find_url(with_unique_domain)
- return namespace_in_path_url(with_unique_domain && unique_domain_enabled?) if config.namespace_in_path
- return unique_url if with_unique_domain && unique_domain_enabled?
-
- project_path_url = "#{config.protocol}://#{project_path}"
-
- # If the project path is the same as host, we serve it as group page
- # On development we ignore the URL port to make it work on GDK
- return namespace_url if Rails.env.development? && portless(namespace_url) == project_path_url
- # If the project path is the same as host, we serve it as group page
- return namespace_url if namespace_url == project_path_url
-
- "#{namespace_url}/#{project_path}"
- end
-
def namespace_url
@namespace_url ||= url_for(project_namespace)
end
@@ -79,13 +75,14 @@ module Gitlab
@pages_url ||= URI(config.url)
.tap { |url| url.port = config.port }
.to_s
+ .downcase
end
def namespace_in_path_url(with_unique_domain)
if with_unique_domain
- "#{pages_base_url}/#{project.project_setting.pages_unique_domain}"
+ "#{pages_base_url}/#{project.project_setting.pages_unique_domain}".downcase
else
- "#{pages_base_url}/#{project_namespace}/#{project_path}"
+ "#{pages_base_url}/#{project_namespace}/#{project_path}".downcase
end
end
@@ -94,6 +91,7 @@ module Gitlab
.tap { |url| url.port = config.port }
.tap { |url| url.host.prepend("#{subdomain}.") }
.to_s
+ .downcase
end
def portless(url)
diff --git a/lib/gitlab/url_blocker.rb b/lib/gitlab/url_blocker.rb
deleted file mode 100644
index 8164cc4524a..00000000000
--- a/lib/gitlab/url_blocker.rb
+++ /dev/null
@@ -1,429 +0,0 @@
-# frozen_string_literal: true
-
-#
-# IMPORTANT: With the new development of the 'gitlab-http' gem (https://gitlab.com/gitlab-org/gitlab/-/issues/415686),
-# no additional change should be implemented in this class. This class will be removed after migrating all
-# the usages to the new gem.
-#
-
-require 'resolv'
-require 'ipaddress'
-
-module Gitlab
- class UrlBlocker
- GETADDRINFO_TIMEOUT_SECONDS = 15
- DENY_ALL_REQUESTS_EXCEPT_ALLOWED_DEFAULT = proc { deny_all_requests_except_allowed_app_setting }.freeze
-
- # Result stores the validation result:
- # uri - The original URI requested
- # hostname - The hostname that should be used to connect. For DNS
- # rebinding protection, this will be the resolved IP address of
- # the hostname.
- # use_proxy -
- # If true, this means that the proxy server specified in the
- # http_proxy/https_proxy environment variables should be used.
- #
- # If false, this either means that no proxy server was specified
- # or that the hostname in the URL is exempt via the no_proxy
- # environment variable. This allows the caller to disable usage
- # of a proxy since the IP address may be used to
- # connect. Otherwise, Net::HTTP may erroneously compare the IP
- # address against the no_proxy list.
- Result = Struct.new(:uri, :hostname, :use_proxy)
-
- class << self
- # Validates the given url according to the constraints specified by arguments.
- #
- # ports - Raises error if the given URL port is not between given ports.
- # allow_localhost - Raises error if URL resolves to a localhost IP address and argument is false.
- # allow_local_network - Raises error if URL resolves to a link-local address and argument is false.
- # allow_object_storage - Avoid raising an error if URL resolves to an object storage endpoint and argument is true.
- # ascii_only - Raises error if URL has unicode characters and argument is true.
- # enforce_user - Raises error if URL user doesn't start with alphanumeric characters and argument is true.
- # enforce_sanitization - Raises error if URL includes any HTML/CSS/JS tags and argument is true.
- # deny_all_requests_except_allowed - Raises error if URL is not in the allow list and argument is true. Can be Boolean or Proc. Defaults to instance app setting.
- #
- # Returns a Result object.
- # rubocop:disable Metrics/ParameterLists
- def validate_url_with_proxy!(
- url,
- schemes:,
- ports: [],
- allow_localhost: false,
- allow_local_network: true,
- allow_object_storage: false,
- ascii_only: false,
- enforce_user: false,
- enforce_sanitization: false,
- deny_all_requests_except_allowed: DENY_ALL_REQUESTS_EXCEPT_ALLOWED_DEFAULT,
- dns_rebind_protection: true)
- # rubocop:enable Metrics/ParameterLists
-
- return Result.new(nil, nil, true) if url.nil?
-
- raise ArgumentError, 'The schemes is a required argument' if schemes.blank?
-
- # Param url can be a string, URI or Addressable::URI
- uri = parse_url(url)
-
- validate_uri(
- uri: uri,
- schemes: schemes,
- ports: ports,
- enforce_sanitization: enforce_sanitization,
- enforce_user: enforce_user,
- ascii_only: ascii_only
- )
-
- begin
- address_info = get_address_info(uri)
- rescue SocketError
- proxy_in_use = uri_under_proxy_setting?(uri, nil)
-
- return Result.new(uri, nil, proxy_in_use) unless enforce_address_info_retrievable?(uri, dns_rebind_protection, deny_all_requests_except_allowed)
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, 'Host cannot be resolved or invalid'
- end
-
- ip_address = ip_address(address_info)
- proxy_in_use = uri_under_proxy_setting?(uri, ip_address)
-
- # Ignore DNS rebind protection when a proxy is being used, as DNS
- # rebinding is expected behavior.
- dns_rebind_protection &&= !proxy_in_use
- return Result.new(uri, nil, proxy_in_use) if domain_in_allow_list?(uri)
-
- protected_uri_with_hostname = enforce_uri_hostname(ip_address, uri, dns_rebind_protection, proxy_in_use)
-
- return protected_uri_with_hostname if ip_in_allow_list?(ip_address, port: get_port(uri))
-
- # Allow url from the GitLab instance itself but only for the configured hostname and ports
- return protected_uri_with_hostname if internal?(uri)
-
- return protected_uri_with_hostname if allow_object_storage && object_storage_endpoint?(uri)
-
- validate_deny_all_requests_except_allowed!(deny_all_requests_except_allowed)
-
- validate_local_request(
- address_info: address_info,
- allow_localhost: allow_localhost,
- allow_local_network: allow_local_network
- )
-
- protected_uri_with_hostname
- end
-
- def blocked_url?(url, **kwargs)
- validate!(url, **kwargs)
-
- false
- rescue Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError
- true
- end
-
- # For backwards compatibility, Returns an array with [<uri>, <original-hostname>].
- # Issue for refactoring: https://gitlab.com/gitlab-org/gitlab/-/issues/410890
- def validate!(...)
- result = validate_url_with_proxy!(...)
- [result.uri, result.hostname]
- end
-
- private
-
- # Returns the given URI with IP address as hostname and the original hostname respectively
- # in an Array.
- #
- # It checks whether the resolved IP address matches with the hostname. If not, it changes
- # the hostname to the resolved IP address.
- #
- # The original hostname is used to validate the SSL, given in that scenario
- # we'll be making the request to the IP address, instead of using the hostname.
- def enforce_uri_hostname(ip_address, uri, dns_rebind_protection, proxy_in_use)
- return Result.new(uri, nil, proxy_in_use) unless dns_rebind_protection && ip_address && ip_address != uri.hostname
-
- new_uri = uri.dup
- new_uri.hostname = ip_address
- Result.new(new_uri, uri.hostname, proxy_in_use)
- end
-
- def ip_address(address_info)
- address_info.first&.ip_address
- end
-
- def validate_uri(uri:, schemes:, ports:, enforce_sanitization:, enforce_user:, ascii_only:)
- validate_html_tags(uri) if enforce_sanitization
-
- return if internal?(uri)
-
- validate_scheme(uri.scheme, schemes)
- validate_port(get_port(uri), ports) if ports.any?
- validate_user(uri.user) if enforce_user
- validate_hostname(uri.hostname)
- validate_unicode_restriction(uri) if ascii_only
- end
-
- def uri_under_proxy_setting?(uri, ip_address)
- return false unless Gitlab.http_proxy_env?
- # `no_proxy|NO_PROXY` specifies addresses for which the proxy is not
- # used. If it's empty, there are no exceptions and this URI
- # will be under proxy settings.
- return true if no_proxy_env.blank?
-
- # `no_proxy|NO_PROXY` is being used. We must check whether it
- # applies to this specific URI.
- ::URI::Generic.use_proxy?(uri.hostname, ip_address, get_port(uri), no_proxy_env)
- end
-
- # Returns addrinfo object for the URI.
- #
- # @param uri [Addressable::URI]
- #
- # @raise [Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, ArgumentError] - raised if host is too long.
- #
- # @return [Array<Addrinfo>]
- def get_address_info(uri)
- Timeout.timeout(GETADDRINFO_TIMEOUT_SECONDS) do
- Addrinfo.getaddrinfo(uri.hostname, get_port(uri), nil, :STREAM).map do |addr|
- addr.ipv6_v4mapped? ? addr.ipv6_to_ipv4 : addr
- end
- end
- rescue Timeout::Error => e
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, e.message
- rescue ArgumentError => e
- # Addrinfo.getaddrinfo errors if the domain exceeds 1024 characters.
- raise unless e.message.include?('hostname too long')
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "Host is too long (maximum is 1024 characters)"
- end
-
- def enforce_address_info_retrievable?(uri, dns_rebind_protection, deny_all_requests_except_allowed)
- # Do not enforce if URI is in the allow list
- return false if domain_in_allow_list?(uri)
-
- # Enforce if the instance should block requests
- return true if deny_all_requests_except_allowed?(deny_all_requests_except_allowed)
-
- # Do not enforce if DNS rebinding protection is disabled
- return false unless dns_rebind_protection
-
- # Do not enforce if proxy is used
- return false if Gitlab.http_proxy_env?
-
- # In the test suite we use a lot of mocked urls that are either invalid or
- # don't exist. In order to avoid modifying a ton of tests and factories
- # we allow invalid urls unless the environment variable RSPEC_ALLOW_INVALID_URLS
- # is not true
- return false if Rails.env.test? && ENV['RSPEC_ALLOW_INVALID_URLS'] == 'true'
-
- true
- end
-
- def validate_local_request(
- address_info:,
- allow_localhost:,
- allow_local_network:)
- return if allow_local_network && allow_localhost
-
- unless allow_localhost
- validate_localhost(address_info)
- validate_loopback(address_info)
- end
-
- unless allow_local_network
- validate_local_network(address_info)
- validate_link_local(address_info)
- validate_shared_address(address_info)
- validate_limited_broadcast_address(address_info)
- end
- end
-
- def validate_shared_address(addrs_info)
- netmask = IPAddr.new('100.64.0.0/10')
- return unless addrs_info.any? { |addr| netmask.include?(addr.ip_address) }
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "Requests to the shared address space are not allowed"
- end
-
- def get_port(uri)
- uri.port || uri.default_port
- end
-
- def validate_html_tags(uri)
- uri_str = uri.to_s
- sanitized_uri = ActionController::Base.helpers.sanitize(uri_str, tags: [])
- if sanitized_uri != uri_str
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, 'HTML/CSS/JS tags are not allowed'
- end
- end
-
- def parse_url(url)
- Addressable::URI.parse(url).tap do |parsed_url|
- raise Addressable::URI::InvalidURIError if multiline_blocked?(parsed_url)
- end
- rescue Addressable::URI::InvalidURIError, URI::InvalidURIError
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, 'URI is invalid'
- end
-
- def multiline_blocked?(parsed_url)
- url = parsed_url.to_s
-
- return true if /\n|\r/.match?(url)
- # Google Cloud Storage uses a multi-line, encoded Signature query string
- return false if %w[http https].include?(parsed_url.scheme&.downcase)
-
- CGI.unescape(url) =~ /\n|\r/
- end
-
- def validate_port(port, ports)
- return if port.blank?
- # Only ports under 1024 are restricted
- return if port >= 1024
- return if ports.include?(port)
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError,
- "Only allowed ports are #{ports.join(', ')}, and any over 1024"
- end
-
- def validate_scheme(scheme, schemes)
- if scheme.blank? || (schemes.any? && schemes.exclude?(scheme))
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "Only allowed schemes are #{schemes.join(', ')}"
- end
- end
-
- def validate_user(value)
- return if value.blank?
- return if /\A\p{Alnum}/.match?(value)
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "Username needs to start with an alphanumeric character"
- end
-
- def validate_hostname(value)
- return if value.blank?
- return if IPAddress.valid?(value)
- return if /\A\p{Alnum}/.match?(value)
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "Hostname or IP address invalid"
- end
-
- def validate_unicode_restriction(uri)
- return if uri.to_s.ascii_only?
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "URI must be ascii only #{uri.to_s.dump}"
- end
-
- def validate_localhost(addrs_info)
- local_ips = ["::", "0.0.0.0"]
- local_ips.concat(Socket.ip_address_list.map(&:ip_address))
-
- return if (local_ips & addrs_info.map(&:ip_address)).empty?
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "Requests to localhost are not allowed"
- end
-
- def validate_loopback(addrs_info)
- return unless addrs_info.any? { |addr| addr.ipv4_loopback? || addr.ipv6_loopback? }
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "Requests to loopback addresses are not allowed"
- end
-
- def validate_local_network(addrs_info)
- return unless addrs_info.any? { |addr| addr.ipv4_private? || addr.ipv6_sitelocal? || addr.ipv6_unique_local? }
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "Requests to the local network are not allowed"
- end
-
- def validate_link_local(addrs_info)
- netmask = IPAddr.new('169.254.0.0/16')
- return unless addrs_info.any? { |addr| addr.ipv6_linklocal? || netmask.include?(addr.ip_address) }
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "Requests to the link local network are not allowed"
- end
-
- # Raises a Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError if the instance is configured to deny all requests.
- #
- # This should only be called after allow list checks have been made.
- def validate_deny_all_requests_except_allowed!(should_deny)
- return unless deny_all_requests_except_allowed?(should_deny)
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError,
- "Requests to hosts and IP addresses not on the Allow List are denied"
- end
-
- # Raises a Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError if any IP in `addrs_info` is the limited
- # broadcast address.
- # https://datatracker.ietf.org/doc/html/rfc919#section-7
- def validate_limited_broadcast_address(addrs_info)
- blocked_ips = ["255.255.255.255"]
-
- return if (blocked_ips & addrs_info.map(&:ip_address)).empty?
-
- raise Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "Requests to the limited broadcast address are not allowed"
- end
-
- def internal?(uri)
- internal_web?(uri) || internal_shell?(uri)
- end
-
- def internal_web?(uri)
- uri.scheme == config.gitlab.protocol &&
- uri.hostname == config.gitlab.host &&
- get_port(uri) == config.gitlab.port
- end
-
- def internal_shell?(uri)
- uri.scheme == 'ssh' &&
- uri.hostname == config.gitlab_shell.ssh_host &&
- get_port(uri) == config.gitlab_shell.ssh_port
- end
-
- def enabled_object_storage_endpoints
- ObjectStoreSettings::SUPPORTED_TYPES.collect do |type|
- section_setting = config.try(type)
-
- next unless section_setting && section_setting['enabled']
-
- object_store_setting = section_setting['object_store']
-
- next unless object_store_setting && object_store_setting['enabled']
-
- object_store_setting.dig('connection', 'endpoint')
- end.compact.uniq
- end
-
- def deny_all_requests_except_allowed?(should_deny)
- should_deny.is_a?(Proc) ? should_deny.call : should_deny
- end
-
- def deny_all_requests_except_allowed_app_setting
- Gitlab::CurrentSettings.current_application_settings? &&
- Gitlab::CurrentSettings.deny_all_requests_except_allowed?
- end
-
- def object_storage_endpoint?(uri)
- enabled_object_storage_endpoints.any? do |endpoint|
- endpoint_uri = URI(endpoint)
-
- uri.scheme == endpoint_uri.scheme &&
- uri.hostname == endpoint_uri.hostname &&
- get_port(uri) == get_port(endpoint_uri)
- end
- end
-
- def domain_in_allow_list?(uri)
- Gitlab::UrlBlockers::UrlAllowlist.domain_allowed?(uri.normalized_host, port: get_port(uri))
- end
-
- def ip_in_allow_list?(ip_address, port: nil)
- Gitlab::UrlBlockers::UrlAllowlist.ip_allowed?(ip_address, port: port)
- end
-
- def config
- Gitlab.config
- end
-
- def no_proxy_env
- ENV['no_proxy'] || ENV['NO_PROXY']
- end
- end
- end
-end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 4becc3dc9f4..2432d343ec3 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -23595,7 +23595,7 @@ msgstr ""
msgid "GroupSAML|Some branches are inaccessible because your SAML session has expired. To access the branches, select the group’s path to reauthenticate."
msgstr ""
-msgid "GroupSAML|Some to-do items may be hidden because your SAML session has expired. Select the group’s path to reauthenticate and view the hidden to-do items."
+msgid "GroupSAML|Some items may be hidden because your SAML session has expired. Select the group’s path to reauthenticate and view any hidden items."
msgstr ""
msgid "GroupSAML|The SCIM token is now hidden. To see the value of the token again, you need to %{linkStart}reset it%{linkEnd}."
@@ -55631,10 +55631,10 @@ msgstr ""
msgid "WikiPage|Write your content or drag files here…"
msgstr ""
-msgid "Wiki|Create New Page"
+msgid "Wiki|Edit Page"
msgstr ""
-msgid "Wiki|Edit Page"
+msgid "Wiki|New Page"
msgstr ""
msgid "Wiki|New page"
diff --git a/spec/bin/saas_feature_spec.rb b/spec/bin/saas_feature_spec.rb
new file mode 100644
index 00000000000..c648db5885a
--- /dev/null
+++ b/spec/bin/saas_feature_spec.rb
@@ -0,0 +1,218 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+require_relative '../../bin/saas-feature'
+
+RSpec.describe 'bin/saas-feature', feature_category: :feature_flags do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:groups) { { geo: { label: 'group::geo' } } }
+
+ before do
+ allow(HTTParty)
+ .to receive(:get)
+ .with(SaasFeatureOptionParser::WWW_GITLAB_COM_GROUPS_JSON, format: :plain)
+ .and_return(groups.to_json)
+ end
+
+ describe SaasFeatureCreator do
+ let(:argv) { %w[saas-feature-name -g group::geo -m http://url -M 16.6] }
+ let(:options) { SaasFeatureOptionParser.parse(argv) }
+ let(:creator) { described_class.new(options) }
+ let(:existing_saas_features) do
+ { 'existing_saas_feature' => File.join('ee', 'config', 'saas_features', 'existing_saas_feature.yml') }
+ end
+
+ before do
+ allow(creator).to receive(:all_saas_feature_names) { existing_saas_features }
+ allow(creator).to receive(:branch_name).and_return('feature-branch')
+ allow(creator).to receive(:editor).and_return(nil)
+
+ # ignore writes
+ allow(File).to receive(:write).and_return(true)
+
+ # ignore stdin
+ allow(Readline).to receive(:readline).and_raise('EOF')
+ end
+
+ subject(:execute) { creator.execute }
+
+ it 'properly creates a SaaS feature' do
+ expect(File).to receive(:write).with(
+ File.join('ee', 'config', 'saas_features', 'saas_feature_name.yml'),
+ anything)
+
+ expect { execute }.to output(/name: saas_feature_name/).to_stdout
+ end
+
+ context 'when running on master' do
+ it 'requires feature branch' do
+ expect(creator).to receive(:branch_name).and_return('master')
+
+ expect { execute }.to raise_error(SaasFeatureHelpers::Abort, /Create a branch first/)
+ end
+ end
+
+ context 'with SaaS feature name validation' do
+ where(:argv, :ex) do
+ %w[.invalid.saas.feature] | /Provide a name for the SaaS feature that is/
+ %w[existing-saas-feature] | /already exists!/
+ end
+
+ with_them do
+ it do
+ expect { execute }.to raise_error(ex)
+ end
+ end
+ end
+ end
+
+ describe SaasFeatureOptionParser do
+ describe '.parse' do
+ where(:param, :argv, :result) do
+ :name | %w[foo] | 'foo'
+ :amend | %w[foo --amend] | true
+ :force | %w[foo -f] | true
+ :force | %w[foo --force] | true
+ :introduced_by_url | %w[foo -m https://url] | 'https://url'
+ :introduced_by_url | %w[foo --introduced-by-url https://url] | 'https://url'
+ :dry_run | %w[foo -n] | true
+ :dry_run | %w[foo --dry-run] | true
+ :group | %w[foo -g group::geo] | 'group::geo'
+ :group | %w[foo --group group::geo] | 'group::geo'
+ :group | %w[foo -g invalid] | nil
+ :group | %w[foo --group invalid] | nil
+ end
+
+ with_them do
+ it do
+ options = described_class.parse(Array(argv))
+
+ expect(options.public_send(param)).to eq(result)
+ end
+ end
+
+ it 'missing SaaS feature name' do
+ expect do
+ expect { described_class.parse(%w[--amend]) }.to output(/SaaS feature name is required/).to_stdout
+ end.to raise_error(SaasFeatureHelpers::Abort)
+ end
+
+ it 'parses -h' do
+ expect do
+ expect { described_class.parse(%w[foo -h]) }.to output(/Usage:/).to_stdout
+ end.to raise_error(SaasFeatureHelpers::Done)
+ end
+ end
+
+ describe '.read_group' do
+ before do
+ allow(described_class).to receive(:fzf_available?).and_return(false)
+ end
+
+ context 'when valid group is given' do
+ let(:group) { 'group::geo' }
+
+ it 'reads group from stdin' do
+ expect(Readline).to receive(:readline).and_return(group)
+ expect do
+ expect(described_class.read_group).to eq('group::geo')
+ end.to output(/Specify the group label to which the SaaS feature belongs, from the following list/).to_stdout
+ end
+ end
+
+ context 'when valid index is given' do
+ it 'picks the group successfully' do
+ expect(Readline).to receive(:readline).and_return('1')
+ expect do
+ expect(described_class.read_group).to eq('group::geo')
+ end.to output(/Specify the group label to which the SaaS feature belongs, from the following list/).to_stdout
+ end
+ end
+
+ context 'with invalid group given' do
+ let(:type) { 'invalid' }
+
+ it 'shows error message and retries' do
+ expect(Readline).to receive(:readline).and_return(type)
+ expect(Readline).to receive(:readline).and_raise('EOF')
+
+ expect do
+ expect { described_class.read_group }.to raise_error(/EOF/)
+ end.to output(/Specify the group label to which the SaaS feature belongs, from the following list/).to_stdout
+ .and output(/The group label isn't in the above labels list/).to_stderr
+ end
+ end
+
+ context 'when invalid index is given' do
+ it 'shows error message and retries' do
+ expect(Readline).to receive(:readline).and_return('12')
+ expect(Readline).to receive(:readline).and_raise('EOF')
+
+ expect do
+ expect { described_class.read_group }.to raise_error(/EOF/)
+ end.to output(/Specify the group label to which the SaaS feature belongs, from the following list/).to_stdout
+ .and output(/The group label isn't in the above labels list/).to_stderr
+ end
+ end
+ end
+
+ describe '.read_introduced_by_url' do
+ context 'with valid URL given' do
+ let(:url) { 'https://merge-request' }
+
+ it 'reads URL from stdin' do
+ expect(Readline).to receive(:readline).and_return(url)
+ expect(HTTParty).to receive(:head).with(url).and_return(instance_double(HTTParty::Response, success?: true))
+
+ expect do
+ expect(described_class.read_introduced_by_url).to eq('https://merge-request')
+ end.to output(/URL of the MR introducing the SaaS feature/).to_stdout
+ end
+ end
+
+ context 'with invalid URL given' do
+ let(:url) { 'https://invalid' }
+
+ it 'shows error message and retries' do
+ expect(Readline).to receive(:readline).and_return(url)
+ expect(HTTParty).to receive(:head).with(url).and_return(instance_double(HTTParty::Response, success?: false))
+ expect(Readline).to receive(:readline).and_raise('EOF')
+
+ expect do
+ expect { described_class.read_introduced_by_url }.to raise_error(/EOF/)
+ end.to output(/URL of the MR introducing the SaaS feature/).to_stdout
+ .and output(/URL '#{url}' isn't valid/).to_stderr
+ end
+ end
+
+ context 'with empty URL given' do
+ let(:url) { '' }
+
+ it 'skips entry' do
+ expect(Readline).to receive(:readline).and_return(url)
+
+ expect do
+ expect(described_class.read_introduced_by_url).to be_nil
+ end.to output(/URL of the MR introducing the SaaS feature/).to_stdout
+ end
+ end
+
+ context 'with a non-URL given' do
+ let(:url) { 'malformed' }
+
+ it 'shows error message and retries' do
+ expect(Readline).to receive(:readline).and_return(url)
+ expect(Readline).to receive(:readline).and_raise('EOF')
+
+ expect do
+ expect { described_class.read_introduced_by_url }.to raise_error(/EOF/)
+ end.to output(/URL of the MR introducing the SaaS feature/).to_stdout
+ .and output(/URL needs to start with/).to_stderr
+ end
+ end
+ end
+ end
+end
diff --git a/spec/config/settings_spec.rb b/spec/config/settings_spec.rb
index 60c460a036a..cee3359cd15 100644
--- a/spec/config/settings_spec.rb
+++ b/spec/config/settings_spec.rb
@@ -31,6 +31,21 @@ RSpec.describe Settings, feature_category: :system_access do
end
end
+ describe 'cron_jobs cron syntax is correct' do
+ it 'all cron entries are correct' do
+ Settings.cron_jobs.each_value do |job_config|
+ next unless job_config
+
+ job_class = job_config['job_class']
+ cron = job_config['cron']
+
+ next unless cron
+
+ expect(Fugit.parse_cron(cron)).not_to eq(nil), "The defined cron schedule (within #{job_class}) is invalid: '#{cron}'."
+ end
+ end
+ end
+
describe '.build_ci_component_fqdn' do
subject(:fqdn) { described_class.build_ci_component_fqdn }
diff --git a/spec/factories/namespaces/descendants.rb b/spec/factories/namespaces/descendants.rb
index 6325481294a..8251c396168 100644
--- a/spec/factories/namespaces/descendants.rb
+++ b/spec/factories/namespaces/descendants.rb
@@ -8,5 +8,11 @@ FactoryBot.define do
traversal_ids { namespace.traversal_ids }
outdated_at { nil }
calculated_at { Time.current }
+
+ trait :up_to_date do
+ after(:create) do |record|
+ record.reload.update!(outdated_at: nil)
+ end
+ end
end
end
diff --git a/spec/frontend/vue_merge_request_widget/components/checks/draft_spec.js b/spec/frontend/vue_merge_request_widget/components/checks/draft_spec.js
index cc605c8c83d..a7695431a02 100644
--- a/spec/frontend/vue_merge_request_widget/components/checks/draft_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/checks/draft_spec.js
@@ -16,7 +16,7 @@ import {
DRAFT_CHECK_READY,
DRAFT_CHECK_ERROR,
} from '~/vue_merge_request_widget/components/checks/i18n';
-import { FAILURE_REASONS } from '~/vue_merge_request_widget/components/checks/message.vue';
+import { FAILURE_REASONS } from '~/vue_merge_request_widget/components/checks/constants';
import draftQuery from '~/vue_merge_request_widget/queries/states/draft.query.graphql';
import getStateQuery from '~/vue_merge_request_widget/queries/get_state.query.graphql';
diff --git a/spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js b/spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js
index 48c01e3efad..054e90640c5 100644
--- a/spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js
@@ -136,19 +136,19 @@ describe('Merge request merge checks component', () => {
});
it.each`
- identifier
- ${'conflict'}
- ${'discussions_not_resolved'}
- ${'need_rebase'}
- ${'default'}
- `('renders $identifier merge check', async ({ identifier }) => {
+ identifier | componentName
+ ${'conflict'} | ${'conflict'}
+ ${'discussions_not_resolved'} | ${'discussions_not_resolved'}
+ ${'need_rebase'} | ${'need_rebase'}
+ ${'policies_denied'} | ${'default'}
+ `('renders $identifier merge check', async ({ identifier, componentName }) => {
shallowMountComponent({ mergeabilityChecks: [{ status: 'failed', identifier }] });
wrapper.findComponent(StateContainer).vm.$emit('toggle');
await waitForPromises();
- const { default: component } = await COMPONENTS[identifier]();
+ const { default: component } = await COMPONENTS[componentName]();
expect(wrapper.findComponent(component).exists()).toBe(true);
});
@@ -166,9 +166,9 @@ describe('Merge request merge checks component', () => {
it('sorts merge checks', async () => {
mountComponent({
mergeabilityChecks: [
- { identifier: 'discussions', status: 'SUCCESS' },
- { identifier: 'discussions', status: 'INACTIVE' },
- { identifier: 'rebase', status: 'FAILED' },
+ { identifier: 'discussions_not_resolved', status: 'SUCCESS' },
+ { identifier: 'status_checks_must_pass', status: 'INACTIVE' },
+ { identifier: 'need_rebase', status: 'FAILED' },
],
});
@@ -184,4 +184,21 @@ describe('Merge request merge checks component', () => {
expect.objectContaining({ status: 'SUCCESS' }),
);
});
+
+ it('does not render check component if no message exists', async () => {
+ mountComponent({
+ mergeabilityChecks: [
+ { identifier: 'discussions_not_resolved', status: 'SUCCESS' },
+ { identifier: 'fakemessage', status: 'FAILED' },
+ ],
+ });
+
+ await waitForPromises();
+
+ await wrapper.findByTestId('widget-toggle').trigger('click');
+
+ const mergeChecks = wrapper.findAllByTestId('merge-check');
+
+ expect(mergeChecks.length).toBe(1);
+ });
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
index 1d6834a5604..3c621b1ad4a 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
@@ -122,6 +122,7 @@ describe('BaseToken', () => {
const findGlFilteredSearchToken = () => wrapper.findComponent(GlFilteredSearchToken);
const findMockSuggestionList = () => wrapper.findByTestId(mockSuggestionListTestId);
+
const getMockSuggestionListSuggestions = () =>
JSON.parse(findMockSuggestionList().attributes('data-suggestions'));
@@ -410,30 +411,6 @@ describe('BaseToken', () => {
expect(setTokenValueToRecentlyUsed).not.toHaveBeenCalled();
});
-
- it('emits token-selected event when groupMultiSelectTokens: true', () => {
- const config = { ...mockConfig, multiSelect: true };
- wrapper = createComponent({
- props: { suggestions: mockLabels, config, value: { operator: '||' } },
- groupMultiSelectTokens: true,
- });
-
- findGlFilteredSearchToken().vm.$emit('select', mockTokenValue.title);
-
- expect(wrapper.emitted('token-selected')).toEqual([[mockTokenValue.title]]);
- });
-
- it('does not emit token-selected event when groupMultiSelectTokens: false', () => {
- const config = { ...mockConfig, multiSelect: true };
- wrapper = createComponent({
- props: { suggestions: mockLabels, config, value: { operator: '||' } },
- groupMultiSelectTokens: false,
- });
-
- findGlFilteredSearchToken().vm.$emit('select', mockTokenValue.title);
-
- expect(wrapper.emitted('token-selected')).toBeUndefined();
- });
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js
index decf843091e..581adef7e32 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js
@@ -1,4 +1,5 @@
import {
+ GlFilteredSearchToken,
GlFilteredSearchTokenSegment,
GlFilteredSearchSuggestion,
GlDropdownDivider,
@@ -310,6 +311,34 @@ describe('UserToken', () => {
expect(mockInput).toHaveBeenLastCalledWith([{ data: 'mockData', operator: '=' }]);
});
+ describe('when loading', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ active: true,
+ config: {
+ ...mockAuthorToken,
+ preloadedUsers: mockPreloadedUsers,
+ defaultUsers: [],
+ },
+ stubs: { Portal: true },
+ });
+ });
+
+ it('shows current user', () => {
+ const firstSuggestion = wrapper.findComponent(GlFilteredSearchSuggestion).text();
+ expect(firstSuggestion).toContain('Administrator');
+ expect(firstSuggestion).toContain('@root');
+ });
+
+ it('does not show non-matching user while searching', async () => {
+ wrapper.findComponent(GlFilteredSearchToken).vm.$emit('input', { data: 'foo' });
+
+ await nextTick();
+
+ expect(wrapper.findComponent(GlFilteredSearchSuggestion).exists()).toBe(false);
+ });
+ });
+
describe('multiSelect', () => {
it('renders check icons in suggestions when multiSelect is true', async () => {
wrapper = createComponent({
@@ -322,15 +351,11 @@ describe('UserToken', () => {
stubs: { Portal: true },
groupMultiSelectTokens: true,
});
-
await activateSuggestionsList();
-
const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
-
expect(findIconAtSuggestion(0).props('name')).toBe('check');
expect(findIconAtSuggestion(1).props('name')).toBe('check');
expect(findIconAtSuggestion(2).exists()).toBe(false);
-
// test for left padding on unchecked items (so alignment is correct)
expect(suggestions.at(2).find('.gl-pl-6').exists()).toBe(true);
});
@@ -344,101 +369,16 @@ describe('UserToken', () => {
config: { ...mockAuthorToken, multiSelect: true, initialUsers: mockUsers },
groupMultiSelectTokens: true,
});
-
await nextTick();
const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
-
expect(tokenSegments).toHaveLength(3); // Author, =, "Administrator"
-
const tokenValue = tokenSegments.at(2);
-
const [user1, user2] = mockUsers;
-
expect(tokenValue.findAllComponents(GlAvatar).at(1).props('src')).toBe(
mockUsers[1].avatar_url,
);
expect(tokenValue.text()).toBe(`${user1.name},${user2.name}`);
});
-
- it('adds new user to multi-select-values', () => {
- wrapper = createComponent({
- value: { data: [mockUsers[0].username], operator: '||' },
- data: {
- users: mockUsers,
- },
- config: { ...mockAuthorToken, multiSelect: true, initialUsers: mockUsers },
- active: true,
- groupMultiSelectTokens: true,
- });
-
- findBaseToken().vm.$emit('token-selected', mockUsers[1].username);
-
- expect(findBaseToken().props().multiSelectValues).toEqual([
- mockUsers[0].username,
- mockUsers[1].username,
- ]);
- });
-
- it('removes existing user from array', () => {
- const initialUsers = [mockUsers[0].username, mockUsers[1].username];
- wrapper = createComponent({
- value: { data: initialUsers, operator: '||' },
- data: {
- users: mockUsers,
- },
- config: { ...mockAuthorToken, multiSelect: true, initialUsers: mockUsers },
- active: true,
- groupMultiSelectTokens: true,
- });
-
- findBaseToken().vm.$emit('token-selected', mockUsers[0].username);
-
- expect(findBaseToken().props().multiSelectValues).toEqual([mockUsers[1].username]);
- });
-
- it('clears input field after token selected', () => {
- wrapper = createComponent({
- value: { data: [mockUsers[0].username, mockUsers[1].username], operator: '||' },
- data: {
- users: mockUsers,
- },
- config: { ...mockAuthorToken, multiSelect: true, initialUsers: mockUsers },
- active: true,
- groupMultiSelectTokens: true,
- });
-
- findBaseToken().vm.$emit('token-selected', 'test');
-
- expect(wrapper.emitted('input')).toEqual([[{ operator: '||', data: '' }]]);
- });
- });
-
- describe('when loading', () => {
- beforeEach(() => {
- wrapper = createComponent({
- active: true,
- config: {
- ...mockAuthorToken,
- preloadedUsers: mockPreloadedUsers,
- defaultUsers: [],
- },
- stubs: { Portal: true },
- });
- });
-
- it('shows current user', () => {
- const firstSuggestion = wrapper.findComponent(GlFilteredSearchSuggestion).text();
- expect(firstSuggestion).toContain('Administrator');
- expect(firstSuggestion).toContain('@root');
- });
-
- it('does not show current user while searching', async () => {
- wrapper.findComponent(BaseToken).vm.handleInput({ data: 'foo' });
-
- await nextTick();
-
- expect(wrapper.findComponent(GlFilteredSearchSuggestion).exists()).toBe(false);
- });
});
});
});
diff --git a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
index 3ae19078c30..5b93605e1d9 100644
--- a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Resolvers::NamespaceProjectsResolver, feature_category: :groups_a
let(:group) { create(:group) }
let(:namespace) { group }
let(:project1) { create(:project, namespace: namespace) }
- let(:project2) { create(:project, namespace: namespace) }
+ let(:project2) { create(:project, :archived, namespace: namespace) }
let(:project3) { create(:project, namespace: namespace, marked_for_deletion_at: 1.day.ago, pending_delete: true) }
let(:nested_group) { create(:group, parent: group) }
let(:nested_project) { create(:project, group: nested_group) }
@@ -54,6 +54,12 @@ RSpec.describe Resolvers::NamespaceProjectsResolver, feature_category: :groups_a
expect(resolve_projects(arg)).to contain_exactly(project1, project2)
end
+ it 'can filter out archived projects' do
+ arg = { include_archived: false }
+
+ expect(resolve_projects(arg)).to contain_exactly(project1, project3)
+ end
+
it 'finds all projects not aimed for deletion including the subgroups' do
args[:not_aimed_for_deletion] = true
diff --git a/spec/helpers/dashboard_helper_spec.rb b/spec/helpers/dashboard_helper_spec.rb
index d52b3c9abb3..95e69ebc070 100644
--- a/spec/helpers/dashboard_helper_spec.rb
+++ b/spec/helpers/dashboard_helper_spec.rb
@@ -75,4 +75,10 @@ RSpec.describe DashboardHelper do
it { is_expected.to eq(false) }
end
+
+ describe '.user_groups_requiring_reauth', feature_category: :system_access do
+ it 'returns an empty array' do
+ expect(helper.user_groups_requiring_reauth).to match_array([])
+ end
+ end
end
diff --git a/spec/helpers/todos_helper_spec.rb b/spec/helpers/todos_helper_spec.rb
index bffb240dae4..92df9e1581c 100644
--- a/spec/helpers/todos_helper_spec.rb
+++ b/spec/helpers/todos_helper_spec.rb
@@ -443,4 +443,10 @@ RSpec.describe TodosHelper do
end
end
end
+
+ describe '.todo_groups_requiring_saml_reauth', feature_category: :system_access do
+ it 'returns an empty array' do
+ expect(helper.todo_groups_requiring_saml_reauth([])).to match_array([])
+ end
+ end
end
diff --git a/spec/initializers/rest-client-hostname_override_spec.rb b/spec/initializers/rest-client-hostname_override_spec.rb
index 187f18a6b1d..18fd9d3bdb2 100644
--- a/spec/initializers/rest-client-hostname_override_spec.rb
+++ b/spec/initializers/rest-client-hostname_override_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'rest-client dns rebinding protection' do
- it_behaves_like 'a request using Gitlab::UrlBlocker' do
+ it_behaves_like 'a request using Gitlab::HTTP_V2::UrlBlocker' do
let(:http_method) { :get }
let(:url_blocked_error_class) { ArgumentError }
diff --git a/spec/lib/gitlab/ci/config/external/mapper/normalizer_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/normalizer_spec.rb
index 09212833d84..6c80df084d1 100644
--- a/spec/lib/gitlab/ci/config/external/mapper/normalizer_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper/normalizer_spec.rb
@@ -40,5 +40,14 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Normalizer, feature_categor
{ remote: 'https://example.com/.gitlab-ci.yml' }]
)
end
+
+ context 'when the location value is an invalid type' do
+ let(:locations) { [123] }
+
+ it 'raises an error' do
+ expect { process }.to raise_error(
+ Gitlab::Ci::Config::External::Mapper::InvalidTypeError, /Each include must be a hash or a string/)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index d67b0ff8895..d6aa39c4849 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -174,6 +174,15 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline
it_behaves_like 'logging config file fetch', 'config_file_fetch_project_content_duration_s', 1
end
+
+ context 'when the include value is a Boolean' do
+ let(:values) { { include: true } }
+
+ it 'raises an error' do
+ expect { process }.to raise_error(
+ Gitlab::Ci::Config::External::Mapper::InvalidTypeError, /Each include must be a hash or a string/)
+ end
+ end
end
context "when 'include' is defined as an array" do
@@ -186,6 +195,15 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline
expect(subject).to all(respond_to(:valid?))
expect(subject).to all(respond_to(:content))
end
+
+ context 'when an include value is an Array' do
+ let(:values) { { include: [remote_url, [local_file]] } }
+
+ it 'raises an error' do
+ expect { process }.to raise_error(
+ Gitlab::Ci::Config::External::Mapper::InvalidTypeError, /Each include must be a hash or a string/)
+ end
+ end
end
context "when 'include' is defined as an array of hashes" do
diff --git a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
index 1824a50cb28..297103e550d 100644
--- a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
@@ -143,6 +143,10 @@ RSpec.describe Gitlab::Database::LooseForeignKeys do
described_class.instance_variable_set(:@loose_foreign_keys_yaml, loose_foreign_keys_yaml)
end
+ after do
+ described_class.instance_variable_set(:@loose_foreign_keys_yaml, nil)
+ end
+
it 'raises Gitlab::Database::GitlabSchema::UnknownSchemaError error' do
expect { subject }.to raise_error(Gitlab::Database::GitlabSchema::UnknownSchemaError)
end
diff --git a/spec/lib/gitlab/pages/url_builder_spec.rb b/spec/lib/gitlab/pages/url_builder_spec.rb
index 863c4481c9e..1a97ca01c3e 100644
--- a/spec/lib/gitlab/pages/url_builder_spec.rb
+++ b/spec/lib/gitlab/pages/url_builder_spec.rb
@@ -63,12 +63,6 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
it { is_expected.to eq('http://group.example.com/project') }
end
- context 'when project is upper cased' do
- let(:full_path) { 'group/Project' }
-
- it { is_expected.to eq('http://group.example.com/project') }
- end
-
context 'when project is in a nested group page' do
let(:full_path) { 'group/subgroup/project' }
@@ -133,12 +127,6 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
it { is_expected.to eq('http://example.com/group/project') }
end
- context 'when project is upper cased' do
- let(:full_path) { 'group/Project' }
-
- it { is_expected.to eq('http://example.com/group/project') }
- end
-
context 'when project is in a nested group page' do
let(:full_path) { 'group/subgroup/project' }
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
deleted file mode 100644
index 9e98cdc05eb..00000000000
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ /dev/null
@@ -1,1009 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :shared do
- include StubRequests
-
- let(:schemes) { %w[http https] }
-
- # This test ensures backward compatibliity for the validate! method.
- # We shoud refactor all callers of validate! to handle a Result object:
- # https://gitlab.com/gitlab-org/gitlab/-/issues/410890
- describe '#validate!' do
- let(:options) { { schemes: schemes } }
-
- subject { described_class.validate!(import_url, **options) }
-
- shared_examples 'validates URI and hostname' do
- it 'runs the url validations' do
- uri, hostname = subject
-
- expect(uri).to eq(Addressable::URI.parse(expected_uri))
- expect(hostname).to eq(expected_hostname)
- end
- end
-
- context 'when the URL hostname is a domain' do
- context 'when domain can be resolved' do
- let(:import_url) { 'https://example.org' }
-
- before do
- stub_dns(import_url, ip_address: '93.184.216.34')
- end
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { 'https://93.184.216.34' }
- let(:expected_hostname) { 'example.org' }
- let(:expected_use_proxy) { false }
- end
- end
- end
- end
-
- describe '#validate_url_with_proxy!' do
- let(:options) { { schemes: schemes } }
-
- subject { described_class.validate_url_with_proxy!(import_url, **options) }
-
- shared_examples 'validates URI and hostname' do
- it 'runs the url validations' do
- expect(subject.uri).to eq(Addressable::URI.parse(expected_uri))
- expect(subject.hostname).to eq(expected_hostname)
- expect(subject.use_proxy).to eq(expected_use_proxy)
- end
- end
-
- shared_context 'when instance configured to deny all requests' do
- before do
- allow(Gitlab::CurrentSettings).to receive(:current_application_settings?).and_return(true)
- stub_application_setting(deny_all_requests_except_allowed: true)
- end
- end
-
- shared_examples 'a URI denied by `deny_all_requests_except_allowed`' do
- context 'when instance setting is enabled' do
- include_context 'when instance configured to deny all requests'
-
- it 'blocks the request' do
- expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- end
- end
-
- context 'when instance setting is not enabled' do
- it 'does not block the request' do
- expect { subject }.not_to raise_error
- end
- end
-
- context 'when passed as an argument' do
- let(:options) { super().merge(deny_all_requests_except_allowed: arg_value) }
-
- context 'when argument is a proc that evaluates to true' do
- let(:arg_value) { proc { true } }
-
- it 'blocks the request' do
- expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- end
- end
-
- context 'when argument is a proc that evaluates to false' do
- let(:arg_value) { proc { false } }
-
- it 'does not block the request' do
- expect { subject }.not_to raise_error
- end
- end
-
- context 'when argument is true' do
- let(:arg_value) { true }
-
- it 'blocks the request' do
- expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- end
- end
-
- context 'when argument is false' do
- let(:arg_value) { false }
-
- it 'does not block the request' do
- expect { subject }.not_to raise_error
- end
- end
- end
- end
-
- shared_examples 'a URI exempt from `deny_all_requests_except_allowed`' do
- include_context 'when instance configured to deny all requests'
-
- it 'does not block the request' do
- expect { subject }.not_to raise_error
- end
- end
-
- context 'when URI is nil' do
- let(:import_url) { nil }
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { nil }
- let(:expected_hostname) { nil }
- let(:expected_use_proxy) { true }
- end
-
- it_behaves_like 'a URI exempt from `deny_all_requests_except_allowed`'
- end
-
- context 'when URI is internal' do
- let(:import_url) { 'http://localhost' }
-
- before do
- stub_dns(import_url, ip_address: '127.0.0.1')
- end
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { 'http://127.0.0.1' }
- let(:expected_hostname) { 'localhost' }
- let(:expected_use_proxy) { false }
- end
-
- it_behaves_like 'a URI exempt from `deny_all_requests_except_allowed`'
- end
-
- context 'when URI is for a local object storage' do
- let(:import_url) { "#{host}/external-diffs/merge_request_diffs/mr-1/diff-1" }
- let(:enabled_object_storage_setting) do
- {
- 'enabled' => true,
- 'object_store' =>
- {
- 'enabled' => true,
- 'connection' => {
- 'endpoint' => host
- }
- }
- }
- end
-
- before do
- allow(Settings).to receive(:external_diffs).and_return(enabled_object_storage_setting)
- end
-
- context 'when allow_object_storage is true' do
- let(:options) { { allow_object_storage: true, schemes: schemes } }
-
- context 'with a local domain name' do
- let(:host) { 'http://review-minio-svc.svc:9000' }
-
- before do
- stub_dns(host, ip_address: '127.0.0.1')
- end
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { 'http://127.0.0.1:9000/external-diffs/merge_request_diffs/mr-1/diff-1' }
- let(:expected_hostname) { 'review-minio-svc.svc' }
- let(:expected_use_proxy) { false }
- end
-
- it_behaves_like 'a URI exempt from `deny_all_requests_except_allowed`'
- end
-
- context 'with an IP address' do
- let(:host) { 'http://127.0.0.1:9000' }
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { 'http://127.0.0.1:9000/external-diffs/merge_request_diffs/mr-1/diff-1' }
- let(:expected_hostname) { nil }
- let(:expected_use_proxy) { false }
- end
-
- it_behaves_like 'a URI exempt from `deny_all_requests_except_allowed`'
- end
-
- context 'when LFS object storage is enabled' do
- let(:lfs_config) do
- {
- 'enabled' => lfs_enabled,
- # This nesting of settings is necessary to trigger the bug
- 'object_store' => GitlabSettings::Options.build({ 'enabled' => true })
- }
- end
-
- let(:config) do
- {
- 'gitlab' => Gitlab.config.gitlab,
- 'repositories' => { 'storages' => { 'default' => 'test' } },
- 'lfs' => GitlabSettings::Options.build(lfs_config)
- }
- end
-
- let(:host) { 'http://127.0.0.1:9000' }
- let(:settings) { GitlabSettings::Options.build(config) }
-
- before do
- allow(Gitlab).to receive(:config).and_return(settings)
- settings.repositories.storages.default
- end
-
- context 'when LFS is disabled' do
- let(:lfs_enabled) { false }
-
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- end
- end
-
- context 'when LFS is enabled with no connection endpoint' do
- let(:lfs_enabled) { true }
-
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- end
- end
- end
- end
-
- context 'when allow_object_storage is false' do
- context 'with a local domain name' do
- let(:host) { 'http://review-minio-svc.svc:9000' }
-
- before do
- stub_dns(host, ip_address: '127.0.0.1')
- end
-
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- end
- end
-
- context 'with an IP address' do
- let(:host) { 'http://127.0.0.1:9000' }
-
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- end
- end
- end
- end
-
- context 'when resolving runs into a timeout' do
- let(:import_url) { 'http://example.com' }
-
- before do
- stub_const("#{described_class}::GETADDRINFO_TIMEOUT_SECONDS", 1)
- allow(Addrinfo).to receive(:getaddrinfo) { sleep 2 }
- end
-
- it 'raises an error due to DNS timeout' do
- expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "execution expired")
- end
- end
-
- context 'when the URL hostname is a domain' do
- context 'when domain can be resolved' do
- let(:import_url) { 'https://example.org' }
-
- before do
- stub_dns(import_url, ip_address: '93.184.216.34')
- end
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { 'https://93.184.216.34' }
- let(:expected_hostname) { 'example.org' }
- let(:expected_use_proxy) { false }
- end
-
- it_behaves_like 'a URI denied by `deny_all_requests_except_allowed`'
- end
-
- context 'when domain cannot be resolved' do
- let(:import_url) { 'http://foobar.x' }
-
- before do
- stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
- end
-
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- end
-
- context 'with HTTP_PROXY' do
- let(:import_url) { 'http://foobar.x' }
-
- before do
- stub_env('http_proxy', 'http://proxy.example.com')
- end
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { import_url }
- let(:expected_hostname) { nil }
- let(:expected_use_proxy) { true }
- end
-
- context 'with no_proxy' do
- before do
- stub_env('no_proxy', 'foobar.x')
- end
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { import_url }
- let(:expected_hostname) { nil }
- let(:expected_use_proxy) { false }
- end
- end
- end
- end
-
- context 'when domain is too long' do
- let(:import_url) { "https://example#{'a' * 1024}.com" }
-
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- end
- end
- end
-
- context 'when the URL hostname is an IP address' do
- let(:import_url) { 'https://93.184.216.34' }
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { import_url }
- let(:expected_hostname) { nil }
- let(:expected_use_proxy) { false }
- end
-
- it_behaves_like 'a URI denied by `deny_all_requests_except_allowed`'
-
- context 'when the address is invalid' do
- let(:import_url) { 'http://1.1.1.1.1' }
-
- it 'raises an error' do
- stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
-
- expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- end
- end
- end
-
- context 'when DNS rebinding protection with IP allowed' do
- let(:import_url) { 'http://a.192.168.0.120.3times.127.0.0.1.1time.repeat.rebind.network:9121/scrape?target=unix:///var/opt/gitlab/redis/redis.socket&amp;check-keys=*' }
-
- before do
- stub_dns(import_url, ip_address: '192.168.0.120')
-
- allow(Gitlab::UrlBlockers::UrlAllowlist).to receive(:ip_allowed?).and_return(true)
- end
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { 'http://192.168.0.120:9121/scrape?target=unix:///var/opt/gitlab/redis/redis.socket&amp;check-keys=*' }
- let(:expected_hostname) { 'a.192.168.0.120.3times.127.0.0.1.1time.repeat.rebind.network' }
- let(:expected_use_proxy) { false }
- end
-
- it_behaves_like 'a URI exempt from `deny_all_requests_except_allowed`'
-
- context 'with HTTP_PROXY' do
- before do
- stub_env('http_proxy', 'http://proxy.example.com')
- end
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { import_url }
- let(:expected_hostname) { nil }
- let(:expected_use_proxy) { true }
- end
-
- context 'when domain is in no_proxy env' do
- before do
- stub_env('no_proxy', 'a.192.168.0.120.3times.127.0.0.1.1time.repeat.rebind.network')
- end
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { 'http://192.168.0.120:9121/scrape?target=unix:///var/opt/gitlab/redis/redis.socket&amp;check-keys=*' }
- let(:expected_hostname) { 'a.192.168.0.120.3times.127.0.0.1.1time.repeat.rebind.network' }
- let(:expected_use_proxy) { false }
- end
- end
- end
- end
-
- context 'with disabled DNS rebinding protection' do
- let(:options) { { dns_rebind_protection: false, schemes: schemes } }
-
- context 'when URI is internal' do
- let(:import_url) { 'http://localhost' }
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { import_url }
- let(:expected_hostname) { nil }
- let(:expected_use_proxy) { false }
- end
-
- it_behaves_like 'a URI exempt from `deny_all_requests_except_allowed`'
- end
-
- context 'when the URL hostname is a domain' do
- let(:import_url) { 'https://example.org' }
-
- before do
- stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
- end
-
- context 'when domain can be resolved' do
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { import_url }
- let(:expected_hostname) { nil }
- let(:expected_use_proxy) { false }
- end
-
- it_behaves_like 'a URI denied by `deny_all_requests_except_allowed`'
- end
-
- context 'when domain cannot be resolved' do
- let(:import_url) { 'http://foobar.x' }
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { import_url }
- let(:expected_hostname) { nil }
- let(:expected_use_proxy) { false }
- end
-
- it_behaves_like 'a URI denied by `deny_all_requests_except_allowed`'
- end
- end
-
- context 'when the URL hostname is an IP address' do
- let(:import_url) { 'https://93.184.216.34' }
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { import_url }
- let(:expected_hostname) { nil }
- let(:expected_use_proxy) { false }
- end
-
- it_behaves_like 'a URI denied by `deny_all_requests_except_allowed`'
-
- context 'when it is invalid' do
- let(:import_url) { 'http://1.1.1.1.1' }
-
- it_behaves_like 'validates URI and hostname' do
- let(:expected_uri) { import_url }
- let(:expected_hostname) { nil }
- let(:expected_use_proxy) { false }
- end
-
- it_behaves_like 'a URI denied by `deny_all_requests_except_allowed`'
- end
- end
- end
- end
-
- describe '#blocked_url?' do
- let(:ports) { Project::VALID_IMPORT_PORTS }
-
- it 'allows imports from configured web host and port' do
- import_url = "http://#{Gitlab.host_with_port}/t.git"
- expect(described_class.blocked_url?(import_url, schemes: schemes)).to be false
- end
-
- it 'allows mirroring from configured SSH host and port' do
- import_url = "ssh://#{Gitlab.config.gitlab_shell.ssh_host}:#{Gitlab.config.gitlab_shell.ssh_port}/t.git"
- expect(described_class.blocked_url?(import_url, schemes: schemes)).to be false
- end
-
- it 'returns true for bad localhost hostname' do
- expect(described_class.blocked_url?('https://localhost:65535/foo/foo.git', schemes: schemes)).to be true
- end
-
- it 'returns true for bad port' do
- expect(described_class.blocked_url?('https://gitlab.com:25/foo/foo.git', ports: ports, schemes: schemes)).to be true
- end
-
- it 'returns true for bad scheme' do
- expect(described_class.blocked_url?('https://gitlab.com/foo/foo.git', schemes: ['https'])).to be false
- expect(described_class.blocked_url?('https://gitlab.com/foo/foo.git', schemes: ['http'])).to be true
- end
-
- it 'returns true for bad protocol on configured web/SSH host and ports' do
- web_url = "javascript://#{Gitlab.host_with_port}/t.git%0aalert(1)"
- expect(described_class.blocked_url?(web_url, schemes: schemes)).to be true
-
- ssh_url = "javascript://#{Gitlab.config.gitlab_shell.ssh_host}:#{Gitlab.config.gitlab_shell.ssh_port}/t.git%0aalert(1)"
- expect(described_class.blocked_url?(ssh_url, schemes: schemes)).to be true
- end
-
- it 'returns true for localhost IPs' do
- expect(described_class.blocked_url?('https://[0:0:0:0:0:0:0:0]/foo/foo.git', schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://0.0.0.0/foo/foo.git', schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://[::]/foo/foo.git', schemes: schemes)).to be true
- end
-
- it 'returns true for loopback IP' do
- expect(described_class.blocked_url?('https://127.0.0.2/foo/foo.git', schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://127.0.0.1/foo/foo.git', schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://[::1]/foo/foo.git', schemes: schemes)).to be true
- end
-
- it 'returns true for alternative version of 127.0.0.1 (0177.1)' do
- expect(described_class.blocked_url?('https://0177.1:65535/foo/foo.git', schemes: schemes)).to be true
- end
-
- it 'returns true for alternative version of 127.0.0.1 (017700000001)' do
- expect(described_class.blocked_url?('https://017700000001:65535/foo/foo.git', schemes: schemes)).to be true
- end
-
- it 'returns true for alternative version of 127.0.0.1 (0x7f.1)' do
- expect(described_class.blocked_url?('https://0x7f.1:65535/foo/foo.git', schemes: schemes)).to be true
- end
-
- it 'returns true for alternative version of 127.0.0.1 (0x7f.0.0.1)' do
- expect(described_class.blocked_url?('https://0x7f.0.0.1:65535/foo/foo.git', schemes: schemes)).to be true
- end
-
- it 'returns true for alternative version of 127.0.0.1 (0x7f000001)' do
- expect(described_class.blocked_url?('https://0x7f000001:65535/foo/foo.git', schemes: schemes)).to be true
- end
-
- it 'returns true for alternative version of 127.0.0.1 (2130706433)' do
- expect(described_class.blocked_url?('https://2130706433:65535/foo/foo.git', schemes: schemes)).to be true
- end
-
- it 'returns true for alternative version of 127.0.0.1 (127.000.000.001)' do
- expect(described_class.blocked_url?('https://127.000.000.001:65535/foo/foo.git', schemes: schemes)).to be true
- end
-
- it 'returns true for alternative version of 127.0.0.1 (127.0.1)' do
- expect(described_class.blocked_url?('https://127.0.1:65535/foo/foo.git', schemes: schemes)).to be true
- end
-
- context 'with ipv6 mapped address' do
- it 'returns true for localhost IPs' do
- expect(described_class.blocked_url?('https://[0:0:0:0:0:ffff:0.0.0.0]/foo/foo.git', schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://[::ffff:0.0.0.0]/foo/foo.git', schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://[::ffff:0:0]/foo/foo.git', schemes: schemes)).to be true
- end
-
- it 'returns true for loopback IPs' do
- expect(described_class.blocked_url?('https://[0:0:0:0:0:ffff:127.0.0.1]/foo/foo.git', schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://[::ffff:127.0.0.1]/foo/foo.git', schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://[::ffff:7f00:1]/foo/foo.git', schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://[0:0:0:0:0:ffff:127.0.0.2]/foo/foo.git', schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://[::ffff:127.0.0.2]/foo/foo.git', schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://[::ffff:7f00:2]/foo/foo.git', schemes: schemes)).to be true
- end
- end
-
- it 'returns true for a non-alphanumeric hostname' do
- aggregate_failures do
- expect(described_class).to be_blocked_url('ssh://-oProxyCommand=whoami/a', schemes: ['ssh'])
-
- # The leading character here is a Unicode "soft hyphen"
- expect(described_class).to be_blocked_url('ssh://­oProxyCommand=whoami/a', schemes: ['ssh'])
-
- # Unicode alphanumerics are allowed
- expect(described_class).not_to be_blocked_url('ssh://ğitlab.com/a', schemes: ['ssh'])
- end
- end
-
- it 'returns true for invalid URL' do
- expect(described_class.blocked_url?('http://:8080', schemes: schemes)).to be true
- end
-
- it 'returns false for legitimate URL' do
- expect(described_class.blocked_url?('https://gitlab.com/foo/foo.git', schemes: schemes)).to be false
- end
-
- describe 'allow_local_network' do
- let(:shared_address_space_ips) { ['100.64.0.0', '100.64.127.127', '100.64.255.255'] }
-
- let(:local_ips) do
- [
- '192.168.1.2',
- '[0:0:0:0:0:ffff:192.168.1.2]',
- '[::ffff:c0a8:102]',
- '10.0.0.2',
- '[0:0:0:0:0:ffff:10.0.0.2]',
- '[::ffff:a00:2]',
- '172.16.0.2',
- '[0:0:0:0:0:ffff:172.16.0.2]',
- '[::ffff:ac10:20]',
- '[feef::1]',
- '[fee2::]',
- '[fc00:bf8b:e62c:abcd:abcd:aaaa:aaaa:aaaa]',
- *shared_address_space_ips
- ]
- end
-
- let(:limited_broadcast_address_variants) do
- [
- '255.255.255.255', # "normal" dotted decimal
- '0377.0377.0377.0377', # Octal
- '0377.00000000377.00377.0000377', # Still octal
- '0xff.0xff.0xff.0xff', # hex
- '0xffffffff', # still hex
- '0xBaaaaaaaaaaaaaaaaffffffff', # padded hex
- '255.255.255.255:65535', # with a port
- '4294967295', # as an integer / dword
- '[::ffff:ffff:ffff]', # short IPv6
- '[0000:0000:0000:0000:0000:ffff:ffff:ffff]' # long IPv6
- ]
- end
-
- let(:fake_domain) { 'www.fakedomain.fake' }
-
- shared_examples 'allows local requests' do |url_blocker_attributes|
- it 'does not block urls from private networks' do
- local_ips.each do |ip|
- stub_domain_resolv(fake_domain, ip) do
- expect(described_class).not_to be_blocked_url("http://#{fake_domain}", **url_blocker_attributes)
- end
-
- expect(described_class).not_to be_blocked_url("http://#{ip}", **url_blocker_attributes)
- end
- end
-
- it 'allows localhost endpoints' do
- expect(described_class).not_to be_blocked_url('http://0.0.0.0', **url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://localhost', **url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://127.0.0.1', **url_blocker_attributes)
- end
-
- it 'allows loopback endpoints' do
- expect(described_class).not_to be_blocked_url('http://127.0.0.2', **url_blocker_attributes)
- end
-
- it 'allows IPv4 link-local endpoints' do
- expect(described_class).not_to be_blocked_url('http://169.254.169.254', **url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://169.254.168.100', **url_blocker_attributes)
- end
-
- it 'allows IPv6 link-local endpoints' do
- expect(described_class).not_to be_blocked_url('http://[0:0:0:0:0:ffff:169.254.169.254]', **url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[::ffff:169.254.169.254]', **url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[::ffff:a9fe:a9fe]', **url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[0:0:0:0:0:ffff:169.254.168.100]', **url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[::ffff:169.254.168.100]', **url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[::ffff:a9fe:a864]', **url_blocker_attributes)
- expect(described_class).not_to be_blocked_url('http://[fe80::c800:eff:fe74:8]', **url_blocker_attributes)
- end
-
- it 'allows limited broadcast address 255.255.255.255 and variants' do
- limited_broadcast_address_variants.each do |variant|
- expect(described_class).not_to be_blocked_url("https://#{variant}", **url_blocker_attributes), "Expected #{variant} to be allowed"
- end
- end
- end
-
- context 'when true (default)' do
- it_behaves_like 'allows local requests', { allow_localhost: true, allow_local_network: true, schemes: %w[http https] }
- end
-
- context 'when false' do
- it 'blocks urls from private networks' do
- local_ips.each do |ip|
- stub_domain_resolv(fake_domain, ip) do
- expect(described_class).to be_blocked_url("http://#{fake_domain}", allow_local_network: false, schemes: schemes)
- end
-
- expect(described_class).to be_blocked_url("http://#{ip}", allow_local_network: false, schemes: schemes)
- end
- end
-
- it 'blocks IPv4 link-local endpoints' do
- expect(described_class).to be_blocked_url('http://169.254.169.254', allow_local_network: false, schemes: schemes)
- expect(described_class).to be_blocked_url('http://169.254.168.100', allow_local_network: false, schemes: schemes)
- end
-
- it 'blocks IPv6 link-local endpoints' do
- expect(described_class).to be_blocked_url('http://[0:0:0:0:0:ffff:169.254.169.254]', allow_local_network: false, schemes: schemes)
- expect(described_class).to be_blocked_url('http://[::ffff:169.254.169.254]', allow_local_network: false, schemes: schemes)
- expect(described_class).to be_blocked_url('http://[::ffff:a9fe:a9fe]', allow_local_network: false, schemes: schemes)
- expect(described_class).to be_blocked_url('http://[0:0:0:0:0:ffff:169.254.168.100]', allow_local_network: false, schemes: schemes)
- expect(described_class).to be_blocked_url('http://[::ffff:169.254.168.100]', allow_local_network: false, schemes: schemes)
- expect(described_class).to be_blocked_url('http://[::ffff:a9fe:a864]', allow_local_network: false, schemes: schemes)
- expect(described_class).to be_blocked_url('http://[fe80::c800:eff:fe74:8]', allow_local_network: false, schemes: schemes)
- end
-
- it 'blocks limited broadcast address 255.255.255.255 and variants' do
- # Raise BlockedUrlError for invalid URLs.
- # The padded hex version, for example, is a valid URL on Mac but
- # not on Ubuntu.
- stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
-
- limited_broadcast_address_variants.each do |variant|
- expect(described_class).to be_blocked_url("https://#{variant}", allow_local_network: false, schemes: schemes), "Expected #{variant} to be blocked"
- end
- end
-
- context 'when local domain/IP is allowed' do
- let(:url_blocker_attributes) do
- {
- allow_localhost: false,
- allow_local_network: false,
- schemes: schemes
- }
- end
-
- before do
- allow(ApplicationSetting).to receive(:current).and_return(ApplicationSetting.new)
- stub_application_setting(outbound_local_requests_whitelist: allowlist)
- end
-
- context 'with IPs in allowlist' do
- let(:allowlist) do
- [
- '0.0.0.0',
- '127.0.0.1',
- '127.0.0.2',
- '192.168.1.1',
- *local_ips,
- '0:0:0:0:0:ffff:169.254.169.254',
- '::ffff:a9fe:a9fe',
- '::ffff:169.254.168.100',
- '::ffff:a9fe:a864',
- 'fe80::c800:eff:fe74:8',
- '255.255.255.255',
-
- # garbage IPs
- '45645632345',
- 'garbage456:more345gar:bage'
- ]
- end
-
- it_behaves_like 'allows local requests', { allow_localhost: false, allow_local_network: false, schemes: %w[http https] }
-
- it 'allows IP when dns_rebind_protection is disabled' do
- url = "http://example.com"
- attrs = url_blocker_attributes.merge(dns_rebind_protection: false)
-
- stub_domain_resolv('example.com', '192.168.1.2') do
- expect(described_class).not_to be_blocked_url(url, **attrs)
- end
-
- stub_domain_resolv('example.com', '192.168.1.3') do
- expect(described_class).to be_blocked_url(url, **attrs)
- end
- end
-
- it 'allows the limited broadcast address 255.255.255.255' do
- expect(described_class).not_to be_blocked_url('http://255.255.255.255', **url_blocker_attributes)
- end
- end
-
- context 'with domains in allowlist' do
- let(:allowlist) do
- [
- 'www.example.com',
- 'example.com',
- 'xn--itlab-j1a.com',
- 'garbage$^$%#$^&$'
- ]
- end
-
- it 'allows domains present in allowlist' do
- domain = 'example.com'
- subdomain1 = 'www.example.com'
- subdomain2 = 'subdomain.example.com'
-
- stub_domain_resolv(domain, '192.168.1.1') do
- expect(described_class).not_to be_blocked_url("http://#{domain}",
- **url_blocker_attributes)
- end
-
- stub_domain_resolv(subdomain1, '192.168.1.1') do
- expect(described_class).not_to be_blocked_url("http://#{subdomain1}",
- **url_blocker_attributes)
- end
-
- # subdomain2 is not part of the allowlist so it should be blocked
- stub_domain_resolv(subdomain2, '192.168.1.1') do
- expect(described_class).to be_blocked_url("http://#{subdomain2}",
- **url_blocker_attributes)
- end
- end
-
- it 'works with unicode and idna encoded domains' do
- unicode_domain = 'ğitlab.com'
- idna_encoded_domain = 'xn--itlab-j1a.com'
-
- stub_domain_resolv(unicode_domain, '192.168.1.1') do
- expect(described_class).not_to be_blocked_url("http://#{unicode_domain}",
- **url_blocker_attributes)
- end
-
- stub_domain_resolv(idna_encoded_domain, '192.168.1.1') do
- expect(described_class).not_to be_blocked_url("http://#{idna_encoded_domain}",
- **url_blocker_attributes)
- end
- end
-
- shared_examples 'dns rebinding checks' do
- shared_examples 'allowlists the domain' do
- let(:allowlist) { [domain] }
- let(:url) { "http://#{domain}" }
-
- before do
- stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
- end
-
- it do
- expect(described_class).not_to be_blocked_url(url, dns_rebind_protection: dns_rebind_value, schemes: schemes)
- end
- end
-
- describe 'dns_rebinding_setting' do
- context 'when enabled' do
- let(:dns_rebind_value) { true }
-
- it_behaves_like 'allowlists the domain'
- end
-
- context 'when disabled' do
- let(:dns_rebind_value) { false }
-
- it_behaves_like 'allowlists the domain'
- end
- end
- end
-
- context 'when the domain cannot be resolved' do
- let(:domain) { 'foobar.x' }
-
- it_behaves_like 'dns rebinding checks'
- end
-
- context 'when the domain can be resolved' do
- let(:domain) { 'example.com' }
-
- before do
- stub_dns(url, ip_address: '93.184.216.34')
- end
-
- it_behaves_like 'dns rebinding checks'
- end
- end
-
- context 'with ports' do
- let(:allowlist) do
- ["127.0.0.1:2000"]
- end
-
- it 'allows domain with port when resolved ip has port allowed' do
- stub_domain_resolv("www.resolve-domain.com", '127.0.0.1') do
- expect(described_class).not_to be_blocked_url("http://www.resolve-domain.com:2000", **url_blocker_attributes)
- end
- end
- end
- end
- end
- end
-
- describe 'enforce_user' do
- context 'when false (default)' do
- it 'does not block urls with a non-alphanumeric username' do
- expect(described_class).not_to be_blocked_url('ssh://-oProxyCommand=whoami@example.com/a', schemes: ['ssh'])
-
- # The leading character here is a Unicode "soft hyphen"
- expect(described_class).not_to be_blocked_url('ssh://­oProxyCommand=whoami@example.com/a', schemes: ['ssh'])
-
- # Unicode alphanumerics are allowed
- expect(described_class).not_to be_blocked_url('ssh://ğitlab@example.com/a', schemes: ['ssh'])
- end
- end
-
- context 'when true' do
- it 'blocks urls with a non-alphanumeric username' do
- aggregate_failures do
- expect(described_class).to be_blocked_url('ssh://-oProxyCommand=whoami@example.com/a', enforce_user: true, schemes: ['ssh'])
-
- # The leading character here is a Unicode "soft hyphen"
- expect(described_class).to be_blocked_url('ssh://­oProxyCommand=whoami@example.com/a', enforce_user: true, schemes: ['ssh'])
-
- # Unicode alphanumerics are allowed
- expect(described_class).not_to be_blocked_url('ssh://ğitlab@example.com/a', enforce_user: true, schemes: ['ssh'])
- end
- end
- end
- end
-
- context 'when ascii_only is true' do
- it 'returns true for unicode domain' do
- expect(described_class.blocked_url?('https://𝕘itⅼαƄ.com/foo/foo.bar', ascii_only: true, schemes: schemes)).to be true
- end
-
- it 'returns true for unicode tld' do
- expect(described_class.blocked_url?('https://gitlab.ᴄοm/foo/foo.bar', ascii_only: true, schemes: schemes)).to be true
- end
-
- it 'returns true for unicode path' do
- expect(described_class.blocked_url?('https://gitlab.com/𝒇οο/𝒇οο.Ƅαꮁ', ascii_only: true, schemes: schemes)).to be true
- end
-
- it 'returns true for IDNA deviations' do
- expect(described_class.blocked_url?('https://mißile.com/foo/foo.bar', ascii_only: true, schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://miςςile.com/foo/foo.bar', ascii_only: true, schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://git‍lab.com/foo/foo.bar', ascii_only: true, schemes: schemes)).to be true
- expect(described_class.blocked_url?('https://git‌lab.com/foo/foo.bar', ascii_only: true, schemes: schemes)).to be true
- end
- end
-
- it 'blocks urls with invalid ip address' do
- stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
-
- expect(described_class).to be_blocked_url('http://8.8.8.8.8', schemes: schemes)
- end
-
- it 'blocks urls whose hostname cannot be resolved' do
- stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
-
- expect(described_class).to be_blocked_url('http://foobar.x', schemes: schemes)
- end
-
- context 'when gitlab is running on a non-default port' do
- let(:gitlab_port) { 3000 }
-
- before do
- stub_config(gitlab: { protocol: 'http', host: 'gitlab.local', port: gitlab_port })
- end
-
- it 'returns true for url targeting the wrong port' do
- stub_domain_resolv('gitlab.local', '127.0.0.1') do
- expect(described_class).to be_blocked_url("http://gitlab.local/foo", schemes: schemes)
- end
- end
-
- it 'does not block url on gitlab port' do
- stub_domain_resolv('gitlab.local', '127.0.0.1') do
- expect(described_class).not_to be_blocked_url("http://gitlab.local:#{gitlab_port}/foo", schemes: schemes)
- end
- end
- end
-
- def stub_domain_resolv(domain, ip, port = 80)
- address = instance_double(Addrinfo,
- ip_address: ip,
- ipv4_private?: true,
- ipv6_linklocal?: false,
- ipv4_loopback?: false,
- ipv6_loopback?: false,
- ipv4?: false,
- ip_port: port
- )
- allow(Addrinfo).to receive(:getaddrinfo).with(domain, port, any_args).and_return([address])
- allow(address).to receive(:ipv6_v4mapped?).and_return(false)
-
- yield
-
- allow(Addrinfo).to receive(:getaddrinfo).and_call_original
- end
- end
-
- describe '#validate_hostname' do
- let(:ip_addresses) do
- [
- '2001:db8:1f70::999:de8:7648:6e8',
- 'FE80::C800:EFF:FE74:8',
- '::ffff:127.0.0.1',
- '::ffff:169.254.168.100',
- '::ffff:7f00:1',
- '0:0:0:0:0:ffff:0.0.0.0',
- 'localhost',
- '127.0.0.1',
- '127.000.000.001',
- '0x7f000001',
- '0x7f.0.0.1',
- '0x7f.0.0.1',
- '017700000001',
- '0177.1',
- '2130706433',
- '::',
- '::1'
- ]
- end
-
- it 'does not raise error for valid Ip addresses' do
- ip_addresses.each do |ip|
- expect { described_class.send(:validate_hostname, ip) }.not_to raise_error
- end
- end
- end
-end
diff --git a/spec/models/namespaces/traversal/cached_spec.rb b/spec/models/namespaces/traversal/cached_spec.rb
index 8263e28bb98..dd52f9c3d70 100644
--- a/spec/models/namespaces/traversal/cached_spec.rb
+++ b/spec/models/namespaces/traversal/cached_spec.rb
@@ -3,101 +3,175 @@
require 'spec_helper'
RSpec.describe Namespaces::Traversal::Cached, feature_category: :database do
- let_it_be_with_refind(:old_parent) { create(:group) }
- let_it_be_with_refind(:new_parent) { create(:group) }
- let_it_be_with_refind(:group) { create(:group, parent: old_parent) }
- let_it_be_with_refind(:subgroup) { create(:group, parent: group) }
+ describe 'callbacks' do
+ let_it_be_with_refind(:old_parent) { create(:group) }
+ let_it_be_with_refind(:new_parent) { create(:group) }
+ let_it_be_with_refind(:group) { create(:group, parent: old_parent) }
+ let_it_be_with_refind(:subgroup) { create(:group, parent: group) }
- context 'when the namespace_descendants_cache_expiration feature flag is off' do
- let!(:cache) { create(:namespace_descendants, namespace: group) }
+ context 'when the namespace_descendants_cache_expiration feature flag is off' do
+ let!(:cache) { create(:namespace_descendants, namespace: group) }
- before do
- stub_feature_flags(namespace_descendants_cache_expiration: false)
- end
+ before do
+ stub_feature_flags(namespace_descendants_cache_expiration: false)
+ end
- it 'does not invalidate the cache' do
- expect { group.update!(parent: new_parent) }.not_to change { cache.reload.outdated_at }
- end
+ it 'does not invalidate the cache' do
+ expect { group.update!(parent: new_parent) }.not_to change { cache.reload.outdated_at }
+ end
- context 'when the group is deleted' do
- it 'invalidates the cache' do
- expect { group.destroy! }.not_to change { cache.reload.outdated_at }
+ context 'when the group is deleted' do
+ it 'invalidates the cache' do
+ expect { group.destroy! }.not_to change { cache.reload.outdated_at }
+ end
end
end
- end
- context 'when no cached records are present' do
- it 'does nothing' do
- group.parent = new_parent
+ context 'when no cached records are present' do
+ it 'does nothing' do
+ group.parent = new_parent
- expect { group.save! }.not_to change { Namespaces::Descendants.all.to_a }
+ expect { group.save! }.not_to change { Namespaces::Descendants.all.to_a }
+ end
end
- end
- context 'when the namespace record is UserNamespace' do
- it 'does nothing' do
- # we won't use the optimization for UserNamespace
- namespace = create(:user_namespace)
- cache = create(:namespace_descendants, namespace: namespace)
+ context 'when the namespace record is UserNamespace' do
+ it 'does nothing' do
+ # we won't use the optimization for UserNamespace
+ namespace = create(:user_namespace)
+ cache = create(:namespace_descendants, namespace: namespace)
- expect { namespace.destroy! }.not_to change { cache.reload.outdated_at }
+ expect { namespace.destroy! }.not_to change { cache.reload.outdated_at }
+ end
end
- end
- context 'when cached record is present' do
- let!(:cache) { create(:namespace_descendants, namespace: group) }
+ context 'when cached record is present' do
+ let!(:cache) { create(:namespace_descendants, namespace: group) }
- it 'invalidates the cache' do
- expect { group.update!(parent: new_parent) }.to change { cache.reload.outdated_at }.from(nil)
- end
+ it 'invalidates the cache' do
+ expect { group.update!(parent: new_parent) }.to change { cache.reload.outdated_at }.from(nil)
+ end
- it 'does not invalidate the cache of subgroups' do
- subgroup_cache = create(:namespace_descendants, namespace: subgroup)
+ it 'does not invalidate the cache of subgroups' do
+ subgroup_cache = create(:namespace_descendants, namespace: subgroup)
- expect { group.update!(parent: new_parent) }.not_to change { subgroup_cache.reload.outdated_at }
+ expect { group.update!(parent: new_parent) }.not_to change { subgroup_cache.reload.outdated_at }
+ end
+
+ context 'when a new subgroup is added' do
+ it 'invalidates the cache' do
+ expect { create(:group, parent: group) }.to change { cache.reload.outdated_at }
+ end
+ end
+
+ context 'when a new project is added' do
+ it 'invalidates the cache' do
+ expect { create(:project, group: group) }.to change { cache.reload.outdated_at }
+ end
+ end
end
- context 'when a new subgroup is added' do
- it 'invalidates the cache' do
- expect { create(:group, parent: group) }.to change { cache.reload.outdated_at }
+ context 'when parent group has cached record' do
+ it 'invalidates the parent cache' do
+ old_parent_cache = create(:namespace_descendants, namespace: old_parent)
+ new_parent_cache = create(:namespace_descendants, namespace: new_parent)
+
+ group.update!(parent: new_parent)
+
+ expect(old_parent_cache.reload.outdated_at).not_to be_nil
+ expect(new_parent_cache.reload.outdated_at).not_to be_nil
end
end
- context 'when a new project is added' do
+ context 'when group is destroyed' do
it 'invalidates the cache' do
- expect { create(:project, group: group) }.to change { cache.reload.outdated_at }
+ cache = create(:namespace_descendants, namespace: group)
+
+ expect { group.destroy! }.to change { cache.reload.outdated_at }.from(nil)
end
- end
- end
- context 'when parent group has cached record' do
- it 'invalidates the parent cache' do
- old_parent_cache = create(:namespace_descendants, namespace: old_parent)
- new_parent_cache = create(:namespace_descendants, namespace: new_parent)
+ context 'when parent group has cached record' do
+ it 'invalidates the parent cache' do
+ old_parent_cache = create(:namespace_descendants, namespace: old_parent)
+ new_parent_cache = create(:namespace_descendants, namespace: new_parent)
- group.update!(parent: new_parent)
+ group.destroy!
- expect(old_parent_cache.reload.outdated_at).not_to be_nil
- expect(new_parent_cache.reload.outdated_at).not_to be_nil
+ expect(old_parent_cache.reload.outdated_at).not_to be_nil
+ expect(new_parent_cache.reload.outdated_at).to be_nil # no change
+ end
+ end
end
end
- context 'when group is destroyed' do
- it 'invalidates the cache' do
- cache = create(:namespace_descendants, namespace: group)
+ describe 'query methods' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:subsubgroup) { create(:group, parent: subgroup) }
+
+ let_it_be(:project1) { create(:project, group: group) }
+ let_it_be(:project2) { create(:project, group: subsubgroup) }
+
+ # deliberately making self_and_descendant_group_ids different from the actual
+ # self_and_descendant_ids so we can verify that the cached query is running.
+ let_it_be_with_refind(:namespace_descendants) do
+ create(:namespace_descendants,
+ :up_to_date,
+ namespace: group,
+ self_and_descendant_group_ids: [group.id, subgroup.id],
+ all_project_ids: [project1.id]
+ )
+ end
+
+ describe '#self_and_descendant_ids' do
+ subject(:ids) { group.self_and_descendant_ids.pluck(:id) }
+
+ it 'returns the cached values' do
+ expect(ids).to eq(namespace_descendants.self_and_descendant_group_ids)
+ end
+
+ context 'when the cache is outdated' do
+ it 'returns the values from the uncached self_and_descendant_ids query' do
+ namespace_descendants.update!(outdated_at: Time.current)
+
+ expect(ids.sort).to eq([group.id, subgroup.id, subsubgroup.id])
+ end
+ end
+
+ context 'when the group_hierarchy_optimization feature flag is disabled' do
+ before do
+ stub_feature_flags(group_hierarchy_optimization: false)
+ end
- expect { group.destroy! }.to change { cache.reload.outdated_at }.from(nil)
+ it 'returns the values from the uncached self_and_descendant_ids query' do
+ expect(ids.sort).to eq([group.id, subgroup.id, subsubgroup.id])
+ end
+ end
end
- context 'when parent group has cached record' do
- it 'invalidates the parent cache' do
- old_parent_cache = create(:namespace_descendants, namespace: old_parent)
- new_parent_cache = create(:namespace_descendants, namespace: new_parent)
+ describe '#all_project_ids' do
+ subject(:ids) { group.all_project_ids.pluck(:id) }
- group.destroy!
+ it 'returns the cached values' do
+ expect(ids).to eq(namespace_descendants.all_project_ids)
+ end
- expect(old_parent_cache.reload.outdated_at).not_to be_nil
- expect(new_parent_cache.reload.outdated_at).to be_nil # no change
+ context 'when the cache is outdated' do
+ it 'returns the values from the uncached all_project_ids query' do
+ namespace_descendants.update!(outdated_at: Time.current)
+
+ expect(ids.sort).to eq([project1.id, project2.id])
+ end
+ end
+
+ context 'when the group_hierarchy_optimization feature flag is disabled' do
+ before do
+ stub_feature_flags(group_hierarchy_optimization: false)
+ end
+
+ it 'returns the values from the uncached all_project_ids query' do
+ expect(ids.sort).to eq([project1.id, project2.id])
+ end
end
end
end
diff --git a/spec/models/user_detail_spec.rb b/spec/models/user_detail_spec.rb
index b443988cde9..81cc59cec49 100644
--- a/spec/models/user_detail_spec.rb
+++ b/spec/models/user_detail_spec.rb
@@ -2,11 +2,76 @@
require 'spec_helper'
-RSpec.describe UserDetail do
+RSpec.describe UserDetail, feature_category: :system_access do
it { is_expected.to belong_to(:user) }
- it { is_expected.to define_enum_for(:registration_objective).with_values([:basics, :move_repository, :code_storage, :exploring, :ci, :other, :joining_team]).with_suffix }
+
+ specify do
+ values = [:basics, :move_repository, :code_storage, :exploring, :ci, :other, :joining_team]
+ is_expected.to define_enum_for(:registration_objective).with_values(values).with_suffix
+ end
describe 'validations' do
+ context 'for onboarding_status json schema' do
+ let(:step_url) { '_some_string_' }
+ let(:email_opt_in) { true }
+ let(:onboarding_status) do
+ {
+ step_url: step_url,
+ email_opt_in: email_opt_in
+ }
+ end
+
+ it { is_expected.to allow_value(onboarding_status).for(:onboarding_status) }
+
+ context 'for step_url' do
+ let(:onboarding_status) do
+ {
+ step_url: step_url
+ }
+ end
+
+ it { is_expected.to allow_value(onboarding_status).for(:onboarding_status) }
+
+ context "when 'step_url' is invalid" do
+ let(:step_url) { [] }
+
+ it { is_expected.not_to allow_value(onboarding_status).for(:onboarding_status) }
+ end
+ end
+
+ context 'for email_opt_in' do
+ let(:onboarding_status) do
+ {
+ email_opt_in: email_opt_in
+ }
+ end
+
+ it { is_expected.to allow_value(onboarding_status).for(:onboarding_status) }
+
+ context "when 'email_opt_in' is invalid" do
+ let(:email_opt_in) { 'true' }
+
+ it { is_expected.not_to allow_value(onboarding_status).for(:onboarding_status) }
+ end
+ end
+
+ context 'when there is no data' do
+ let(:onboarding_status) { {} }
+
+ it { is_expected.to allow_value(onboarding_status).for(:onboarding_status) }
+ end
+
+ context 'when trying to store an unsupported key' do
+ let(:onboarding_status) do
+ {
+ unsupported_key: '_some_value_'
+ }
+ end
+
+ it { is_expected.not_to allow_value(onboarding_status).for(:onboarding_status) }
+ end
+ end
+
describe '#job_title' do
it { is_expected.not_to validate_presence_of(:job_title) }
it { is_expected.to validate_length_of(:job_title).is_at_most(200) }
@@ -75,7 +140,8 @@ RSpec.describe UserDetail do
user_detail.mastodon = '@robin'
expect(user_detail).not_to be_valid
- expect(user_detail.errors.full_messages).to match_array([_('Mastodon must contain only a mastodon username.')])
+ expect(user_detail.errors.full_messages)
+ .to match_array([_('Mastodon must contain only a mastodon username.')])
end
end
end
diff --git a/spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb b/spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb
deleted file mode 100644
index f1c87243516..00000000000
--- a/spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb
+++ /dev/null
@@ -1,137 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::Gcp::ArtifactRegistry::DockerImagesController, feature_category: :container_registry do
- let_it_be(:project) { create(:project, :private) }
-
- let(:user) { project.owner }
- let(:gcp_project_id) { 'gcp_project_id' }
- let(:gcp_location) { 'gcp_location' }
- let(:gcp_ar_repository) { 'gcp_ar_repository' }
- let(:gcp_wlif_url) { 'gcp_wlif_url' }
-
- describe '#index' do
- let(:service_response) { ServiceResponse.success(payload: dummy_client_payload) }
- let(:service_double) do
- instance_double('GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService')
- end
-
- subject(:get_index_page) do
- get(
- project_gcp_artifact_registry_docker_images_path(
- project,
- gcp_project_id: gcp_project_id,
- gcp_location: gcp_location,
- gcp_ar_repository: gcp_ar_repository,
- gcp_wlif_url: gcp_wlif_url
- )
- )
- end
-
- before do
- allow_next_instance_of(GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService) do |service|
- allow(service).to receive(:execute).and_return(service_response)
- end
- end
-
- shared_examples 'returning the error message' do |message|
- it 'displays an error message' do
- sign_in(user)
-
- get_index_page
-
- expect(response).to have_gitlab_http_status(:success)
- expect(response.body).to include(message)
- end
- end
-
- context 'when on saas', :saas do
- it 'returns the images' do
- sign_in(user)
-
- get_index_page
-
- expect(response).to have_gitlab_http_status(:success)
- expect(response.body).to include('image@sha256:6a')
- expect(response.body).to include('tag1')
- expect(response.body).to include('tag2')
- expect(response.body).to include('Prev')
- expect(response.body).to include('Next')
- expect(response.body).to include('https://location.pkg.dev/project/repo/image@sha256:6a')
- end
-
- context 'when the service returns an error response' do
- let(:service_response) { ServiceResponse.error(message: 'boom') }
-
- it_behaves_like 'returning the error message', 'boom'
- end
-
- %i[gcp_project_id gcp_location gcp_ar_repository gcp_wlif_url].each do |field|
- context "when a gcp parameter #{field} is missing" do
- let(field) { nil }
-
- it 'redirects to setup page' do
- sign_in(user)
-
- get_index_page
-
- expect(response).to redirect_to new_project_gcp_artifact_registry_setup_path(project)
- end
- end
- end
-
- context 'with the feature flag disabled' do
- before do
- stub_feature_flags(gcp_technical_demo: false)
- end
-
- it_behaves_like 'returning the error message', 'Feature flag disabled'
- end
-
- context 'with non private project' do
- before do
- allow_next_found_instance_of(Project) do |project|
- allow(project).to receive(:private?).and_return(false)
- end
- end
-
- it_behaves_like 'returning the error message', 'Can only run on private projects'
- end
-
- context 'with unauthorized user' do
- let_it_be(:user) { create(:user) }
-
- it 'returns success' do
- sign_in(user)
-
- get_index_page
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- context 'when not on saas' do
- it_behaves_like 'returning the error message', "Can&#39;t run here"
- end
-
- def dummy_client_payload
- {
- images: [
- {
- built_at: '2023-11-30T23:23:11.980068941Z',
- media_type: 'application/vnd.docker.distribution.manifest.v2+json',
- name: 'projects/project/locations/location/repositories/repo/dockerImages/image@sha256:6a',
- size_bytes: 2827903,
- tags: %w[tag1 tag2],
- updated_at: '2023-12-07T11:48:50.840751Z',
- uploaded_at: '2023-12-07T11:48:47.598511Z',
- uri: 'location.pkg.dev/project/repo/image@sha256:6a'
- }
- ],
- next_page_token: 'next_page_token'
- }
- end
- end
-end
diff --git a/spec/requests/projects/gcp/artifact_registry/setup_controller_spec.rb b/spec/requests/projects/gcp/artifact_registry/setup_controller_spec.rb
deleted file mode 100644
index 20d7969a05f..00000000000
--- a/spec/requests/projects/gcp/artifact_registry/setup_controller_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::Gcp::ArtifactRegistry::SetupController, feature_category: :container_registry do
- let_it_be(:project) { create(:project, :private) }
-
- let(:user) { project.owner }
-
- describe '#new' do
- subject(:get_setup_page) { get(new_project_gcp_artifact_registry_setup_path(project)) }
-
- shared_examples 'returning the error message' do |message|
- it 'displays an error message' do
- sign_in(user)
-
- get_setup_page
-
- expect(response).to have_gitlab_http_status(:success)
- expect(response.body).to include(message)
- end
- end
-
- context 'when on saas', :saas do
- it 'returns the setup page' do
- sign_in(user)
-
- get_setup_page
-
- expect(response).to have_gitlab_http_status(:success)
- expect(response.body).to include('Google Project ID')
- expect(response.body).to include('Google Project Location')
- expect(response.body).to include('Artifact Registry Repository Name')
- expect(response.body).to include('Worflow Identity Federation url')
- expect(response.body).to include('Setup')
- end
-
- context 'with the feature flag disabled' do
- before do
- stub_feature_flags(gcp_technical_demo: false)
- end
-
- it_behaves_like 'returning the error message', 'Feature flag disabled'
- end
-
- context 'with non private project' do
- before do
- allow_next_found_instance_of(Project) do |project|
- allow(project).to receive(:private?).and_return(false)
- end
- end
-
- it_behaves_like 'returning the error message', 'Can only run on private projects'
- end
-
- context 'with unauthorized user' do
- let_it_be(:user) { create(:user) }
-
- it 'returns success' do
- sign_in(user)
-
- get_setup_page
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- context 'when not on saas' do
- it_behaves_like 'returning the error message', "Can&#39;t run here"
- end
- end
-end
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index f50163041f8..ceb0f5c45b4 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -457,6 +457,8 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
context 'when enabling the setting' do
it 'creates the initial Namespaces::Descendants record' do
expect { result }.to change { public_group.reload.namespace_descendants.present? }.from(false).to(true)
+
+ expect(public_group.namespace_descendants.outdated_at).to be_present
end
end
diff --git a/spec/support/helpers/stub_requests.rb b/spec/support/helpers/stub_requests.rb
index a3810323fee..f9fe12fbfbb 100644
--- a/spec/support/helpers/stub_requests.rb
+++ b/spec/support/helpers/stub_requests.rb
@@ -8,7 +8,7 @@ module StubRequests
#
# It expects the final request to go to the `ip_address` instead the given url.
# That's primarily a DNS rebind attack prevention of Gitlab::HTTP
- # (see: Gitlab::UrlBlocker).
+ # (see: Gitlab::HTTP_V2::UrlBlocker).
#
def stub_full_request(url, ip_address: IP_ADDRESS_STUB, port: 80, method: :get)
stub_dns(url, ip_address: ip_address, port: port)
@@ -22,7 +22,7 @@ module StubRequests
socket = Socket.sockaddr_in(port, ip_address)
addr = Addrinfo.new(socket)
- # See Gitlab::UrlBlocker
+ # See Gitlab::HTTP_V2::UrlBlocker
allow(Addrinfo).to receive(:getaddrinfo)
.with(url.hostname, url.port, nil, :STREAM)
.and_return([addr])
@@ -34,7 +34,7 @@ module StubRequests
socket = Socket.sockaddr_in(port, ip_address)
addr = Addrinfo.new(socket)
- # See Gitlab::UrlBlocker
+ # See Gitlab::HTTP_V2::UrlBlocker
allow(Addrinfo).to receive(:getaddrinfo).and_call_original
allow(Addrinfo).to receive(:getaddrinfo)
.with(url.hostname, anything, nil, :STREAM)
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index 4be24d43363..dae6ffe89c4 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -6714,7 +6714,6 @@
- './spec/lib/gitlab/uploads_transfer_spec.rb'
- './spec/lib/gitlab/url_blockers/domain_allowlist_entry_spec.rb'
- './spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb'
-- './spec/lib/gitlab/url_blocker_spec.rb'
- './spec/lib/gitlab/url_blockers/url_allowlist_spec.rb'
- './spec/lib/gitlab/url_builder_spec.rb'
- './spec/lib/gitlab/url_sanitizer_spec.rb'
diff --git a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
index dfad11f3170..6280a61f024 100644
--- a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
@@ -44,7 +44,7 @@ RSpec.shared_examples 'User creates wiki page' do
click_link("link test")
- expect(page).to have_content("Create New Page")
+ expect(page).to have_content("New Page")
end
it "shows non-escaped link in the pages list", :js do
@@ -85,7 +85,7 @@ RSpec.shared_examples 'User creates wiki page' do
expect(page).to have_current_path(wiki_page_path(wiki, "test"), ignore_query: true)
page.within(:css, ".wiki-page-header") do
- expect(page).to have_content("Create New Page")
+ expect(page).to have_content("New Page")
end
click_link("Home")
@@ -97,7 +97,7 @@ RSpec.shared_examples 'User creates wiki page' do
expect(page).to have_current_path(wiki_page_path(wiki, "api"), ignore_query: true)
page.within(:css, ".wiki-page-header") do
- expect(page).to have_content("Create")
+ expect(page).to have_content("New Page")
end
click_link("Home")
@@ -109,7 +109,7 @@ RSpec.shared_examples 'User creates wiki page' do
expect(page).to have_current_path(wiki_page_path(wiki, "raketasks"), ignore_query: true)
page.within(:css, ".wiki-page-header") do
- expect(page).to have_content("Create")
+ expect(page).to have_content("New Page")
end
end
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
index c6454f62f34..7c7a6942155 100644
--- a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
@@ -117,7 +117,7 @@ RSpec.shared_examples 'User views a wiki page' do
click_on('image')
expect(page).to have_current_path(%r{wikis/#{path}})
- expect(page).to have_content('Create New Page')
+ expect(page).to have_content('New Page')
end
end
@@ -281,6 +281,6 @@ RSpec.shared_examples 'User views a wiki page' do
click_link "Create your first page"
- expect(page).to have_content('Create New Page')
+ expect(page).to have_content('New Page')
end
end
diff --git a/spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb b/spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb
index 3119a03b1cb..3d8ffd4fae2 100644
--- a/spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb
+++ b/spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'a request using Gitlab::UrlBlocker' do
+RSpec.shared_examples 'a request using Gitlab::HTTP_V2::UrlBlocker' do
# Written to test internal patches against 3rd party libraries
#
# Expects the following to be available in the example contexts:
diff --git a/spec/support/shared_examples/namespaces/traversal_examples.rb b/spec/support/shared_examples/namespaces/traversal_examples.rb
index 960160395f8..65f1abe0355 100644
--- a/spec/support/shared_examples/namespaces/traversal_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_examples.rb
@@ -284,6 +284,14 @@ RSpec.shared_examples 'namespace traversal' do
end
end
+ describe 'all_project_ids' do
+ it 'is a AR relation' do
+ expect(group.all_project_ids).to be_kind_of(ActiveRecord::Relation)
+ end
+
+ it_behaves_like 'recursive version', :all_project_ids
+ end
+
describe '#self_and_descendant_ids' do
subject { group.self_and_descendant_ids.pluck(:id) }